当前位置: 首页>>代码示例>>Python>>正文


Python Process.join方法代码示例

本文整理汇总了Python中multiprocessing.process.Process.join方法的典型用法代码示例。如果您正苦于以下问题:Python Process.join方法的具体用法?Python Process.join怎么用?Python Process.join使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.process.Process的用法示例。


在下文中一共展示了Process.join方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
def main(gcm="", rcm="", out_folder=""):
    pc = None
    pf = None

    kwargs = {
        "start_year": 1970,
        "end_year": 1999,
        "rcm": rcm, "gcm": gcm,
        "out_folder": out_folder
    }
    in_folder = "data/narccap/{0}-{1}/current".format(gcm, rcm)

    if os.path.isdir(in_folder):
        pc = Process(target=interpolate_to_amno, args=(in_folder, ), kwargs=kwargs)
        pc.start()
    else:
        print "{0} does not exist, ignoring the period ...".format(in_folder)

    kwargs = {
        "start_year": 2041,
        "end_year": 2070,
        "rcm": rcm, "gcm": gcm,
        "out_folder": out_folder
    }
    in_folder = "data/narccap/{0}-{1}/future".format(gcm, rcm)
    if os.path.isdir(in_folder):
        pf = Process(target=interpolate_to_amno, args=(in_folder, ), kwargs=kwargs)
        pf.start()
    else:
        print "{0} does not exist, ignoring the period ...".format(in_folder)

    #do current and future climates in parallel
    if pc is not None: pc.join()
    if pf is not None: pf.join()
开发者ID:guziy,项目名称:PlotWatrouteData,代码行数:36,代码来源:prepare_runoff_for_routing.py

示例2: process_input_dir

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
def process_input_dir(args, input_path, output_path):
    patt = input_path + os.sep + "*" + args.extension
    files = glob.glob(patt)
    docs_num = len(files)
    if docs_num > args.threads:
        slice_size = docs_num / args.threads
    else:
        slice_size = 1
    print "Threads:", args.threads
    print "Documents number:", docs_num
    print "Documents per thread:", slice_size

    start = 0
    jobs = []
    for job_num in range(args.threads):
        print "Initializing process", job_num
        end = start + slice_size
        p = Process(target=lemmatize_files, args=(files[start:end], output_path, args))
        print files[start:end]
        jobs.append(p)
        p.start()
    	start += slice_size

    for p in jobs:
        p.join()
    
    if (docs_num % 2) == 1:
        lemmatize_files(files, output_path, args)
开发者ID:jsouza,项目名称:text_utils,代码行数:30,代码来源:corpora_lemmatizer.py

示例3: test_litmus_with_authentication

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
    def test_litmus_with_authentication(self):
        """Run litmus test suite on HTTP with authentification.

        This test passes
        """
        try:
            proc = Process(target=run_wsgidav_server, args=(True, False))
            proc.daemon = True
            proc.start()
            time.sleep(1)

            try:
                self.assertEqual(subprocess.call(["litmus", "http://127.0.0.1:8080/", "tester", "secret"]),
                                 0,
                                 "litmus suite failed: check the log")
            except OSError:
                print "*" * 70
                print "This test requires the litmus test suite."
                print "See http://www.webdav.org/neon/litmus/"
                print "*" * 70
                raise

        finally:
            proc.terminate()
            proc.join()
开发者ID:Nonolost,项目名称:wsgidav,代码行数:27,代码来源:test_litmus.py

示例4: test_mcdpweb_server

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
def test_mcdpweb_server(dirname):
    port = random.randint(11000, 15000)
    base = 'http://127.0.0.1:%s' % port

    p = Process(target=start_server, args=(dirname, port,))
    p.start()

    print('sleeping')
    time.sleep(5)

    try:
        url_wrong = base + '/not-existing'
        urllib2.urlopen(url_wrong).read()
    except HTTPError:
        pass
    else:
        raise Exception('Expected 404')

    # now run the spider
    tmpdir = tempfile.mkdtemp(prefix='wget-output')
    cwd = '.'
    cmd = ['wget', '-nv', '-P', tmpdir, '-m', base]
#     res = system_cmd_result(
#             cwd, cmd,
#             display_stdout=True,
#             display_stderr=True,
#             raise_on_error=True)
    sub = subprocess.Popen(
                cmd,
                bufsize=0,
                cwd=cwd)
    sub.wait()

    exc = get_exceptions(port)

    if len(exc) == 0:
        msg = 'Expected at least a not-found error'
        raise Exception(msg)

    if not 'not-existing' in exc[0]:
        raise Exception('Could not find 404 error')

    exc = exc[1:]

    if exc:
        msg = 'Execution raised errors:\n\n'
        msg += str("\n---\n".join(exc))
        raise_desc(Exception, msg)

    url_exit = base + '/exit'
    urllib2.urlopen(url_exit).read()

    print('waiting for start_server() process to exit...')
    p.join()
    print('...clean exit')
开发者ID:AndreaCensi,项目名称:mcdp,代码行数:57,代码来源:test_server.py

示例5: Downloader

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
class Downloader(object):
    def __init__(self, timeout=30, retries=100, wait=1):
        self.timeout = timeout
        self.retries = retries
        self.wait = wait
        
        self.manager = SyncManager()
        self.manager.start()
        
    def retry_fetch_data(self, url):
        market_data = self.fetch_data(url)
        
        retries = 1
        while not market_data and retries < self.retries:
            print "Retry #%s..." % str(retries)
            market_data = self.fetch_data(url)
            if market_data:
                print "Fetched: " + str(len(market_data))
            else:
                print "Fetched nothing!"
            retries += 1
        
        return market_data
    
    def fetch_data(self, url):
        limit = 60
        msg = "Downloading " + url[0: min(limit, len(url))] 
        if len(url) > limit:
            msg += "(+" + str(len(url) - limit) + ")"
        print msg
            
        return_dict = self.manager.dict()
        self.job = Process(target=get_page_data, args=(url, return_dict))
        self.job.start()
        
        self.job.join(self.timeout)
        if self.job.is_alive():
            self.job.terminate()
        self.job = None
        
        market_data = None
        if 'page' in return_dict:
            market_data = return_dict['page']
        
        if self.wait > 0:
            time.sleep(self.wait)
        
        return market_data
开发者ID:supremefist,项目名称:KinectBats,代码行数:50,代码来源:downloader.py

示例6: set_from_file

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
 def set_from_file(self, varfile_path, args):
     q = Queue()
     p = Process(target=set_from_file, args=(q, varfile_path, args))
     p.start()
     p.join()
     there_are_results = False
     while True:
         try:
             results = q.get_nowait()
             there_are_results  = True
             if len(results) == 1:
                 raise DataError(results[0])
             self.set(*results)
         except Empty:
             if not there_are_results:
                 raise DataError('No variables')
             return
开发者ID:pxnj63,项目名称:RIDE,代码行数:19,代码来源:namespace.py

示例7: prepare_proxies

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
def prepare_proxies(configdata):
    
    if configdata[const.PROXY_CONFIG].get(const.PROXY_CONFIG_SOURCE_TYPE, u'1') != u'2':
        return 
    
    p = Process(group=None, target=fetch_proxy,)
    p.start()
    p.join()
    
    print u'%s get %d free proxy' % (datetime.datetime.now(),
                                   len(open(u'proxy.txt', u'r').readlines()))
    
    c = Process(group=None, target=valid_proxy,)
    c.start()
    
    valid_time = int(configdata[const.PROXY_CONFIG].get(const.PROXY_VALID_TIME))
    print u'%s following %d seconds will valid the proxy' % (datetime.datetime.now(), valid_time)
    time.sleep(valid_time)
    c.terminate()
    
    print u'%s get %d effective proxy' % (datetime.datetime.now(),
                                len(open(u'enable_proxies.txt', u'r').readlines()))
开发者ID:535521469,项目名称:crawl_secondhandcar,代码行数:24,代码来源:start.py

示例8: process_mongo

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
def process_mongo(args, output_path):
    # connects to the MongoDB server
    if args.port:
        connection = Connection(args.address, args.port)
    else:
        connection = Connection(args.address)
    
    # gets the DB
    db = connection[args.db_name]
    
    # gets the collection
    collec = db[args.collection]
    
    # sets the number of documents to be processed by each thread
    docs_num = collec.count()
    slice_size = docs_num / args.threads
    print "Threads:", args.threads
    print "Documents number:", docs_num
    print "Documents per thread:", slice_size

    # initiates a thread for each slice of documents
    # the slices are controlled using the base and offset variables
    base = 0
    offset = slice_size
    jobs = []
    for thread_num in range(args.threads):
        print "Initializing process", thread_num
        p = Process(target=lemmatize_slice, args=(collec, base, offset, args, output_path))
        jobs.append(p)
        p.start()
        base += offset
    
    for p in jobs:
        p.join()
    
    if (docs_num % 2) == 1:
        lemmatize_slice(collec, base, offset, args, output_path)
开发者ID:jsouza,项目名称:text_utils,代码行数:39,代码来源:corpora_lemmatizer.py

示例9: WorkerThread

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]

#.........这里部分代码省略.........
                    self._delete_count += 1
            except MemcachedError as error:
                if not self.moxi:
                    awareness.done()
                    try:
                        awareness = VBucketAwareMemcached(RestConnection(self.serverInfo), self.name)
                    except Exception:
                        # vbucket map is changing . sleep 5 seconds
                        time.sleep(5)
                        awareness = VBucketAwareMemcached(RestConnection(self.serverInfo), self.name)
                    self.log.info("now connected to {0} memcacheds".format(len(awareness.memcacheds)))
                    if isinstance(self.serverInfo, dict):
                        self.log.error(
                            "memcached error {0} {1} from {2}".format(error.status, error.msg, self.serverInfo["ip"])
                        )
                    else:
                        self.log.error(
                            "memcached error {0} {1} from {2}".format(error.status, error.msg, self.serverInfo.ip)
                        )
                if error.status == 134:
                    backoff_count += 1
                    if backoff_count < 5:
                        backoff_seconds = 15 * backoff_count
                    else:
                        backoff_seconds = 2 * backoff_count
                    self.log.info("received error # 134. backing off for {0} sec".format(backoff_seconds))
                    time.sleep(backoff_seconds)

                self._rejected_keys_count += 1
                self._rejected_keys.append({"key": key, "value": value})
                if len(self._rejected_keys) > self.ignore_how_many_errors:
                    break
            except Exception as ex:
                if not self.moxi:
                    awareness.done()
                    try:
                        awareness = VBucketAwareMemcached(RestConnection(self.serverInfo), self.name)
                    except Exception:
                        awareness = VBucketAwareMemcached(RestConnection(self.serverInfo), self.name)
                    self.log.info("now connected to {0} memcacheds".format(len(awareness.memcacheds)))
                if isinstance(self.serverInfo, dict):
                    self.log.error("error {0} from {1}".format(ex, self.serverInfo["ip"]))
                    import traceback

                    traceback.print_exc()
                else:
                    self.log.error("error {0} from {1}".format(ex, self.serverInfo.ip))
                self._rejected_keys_count += 1
                self._rejected_keys.append({"key": key, "value": value})
                if len(self._rejected_keys) > self.ignore_how_many_errors:
                    break

                    # before closing the session let's try sending those items again
        retry = 3
        while retry > 0 and self._rejected_keys_count > 0:
            rejected_after_retry = []
            self._rejected_keys_count = 0
            for item in self._rejected_keys:
                try:
                    if self.override_vBucketId >= 0:
                        client.vbucketId = self.override_vBucketId
                    if self.async_write:
                        client.send_set(item["key"], 0, 0, item["value"])
                    else:
                        client.set(item["key"], 0, 0, item["value"])
                    self._inserted_keys_count += 1
                except MemcachedError:
                    self._rejected_keys_count += 1
                    rejected_after_retry.append({"key": item["key"], "value": item["value"]})
                    if len(rejected_after_retry) > self.ignore_how_many_errors:
                        break
            self._rejected_keys = rejected_after_retry
            retry = -1
            # clean up the rest of the deleted keys
            if len(self._delete) > 0:
                #                self.log.info("deleting {0} keys".format(len(self._delete)))
                for key_del in self._delete:
                    client.delete(key_del)
                self._delete = []

            self.log.info("deleted {0} keys".format(self._delete_count))
            self.log.info("expiry {0} keys".format(self._expiry_count))
            #        client.close()
        awareness.done()
        if not self.write_only:
            self.queue.put_nowait("stop")
            self.reader.join()

    def _initialize_memcached(self):
        pass

    def _set(self):
        pass

    def _handle_error(self):
        pass
        # if error is memcached error oom related let's do a sleep

    def _time_to_stop(self):
        return self.aborted or len(self._rejected_keys) > self.ignore_how_many_errors
开发者ID:steveyen,项目名称:testrunner,代码行数:104,代码来源:data_helper.py

示例10: len

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
                toBeAdded.append(server)
            if len(toBeAdded) == how_many:
                break

        for server in toBeAdded:
            rest.add_node('Administrator', 'password', server.ip)
            #check if its added ?
        nodes = rest.node_statuses()
        otpNodes = [node.id for node in nodes]
        started = rest.rebalance(otpNodes, [])
        msg = "rebalance operation started ? {0}"
        self.log.info(msg.format(started))
        if started:
            result = rest.monitorRebalance()
            msg = "successfully rebalanced out selected nodes from the cluster ? {0}"
            self.log.info(msg.format(result))
            return result
        return False


if __name__ == "__main__":
    process1 = Process(target=start_load, args=(sys.argv,))
    process1.start()
    process2 = Process(target=start_combo, args=(sys.argv,))
    process2.start()
    process3 = Process(target=start_backup, args=(sys.argv,))
    process3.start()
    process1.join()
    process2.join()
    process3.join()
开发者ID:DavidAlphaFox,项目名称:couchbase,代码行数:32,代码来源:longevity.py

示例11: AuthorizationCodeTestCase

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]

#.........这里部分代码省略.........
                stores = store_factory(client_identifier="abc",
                                       client_secret="xyz",
                                       redirect_uris=[redirect_uri])

                provider = Provider(access_token_store=stores["access_token_store"],
                                    auth_code_store=stores["auth_code_store"],
                                    client_store=stores["client_store"],
                                    site_adapter=TestSiteAdapter(),
                                    token_generator=Uuid4())

                provider.add_grant(AuthorizationCodeGrant(expires_in=120))
                provider.add_grant(RefreshToken(expires_in=60))

                app = Wsgi(server=provider)

                httpd = make_server('', 15486, app,
                                    handler_class=NoLoggingHandler)

                queue.put({"result": 0})

                httpd.serve_forever()
            except Exception as e:
                queue.put({"result": 1, "error_message": str(e)})

        def run_client(queue):
            try:
                app = ClientApplication(
                    callback_url="http://127.0.0.1:15487/callback",
                    client_id="abc",
                    client_secret="xyz",
                    provider_url="http://127.0.0.1:15486")

                httpd = make_server('', 15487, app,
                                    handler_class=NoLoggingHandler)

                queue.put({"result": 0})

                httpd.serve_forever()
            except Exception as e:
                queue.put({"result": 1, "error_message": str(e)})

        uuid_regex = "^[a-z0-9]{8}\-[a-z0-9]{4}\-[a-z0-9]{4}\-[a-z0-9]{4}-[a-z0-9]{12}$"

        ready_queue = Queue()

        self.provider = Process(target=run_provider, args=(ready_queue,))
        self.provider.start()

        provider_started = ready_queue.get()

        if provider_started["result"] != 0:
            raise Exception("Error starting Provider process with message"
                            "'{0}'".format(provider_started["error_message"]))

        self.client = Process(target=run_client, args=(ready_queue,))
        self.client.start()

        client_started = ready_queue.get()

        if client_started["result"] != 0:
            raise Exception("Error starting Client Application process with "
                            "message '{0}'"
                            .format(client_started["error_message"]))

        access_token_result = urlopen("http://127.0.0.1:15487/app").read()

        access_token_data = json.loads(access_token_result.decode('utf-8'))

        self.assertEqual(access_token_data["token_type"], "Bearer")
        self.assertEqual(access_token_data["expires_in"], 120)
        self.assertRegexpMatches(access_token_data["access_token"],
                                 uuid_regex)
        self.assertRegexpMatches(access_token_data["refresh_token"],
                                 uuid_regex)

        request_data = {"grant_type": "refresh_token",
                        "refresh_token": access_token_data["refresh_token"],
                        "client_id": "abc",
                        "client_secret": "xyz"}

        refresh_token_result = urlopen(
            "http://127.0.0.1:15486/token",
            urlencode(request_data).encode('utf-8')
        )

        refresh_token_data = json.loads(refresh_token_result.read().decode('utf-8'))

        self.assertEqual(refresh_token_data["token_type"], "Bearer")
        self.assertEqual(refresh_token_data["expires_in"], 120)
        self.assertRegexpMatches(refresh_token_data["access_token"],
                                 uuid_regex)

    def tearDown(self):
        if self.client is not None:
            self.client.terminate()
            self.client.join()

        if self.provider is not None:
            self.provider.terminate()
            self.provider.join()
开发者ID:LejoFlores,项目名称:python-oauth2-1,代码行数:104,代码来源:test_authorization_code.py

示例12: Worker

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
class Worker(_th.Thread):
    def __new__(cls,name=None):
        if name is None: name='default'
        if name in _workers.keys():
            return _workers[name]
        return super(Worker,cls).__new__(cls)

    def __init__(self,name=None):
        if name is None: name='default'
        if name in _workers.keys():
            return
        _workers[name] = self
        super(Worker,self).__init__()
        self.daemon = True
        self.name = name
        self._queue = _ver.queue.Queue(1)
        self.last_exception = None
        self._pon = _mp.Value('b',True)
        tsk,self.task = _mp.Pipe(False)
        self.out,res = _mp.Pipe(False)
        self.process = Process(target=process,args=(self._pon,tsk,res),name=name)
        self.process.start()
        self._on = True
        self.start()
        _time.sleep(1)

    def run(self):
        _sup.debug('%s started' % (str(self.name),))
        while self._on or not self._queue.empty():
            try:
                result,target,args,kwargs = self._queue.get(True,.1)
                _sup.debug('%s: %s-task received' % (str(self.name),target.__name__))
                self.task.send((target,args,kwargs))
                res = self.out.recv()
                del(result[self.name])
                _sup.debug(res)
                result[target.__name__] = res
                _sup.debug('%s: %s-task done' % (str(self.name),target.__name__))
                self._queue.task_done()
            except _ver.queue.Empty: continue
            except KeyboardInterrupt as ki: raise ki
            except Exception as exc:
                _sup.debug('%s: %s' % (str(self.name),str(exc)),0)
                if result is not None:
                    result[self.name] = exc
                self.last_exception = exc
        _sup.debug('%s: done!' % (str(self.name),))
        self._pon.value = False
        del(_workers[self.name])

    def join(self):
        self._on = False
        self._queue.join()
        super(Worker,self).join()
        self._pon.value = False
        self.process.join()

    def put(self,target,*args,**kwargs):
        result = {self.name:target.__name__}
        self._queue.put((result,target,args,kwargs))
        _time.sleep(.1)
        return result
开发者ID:zack-vii,项目名称:archive,代码行数:64,代码来源:process.py

示例13: join

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
 def join(self, timeout=None, close=True):
     """Close the file and join the thread."""
     if close:
         self._queue.put(StopIteration)
         self._fd.close()
     Process.join(self, timeout)
开发者ID:YunoHost,项目名称:moulinette,代码行数:8,代码来源:stream.py

示例14: AuthorizationCodeTestCase

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]

#.........这里部分代码省略.........
                queue.put({"result": 1, "error_message": str(e)})

        ready_queue = Queue()

        self.server = Process(target=run_provider, args=(ready_queue,))
        self.server.start()

        provider_started = ready_queue.get()

        if provider_started["result"] != 0:
            raise Exception("Error starting Provider process with message"
                            "'{0}'".format(provider_started["error_message"]))

        self.client = Process(target=run_client, args=(ready_queue,))
        self.client.start()

        client_started = ready_queue.get()

        if client_started["result"] != 0:
            raise Exception("Error starting Client Application process with "
                            "message '{0}'"
                            .format(client_started["error_message"]))

        self.access_token()

    def test_wsgi_404(self):
        def run_provider(queue):
            try:
                provider = create_provider()

                app = Application(provider=provider)

                httpd = make_server('', 15486, app,
                                    handler_class=NoLoggingHandler)

                queue.put({"result": 0})

                httpd.serve_forever()
            except Exception as e:
                queue.put({"result": 1, "error_message": str(e)})

        ready_queue = Queue()

        self.server = Process(target=run_provider, args=(ready_queue,))
        self.server.start()

        provider_started = ready_queue.get()

        if provider_started["result"] != 0:
            raise Exception("Error starting Provider process with message"
                            "'{0}'".format(provider_started["error_message"]))

        try:
            urlopen("http://127.0.0.1:15486/invalid-path").read()
        except HTTPError as e:
            self.assertEqual(404, e.code)

    def access_token(self):
        uuid_regex = "^[a-z0-9]{8}\-[a-z0-9]{4}\-[a-z0-9]{4}\-[a-z0-9]{4}-[a-z0-9]{12}$"

        try:
            access_token_result = urlopen("http://127.0.0.1:15487/app").read()
        except HTTPError as e:
            print(e.read())
            exit(1)

        access_token_data = json.loads(access_token_result.decode('utf-8'))

        self.assertEqual(access_token_data["token_type"], "Bearer")
        self.assertEqual(access_token_data["expires_in"], 120)
        self.assertRegexpMatches(access_token_data["access_token"],
                                 uuid_regex)
        self.assertRegexpMatches(access_token_data["refresh_token"],
                                 uuid_regex)

        request_data = {"grant_type": "refresh_token",
                        "refresh_token": access_token_data["refresh_token"],
                        "client_id": "abc",
                        "client_secret": "xyz"}

        refresh_token_result = urlopen(
            "http://127.0.0.1:15486/token",
            urlencode(request_data).encode('utf-8')
        )

        refresh_token_data = json.loads(refresh_token_result.read().decode('utf-8'))

        self.assertEqual(refresh_token_data["token_type"], "Bearer")
        self.assertEqual(refresh_token_data["expires_in"], 120)
        self.assertRegexpMatches(refresh_token_data["access_token"],
                                 uuid_regex)

    def tearDown(self):
        if self.client is not None:
            self.client.terminate()
            self.client.join()

        if self.server is not None:
            self.server.terminate()
            self.server.join()
开发者ID:wndhydrnt,项目名称:python-oauth2,代码行数:104,代码来源:test_authorization_code.py

示例15: execute

# 需要导入模块: from multiprocessing.process import Process [as 别名]
# 或者: from multiprocessing.process.Process import join [as 别名]
 def execute():
     # Eventually consistent trick
     p = Process(target=_update_library_keywords, args=(path, args))
     p.start()
     p.join()
     print 'should refresh database'
开发者ID:pxnj63,项目名称:RIDE,代码行数:8,代码来源:libraryfetcher.py


注:本文中的multiprocessing.process.Process.join方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。