本文整理汇总了Python中threadpool.ThreadPool.wait_completion方法的典型用法代码示例。如果您正苦于以下问题:Python ThreadPool.wait_completion方法的具体用法?Python ThreadPool.wait_completion怎么用?Python ThreadPool.wait_completion使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类threadpool.ThreadPool
的用法示例。
在下文中一共展示了ThreadPool.wait_completion方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: from_file
# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import wait_completion [as 别名]
def from_file(m163,option):
""" download objects (songs, albums...) from an input file. """
urls = []
with open(option.inFile) as f:
urls = f.readlines()
global total, done, xiami_obj
total = len(urls)
print border
LOG.info(msgTxt.fmt_links_in_file % total)
print border
pool = ThreadPool(config.THREAD_POOL_SIZE)
for link in [u for u in urls if u]:
link = link.rstrip('\n')
#if it is a xiami link, init xiami object
if re.match(pat_xm, link):
__init_xiami_obj(option)
pool.add_task(from_url_xm, xiami_obj,link, verbose=False)
elif re.match(pat_163, link):
pool.add_task(from_url_163, m163,link, verbose=False)
else:
LOG.warning(msgTxt.fmt_skip_unknown_url % link)
pool.wait_completion()
示例2: pickle_all_companies
# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import wait_completion [as 别名]
def pickle_all_companies():
tpool = ThreadPool(50)
companies = Company.objects.all()
for c in companies:
tpool.add_task(pickle_company, c.symbol)
tpool.wait_completion()
return None
示例3: prime_cache
# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import wait_completion [as 别名]
def prime_cache(self):
"""Ensures that the webpage cache is filled in the
quickest time possible by making many requests in
parallel"""
print "Getting data for parts from suppliers' websites"
pool = ThreadPool(NUM_THREADS)
for srcode, pg in self.iteritems():
print srcode
pool.add_task(pg.get_price)
pool.wait_completion()
示例4: from_file
# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import wait_completion [as 别名]
def from_file(xm_obj, infile):
""" download objects (songs, albums...) from an input file. """
urls = []
with open(infile) as f:
urls = f.readlines()
global total, done
total = len(urls)
print border
LOG.info(u' 文件包含链接总数: %d' % total)
print border
pool = ThreadPool(config.THREAD_POOL_SIZE)
for link in [u for u in urls if u]:
pool.add_task(from_url, xm_obj,link.rstrip('\n'), verbose=False)
pool.wait_completion()
示例5: from_file
# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import wait_completion [as 别名]
def from_file(xm_obj,m163, infile):
""" download objects (songs, albums...) from an input file. """
urls = []
with open(infile) as f:
urls = f.readlines()
global total, done
total = len(urls)
print border
LOG.info(u' 文件包含链接总数: %d' % total)
print border
pool = ThreadPool(config.THREAD_POOL_SIZE)
for link in [u for u in urls if u]:
link = link.rstrip('\n')
if re.match(pat_xm, link):
pool.add_task(from_url_xm, xm_obj,link, verbose=False)
elif re.match(pat_163, link):
pool.add_task(from_url_163, m163,link, verbose=False)
else:
LOG.warning(u' 略过不能识别的url [%s].' % link)
pool.wait_completion()
示例6: make_all_pairs
# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import wait_completion [as 别名]
def make_all_pairs(use_celery=False, skip_update=False, skip_pickle=False, skip_worker_update=False):
"""
This will check all of the pairs, either threaded
or via celery (i.e. local v cloud)
"""
logger.info(colored('Collecting companies', 'white', attrs=['bold']))
companies = Company.objects.all()
tpool = ThreadPool(50)
if not skip_update:
logger.info(colored('Updating prices', 'white', attrs=['bold']))
for c in companies:
tpool.add_task(c.update_prices)
tpool.wait_completion()
logger.info(colored('Prices updated', 'white', attrs=['bold']))
symbols = [c.symbol for c in companies]
if not skip_pickle:
logger.info(colored('Pickling companies', 'white', attrs=['bold']))
pickle_all_companies()
if not skip_worker_update:
logger.info(colored('Updating workers', 'white', attrs=['bold']))
update_workers()
if use_celery:
for s1, s2 in itertools.combinations(symbols, 2):
make_pair.delay(s1, s2)
else:
for s1, s2 in itertools.combinations(symbols, 2):
tpool.add_task(make_pair, s1, s2)
tpool.wait_completion()
return
示例7: run
# 需要导入模块: from threadpool import ThreadPool [as 别名]
# 或者: from threadpool.ThreadPool import wait_completion [as 别名]
def run(self, symnames, init_symbols=None, N=4):
if isinstance(symnames, (str, unicode)):
symnames = [ symnames ]
self.finalize()
toporder = self._topsort_subgraph(symnames)
sources = ( v for v in toporder if len(self.depends[v]) == 0 )
remain_to_submit = SynchronizedCounter(len(toporder))
finished_deps = defaultdict(SynchronizedCounter)
p = ThreadPool(N)
if init_symbols is None:
syms = SymbolTable()
else:
syms = init_symbols
parentlock = RLock()
done_submitting = Condition()
# If the child thread notifies before the parent thread reaches the
# wait statement, then the parent will never receive the notification
# and will block forever. To fix this, the child will decrement this
# counter to zero, and the parent will check this before waiting.
done_submitting_helper = SynchronizedCounter(1)
# The callback runs within the thread. Don't know how to fix.
def make_apply_callback(gf):
def finished(new_syms):
parentlock.acquire()
self.results[gf] = new_syms
parentlock.release()
parentlock.acquire()
# print "%s finished! printing state"%(gf.name)
# print "finished_deps", finished_deps
# print >> sys.stderr, "%s completed. new_syms = %s"%(gf.name, new_syms)
# print "self.depends", self.depends
parentlock.release()
# Update the functions which we precede
for next_gf in self.preceded_by[gf]:
finished_deps_next_gf = finished_deps[next_gf].inc()
if finished_deps_next_gf == len(self.depends[next_gf]):
# All dependencies satisfied; we can run!
# This may take a bit of time, but we want to do
# all data manipulation in this process.
print >> sys.stderr, "Dependencies for %s satisfied. Queueing."%next_gf.name
symtable = SymbolTable(parents=[self.results[r] for r in self.depends[next_gf]])
# Queue doesn't need to be locked
p.add_task(next_gf, args=(symtable,), callback=make_apply_callback(next_gf))
if remain_to_submit.dec() == 0:
print >> sys.stderr, "All jobs have been submitted. Waiting for parent thread to be ready to receive done_submitting"
done_submitting.acquire()
done_submitting.notify()
done_submitting.release()
done_submitting_helper.dec()
return finished
for s in sources:
remain_to_submit.dec()
p.add_task(s, args=(SymbolTable(),), callback=make_apply_callback(s))
if done_submitting_helper.get() > 0:
done_submitting.acquire()
print >> sys.stderr, "PARENT THREAD: Awaiting condition variable"
done_submitting.wait()
done_submitting.release()
print >> sys.stderr, "PARENT THREAD: Joining the thread pool"
p.wait_completion()
ret = dict((sym, self.results[self.supplier[sym]][sym]) for sym in symnames)
return ret