本文整理汇总了Python中multiprocessing.pool.map方法的典型用法代码示例。如果您正苦于以下问题:Python pool.map方法的具体用法?Python pool.map怎么用?Python pool.map使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类multiprocessing.pool
的用法示例。
在下文中一共展示了pool.map方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_imap_unordered_handle_iterable_exception
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_imap_unordered_handle_iterable_exception(self):
if self.TYPE == 'manager':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
it = self.pool.imap_unordered(sqr,
exception_throwing_generator(10, 3),
1)
expected_values = map(sqr, range(10))
with self.assertRaises(SayWhenError):
# imap_unordered makes it difficult to anticipate the SayWhenError
for i in range(10):
value = next(it)
self.assertIn(value, expected_values)
expected_values.remove(value)
it = self.pool.imap_unordered(sqr,
exception_throwing_generator(20, 7),
2)
expected_values = map(sqr, range(20))
with self.assertRaises(SayWhenError):
for i in range(20):
value = next(it)
self.assertIn(value, expected_values)
expected_values.remove(value)
示例2: main
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def main(n_workers=args.n_workers):
if args.input_dir:
files = [os.path.join(args.input_dir, fi) for fi in os.listdir(args.input_dir)]
elif args.input_file_list:
files = np.squeeze(np.asarray(pd.read_csv(args.input_file_list, header=None, sep="\n"))).tolist()
elif args.input_file:
files = [args.input_file]
else:
pass
for file in files:
save_dir = os.path.join(args.output_dir, os.path.splitext(os.path.basename(file))[0])
df = pd.read_csv(file, header=None, sep=" ")
name_url_zip = zip(np.squeeze(np.asarray(df.ix[:, 0])).tolist(), np.squeeze(np.asarray(df.ix[:, 1])).tolist())
dn = partial(download, tarDir=save_dir)
pool = Pool(processes=n_workers)
try:
pool.map(dn, name_url_zip)
except Exception as e:
logger.error("Error occurred during multiprocessing thread: %s @ time %s" %(str(e), time.strftime("%Y%m%d-%H%M%S")))
示例3: map
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def map(self, func, iterable, chunksize=None):
"""
Equivalent of `map()` built-in, without swallowing
`KeyboardInterrupt`.
:param func:
The function to apply to the items.
:param iterable:
An iterable of items that will have `func` applied to them.
"""
# The key magic is that we must call r.get() with a timeout, because
# a Condition.wait() without a timeout swallows KeyboardInterrupts.
r = self.map_async(func, iterable, chunksize)
while True:
try:
return r.get(self.wait_timeout)
except multiprocessing.TimeoutError:
pass
except KeyboardInterrupt:
self.terminate()
self.join()
raise
示例4: test_map_handle_iterable_exception
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_map_handle_iterable_exception(self):
if self.TYPE == 'manager':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
# SayWhenError seen at the very first of the iterable
with self.assertRaises(SayWhenError):
self.pool.map(sqr, exception_throwing_generator(1, -1), 1)
# again, make sure it's reentrant
with self.assertRaises(SayWhenError):
self.pool.map(sqr, exception_throwing_generator(1, -1), 1)
with self.assertRaises(SayWhenError):
self.pool.map(sqr, exception_throwing_generator(10, 3), 1)
class SpecialIterable:
def __iter__(self):
return self
def __next__(self):
raise SayWhenError
def __len__(self):
return 1
with self.assertRaises(SayWhenError):
self.pool.map(sqr, SpecialIterable(), 1)
with self.assertRaises(SayWhenError):
self.pool.map(sqr, SpecialIterable(), 1)
示例5: scrape_pages
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def scrape_pages(pages):
for page in pages:
make_transient(page)
# free up the connection while doing net IO
db.session.close()
db.engine.dispose()
pool = get_worker_pool()
map_results = pool.map(scrape_with_timeout, pages, chunksize=1)
scraped_pages = [p for p in map_results if p]
logger.info(u'finished scraping all pages')
pool.close()
pool.join()
logger.info(u'preparing update records')
row_dicts = [x.__dict__ for x in scraped_pages]
for row_dict in row_dicts:
row_dict.pop('_sa_instance_state')
logger.info(u'saving update records')
db.session.bulk_update_mappings(PageNew, row_dicts)
scraped_page_ids = [p.id for p in scraped_pages]
return scraped_page_ids
# need to spawn processes from workers but can't do that if worker is daemonized
示例6: get_worker_pool
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def get_worker_pool():
num_request_workers = int(os.getenv('GREEN_SCRAPE_PROCS_PER_WORKER', 10))
return NDPool(processes=num_request_workers, maxtasksperchild=10)
# Pool.map hangs if a worker process dies, so wrap the scrape in a new process and watch that
示例7: test_map
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_map(self):
pmap = self.pool.map
self.assertEqual(pmap(sqr, range(10)), map(sqr, range(10)))
self.assertEqual(pmap(sqr, range(100), chunksize=20),
map(sqr, range(100)))
示例8: test_map_unplicklable
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_map_unplicklable(self):
# Issue #19425 -- failure to pickle should not cause a hang
if self.TYPE == 'threads':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
class A(object):
def __reduce__(self):
raise RuntimeError('cannot pickle')
with self.assertRaises(RuntimeError):
self.pool.map(sqr, [A()]*10)
示例9: test_imap
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_imap(self):
it = self.pool.imap(sqr, range(10))
self.assertEqual(list(it), map(sqr, range(10)))
it = self.pool.imap(sqr, range(10))
for i in range(10):
self.assertEqual(it.next(), i*i)
self.assertRaises(StopIteration, it.next)
it = self.pool.imap(sqr, range(1000), chunksize=100)
for i in range(1000):
self.assertEqual(it.next(), i*i)
self.assertRaises(StopIteration, it.next)
示例10: test_imap_unordered
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_imap_unordered(self):
it = self.pool.imap_unordered(sqr, range(100))
self.assertEqual(sorted(it), map(sqr, range(100)))
it = self.pool.imap_unordered(sqr, range(1000), chunksize=100)
self.assertEqual(sorted(it), map(sqr, range(1000)))
示例11: test_release_task_refs
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_release_task_refs(self):
# Issue #29861: task arguments and results should not be kept
# alive after we are done with them.
objs = list(CountedObject() for i in range(10))
refs = list(weakref.ref(o) for o in objs)
self.pool.map(identity, objs)
del objs
time.sleep(DELTA) # let threaded cleanup code run
self.assertEqual(set(wr() for wr in refs), {None})
# With a process pool, copies of the objects are returned, check
# they were released too.
self.assertEqual(CountedObject.n_instances, 0)
示例12: pool_in_process
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def pool_in_process():
pool = multiprocessing.Pool(processes=4)
x = pool.map(_afunc, [1, 2, 3, 4, 5, 6, 7])
示例13: test_imap_unordered
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_imap_unordered(self):
it = self.pool.imap_unordered(sqr, range(1000))
self.assertEqual(sorted(it), map(sqr, range(1000)))
it = self.pool.imap_unordered(sqr, range(1000), chunksize=53)
self.assertEqual(sorted(it), map(sqr, range(1000)))
示例14: test_empty_iterable
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def test_empty_iterable(self):
# See Issue 12157
p = self.Pool(1)
self.assertEqual(p.map(sqr, []), [])
self.assertEqual(list(p.imap(sqr, [])), [])
self.assertEqual(list(p.imap_unordered(sqr, [])), [])
self.assertEqual(p.map_async(sqr, []).get(), [])
p.close()
p.join()
示例15: _fetch_stock_data
# 需要导入模块: from multiprocessing import pool [as 别名]
# 或者: from multiprocessing.pool import map [as 别名]
def _fetch_stock_data(self, stock_list):
"""获取股票信息"""
pool = multiprocessing.pool.ThreadPool(len(stock_list))
try:
res = pool.map(self.get_stocks_by_range, stock_list)
finally:
pool.close()
return [d for d in res if d is not None]