本文整理汇总了Python中multiprocessing.dummy.Process.join方法的典型用法代码示例。如果您正苦于以下问题:Python Process.join方法的具体用法?Python Process.join怎么用?Python Process.join使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类multiprocessing.dummy.Process
的用法示例。
在下文中一共展示了Process.join方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Ticker
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
class Ticker(object):
def __init__(self, api, interval=1):
self.api = api
self.db = MongoClient().poloniex['ticker']
self.interval = interval
def updateTicker(self):
tick = self.api.returnTicker()
for market in tick:
self.db.update_one({'_id': market},
{'$set': tick[market]},
upsert=True)
logger.info('Ticker updated')
def __call__(self):
return list(self.db.find())
def run(self):
self._running = True
while self._running:
self.updateTicker()
sleep(self.interval)
def start(self):
self._thread = Thread(target=self.run)
self._thread.daemon = True
self._thread.start()
logger.info('Ticker started')
def stop(self):
self._running = False
self._thread.join()
logger.info('Ticker stopped')
示例2: test_stop
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def test_stop(self):
"""
Verify that the stop method actually works. In this case, working
means that the process doesn't send any more HTTP requests after we
stop().
This test seems to be failing @ CircleCI because of a test dependency
issue. If run alone in your workstation it will PASS, but if run at
CircleCI the count plugin doesn't seem to start.
"""
core_start = Process(target=self.w3afcore.start, name='TestRunner')
core_start.daemon = True
core_start.start()
# Let the core start, and the count plugin send some requests.
time.sleep(5)
count_before_stop = self.count_plugin.count
self.assertGreater(count_before_stop, 0)
# Stop now,
self.w3afcore.stop()
core_start.join()
count_after_stop = self.count_plugin.count
self.assertEqual(count_after_stop, count_before_stop)
示例3: main
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def main():
"""
Creates instances of the above methods and occassionally checks for crashed
worker processes & relaunches.
"""
worker_process = list()
get_update_process = Process(target=get_updates)
get_update_process.start()
for i in range(0, int(CONFIG['BOT_CONFIG']['workers'])):
worker_process.append(Process(target=process_updates))
worker_process[i].start()
time_worker = ThreadProcess(target=check_time_args)
time_worker.start()
while RUNNING.value:
time.sleep(30)
for index, worker in enumerate(worker_process):
if not worker.is_alive():
del worker_process[index]
worker_process.append(Process(target=process_updates))
worker_process[-1].start()
if not time_worker.is_alive():
time_worker = ThreadProcess(target=check_time_args)
time_worker.start()
if not get_update_process.is_alive():
get_update_process = Process(target=get_updates)
get_update_process.start()
get_update_process.join()
time_worker.join()
for worker in worker_process:
worker.join()
示例4: process_updates
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def process_updates():
"""
Decides which type the update is and routes it to the appropriate route_updates
method and launches a thread for the run_extensions method.
"""
signal.signal(signal.SIGINT, signal.SIG_IGN)
plugin_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
plugin_http.timeout = urllib3.Timeout(connect=1.0)
plugin_http.retries = 3
update_router = RouteMessage(PLUGINS, plugin_http, GET_ME, CONFIG)
while RUNNING.value:
try:
update = MESSAGE_QUEUE.get_nowait()
except queue.Empty:
time.sleep(SLEEP_TIME)
continue
extension_thread = ThreadProcess(target=run_extensions, args=(update, ))
extension_thread.start()
if 'message' in update:
update_router.route_update(update['message'])
elif 'edited_message' in update:
update_router.route_update(update['edited_message'])
elif 'callback_query' in update:
route_callback_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['callback_query'])
elif 'inline_query' in update:
route_inline_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['inline_query'])
extension_thread.join()
示例5: test_pause_stop
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def test_pause_stop(self):
"""
Verify that the pause method actually works. In this case, working
means that the process doesn't send any more HTTP requests after we,
pause and that stop works when paused.
"""
core_start = Process(target=self.w3afcore.start, name="TestRunner")
core_start.daemon = True
core_start.start()
# Let the core start, and the count plugin send some requests.
time.sleep(5)
count_before_pause = self.count_plugin.count
self.assertGreater(self.count_plugin.count, 0)
# Pause and measure
self.w3afcore.pause(True)
count_after_pause = self.count_plugin.count
time.sleep(2)
count_after_sleep = self.count_plugin.count
all_equal = count_before_pause == count_after_pause == count_after_sleep
self.assertTrue(all_equal)
# Unpause and verify that all requests were sent
self.w3afcore.stop()
core_start.join()
# No more requests sent after pause
self.assertEqual(self.count_plugin.count, count_after_sleep)
示例6: test_pause_unpause
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def test_pause_unpause(self):
"""
Verify that the pause method actually works. In this case, working
means that the process doesn't send any more HTTP requests, fact
that is verified with the "fake" count plugin.
"""
core_start = Process(target=self.w3afcore.start, name='TestRunner')
core_start.daemon = True
core_start.start()
# Let the core start, and the count plugin send some requests.
time.sleep(5)
count_before_pause = self.count_plugin.count
self.assertGreater(self.count_plugin.count, 0)
# Pause and measure
self.w3afcore.pause(True)
count_after_pause = self.count_plugin.count
time.sleep(2)
count_after_sleep = self.count_plugin.count
all_equal = count_before_pause == count_after_pause == count_after_sleep
self.assertTrue(all_equal)
# Unpause and verify that all requests were sent
self.w3afcore.pause(False)
core_start.join()
self.assertEqual(self.count_plugin.count, self.count_plugin.loops)
示例7: test_pause_unpause
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def test_pause_unpause(self):
output = Queue.Queue()
self.uri_opener.pause(True)
def send(uri_opener, output):
url = URL(get_moth_http())
try:
http_response = uri_opener.GET(url)
output.put(http_response)
except:
output.put(None)
th = Process(target=send, args=(self.uri_opener, output))
th.daemon = True
th.start()
self.assertRaises(Queue.Empty, output.get, True, 2)
self.uri_opener.pause(False)
http_response = output.get()
self.assertNotIsInstance(http_response, types.NoneType,
'Error in send thread.')
th.join()
self.assertEqual(http_response.get_code(), 200)
self.assertIn(self.MOTH_MESSAGE, http_response.body)
示例8: scoreDuplicates
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0):
if num_cores < 2:
from multiprocessing.dummy import Process, Queue
SimpleQueue = Queue
else:
from .backport import Process, SimpleQueue, Queue
first, records = peek(records)
if first is None:
raise BlockingError("No records have been blocked together. "
"Is the data you are trying to match like "
"the data you trained on?")
record_pairs_queue = Queue(2)
score_queue = SimpleQueue()
result_queue = SimpleQueue()
n_map_processes = max(num_cores, 1)
score_records = ScoreDupes(data_model, classifier, threshold)
map_processes = [Process(target=score_records,
args=(record_pairs_queue,
score_queue))
for _ in range(n_map_processes)]
[process.start() for process in map_processes]
reduce_process = Process(target=mergeScores,
args=(score_queue,
result_queue,
n_map_processes))
reduce_process.start()
fillQueue(record_pairs_queue, records, n_map_processes)
result = result_queue.get()
if isinstance(result, Exception):
raise ChildProcessError
if result:
scored_pairs_file, dtype, size = result
scored_pairs = numpy.memmap(scored_pairs_file,
dtype=dtype,
shape=(size,))
else:
dtype = numpy.dtype([('pairs', object, 2),
('score', 'f4', 1)])
scored_pairs = numpy.array([], dtype=dtype)
reduce_process.join()
[process.join() for process in map_processes]
return scored_pairs
示例9: get_stats
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def get_stats():
print 'Fetching NBA player stats...'
stats_outfile = RUNDAY+'_nba_stats.csv'
csvout = open(stats_outfile, 'wb')
NUM_THREADS = 8
in_queue = Queue()
out_queue = Queue()
queue_players(in_queue)
while not in_queue.empty():
jobs = []
for i in range(NUM_THREADS):
if not in_queue.empty():
thread = Process(target=get_stats_helper, args=(in_queue, out_queue))
jobs.append(thread)
thread.start()
for thread in jobs:
thread.join()
while not out_queue.empty():
player = out_queue.get()
del player['SUCCESS']
try:
name = player['NAME']
except KeyError as e:
continue
player['TIME'] = RUNDAY
fieldnames = [
'TIME',
'NAME',
'JERSEY',
'SPORT',
'TEAM',
'POSITION',
'PTS',
'REB',
'AST',
'URL'
]
csvwriter = csv.DictWriter(csvout, delimiter='|', fieldnames=fieldnames)
csvwriter.writerow(player)
csvout.close()
print 'Finished fetching NBA player stats.'
print 'Ouput saved in %s' % stats_outfile
示例10: __init__
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def __init__(self):
pool = Pool(processes=2)
self.graph = getGraph()
files = findFiles(opts)
self.progressQueue = Queue()
reporter = Process(target=ProgressReport,
args=(self.progressQueue, len(files)))
reporter.start()
result = pool.map(self.cacheFile, enumerate(files), chunksize=5)
self.progressQueue.put('END')
log.info("finished, %s results", len(result))
reporter.join()
示例11: Ticker
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
class Ticker(object):
""" Ticker object for controlling the ticker thread and subprocess
Holds poloniex ticker dict under self.markets"""
def __init__(self):
self._tickerP, self._tickerT = [None, None]
self.markets = poloniex.Poloniex().marketTicker()
def startTicker(self):
""" Starts the 'tickcatcher' subprocess and 'tickCatcher' thread"""
self._tickerP = Popen(["python", "tickcatcher.py"], stdout=PIPE, bufsize=1)
print('TICKER: tickcatcher subprocess started')
self._tickerT = Thread(target=self.tickCatcher);self._tickerT.daemon = True
self._tickerT.start()
print('TICKER: tickCatcher thread started')
def stopTicker(self):
""" Stops the ticker subprocess"""
self._tickerP.terminate();self._tickerP.kill()
print('TICKER: Ticker subprocess stopped')
self._tickerT.join()
print('TICKER: Ticker thread joined')
def tickCatcher(self):
with self._tickerP.stdout:
for line in iter(self._tickerP.stdout.readline, b''):
try:
tick = json.loads(line[25:]) # shave off twisted timestamp (probably a better way to remove the timestamp...)
self.markets[tick[0]] = {
'last':tick[1],
'lowestAsk':tick[2],
'highestBid':tick[3],
'percentChange':tick[4],
'baseVolume':tick[5],
'quoteVolume':tick[6],
'isFrozen':tick[7],
'high24hr':tick[8],
'low24hr':tick[9],
'id':self.markets[tick[0]]['id']
}
except Exception as e:
print(e)
self._tickerP.wait()
示例12: scoreDuplicates
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0) :
if num_cores < 2 :
from multiprocessing.dummy import Process, Pool, Queue
SimpleQueue = Queue
else :
from .backport import Process, Pool, SimpleQueue
record_pairs_queue = SimpleQueue()
score_queue = SimpleQueue()
result_queue = SimpleQueue()
n_map_processes = max(num_cores-1, 1)
score_records = ScoreRecords(data_model, classifier, threshold)
map_processes = [Process(target=score_records,
args=(record_pairs_queue,
score_queue))
for _ in range(n_map_processes)]
[process.start() for process in map_processes]
reduce_process = Process(target=mergeScores,
args=(score_queue,
result_queue,
n_map_processes))
reduce_process.start()
fillQueue(record_pairs_queue, records, n_map_processes)
result = result_queue.get()
if isinstance(result, Exception) :
raise ChildProcessError
if result :
scored_pairs_file, dtype = result
scored_pairs = numpy.memmap(scored_pairs_file,
dtype=dtype)
else :
scored_pairs = result
reduce_process.join()
[process.join() for process in map_processes]
return scored_pairs
示例13: stat_files
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def stat_files():
all_files = []
for root, dirs, files in os.walk('/home/gzguoyubo/mf/tw2/res/entities/custom_type'):
ignore = False
for ig_path in ignore_paths:
if ig_path in root:
ignore = True
if ignore:
continue
for fname in files:
if not fname.endswith('.py'):
continue
abs_file_path = join(root, fname)
all_files.append(abs_file_path)
file_sections = []
file_total_nums = len(all_files)
for i in xrange(P_NUM):
start = i * file_total_nums / P_NUM
stop = start + file_total_nums / P_NUM
if i == P_NUM - 1:
stop = -1
file_sections.append(all_files[start : stop])
res_queue = Queue()
processes = []
for section in file_sections:
p = Process(target=stat_file, args=(section, res_queue))
p.start()
processes.append(p)
for p in processes:
p.join()
total_stats = defaultdict(int)
while not res_queue.empty():
stat = res_queue.get()
for author, cnt in stat.iteritems():
total_stats[author] += cnt
print total_stats
示例14: test_stop
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def test_stop(self):
"""
Verify that the stop method actually works. In this case, working
means that the process doesn't send any more HTTP requests after we
stop().
"""
core_start = Process(target=self.w3afcore.start, name="TestRunner")
core_start.daemon = True
core_start.start()
# Let the core start, and the count plugin send some requests.
time.sleep(5)
count_before_stop = self.count_plugin.count
self.assertGreater(count_before_stop, 0)
# Stop now,
self.w3afcore.stop()
core_start.join()
count_after_stop = self.count_plugin.count
self.assertEqual(count_after_stop, count_before_stop)
示例15: test_pause_unpause
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import join [as 别名]
def test_pause_unpause(self):
output = Queue.Queue()
self.uri_opener.pause(True)
def send(uri_opener, output):
url = URL('http://moth/')
http_response = uri_opener.GET(url)
output.put(http_response)
th = Process(target=send, args=(self.uri_opener, output))
th.daemon = True
th.start()
self.assertRaises(Queue.Empty, output.get, True, 2)
self.uri_opener.pause(False)
http_response = output.get()
th.join()
self.assertEqual(http_response.get_code(), 200)
self.assertIn(self.MOTH_MESSAGE, http_response.body)