本文整理汇总了Python中multiprocessing.dummy.Process.start方法的典型用法代码示例。如果您正苦于以下问题:Python Process.start方法的具体用法?Python Process.start怎么用?Python Process.start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类multiprocessing.dummy.Process
的用法示例。
在下文中一共展示了Process.start方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_pause_unpause
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def test_pause_unpause(self):
output = Queue.Queue()
self.uri_opener.pause(True)
def send(uri_opener, output):
url = URL(get_moth_http())
try:
http_response = uri_opener.GET(url)
output.put(http_response)
except:
output.put(None)
th = Process(target=send, args=(self.uri_opener, output))
th.daemon = True
th.start()
self.assertRaises(Queue.Empty, output.get, True, 2)
self.uri_opener.pause(False)
http_response = output.get()
self.assertNotIsInstance(http_response, types.NoneType,
'Error in send thread.')
th.join()
self.assertEqual(http_response.get_code(), 200)
self.assertIn(self.MOTH_MESSAGE, http_response.body)
示例2: start
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def start(self):
playingfile = self.get_playing_file()
if not (playingfile["file"].startswith(uni_join(const.libpath, const.provider)) or
playingfile["file"].startswith(uni_join(const.addonpath, "resources"))):
return
kodi.log("start onPlayBackStarted")
self.koala_playing = True
self.player = Player()
self.remote = None
if kodi.settings["remote"]:
self.remote = remote.Remote()
self.remote.run(player=self.player)
self.player.connect()
if "NRK nett-TV.htm" not in playingfile["file"]:
self.player.get_player_coord()
self.player.wait_player_start()
self.player.toggle_fullscreen()
if playingfile["type"] == "episode":
thread = Thread(target=self.monitor_watched, args=[playingfile])
thread.start()
kodi.log("finished onPlayBackStarted")
示例3: main
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def main():
"""
Creates instances of the above methods and occassionally checks for crashed
worker processes & relaunches.
"""
worker_process = list()
get_update_process = Process(target=get_updates)
get_update_process.start()
for i in range(0, int(CONFIG['BOT_CONFIG']['workers'])):
worker_process.append(Process(target=process_updates))
worker_process[i].start()
time_worker = ThreadProcess(target=check_time_args)
time_worker.start()
while RUNNING.value:
time.sleep(30)
for index, worker in enumerate(worker_process):
if not worker.is_alive():
del worker_process[index]
worker_process.append(Process(target=process_updates))
worker_process[-1].start()
if not time_worker.is_alive():
time_worker = ThreadProcess(target=check_time_args)
time_worker.start()
if not get_update_process.is_alive():
get_update_process = Process(target=get_updates)
get_update_process.start()
get_update_process.join()
time_worker.join()
for worker in worker_process:
worker.join()
示例4: Ticker
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
class Ticker(object):
def __init__(self, api, interval=1):
self.api = api
self.db = MongoClient().poloniex['ticker']
self.interval = interval
def updateTicker(self):
tick = self.api.returnTicker()
for market in tick:
self.db.update_one({'_id': market},
{'$set': tick[market]},
upsert=True)
logger.info('Ticker updated')
def __call__(self):
return list(self.db.find())
def run(self):
self._running = True
while self._running:
self.updateTicker()
sleep(self.interval)
def start(self):
self._thread = Thread(target=self.run)
self._thread.daemon = True
self._thread.start()
logger.info('Ticker started')
def stop(self):
self._running = False
self._thread.join()
logger.info('Ticker stopped')
示例5: test_stop
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def test_stop(self):
"""
Verify that the stop method actually works. In this case, working
means that the process doesn't send any more HTTP requests after we
stop().
This test seems to be failing @ CircleCI because of a test dependency
issue. If run alone in your workstation it will PASS, but if run at
CircleCI the count plugin doesn't seem to start.
"""
core_start = Process(target=self.w3afcore.start, name='TestRunner')
core_start.daemon = True
core_start.start()
# Let the core start, and the count plugin send some requests.
time.sleep(5)
count_before_stop = self.count_plugin.count
self.assertGreater(count_before_stop, 0)
# Stop now,
self.w3afcore.stop()
core_start.join()
count_after_stop = self.count_plugin.count
self.assertEqual(count_after_stop, count_before_stop)
示例6: process_updates
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def process_updates():
"""
Decides which type the update is and routes it to the appropriate route_updates
method and launches a thread for the run_extensions method.
"""
signal.signal(signal.SIGINT, signal.SIG_IGN)
plugin_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
plugin_http.timeout = urllib3.Timeout(connect=1.0)
plugin_http.retries = 3
update_router = RouteMessage(PLUGINS, plugin_http, GET_ME, CONFIG)
while RUNNING.value:
try:
update = MESSAGE_QUEUE.get_nowait()
except queue.Empty:
time.sleep(SLEEP_TIME)
continue
extension_thread = ThreadProcess(target=run_extensions, args=(update, ))
extension_thread.start()
if 'message' in update:
update_router.route_update(update['message'])
elif 'edited_message' in update:
update_router.route_update(update['edited_message'])
elif 'callback_query' in update:
route_callback_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['callback_query'])
elif 'inline_query' in update:
route_inline_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['inline_query'])
extension_thread.join()
示例7: test_pause_unpause
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def test_pause_unpause(self):
"""
Verify that the pause method actually works. In this case, working
means that the process doesn't send any more HTTP requests, fact
that is verified with the "fake" count plugin.
"""
core_start = Process(target=self.w3afcore.start, name='TestRunner')
core_start.daemon = True
core_start.start()
# Let the core start, and the count plugin send some requests.
time.sleep(5)
count_before_pause = self.count_plugin.count
self.assertGreater(self.count_plugin.count, 0)
# Pause and measure
self.w3afcore.pause(True)
count_after_pause = self.count_plugin.count
time.sleep(2)
count_after_sleep = self.count_plugin.count
all_equal = count_before_pause == count_after_pause == count_after_sleep
self.assertTrue(all_equal)
# Unpause and verify that all requests were sent
self.w3afcore.pause(False)
core_start.join()
self.assertEqual(self.count_plugin.count, self.count_plugin.loops)
示例8: test_pause_stop
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def test_pause_stop(self):
"""
Verify that the pause method actually works. In this case, working
means that the process doesn't send any more HTTP requests after we,
pause and that stop works when paused.
"""
core_start = Process(target=self.w3afcore.start, name="TestRunner")
core_start.daemon = True
core_start.start()
# Let the core start, and the count plugin send some requests.
time.sleep(5)
count_before_pause = self.count_plugin.count
self.assertGreater(self.count_plugin.count, 0)
# Pause and measure
self.w3afcore.pause(True)
count_after_pause = self.count_plugin.count
time.sleep(2)
count_after_sleep = self.count_plugin.count
all_equal = count_before_pause == count_after_pause == count_after_sleep
self.assertTrue(all_equal)
# Unpause and verify that all requests were sent
self.w3afcore.stop()
core_start.join()
# No more requests sent after pause
self.assertEqual(self.count_plugin.count, count_after_sleep)
示例9: process_request
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def process_request(self, request, client_address):
"""
Start a new thread to process the request.
Override here
"""
t = Process(target=self.process_request_thread, args=(request, client_address))
t.daemon = self.daemon_threads
t.start()
示例10: _draw_start
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def _draw_start(self):
if not self._somethingnew:
return True
# let's draw!
q = Queue.Queue()
evt = Event()
th = Process(target=self._draw_real, args=(q, evt), name='GTKDraw')
th.start()
gobject.timeout_add(500, self._draw_end, q, evt)
return False
示例11: scoreDuplicates
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0):
if num_cores < 2:
from multiprocessing.dummy import Process, Queue
SimpleQueue = Queue
else:
from .backport import Process, SimpleQueue, Queue
first, records = peek(records)
if first is None:
raise BlockingError("No records have been blocked together. "
"Is the data you are trying to match like "
"the data you trained on?")
record_pairs_queue = Queue(2)
score_queue = SimpleQueue()
result_queue = SimpleQueue()
n_map_processes = max(num_cores, 1)
score_records = ScoreDupes(data_model, classifier, threshold)
map_processes = [Process(target=score_records,
args=(record_pairs_queue,
score_queue))
for _ in range(n_map_processes)]
[process.start() for process in map_processes]
reduce_process = Process(target=mergeScores,
args=(score_queue,
result_queue,
n_map_processes))
reduce_process.start()
fillQueue(record_pairs_queue, records, n_map_processes)
result = result_queue.get()
if isinstance(result, Exception):
raise ChildProcessError
if result:
scored_pairs_file, dtype, size = result
scored_pairs = numpy.memmap(scored_pairs_file,
dtype=dtype,
shape=(size,))
else:
dtype = numpy.dtype([('pairs', object, 2),
('score', 'f4', 1)])
scored_pairs = numpy.array([], dtype=dtype)
reduce_process.join()
[process.join() for process in map_processes]
return scored_pairs
示例12: test_pause
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def test_pause(self):
output = Queue.Queue()
self.uri_opener.pause(True)
def send(uri_opener, output):
url = URL('http://moth/')
http_response = uri_opener.GET(url)
output.put(http_response)
th = Process(target=send, args=(self.uri_opener, output))
th.daemon = True
th.start()
self.assertRaises(Queue.Empty, output.get, True, 2)
示例13: get_stats
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def get_stats():
print 'Fetching NBA player stats...'
stats_outfile = RUNDAY+'_nba_stats.csv'
csvout = open(stats_outfile, 'wb')
NUM_THREADS = 8
in_queue = Queue()
out_queue = Queue()
queue_players(in_queue)
while not in_queue.empty():
jobs = []
for i in range(NUM_THREADS):
if not in_queue.empty():
thread = Process(target=get_stats_helper, args=(in_queue, out_queue))
jobs.append(thread)
thread.start()
for thread in jobs:
thread.join()
while not out_queue.empty():
player = out_queue.get()
del player['SUCCESS']
try:
name = player['NAME']
except KeyError as e:
continue
player['TIME'] = RUNDAY
fieldnames = [
'TIME',
'NAME',
'JERSEY',
'SPORT',
'TEAM',
'POSITION',
'PTS',
'REB',
'AST',
'URL'
]
csvwriter = csv.DictWriter(csvout, delimiter='|', fieldnames=fieldnames)
csvwriter.writerow(player)
csvout.close()
print 'Finished fetching NBA player stats.'
print 'Ouput saved in %s' % stats_outfile
示例14: __init__
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
def __init__(self):
pool = Pool(processes=2)
self.graph = getGraph()
files = findFiles(opts)
self.progressQueue = Queue()
reporter = Process(target=ProgressReport,
args=(self.progressQueue, len(files)))
reporter.start()
result = pool.map(self.cacheFile, enumerate(files), chunksize=5)
self.progressQueue.put('END')
log.info("finished, %s results", len(result))
reporter.join()
示例15: Ticker
# 需要导入模块: from multiprocessing.dummy import Process [as 别名]
# 或者: from multiprocessing.dummy.Process import start [as 别名]
class Ticker(object):
""" Ticker object for controlling the ticker thread and subprocess
Holds poloniex ticker dict under self.markets"""
def __init__(self):
self._tickerP, self._tickerT = [None, None]
self.markets = poloniex.Poloniex().marketTicker()
def startTicker(self):
""" Starts the 'tickcatcher' subprocess and 'tickCatcher' thread"""
self._tickerP = Popen(["python", "tickcatcher.py"], stdout=PIPE, bufsize=1)
print('TICKER: tickcatcher subprocess started')
self._tickerT = Thread(target=self.tickCatcher);self._tickerT.daemon = True
self._tickerT.start()
print('TICKER: tickCatcher thread started')
def stopTicker(self):
""" Stops the ticker subprocess"""
self._tickerP.terminate();self._tickerP.kill()
print('TICKER: Ticker subprocess stopped')
self._tickerT.join()
print('TICKER: Ticker thread joined')
def tickCatcher(self):
with self._tickerP.stdout:
for line in iter(self._tickerP.stdout.readline, b''):
try:
tick = json.loads(line[25:]) # shave off twisted timestamp (probably a better way to remove the timestamp...)
self.markets[tick[0]] = {
'last':tick[1],
'lowestAsk':tick[2],
'highestBid':tick[3],
'percentChange':tick[4],
'baseVolume':tick[5],
'quoteVolume':tick[6],
'isFrozen':tick[7],
'high24hr':tick[8],
'low24hr':tick[9],
'id':self.markets[tick[0]]['id']
}
except Exception as e:
print(e)
self._tickerP.wait()