本文整理匯總了Python中multiprocessing.Process方法的典型用法代碼示例。如果您正苦於以下問題:Python multiprocessing.Process方法的具體用法?Python multiprocessing.Process怎麽用?Python multiprocessing.Process使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類multiprocessing
的用法示例。
在下文中一共展示了multiprocessing.Process方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: main
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def main():
m = multiprocessing.Manager()
sharedQueue = m.Queue()
sharedQueue.put(2)
sharedQueue.put(3)
sharedQueue.put(4)
process1 = multiprocessing.Process(target=myTask, args=(sharedQueue,))
process1.start()
process2 = multiprocessing.Process(target=myTask, args=(sharedQueue,))
process2.start()
process3 = multiprocessing.Process(target=myTask, args=(sharedQueue,))
process3.start()
process2.join()
process1.join()
process3.join()
示例2: parallel_download_all_section
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def parallel_download_all_section(*arg):
if len(arg)==1:
k=arg[0]
pro=[]
for key in decode.keys():
pro.append(multiprocessing.Process(target=download, args=(key, k)))
#th.append(threading.Thread(target=download, args=(key,k)))
for p in pro:
p.start()
for p in pro:
p.join()
elif len(arg)==2:
From=arg[0]
To=arg[1]
pro=[]
for key in decode.keys():
pro.append(multiprocessing.Process(target=download, args=(key, From, To)))
#th.append(threading.Thread(target=download, args=(key,k)))
for p in pro:
p.start()
for p in pro:
p.join()
示例3: main
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def main():
print("Starting number crunching")
t0 = time.time()
procs = []
# Here we create our processes and kick them off
for i in range(10):
proc = Process(target=executeProc, args=())
procs.append(proc)
proc.start()
# Again we use the .join() method in order to wait for
# execution to finish for all of our processes
for proc in procs:
proc.join()
t1 = time.time()
totalTime = t1 - t0
# we print out the total execution time for our 10
# procs.
print("Execution Time: {}".format(totalTime))
示例4: add_step
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def add_step(self,module_name_and_params, extra_args):
config=module_name_and_params.split()
module_name=config[0]
params=config[1:]
# collect extra arguments from command line meant for this particular module
if extra_args is not None:
for _name, _value in extra_args.__dict__.items():
if _name.startswith(module_name):
_modname,_argname=_name.split(".",1) # for example lemmatizer_mod.gpu
params.append("--"+_argname)
params.append(str(_value))
mod=importlib.import_module(module_name)
step_in=self.q_out
self.q_out=Queue(self.max_q_size) #new pipeline end
args=mod.argparser.parse_args(params)
process=Process(target=mod.launch,args=(args,step_in,self.q_out))
process.daemon=True
process.start()
self.processes.append(process)
示例5: test_multiprocessing_download_successful
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def test_multiprocessing_download_successful():
""" test download with multiprocessing """
tmp = tempfile.mkdtemp()
tmpfile = os.path.join(tmp, 'README.md')
process_list = []
# test it with 10 processes
for i in range(10):
process_list.append(mp.Process(
target=_download_successful, args=(tmpfile,)))
process_list[i].start()
for i in range(10):
process_list[i].join()
assert os.path.getsize(tmpfile) > 100, os.path.getsize(tmpfile)
# check only one file we want left
pattern = os.path.join(tmp, 'README.md*')
assert len(glob.glob(pattern)) == 1, glob.glob(pattern)
# delete temp dir
shutil.rmtree(tmp)
示例6: __init__
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def __init__(self, constructor):
"""Step environment in a separate process for lock free paralellism.
The environment will be created in the external process by calling the
specified callable. This can be an environment class, or a function
creating the environment and potentially wrapping it. The returned
environment should not access global variables.
Args:
constructor: Callable that creates and returns an OpenAI gym environment.
Attributes:
observation_space: The cached observation space of the environment.
action_space: The cached action space of the environment.
"""
self._conn, conn = multiprocessing.Pipe()
self._process = multiprocessing.Process(
target=self._worker, args=(constructor, conn))
atexit.register(self.close)
self._process.start()
self._observ_space = None
self._action_space = None
示例7: attach
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def attach(self):
"""
Start listening for log messages.
Log messages in the queue will appear like the following:
{
'service': 'main',
'timestamp': '2017-01-30T15:46:23.009397536Z',
'message': 'Something happened'
}
"""
if not self.listening:
for service in self.chute.get_services():
process = Process(target=monitor_logs,
args=(service.name, service.get_container_name(), self.queue))
process.start()
self.processes.append(process)
self.listening = True
示例8: __init__
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def __init__(self, env_fns, spaces=None):
"""
envs: list of gym environments to run in subprocesses
"""
self.waiting = False
self.closed = False
nenvs = len(env_fns)
self.remotes, self.work_remotes = zip(*[Pipe() for _ in range(nenvs)])
self.ps = [Process(target=worker, args=(work_remote, remote, CloudpickleWrapper(env_fn)))
for (work_remote, remote, env_fn) in zip(self.work_remotes, self.remotes, env_fns)]
for p in self.ps:
p.daemon = True # if the main process crashes, we should not cause things to hang
p.start()
for remote in self.work_remotes:
remote.close()
self.remotes[0].send(('get_spaces', None))
observation_space, action_space = self.remotes[0].recv()
VecEnv.__init__(self, len(env_fns), observation_space, action_space)
示例9: prepare_processes
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def prepare_processes():
global processes
proxies = get_proxies()
n = 0
if len(proxies) < 1:
print "An error has occurred while preparing the process: Not enough proxy servers. Need at least 1 to function."
sys.exit(1)
for proxy in proxies:
# Preparing the process and giving it its own proxy
processes.append(
multiprocessing.Process(
target=open_url, kwargs={
"url": get_url(), "proxy": {
"http": proxy}}))
print '.',
print ''
示例10: __init__
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def __init__(self, env_fns, render_interval):
""" Minor addition to SubprocVecEnv, automatically renders environments
envs: list of gym environments to run in subprocesses
"""
self.closed = False
nenvs = len(env_fns)
self.remotes, self.work_remotes = zip(*[Pipe() for _ in range(nenvs)])
self.ps = [Process(target=worker, args=(work_remote, remote, CloudpickleWrapper(env_fn)))
for (work_remote, remote, env_fn) in zip(self.work_remotes, self.remotes, env_fns)]
for p in self.ps:
p.daemon = True # if the main process crashes, we should not cause things to hang
p.start()
for remote in self.work_remotes:
remote.close()
self.remotes[0].send(('get_spaces', None))
self.action_space, self.observation_space = self.remotes[0].recv()
self.render_interval = render_interval
self.render_timer = 0
示例11: _start_invoker_process
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def _start_invoker_process(self):
"""
Starts the invoker process responsible to spawn pending calls in background
"""
if self.is_pywren_function or not is_unix_system():
for inv_id in range(INVOKER_PROCESSES):
p = Thread(target=self._run_invoker_process, args=(inv_id, ))
self.invokers.append(p)
p.daemon = True
p.start()
else:
for inv_id in range(INVOKER_PROCESSES):
p = Process(target=self._run_invoker_process, args=(inv_id, ))
self.invokers.append(p)
p.daemon = True
p.start()
示例12: trigger_request_process_and_return_response
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def trigger_request_process_and_return_response(rows_to_request):
process_manager = Manager()
shared_queue = process_manager.Queue()
shared_queue_list = []
list_process = []
# Trigger Process in rows
for index, row in rows_to_request.iterrows():
token, account = get_token_and_account_number_or_wait()
p = Process(target=trigger_facebook_call, args=(index, row, token, account, shared_queue))
list_process.append(p)
# Starting process
map(lambda p: p.start(), list_process)
# Stop process
map(lambda p: p.join(), list_process)
#Check for Exception
map(lambda p: check_exception(p), list_process)
# Put things from shared list to normal list
while shared_queue.qsize() != 0:
shared_queue_list.append(shared_queue.get())
return shared_queue_list
示例13: ensure_proc_terminate
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def ensure_proc_terminate(proc):
"""
Make sure processes terminate when main process exit.
Args:
proc (multiprocessing.Process or list)
"""
if isinstance(proc, list):
for p in proc:
ensure_proc_terminate(p)
return
def stop_proc_by_weak_ref(ref):
proc = ref()
if proc is None:
return
if not proc.is_alive():
return
proc.terminate()
proc.join()
assert isinstance(proc, mp.Process)
atexit.register(stop_proc_by_weak_ref, weakref.ref(proc))
示例14: start_proc_mask_signal
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def start_proc_mask_signal(proc):
"""
Start process(es) with SIGINT ignored.
Args:
proc: (mp.Process or list)
Note:
The signal mask is only applied when called from main thread.
"""
if not isinstance(proc, list):
proc = [proc]
with mask_sigint():
for p in proc:
if isinstance(p, mp.Process):
if sys.version_info < (3, 4) or mp.get_start_method() == 'fork':
log_once("""
Starting a process with 'fork' method is efficient but not safe and may cause deadlock or crash.
Use 'forkserver' or 'spawn' method instead if you run into such issues.
See https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods on how to set them.
""".replace("\n", ""),
'warn') # noqa
p.start()
示例15: __init__
# 需要導入模塊: import multiprocessing [as 別名]
# 或者: from multiprocessing import Process [as 別名]
def __init__(self, env_fns):
if np.__version__ == '1.16.0':
warnings.warn("""
NumPy 1.16.0 can cause severe memory leak in chainerrl.envs.MultiprocessVectorEnv.
We recommend using other versions of NumPy.
See https://github.com/numpy/numpy/issues/12793 for details.
""") # NOQA
nenvs = len(env_fns)
self.remotes, self.work_remotes = zip(*[Pipe() for _ in range(nenvs)])
self.ps = \
[Process(target=worker, args=(work_remote, env_fn))
for (work_remote, env_fn) in zip(self.work_remotes, env_fns)]
for p in self.ps:
p.start()
self.last_obs = [None] * self.num_envs
self.remotes[0].send(('get_spaces', None))
self.action_space, self.observation_space = self.remotes[0].recv()
self.closed = False