当前位置: 首页>>代码示例>>Python>>正文


Python multiprocessing.Manager方法代码示例

本文整理汇总了Python中multiprocessing.Manager方法的典型用法代码示例。如果您正苦于以下问题:Python multiprocessing.Manager方法的具体用法?Python multiprocessing.Manager怎么用?Python multiprocessing.Manager使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing的用法示例。


在下文中一共展示了multiprocessing.Manager方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def main():
  m = multiprocessing.Manager()
  sharedQueue = m.Queue()
  sharedQueue.put(2)
  sharedQueue.put(3)
  sharedQueue.put(4)

  process1 = multiprocessing.Process(target=myTask, args=(sharedQueue,))
  process1.start()

  process2 = multiprocessing.Process(target=myTask, args=(sharedQueue,))
  process2.start()
  
  process3 = multiprocessing.Process(target=myTask, args=(sharedQueue,))
  process3.start()
  
  process2.join()
  process1.join()
  process3.join() 
开发者ID:PacktPublishing,项目名称:Learning-Concurrency-in-Python,代码行数:21,代码来源:mpQueue.py

示例2: add_cmd_tasks

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def add_cmd_tasks(cmd_task_list, identifier=None, stdin_error_lock=mp.Manager().Lock()):
		"""
			Run several command line commands in parallel.

			@attention: use the Manager to get the lock as in this function definition !!!

			@type cmd_task_list: list of TaskCmd
			@param stdin_error_lock: acquiring the lock enables writing to the stdout and stderr

			@return: list of failed commands, dictionary (cmd, task process)
		"""
		assert isinstance(cmd_task_list, list)

		thread_task_list = []
		for cmdTask in cmd_task_list:
			assert isinstance(cmdTask, TaskCmd)
			thread_task_list.append(TaskThread(_runCmd, (cmdTask, stdin_error_lock)))

		return AsyncParallel.add_tasks(thread_task_list, identifier) 
开发者ID:CAMI-challenge,项目名称:CAMISIM,代码行数:21,代码来源:parallel.py

示例3: trigger_request_process_and_return_response

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def trigger_request_process_and_return_response(rows_to_request):
    process_manager = Manager()
    shared_queue = process_manager.Queue()
    shared_queue_list = []
    list_process = []

    # Trigger Process in rows
    for index, row in rows_to_request.iterrows():
        token, account = get_token_and_account_number_or_wait()
        p = Process(target=trigger_facebook_call, args=(index, row, token, account, shared_queue))
        list_process.append(p)

    # Starting process
    map(lambda p: p.start(), list_process)
    # Stop process
    map(lambda p: p.join(), list_process)
    #Check for Exception
    map(lambda p: check_exception(p), list_process)

    # Put things from shared list to normal list
    while shared_queue.qsize() != 0:
        shared_queue_list.append(shared_queue.get())
    return shared_queue_list 
开发者ID:maraujo,项目名称:pySocialWatcher,代码行数:25,代码来源:utils.py

示例4: use_virustotal

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def use_virustotal(args):
    """
    Use Virustotal to download the environment malware
    """
    m = multiprocessing.Manager()
    download_queue = m.JoinableQueue(args.nconcurrent)

    archive_procs = [
        multiprocessing.Process(
            target=download_worker_function,
            args=(download_queue, args.vtapikey))
        for i in range(args.nconcurrent)
    ]
    for w in archive_procs:
        w.start()

    for row in get_sample_hashes():
        download_queue.put(row["sha256"])

    for i in range(args.narchiveprocs):
        download_queue.put("STOP")

    download_queue.join()
    for w in archive_procs:
        w.join() 
开发者ID:endgameinc,项目名称:gym-malware,代码行数:27,代码来源:download_samples.py

示例5: run_in_separate_process

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def run_in_separate_process(func, *args, **kwargs):
    """Runs function in separate process.

    This function is used instead of a decorator, since Python multiprocessing
    module can't serialize decorated function on all platforms.
    """
    manager = multiprocessing.Manager()
    manager_dict = manager.dict()
    process = ProcessWithException(
        manager_dict, target=func, args=args, kwargs=kwargs)
    process.start()
    process.join()
    exc = process.exception
    if exc:
        raise exc
    return process.output 
开发者ID:nvdv,项目名称:vprof,代码行数:18,代码来源:base_profiler.py

示例6: test_init

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def test_init():
    manager = Manager()
    return_dict = manager.dict()

    # make server init before worker
    server_init = Value('i', False)
    serv_p = Process(target=server_func, args=(2, 'test_graph1', server_init))
    serv_p.start()
    while server_init.value == 0:
      time.sleep(1)
    work_p1 = Process(target=check_init_func, args=(0, 'test_graph1', return_dict))
    work_p2 = Process(target=check_init_func, args=(1, 'test_graph1', return_dict))
    work_p1.start()
    work_p2.start()
    serv_p.join()
    work_p1.join()
    work_p2.join()
    for worker_id in return_dict.keys():
        assert return_dict[worker_id] == 0, "worker %d fails" % worker_id 
开发者ID:dmlc,项目名称:dgl,代码行数:21,代码来源:test_shared_mem_store.py

示例7: test_compute

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def test_compute():
    manager = Manager()
    return_dict = manager.dict()

    # make server init before worker
    server_init = Value('i', 0)
    serv_p = Process(target=server_func, args=(2, 'test_graph3', server_init))
    serv_p.start()
    while server_init.value == 0:
      time.sleep(1)
    work_p1 = Process(target=check_compute_func, args=(0, 'test_graph3', return_dict))
    work_p2 = Process(target=check_compute_func, args=(1, 'test_graph3', return_dict))
    work_p1.start()
    work_p2.start()
    serv_p.join()
    work_p1.join()
    work_p2.join()
    for worker_id in return_dict.keys():
        assert return_dict[worker_id] == 0, "worker %d fails" % worker_id 
开发者ID:dmlc,项目名称:dgl,代码行数:21,代码来源:test_shared_mem_store.py

示例8: test_sync_barrier

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def test_sync_barrier():
    manager = Manager()
    return_dict = manager.dict()

    # make server init before worker
    server_init = Value('i', 0)
    serv_p = Process(target=server_func, args=(2, 'test_graph4', server_init))
    serv_p.start()
    while server_init.value == 0:
      time.sleep(1)
    work_p1 = Process(target=check_sync_barrier, args=(0, 'test_graph4', return_dict))
    work_p2 = Process(target=check_sync_barrier, args=(1, 'test_graph4', return_dict))
    work_p1.start()
    work_p2.start()
    serv_p.join()
    work_p1.join()
    work_p2.join()
    for worker_id in return_dict.keys():
        assert return_dict[worker_id] == 0, "worker %d fails" % worker_id 
开发者ID:dmlc,项目名称:dgl,代码行数:21,代码来源:test_shared_mem_store.py

示例9: test_answer_challenge_auth_failure

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def test_answer_challenge_auth_failure(self):
        class _FakeConnection(object):
            def __init__(self):
                self.count = 0
            def recv_bytes(self, size):
                self.count += 1
                if self.count == 1:
                    return multiprocessing.connection.CHALLENGE
                elif self.count == 2:
                    return b'something bogus'
                return b''
            def send_bytes(self, data):
                pass
        self.assertRaises(multiprocessing.AuthenticationError,
                          multiprocessing.connection.answer_challenge,
                          _FakeConnection(), b'abc')

#
# Test Manager.start()/Pool.__init__() initializer feature - see issue 5585
# 
开发者ID:IronLanguages,项目名称:ironpython2,代码行数:22,代码来源:test_multiprocessing.py

示例10: initialize_unittest_logging

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def initialize_unittest_logging():
    # ACE is multi-process multi-threaded
    # so we use this special logging mechanism to keep a central repository of the log events generated
    # that the original process can access

    global test_log_manager
    global test_log_sync
    global test_log_messages
    global memory_log_handler

    test_log_manager = Manager()
    atexit.register(_atexit_callback)
    test_log_sync = RLock()
    test_log_messages = test_log_manager.list()

    log_format = logging.Formatter(datefmt='%(asctime)s')

    memory_log_handler = MemoryLogHandler()
    memory_log_handler.setLevel(logging.DEBUG)
    memory_log_handler.setFormatter(log_format)
    logging.getLogger().addHandler(memory_log_handler) 
开发者ID:IntegralDefense,项目名称:ACE,代码行数:23,代码来源:test.py

示例11: _import_mp

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def _import_mp():
    global Process, Queue, Pool, Event, Value, Array
    try:
        from multiprocessing import Manager, Process
        #prevent the server process created in the manager which holds Python 
        #objects and allows other processes to manipulate them using proxies
        #to interrupt on SIGINT (keyboardinterrupt) so that the communication
        #channel between subprocesses and main process is still usable after
        #ctrl+C is received in the main process.
        old=signal.signal(signal.SIGINT, signal.SIG_IGN)
        m = Manager()
        #reset it back so main process will receive a KeyboardInterrupt
        #exception on ctrl+c
        signal.signal(signal.SIGINT, old)
        Queue, Pool, Event, Value, Array = (
                m.Queue, m.Pool, m.Event, m.Value, m.Array
        )
    except ImportError:
        warn("multiprocessing module is not available, multiprocess plugin "
             "cannot be used", RuntimeWarning) 
开发者ID:singhj,项目名称:locality-sensitive-hashing,代码行数:22,代码来源:multiprocess.py

示例12: __init__

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def __init__(self, _id):
        self.manager = Manager()

        self.event_loop_id = _id
        self.target = Target.new_target(self.event_loop_id, self.__class__.__name__)
        self.__subscribers = {}
        self.__subscribers_lock = self.manager.Lock()
        self.__publishers = {}
        self.__client = None
        self.__main_loop = None
        self.__pid = os.getpid()

        self.__topicPub = Topic()
        self.__topicPub.set_targets(Target.new_target(self.event_loop_id, EventLoop.__name__))
        self.__topicPub.set_categories(EVENT_LOOP.TOPIC.CATEGORIES.RESPONSE)

        self.__topicSub = Topic()
        self.__topicSub.set_targets(None, Target.new_target(self.event_loop_id, EventLoop.__name__))
        self.__topicSub.set_categories(EVENT_LOOP.TOPIC.CATEGORIES.REQUEST)
        self.__topicSub.set_message(EventLoopMessage)
        self.set_subscriber(self.__topicSub, self.on_event_loop_message)

        self.__user_data = None
        self.__user_will = None 
开发者ID:CPFL,项目名称:AMS,代码行数:26,代码来源:event_loop.py

示例13: summary_multi_preprocess

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def summary_multi_preprocess(doc, num=None, fs=[text_pronouns, text_teaser, mmr, text_rank, lead3, lda, lsi, nmf]):
    """
        len(fs) 个进程
    :param doc: str
    :return: list
    """
    manager = Manager()
    return_dict = manager.dict()
    jobs = []
    for i in range(len(fs)):
        p = Process(target=worker, args=(i, doc, num, fs, return_dict))
        jobs.append(p)
        p.start()
    for proc in jobs:
        proc.join()
    return list(return_dict.values()) 
开发者ID:yongzhuo,项目名称:nlg-yongzhuo,代码行数:18,代码来源:text_summary_merge.py

示例14: main

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def main():
    try:
        parser = get_parser()
        args = parser.parse_args()
        if args.media_type == 'both':
            args.media_type = 'audio/video'
        globals()['media_type'] = REGEX_MAP[args.media_type]
        cache_ob = CacheUtil(args.path, args.media_type)
        manager = multiprocessing.Manager()
        queue = manager.Queue()
        consumer = multiprocessing.Process(target=store_in_cache, args=(queue, cache_ob))
        consumer.start()
        result = calculate_length(
            args.path, args.no_subdir, args.media_type, queue, cache_ob
        )
        consumer.join()
    except KeyboardInterrupt:
        sys.stdout.write('\nPlease wait... exiting gracefully!\n')
    else:
        sys.stdout.write('\n{}\n\n'.format(result))
    finally:
        sys.exit() 
开发者ID:karansthr,项目名称:Playlist-Length,代码行数:24,代码来源:main.py

示例15: __init__

# 需要导入模块: import multiprocessing [as 别名]
# 或者: from multiprocessing import Manager [as 别名]
def __init__(self,
                 kube_config: Any,
                 task_queue: 'Queue[KubernetesJobType]',
                 result_queue: 'Queue[KubernetesResultsType]',
                 kube_client: client.CoreV1Api,
                 worker_uuid: str):
        super().__init__()
        self.log.debug("Creating Kubernetes executor")
        self.kube_config = kube_config
        self.task_queue = task_queue
        self.result_queue = result_queue
        self.namespace = self.kube_config.kube_namespace
        self.log.debug("Kubernetes using namespace %s", self.namespace)
        self.kube_client = kube_client
        self.launcher = PodLauncher(kube_client=self.kube_client)
        self.worker_configuration_pod = WorkerConfiguration(kube_config=self.kube_config).as_pod()
        self._manager = multiprocessing.Manager()
        self.watcher_queue = self._manager.Queue()
        self.worker_uuid = worker_uuid
        self.kube_watcher = self._make_kube_watcher() 
开发者ID:apache,项目名称:airflow,代码行数:22,代码来源:kubernetes_executor.py


注:本文中的multiprocessing.Manager方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。