当前位置: 首页>>代码示例>>Python>>正文


Python JoinableQueue.join方法代码示例

本文整理汇总了Python中multiprocessing.JoinableQueue.join方法的典型用法代码示例。如果您正苦于以下问题:Python JoinableQueue.join方法的具体用法?Python JoinableQueue.join怎么用?Python JoinableQueue.join使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.JoinableQueue的用法示例。


在下文中一共展示了JoinableQueue.join方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Mothership

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
class Mothership(object):

    """ Monitor of producer and consumers """

    def __init__(self, producer, consumers):
        self._queue = JoinableQueue()

        self._producer_proxy = ProducerProxy(self._queue, producer)
        self._consumer_pool = list(ConsumerProxy(self._queue, consumer) \
                                   for consumer in consumers)

    def start(self):
        """ Start working """
        logger.info('Starting Producers'.center(20, '='))
        self._producer_proxy.start()

        logger.info('Starting Consumers'.center(20, '='))
        for consumer in self._consumer_pool:
            consumer.start()

        self._producer_proxy.join()
        self._queue.join()

    def __enter__(self):
        return self

    def __exit__(self, types, value, tb):
        return
开发者ID:Kotaimen,项目名称:mason,代码行数:30,代码来源:workq.py

示例2: emailSubsystem

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
class emailSubsystem(object):
    def __init__(self):
        ### will move to Celery eventually; with Celery, the app would be able to periodically
        # wakeup and check on replyQueue to see which emails were send, which were not and
        # what to do ...

        self.emailQueue = JoinableQueue()
        self.replyQueue = JoinableQueue()

        self.worker = Process(target=sendEmailWorker, args=(self.emailQueue, self.replyQueue))

    def start(self):
        # temporarily comment out starting a new process as it seems to leave zombies
        # and causes app not to start as max process limit is reached.
        #self.worker.start()
        return

    def shutdown(self):
        # post poison pill
        # wait on the queue to be done; ie join on emailQueue
        # wait on the worker process to die; ie join on worker

        self.emailQueue.put(None)
        self.emailQueue.join()
        self.worker.join()
开发者ID:haribcva,项目名称:the_library_app,代码行数:27,代码来源:library.py

示例3: main

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def main(multiplier):
    # Establish communication queues
    tasks = JoinableQueue()
    results = Queue()

    # Start consumers
    num_consumers = cpu_count() * multiplier
    print 'Creating %d consumers' % num_consumers
    consumers = [Consumer(tasks, results) for i in xrange(num_consumers)]
    for w in consumers:
        w.start()
    
    fout = open(os.path.join(settings.PERSIST_DIR, 'doc_matrix_comparison.csv'), 'w', 0)
    rw = ResultWriter(results, csv.writer(fout))
    rw.start()

    #num_docs = 801781
    num_docs = 25
    for i in xrange(num_docs):
        tasks.put(Task(i))


    # Add a poison pill for each consumer
    for i in xrange(num_consumers):
        tasks.put(None)

    # Wait for all of the tasks to finish
    tasks.join()
    results.put('STOP')
开发者ID:digideskio,项目名称:fcc-net-neutrality-comments,代码行数:31,代码来源:sfm_docsim_matrix.py

示例4: main

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def main(workers=10):
    """
    Executes main function of mini-framework's Control thread.
    :param workers: Integer detailing number of worker FIFO threads to employ
    """
    start_logging()
    log_info("New multiprocessing session with {} workers".format(workers))
    
    # Input JoinableQueue and Output Queue
    inq = JoinableQueue(maxsize=int(workers*1.5))
    outq = Queue(maxsize=int(workers*1.5))
    
    ot = OutThread(workers, outq)
    ot.start()
    
    for _ in range(workers):
        w = WorkerThread(inq, outq)
        w.start()
    
    # Create a sequence of a 1000 random alphabetic characters
    random_chars = (ascii_letters[randint(0, 51)] for _ in range(1000))
    
    # Keep input queue loaded for as long as possible
    # Feed the process pool with work units
    for work in enumerate(random_chars):
        inq.put(work)
    
    # Fill the input queue with Nones to shut the worker threads down
    # which terminates the process pool
    for _ in range(workers):
        inq.put(None)
        
    inq.join()
    print("Control process terminating")
开发者ID:Astrocesped,项目名称:OST_Homework,代码行数:36,代码来源:control.py

示例5: init

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def init(opts):

    tasks = JoinableQueue()
    manager = Manager()
    stats = manager.dict()
    stats['processed'] = 0
    stats['missing'] = 0
    
    # start up our workers threads
    log.info("Creating %d workers" % opts.threads)
    
    workers = [ Worker(tasks, opts, stats) for i in xrange(opts.threads)]
    for w in workers:
        w.start()
        
    # queue up the bibcodes
    for doc in get_docs(opts):
        tasks.put(doc)
    
    # add some poison pills to the end of the queue
    log.info("poisoning our task threads")
    for i in xrange(opts.threads):
        tasks.put(None)
    
    # join the results queue. this should
    # block until all tasks in the task queue are completed
    log.info("Joining the task queue")
    tasks.join()
    
    log.info("Joining the task threads")
    for w in workers:
        w.join()
        
    log.info("processed: %d" % stats['processed'])
    log.info("records with no existing extract dir: %d" % stats['missing'])        
开发者ID:adsabs,项目名称:adsdata,代码行数:37,代码来源:init_fulltext_meta.py

示例6: ProcessPool

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
class ProcessPool(object):

    def __init__(self, size=1):
        self.size = size
        self.jobs = Queue()
        self.results = Queue()
        self.processes = []

    def start(self):
        '''start all processes'''

        for i in range(self.size):
            self.processes.append(ProcessWorker(self))

        for process in self.processes:
            process.start()

    def append_job(self, job, *args, **kwargs):
        self.jobs.put((job, args, kwargs))

    def join(self):
        '''waiting all jobs done'''
        self.jobs.join()

    def stop(self):
        '''kill all processes'''
        for process in self.processes:
            process.stop()

        for process in self.processes:  # waiting processes completing
            if process.is_alive():
                process.join()

        del self.processes[:]  # reset processes to empty
开发者ID:hit9,项目名称:Simple-Pools,代码行数:36,代码来源:processpool.py

示例7: run

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
    def run(self):

        # Changes the process name shown by ps for instance
        setProcTitle ("agentcluster master [version: %s] [monitoring: %d seconds]" % (__version__,self.monitoring_period) );

        try:
            logger.info ( 'Agent cluster server starting' );

            logger.info ( 'Configurations will be scanned in directories:' );
            for directory in confdir.data:
                logger.info ( '  o %s', os.path.abspath(directory) );

            self.watchdog = Watchdog(self.monitoring_period)
            self.watchdog.start()

            # Generates a deadlock to enter in sleep mode
            # Only an external signal can break this deadlock
            logger.info ( 'Agent cluster server started' );
            queue = JoinableQueue()
            queue.put(object());
            queue.join();

        except KeyboardInterrupt:
            logger.info ( 'Agent cluster server interrupted' );
        except Exception:
            logger.error ( 'Exception catched in main process: %s', sys.exc_info()[1] );
            logger.debug ( "", exc_info=True );
        finally:
            # First stop the monitoring to avoid restarting killed agents
            if self.watchdog is not None:
                self.watchdog.shutdown = True
                self.watchdog.join()
            logger.info ( 'Agent cluster server end' );
            logging.shutdown()
开发者ID:GillesBouissac,项目名称:agentcluster,代码行数:36,代码来源:agentclusterd.py

示例8: evaluate

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def evaluate(points,meshToBasis,kernel,quadRule,coeffs,nprocs=None):
    """Evaluate a kernel using the given coefficients"""


    if nprocs==None: nprocs=cpu_count()

    inputQueue=JoinableQueue()

    nelements=meshToBasis.nelements

    for elem in meshToBasis: inputQueue.put(elem)

    buf=sharedctypes.RawArray('b',len(points[0])*numpy.dtype(numpy.complex128).itemsize)
    result=numpy.frombuffer(buf,dtype=numpy.complex128)
    result[:]=numpy.zeros(1,dtype=numpy.complex128)

    time.sleep(.5)
    workers=[]

    for id in range(nprocs):
        worker=EvaluationWorker(points,kernel,quadRule,coeffs,inputQueue,result)
        worker.start()
        workers.append(worker)


    inputQueue.join()
    for worker in workers: worker.join()

    return result.copy()
开发者ID:tbetcke,项目名称:PyBEM2D,代码行数:31,代码来源:evaluation.py

示例9: main

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def main():

    fetch_queue = JoinableQueue()
    reject_queue = JoinableQueue(maxsize = 1000)

    log_processor = Process(target=job_creator, args=(fetch_queue, './search_log_valid_2010_06_17'), name='log-processor')
    
    writers = [ ]
    write_queues = []

    for num in DATA_SETS:
        queue, writer = create_writer(reject_queue, num) 
        writers.append(writer)
        write_queues.append(queue)

    fetchers = [ create_fetcher(fetch_queue, write_queues, reject_queue, num) for num in xrange(NUM_FETCHERS) ]
    reject_writer = Process(target=reject, args=(reject_queue, './rejected-lines'), name='related-search-reject-writer')

    log_processor.start()
    reject_writer.start()
    start_processes(writers)
    start_processes(fetchers)

    log_processor.join()
    print 'DONE? '
    fetch_queue.join()
    write_queue.join()
    reject_writer.join()
开发者ID:GregBowyer,项目名称:random-junk,代码行数:30,代码来源:construct-test-dataset.py

示例10: setup_queue

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def setup_queue(options):	
	probe_servers = Queue()
	progress_queue = Queue()

	run = Probe.ProbeRun.objects.get(id = options.run_id)

	summary_top = Results.ResultSummaryList.objects.get(part_of_run=run)
	summary_top.setup()

	connection.close()
	
	threads = [] 
	for i in range(options.threads):
		new_thread = Process(target=SetupQueueThread, args=(i,run, probe_servers, progress_queue))
		new_thread.daemon = True
		new_thread.start()
		threads.append(new_thread)
		
	progress_thread = threading.Thread(target=__ProgressCounter, args=(run,  progress_queue, threads,options))
	progress_thread.daemon = True
	progress_thread.start()

	i = 0;
	if options.input_filename and (not options.count or i < options.count):
		for hostname_line in fileinput.input(options.input_filename, openhook=fileinput.hook_compressed):
			probe_servers.put(hostname_line)
			i+=1
			if options.count and i >= options.count:
				break;

	probe_servers.join()
	progress_queue.join()
	
	return run
开发者ID:AbhinavBansal,项目名称:tlsprober,代码行数:36,代码来源:cluster_setup_add.py

示例11: getdata_multiprocess

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
 def getdata_multiprocess(self,task_funcsiter=None,task_funcsiterparas={},
                         task_funcsconst=None,task_funcsconstparas={},processnum=None,
                         threadnum=2):
     def _start_processes(taskqueue,resultqueue,taskqueue_lk,task_funcsconst,
                          task_funcsconstparas,processnum,threadnum):
         for i in range(processnum):
             p = Process(target=self.multiprocess_task, args=(taskqueue,resultqueue,
                                      taskqueue_lk,threadnum,
                                      task_funcsconst,task_funcsconstparas
                                      ),name='P'+str(i))
             p.daemon=True
             p.start()
             
     processnum=processnum if processnum else multiprocessing.cpu_count()
     #任务传送queue
     taskqueue=JoinableQueue()
     #任务写入/唤醒lock
     taskqueue_lk = multiprocessing.Condition(multiprocessing.Lock())
     #结果传送queue
     resultqueue=Queue()
     
     _start_processes(taskqueue,resultqueue,taskqueue_lk,task_funcsconst,
                         task_funcsconstparas,processnum,threadnum)
     #放入任务,唤醒进程
     if task_funcsconst is None:
         self._put_tasks(zip(task_funcsiter,task_funcsiterparas),taskqueue,taskqueue_lk)
     else:
         self._put_tasks(task_funcsiterparas,taskqueue,taskqueue_lk)
     logger.info('main join!')
     taskqueue.join()
     logger.info('main end!')
     return self._get_results(resultqueue)
开发者ID:rainwu,项目名称:stockdata,代码行数:34,代码来源:StockDataProc1.py

示例12: build

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def build(opts):
    tasks = JoinableQueue()
    results = JoinableQueue()
    
    if opts.remove:
        log.info("Removing existing docs collection")
        session = utils.get_session(config)
        session.docs.drop()
        
    # start up our builder threads
    log.info("Creating %d Builder processes" % opts.threads)
    builders = [ Builder(tasks, results) for i in xrange(opts.threads)]
    for b in builders:
        b.start()
        
    # queue up the bibcodes
    for bib in get_bibcodes(opts):
        tasks.put(bib)
    
    # add some poison pills to the end of the queue
    log.info("poisoning our task threads")
    for i in xrange(opts.threads):
        tasks.put(None)
    
    # join the results queue. this should
    # block until all tasks in the task queue are completed
    log.info("Joining the task queue")
    tasks.join()
    log.info("Joining the task threads")
    for b in builders:
        b.join()
    
    log.info("All work complete")
开发者ID:aaccomazzi,项目名称:adsdata,代码行数:35,代码来源:build_docs.py

示例13: FlightProducer

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
class FlightProducer(Process):

    def __init__(self, options={}, date_group=[]):
        self.options    = options
        self.date_group = date_group
        self.date_queue = JoinableQueue()

    def start(self):
        consumers_list = []
        consumers_num  = cpu_count() * 2

        # Consumers
        for i in xrange(consumers_num):
            consumers_list.append(FlightConsumer(self.options, self.date_queue))

        for consumer in consumers_list:
            consumer.start()

        # Put each date group to queue
        for date_item in self.date_group:
            self.date_queue.put(date_item)

        # Tell consumers can exit
        for i in xrange(consumers_num):
            self.date_queue.put(None)

        # Wait for all of the consumers to finish
        self.date_queue.join()

        print('Done')
开发者ID:zeuxisoo,项目名称:web-cupels,代码行数:32,代码来源:producer.py

示例14: main

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def main(max):
	rand_word = ''
	filename_tmp = "file_tmp.txt"
	final_file = "dataset.txt"
	q = JoinableQueue()
	_worker = Process(target=tuan, args=(q, filename_tmp,))
	_worker.start()
	alphanum = printable[0:62]
	alpha = printable[10:62]
	with open(final_file, "w") as f:
		print("Writing in {}".format(final_file))
		for x in range(max):
			rand_word = random_word(64, alpha)
			if randint(0, 100) <= 50:
				q.put(rand_word)
			else:
				q.put(random_word(64, alphanum))
			print(rand_word, random_word(64, alphanum),
					sep='\n', file=f)
		else:
			q.put(None)
			q.join()
			_worker.join()
			print(file=f)
			with open(filename_tmp, "r") as f2:
				copyfileobj(f2, f)
			print("Removing {}".format(filename_tmp))
			remove(filename_tmp)
	print("Work is done.")
开发者ID:darnuria,项目名称:libhashtable,代码行数:31,代码来源:gen_test.py

示例15: main

# 需要导入模块: from multiprocessing import JoinableQueue [as 别名]
# 或者: from multiprocessing.JoinableQueue import join [as 别名]
def main(opts, files):
    
    if opts.threads == 1:
        log.info("running synchronously")
        run_syncronous(opts, files)
    else:
        Q = JoinableQueue()
        workers = [Worker(Q, opts) for i in xrange(opts.threads)]
        
        log.info("initializing %d threads" % opts.threads)
        for w in workers:
            w.start()
            
        # push log events onto the queue
        events_iter = events(files, opts)
        if opts.limit:
            events_iter = itertools.islice(events_iter, opts.limit)
            
        for event in events_iter:
            Q.put(event)
        
        # add poison pills 
        for i in xrange(opts.threads):
            Q.put(None)
            
        Q.join()
        log.info("work complete. shutting down threads.")
        for w in workers:
            w.join()   
开发者ID:lbjay,项目名称:skidder,代码行数:31,代码来源:skidder.py


注:本文中的multiprocessing.JoinableQueue.join方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。