当前位置: 首页>>代码示例>>Python>>正文


Python Queue.empty方法代码示例

本文整理汇总了Python中multiprocessing.queues.Queue.empty方法的典型用法代码示例。如果您正苦于以下问题:Python Queue.empty方法的具体用法?Python Queue.empty怎么用?Python Queue.empty使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在multiprocessing.queues.Queue的用法示例。


在下文中一共展示了Queue.empty方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_multiprocess_tasks

# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
def test_multiprocess_tasks():
    wait_until_convenient()
    TAG = "message_q"
    def fetch_task(queue):
        pid = os.getpid()
        count = 0
        for dq in q.listen(TAG, timeout=1):
            s = { 'pid': pid, 'data': dq }
            if dq:
                count += 1
                queue.put(s)
                sleep(uniform(0.1, 0.5)) # sleep 0.1~0.5 seconds randomly
            elif q.count(TAG) == 0:
                return count # the number of tasks done by this process


    test_items = range(0, 10000) # enqueue 10000 tasks
    for i in test_items:
        q.enqueue(TAG, i + 1)

    while q.count(TAG) != len(test_items): # wait until test data is ready
        wait_until_convenient()

    jobs = []
    wait_until_convenient()
    queue = Queue()
    start = timer()
    num_p = 30 # the number of processes to use
    for i in range(0, num_p):
        job = Process(target=fetch_task, args=(queue,))
        jobs.append(job)
        job.start() # start task process

    remaining = q.count(TAG)
    while remaining > 0: # wait until the queue is consumed completely
        remaining = q.count(TAG)
        sys.stdout.write('\rRunning test_multiprocess_tasks - remaining %5d/%5d' % (remaining, len(test_items),))
        sys.stdout.flush()
        wait_until_convenient()

    processed_data = set()
    qsize = 0
    while not queue.empty():
        item = queue.get()
        data = item.get('data')
        qsize += 1
        assert data not in processed_data, "failed test_multiprocess_tasks - data %s has been processed already" % (data, )
        processed_data.add(item.get('data'))

    queue.close()
    queue.join_thread()
    for j in jobs:
        j.join()

    assert qsize == len(test_items), "failed test_multiprocess_tasks - tasks are not complete %d/%d" % (qsize, len(test_items), )
    end = timer()
    print("\rOK test_multiprocess_tasks - %d done in %5d seconds" % (qsize, end - start))
开发者ID:smihica,项目名称:py-q4pg,代码行数:59,代码来源:test.py

示例2: test_spy

# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
def test_spy():
    """Test the measure spy working.

    """
    q = Queue()
    data = TaskDatabase()
    spy = MeasureSpy(queue=q, observed_database=data,
                     observed_entries=('test',))

    data.notifier(('test', 1))
    assert q.get()

    data.notifier(('test2', 1))
    assert q.empty()

    spy.close()
    assert q.get() == ('', '')
开发者ID:MatthieuDartiailh,项目名称:ecpy,代码行数:19,代码来源:test_utils.py

示例3: empty

# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
 def empty(self):
     '''
     Returns True if the Queue is empty, False otherwise
     Raises an exception if too many errors are encountered 
     '''
     dt = 1e-3
     while dt < 1:
         try:
             isEmpty = Queue.empty(self)
             return isEmpty
         except IOError:
             logger.warning('IOError encountered in SafeQueue empty()')
             try:
                 time.sleep(dt)
             except:pass
             dt *= 2
             
     e = IOError('Unrecoverable error')
     raise e
开发者ID:BioinformaticsArchive,项目名称:DuctApe,代码行数:21,代码来源:commonmultiprocess.py

示例4: MultiCoreEngine

# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
class MultiCoreEngine():
    
    _mapred = None
    
    _out_queue = None
    _in_queue = None
    _log_queue = None
        
    _processes = None
    
        
    def __init__(self,mapred):
        self._mapred = mapred
            
    def _start(self,name,cpu, module_name, class_name, params):
        fn = None
        
        self._processes = []
        self._in_queue = Queue()
        self._out_queue = Queue()
        self._log_queue = Queue()
        
        if name == "mapper":
            fn = q_run_mapper
        elif name == "reducer":
            fn = q_run_reducer
        
        for i in range(cpu):
            process = Process(target=fn,args=(module_name, class_name ,params, self._in_queue, self._out_queue, self._log_queue))
            self._processes.append(process)
            process.start()
    
    def _stop(self):
        
        for process in self._processes:
            self._in_queue.put("STOP")
        
        while not self._log_queue.empty():
            print self._log_queue.get()
    
    def _get_data_chunks(self):
        chunks = []
        for process in self._processes:
            chunks.append(self._out_queue.get())
        
        return chunks
    
    def _set_data_chunks(self, chunks):
        
        map(self._in_queue.put,chunks)
        
                        
    def _send_lines(self,lines, cpu, lines_len ):
        line_splits = [lines[i* lines_len / cpu : (i+1)* lines_len / cpu] for i in range(cpu) ]
                    
        for i in range(cpu): 
            self._in_queue.put(line_splits[i])
    
    def _terminate(self):
        for process in self._processes:
            process.join()
            process.terminate()
                
        self._in_queue.close()
        self._out_queue.close()
        self._processes = None
        
    def _force_terminate(self):
        for process in self._processes:
            process.terminate()
            
    def _merge_data(self, data):
       
        self._mapred.data = merge_kv_dict(self._mapred.data,data)
                
    def _merge_reduced_data(self, data):
       
        self._mapred.data_reduced = merge_kv_dict(self._mapred.data_reduced,data)
                
    def _split_data(self, num_splits):
        splits = []
        index = 0
        
        len_data = len(self._mapred.data)
        
        chunk_len = int(math.ceil(len_data / float(num_splits)))
        
        if chunk_len == 0:
            splits.append(self._mapred.data)
        else:        
            for i in range(int(math.ceil(len_data/float(chunk_len)))):
                splits.append({})
                
            for (key, value) in self._mapred.data.items():
                
                i = int(math.floor(index / float(chunk_len)))
                       
                splits[i][key]=value
                
                index = index + 1
#.........这里部分代码省略.........
开发者ID:jucabot,项目名称:polymr,代码行数:103,代码来源:smp.py


注:本文中的multiprocessing.queues.Queue.empty方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。