本文整理汇总了Python中multiprocessing.queues.Queue.empty方法的典型用法代码示例。如果您正苦于以下问题:Python Queue.empty方法的具体用法?Python Queue.empty怎么用?Python Queue.empty使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类multiprocessing.queues.Queue
的用法示例。
在下文中一共展示了Queue.empty方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_multiprocess_tasks
# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
def test_multiprocess_tasks():
wait_until_convenient()
TAG = "message_q"
def fetch_task(queue):
pid = os.getpid()
count = 0
for dq in q.listen(TAG, timeout=1):
s = { 'pid': pid, 'data': dq }
if dq:
count += 1
queue.put(s)
sleep(uniform(0.1, 0.5)) # sleep 0.1~0.5 seconds randomly
elif q.count(TAG) == 0:
return count # the number of tasks done by this process
test_items = range(0, 10000) # enqueue 10000 tasks
for i in test_items:
q.enqueue(TAG, i + 1)
while q.count(TAG) != len(test_items): # wait until test data is ready
wait_until_convenient()
jobs = []
wait_until_convenient()
queue = Queue()
start = timer()
num_p = 30 # the number of processes to use
for i in range(0, num_p):
job = Process(target=fetch_task, args=(queue,))
jobs.append(job)
job.start() # start task process
remaining = q.count(TAG)
while remaining > 0: # wait until the queue is consumed completely
remaining = q.count(TAG)
sys.stdout.write('\rRunning test_multiprocess_tasks - remaining %5d/%5d' % (remaining, len(test_items),))
sys.stdout.flush()
wait_until_convenient()
processed_data = set()
qsize = 0
while not queue.empty():
item = queue.get()
data = item.get('data')
qsize += 1
assert data not in processed_data, "failed test_multiprocess_tasks - data %s has been processed already" % (data, )
processed_data.add(item.get('data'))
queue.close()
queue.join_thread()
for j in jobs:
j.join()
assert qsize == len(test_items), "failed test_multiprocess_tasks - tasks are not complete %d/%d" % (qsize, len(test_items), )
end = timer()
print("\rOK test_multiprocess_tasks - %d done in %5d seconds" % (qsize, end - start))
示例2: test_spy
# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
def test_spy():
"""Test the measure spy working.
"""
q = Queue()
data = TaskDatabase()
spy = MeasureSpy(queue=q, observed_database=data,
observed_entries=('test',))
data.notifier(('test', 1))
assert q.get()
data.notifier(('test2', 1))
assert q.empty()
spy.close()
assert q.get() == ('', '')
示例3: empty
# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
def empty(self):
'''
Returns True if the Queue is empty, False otherwise
Raises an exception if too many errors are encountered
'''
dt = 1e-3
while dt < 1:
try:
isEmpty = Queue.empty(self)
return isEmpty
except IOError:
logger.warning('IOError encountered in SafeQueue empty()')
try:
time.sleep(dt)
except:pass
dt *= 2
e = IOError('Unrecoverable error')
raise e
示例4: MultiCoreEngine
# 需要导入模块: from multiprocessing.queues import Queue [as 别名]
# 或者: from multiprocessing.queues.Queue import empty [as 别名]
class MultiCoreEngine():
_mapred = None
_out_queue = None
_in_queue = None
_log_queue = None
_processes = None
def __init__(self,mapred):
self._mapred = mapred
def _start(self,name,cpu, module_name, class_name, params):
fn = None
self._processes = []
self._in_queue = Queue()
self._out_queue = Queue()
self._log_queue = Queue()
if name == "mapper":
fn = q_run_mapper
elif name == "reducer":
fn = q_run_reducer
for i in range(cpu):
process = Process(target=fn,args=(module_name, class_name ,params, self._in_queue, self._out_queue, self._log_queue))
self._processes.append(process)
process.start()
def _stop(self):
for process in self._processes:
self._in_queue.put("STOP")
while not self._log_queue.empty():
print self._log_queue.get()
def _get_data_chunks(self):
chunks = []
for process in self._processes:
chunks.append(self._out_queue.get())
return chunks
def _set_data_chunks(self, chunks):
map(self._in_queue.put,chunks)
def _send_lines(self,lines, cpu, lines_len ):
line_splits = [lines[i* lines_len / cpu : (i+1)* lines_len / cpu] for i in range(cpu) ]
for i in range(cpu):
self._in_queue.put(line_splits[i])
def _terminate(self):
for process in self._processes:
process.join()
process.terminate()
self._in_queue.close()
self._out_queue.close()
self._processes = None
def _force_terminate(self):
for process in self._processes:
process.terminate()
def _merge_data(self, data):
self._mapred.data = merge_kv_dict(self._mapred.data,data)
def _merge_reduced_data(self, data):
self._mapred.data_reduced = merge_kv_dict(self._mapred.data_reduced,data)
def _split_data(self, num_splits):
splits = []
index = 0
len_data = len(self._mapred.data)
chunk_len = int(math.ceil(len_data / float(num_splits)))
if chunk_len == 0:
splits.append(self._mapred.data)
else:
for i in range(int(math.ceil(len_data/float(chunk_len)))):
splits.append({})
for (key, value) in self._mapred.data.items():
i = int(math.floor(index / float(chunk_len)))
splits[i][key]=value
index = index + 1
#.........这里部分代码省略.........