本文整理汇总了Python中eventlet.queue.Queue.empty方法的典型用法代码示例。如果您正苦于以下问题:Python Queue.empty方法的具体用法?Python Queue.empty怎么用?Python Queue.empty使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类eventlet.queue.Queue
的用法示例。
在下文中一共展示了Queue.empty方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: MethodReader
# 需要导入模块: from eventlet.queue import Queue [as 别名]
# 或者: from eventlet.queue.Queue import empty [as 别名]
class MethodReader(object):
"""
Helper class to receive frames from the broker, combine them if
necessary with content-headers and content-bodies into complete methods.
Normally a method is represented as a tuple containing
(channel, method_sig, args, content).
In the case of a framing error, an AMQPConnectionException is placed
in the queue.
In the case of unexpected frames, a tuple made up of
(channel, AMQPChannelException) is placed in the queue.
"""
def __init__(self, source):
self.source = source
self.queue = Queue()
self.running = False
self.partial_messages = {}
# For each channel, which type is expected next
self.expected_types = defaultdict(lambda:1)
def _next_method(self):
"""
Read the next method from the source, once one complete method has
been assembled it is placed in the internal queue.
"""
while self.queue.empty():
try:
frame_type, channel, payload = self.source.read_frame()
except Exception, e:
#
# Connection was closed? Framing Error?
#
self.queue.put(e)
break
if self.expected_types[channel] != frame_type:
self.queue.put((
channel,
Exception('Received frame type %s while expecting type: %s' %
(frame_type, self.expected_types[channel])
)
))
elif frame_type == 1:
self._process_method_frame(channel, payload)
elif frame_type == 2:
self._process_content_header(channel, payload)
elif frame_type == 3:
self._process_content_body(channel, payload)
示例2: Crawler
# 需要导入模块: from eventlet.queue import Queue [as 别名]
# 或者: from eventlet.queue.Queue import empty [as 别名]
class Crawler(object):
def __init__(self, max_connections, input_is_plain):
self.max_connections = max_connections
self.input_is_plain = input_is_plain
self.queue = Queue(1)
self.closed = False
self._handler_pool = GreenPool(self.max_connections)
self._robots_cache = PoolMap(self.get_robots_checker, pool_max_size=1, timeout=600)
# Start IO worker and die if he does.
self.io_worker = io.Worker(lambda: self.closed)
t = spawn(self.io_worker.run_loop)
t.link(reraise_errors, greenthread.getcurrent())
log.debug(u"Crawler started. Max connections: %d.", self.max_connections)
def crawl(self, forever=True):
# TODO: do something special about signals?
if forever:
self.start_queue_updater()
while not self.closed:
# `get_nowait` will only work together with sleep(0) here
# because we need greenlet switch to reraise exception from `do_process`.
sleep()
try:
item = self.queue.get_nowait()
except Empty:
if not forever:
self.graceful_stop()
sleep(0.01)
continue
t = self._handler_pool.spawn(self.do_process, item)
t.link(reraise_errors, greenthread.getcurrent())
def stop(self):
self.closed = True
def graceful_stop(self, timeout=None):
"""Stops crawler and waits for all already started crawling requests to finish.
If `timeout` is supplied, it waits for at most `timeout` time to finish
and returns True if allocated time was enough.
Returns False if `timeout` was not enough.
"""
self.closed = True
if timeout is not None:
with eventlet.Timeout(timeout, False):
if hasattr(self, "_queue_updater_thread"):
self._queue_updater_thread.kill()
self._handler_pool.waitall()
return True
return False
else:
if hasattr(self, "_queue_updater_thread"):
self._queue_updater_thread.kill()
self._handler_pool.waitall()
def start_queue_updater(self):
self._queue_updater_thread = spawn(self.queue_updater)
self._queue_updater_thread.link(reraise_errors, greenthread.getcurrent())
def queue_updater(self):
log.debug("Waiting for crawl jobs on stdin.")
for line in sys.stdin:
if self.closed:
break
line = line.strip()
if self.input_is_plain:
job = {"url": line}
else:
try:
job = json.loads(line)
except ValueError:
log.error(u"Decoding input line: %s", line)
continue
# extend worker queue
# 1. skip duplicate URLs
for queue_item in self.queue.queue:
if queue_item["url"] == job["url"]: # compare URLs
break
else:
# 2. extend queue with new items
# May block here, when queue is full. This is a feature.
self.queue.put(job)
# Stdin exhausted -> stop.
while not self.queue.empty():
sleep(0.01)
sleep(2) # FIXME: Crutch to prevent stopping too early.
self.graceful_stop()
def get_robots_checker(self, scheme, authority):
#.........这里部分代码省略.........