本文整理汇总了Python中asyncio.Queue.put方法的典型用法代码示例。如果您正苦于以下问题:Python Queue.put方法的具体用法?Python Queue.put怎么用?Python Queue.put使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类asyncio.Queue
的用法示例。
在下文中一共展示了Queue.put方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: AverageMessageHandlerForTest
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
class AverageMessageHandlerForTest(AverageMemoryMessageHandler):
def __init__(self, keys, average_period_minutes=0):
super().__init__(keys, average_period_minutes)
self.queue = Queue()
@asyncio.coroutine
def save(self, average_message):
yield from self.queue.put(average_message)
示例2: test_nonmatching
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
def test_nonmatching():
i_queue = Queue()
o_queue = find_events(i_queue)
for in_string in NONMATCHING_TESTS:
yield from i_queue.put(in_string)
yield from o_queue.get()
示例3: test_matching
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
def test_matching():
i_queue = Queue()
o_queue = find_events(i_queue)
for in_string, event in MATCHING_TESTS:
yield from i_queue.put(in_string)
ev = yield from o_queue.get()
eq_(ev, event)
示例4: MessageHandler
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
class MessageHandler(ws.WS):
def __init__(self):
self.queue = Queue()
def get(self):
return self.queue.get()
def on_message(self, websocket, message):
return self.queue.put(message)
示例5: new_queue
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
def new_queue():
global _main_loop
queue = Queue(loop=_main_loop)
def putter(item):
_main_loop.call_soon_threadsafe(queue.put_nowait, item)
queue.put = putter
return queue
示例6: BrokerProtocolHandler
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
class BrokerProtocolHandler(ProtocolHandler):
def __init__(self, plugins_manager: PluginManager, session: Session=None, loop=None):
super().__init__(plugins_manager, session, loop)
self._disconnect_waiter = None
self._pending_subscriptions = Queue(loop=self._loop)
self._pending_unsubscriptions = Queue(loop=self._loop)
@asyncio.coroutine
def start(self):
yield from super().start()
if self._disconnect_waiter is None:
self._disconnect_waiter = futures.Future(loop=self._loop)
@asyncio.coroutine
def stop(self):
yield from super().stop()
if self._disconnect_waiter is not None and not self._disconnect_waiter.done():
self._disconnect_waiter.set_result(None)
@asyncio.coroutine
def wait_disconnect(self):
return (yield from self._disconnect_waiter)
def handle_write_timeout(self):
pass
def handle_read_timeout(self):
if self._disconnect_waiter is not None and not self._disconnect_waiter.done():
self._disconnect_waiter.set_result(None)
@asyncio.coroutine
def handle_disconnect(self, disconnect):
self.logger.debug("Client disconnecting")
if self._disconnect_waiter and not self._disconnect_waiter.done():
self.logger.debug("Setting waiter result to %r" % disconnect)
self._disconnect_waiter.set_result(disconnect)
@asyncio.coroutine
def handle_connection_closed(self):
yield from self.handle_disconnect(None)
@asyncio.coroutine
def handle_connect(self, connect: ConnectPacket):
# Broker handler shouldn't received CONNECT message during messages handling
# as CONNECT messages are managed by the broker on client connection
self.logger.error('%s [MQTT-3.1.0-2] %s : CONNECT message received during messages handling' %
(self.session.client_id, format_client_message(self.session)))
if self._disconnect_waiter is not None and not self._disconnect_waiter.done():
self._disconnect_waiter.set_result(None)
@asyncio.coroutine
def handle_pingreq(self, pingreq: PingReqPacket):
yield from self._send_packet(PingRespPacket.build())
@asyncio.coroutine
def handle_subscribe(self, subscribe: SubscribePacket):
subscription = {'packet_id': subscribe.variable_header.packet_id, 'topics': subscribe.payload.topics}
yield from self._pending_subscriptions.put(subscription)
@asyncio.coroutine
def handle_unsubscribe(self, unsubscribe: UnsubscribePacket):
unsubscription = {'packet_id': unsubscribe.variable_header.packet_id, 'topics': unsubscribe.payload.topics}
yield from self._pending_unsubscriptions.put(unsubscription)
@asyncio.coroutine
def get_next_pending_subscription(self):
subscription = yield from self._pending_subscriptions.get()
return subscription
@asyncio.coroutine
def get_next_pending_unsubscription(self):
unsubscription = yield from self._pending_unsubscriptions.get()
return unsubscription
@asyncio.coroutine
def mqtt_acknowledge_subscription(self, packet_id, return_codes):
suback = SubackPacket.build(packet_id, return_codes)
yield from self._send_packet(suback)
@asyncio.coroutine
def mqtt_acknowledge_unsubscription(self, packet_id):
unsuback = UnsubackPacket.build(packet_id)
yield from self._send_packet(unsuback)
@asyncio.coroutine
def mqtt_connack_authorize(self, authorize: bool):
if authorize:
connack = ConnackPacket.build(self.session.parent, CONNECTION_ACCEPTED)
else:
connack = ConnackPacket.build(self.session.parent, NOT_AUTHORIZED)
yield from self._send_packet(connack)
@classmethod
@asyncio.coroutine
def init_from_connect(cls, reader: ReaderAdapter, writer: WriterAdapter, plugins_manager, loop=None):
"""
:param reader:
:param writer:
:param plugins_manager:
#.........这里部分代码省略.........
示例7: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
class Crawler:
def __init__(self, root_url, max_redirect):
self.max_tasks = 10
self.max_redirect = max_redirect
self.q = Queue()
self.seen_urls = set()
# aiohttp's ClientSession does connection pooling and
# HTTP keep-alives for us.
self.session = aiohttp.ClientSession(loop=loop)
# Put (URL, max_redirect) in the Queue
self.q.put((root_url, self.max_redirect))
@asyncio.coroutine
def crawl(self):
'''Run the crawler untill all work is done.'''
workers = [asyncio.Task(self.work())
for _ in range(self.max_tasks)]
# When all work is done, exit.
yield from self.q.join()
for w in workers:
w.cancel()
@asyncio.coroutine
def work(self):
while True:
url, max_redirect = yield from self.q.get()
# Download page and add new links to self.q
yield from self.fetch(url, max_redirect)
self.q.task_done()
@asyncio.coroutine
def fetch(self, url, max_redirect):
# Handle redirects ourselves.
response = yield from self.session.get(
url, allow_redirects=False)
try:
if is_redirect(response):
if max_redirect > 0:
next_url = response.headers['location']
if next_url in self.seen_urls:
# We have done this before.
return
# Remember we have seen this url.
self.seen_urls.add(next_url)
# Follow the redirect. One less redirect remains.
self.q.put_nowait((next_url, max_redirect -1))
else:
links = yield from self.parse_links(response)
# Python set-logic:
for link in links.difference(self.seen_urls):
self.q.put_nowait((link, self.max_redirect))
self.seen_urls.update(links)
finally:
# Return connection to pool.
yield from response.release()
示例8: Cloner
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
class Cloner(object):
def __init__(self, root):
self.visited_urls = []
self.root = self.add_scheme(root)
if len(self.root.host) < 4:
sys.exit('invalid taget {}'.format(self.root.host))
self.target_path = '/opt/snare/pages/{}'.format(self.root.host)
if not os.path.exists(self.target_path):
os.mkdir(self.target_path)
self.new_urls = Queue()
@staticmethod
def add_scheme(url):
if yarl.URL(url).scheme:
new_url = yarl.URL(url)
else:
new_url = yarl.URL('http://' + url)
return new_url
@asyncio.coroutine
def process_link(self, url, check_host=False):
url = yarl.URL(url)
if check_host:
if (url.host != self.root.host or url.fragment
or url in self.visited_urls):
return None
if not url.is_absolute():
url = self.root.join(url)
yield from self.new_urls.put(url)
return url.relative().human_repr()
@asyncio.coroutine
def replace_links(self, data):
soup = BeautifulSoup(data, 'html.parser')
# find all relative links
for link in soup.findAll(href=True):
res = yield from self.process_link(link['href'], check_host=True)
if res is not None:
link['href'] = res
# find all images and scripts
for elem in soup.findAll(src=True):
res = yield from self.process_link(elem['src'])
if res is not None:
elem['src'] = res
# find all action elements
for act_link in soup.findAll(action=True):
res = yield from self.process_link(act_link['action'])
if res is not None:
act_link['action'] = res
# prevent redirects
for redir in soup.findAll(True, attrs={'name': re.compile('redirect.*')}):
redir['value'] = yarl.URL(redir['value']).relative().human_repr()
return soup
@asyncio.coroutine
def get_body(self):
while not self.new_urls.empty():
current_url = yield from self.new_urls.get()
if current_url in self.visited_urls:
continue
self.visited_urls.append(current_url)
if current_url.name:
file_name = current_url.name
elif current_url.raw_path != '/':
file_name = current_url.path.rsplit('/')[1]
else:
file_name = 'index.html'
file_path = os.path.dirname(current_url.path)
if file_path == '/':
file_path = self.target_path
else:
file_path = os.path.join(self.target_path, file_path[1:])
print('path: ', file_path, 'name: ', file_name)
if file_path and not os.path.exists(file_path):
os.makedirs(file_path)
data = None
try:
with aiohttp.Timeout(10.0):
with aiohttp.ClientSession() as session:
response = yield from session.get(current_url)
data = yield from response.read()
except aiohttp.ClientError as client_error:
print(client_error)
else:
response.release()
session.close()
if data is not None:
if re.match(re.compile('.*\.(html|php)'), file_name):
soup = yield from self.replace_links(data)
#.........这里部分代码省略.........
示例9: BasePlugin
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
class BasePlugin(metaclass=ABCMeta):
'''Core plug-in functionality
A Sphinx plug-in needs to provide a minimim set of services in order to be
useful. Those are defined here, with default implementations where it
makes sense.
'''
# This is a handle to the data bus. It's set when we are registered.
_databus = None
# Type manager handle
_tm = None
def __init__(self, runner, plugins, source = None):
'''Constructor
This is how our plugin pipeline is constructed. Each plugin instance
is created when the input script is read, and they are chained together,
from source to sink, here.
This method _must_ be called with the event loop from which it will be
called in the future, e.g., asyncio.get_event_loop().
'''
# A dict that maps each destination for our data, to the type that the
# destination can consume.
self._sinks = {}
# Retain a pointer to our source, and add ourself to it's list of sinks.
self._source = source
if source:
# Validate that we can process data from this source
sink_types = set(source.sources()).intersection(self.sinks())
if len(sink_types):
source._set_sink(self, sink_types.pop())
else:
err = "{} cannot sink '{}'".format(self, source.sources())
_log.error(err)
raise ImpedenceMismatchError(err)
# Our input queue
self._queue = Queue()
self.runner = runner
self._plugins = plugins
# create_task schedules the execution of the coroutine "run", wrapped
# in a future.
self._task = self.runner.create_task(self.run())
def __getattr__(self, name):
'''Plugin Pipeline Bulding
This method is called when Python can't find a requested attribute. We
use it to create a new plugin instance to add to the pipeline.
'''
if name in self._plugins:
return partial(self._plugins[name], source = self)
else:
raise AttributeError
def _set_sink(self, sink, data_type):
'''Register a sink
Called during initialization to register a sink (destination for our
output).
'''
self._sinks[sink] = data_type
@coroutine
def publish(self, data):
'''Publish data
Called by a plugin to publish data to it's sinks.
'''
for sink, data_type in self._sinks.items():
# Special case 'None', since that's our 'eof'. See the 'done'
# method below.
if data:
data = self.xform_data(data, data_type)
yield from self._databus.publish(data, sink)
@coroutine
def write_data(self, data):
'''Write data to queue
Called by the databus controller to enqueue data from our source.
'''
yield from self._queue.put(data)
@coroutine
def read_data(self):
#.........这里部分代码省略.........
示例10: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put [as 别名]
class Messagedispatcher:
def __init__(self, communicator):
self.communicator = communicator
self.messages = {
"direct": {
"status": {
"class": messages.StatusDirect,
"queue": Queue()
},
"pinor": {
"class": messages.PinorDirect,
"queue": Queue()
}
},
"mesh": {
"status": {
"class": messages.StatusMesh,
"queue": Queue()
},
"pinor": {
"class": messages.PinorMesh,
"queue": Queue()
},
"return": {
"class": messages.ReturnMesh,
"queue": Queue()
},
"deploy": {
"class": messages.DeployMesh,
"queue": Queue()
},
"grid": {
"class": messages.GridMesh,
"queue": Queue()
}
}
}
self.mesh_queue = Queue()
@coroutine
def wait_for_message(self, *types):
x = self.messages
for i in types:
x = x[i]
q = x["queue"]
return (yield from q.get())
@coroutine
def get_mesh_message(self):
return (yield from self.mesh_queue.get())
@coroutine
def startup(self):
while True:
meshput = False
msg = yield from self.communicator.receive()
if msg["type"] == "mesh":
meshput = True
x = self.messages
x = x[msg["type"]]
x = x[msg["data"]["datatype"]]
q = x["queue"]
c = x["class"]
emsg = c.from_json(msg)
yield from q.put(emsg)
if meshput:
# print("RECEIVE: " + str(msg) + "\n")
yield from self.mesh_queue.put(emsg)