本文整理汇总了Python中asyncio.Queue.put_nowait方法的典型用法代码示例。如果您正苦于以下问题:Python Queue.put_nowait方法的具体用法?Python Queue.put_nowait怎么用?Python Queue.put_nowait使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类asyncio.Queue
的用法示例。
在下文中一共展示了Queue.put_nowait方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class MagicEmotiv:
def __init__(self, ptr, upd_interval):
self.data_to_send = Queue()
self.battery = 40
self.packets = Queue()
self.ptr = ptr
self.poll_interval = upd_interval
def set_filter(self, value):
self.poll_interval = 1 / value
async def setup(self):
print("creating magic emotiv...")
async def read_data(self):
while self.running:
s = {}
for shift, sensor in enumerate(sorted(sensor_bits, reverse=True)):
s[sensor] = {'quality': 0.0}
s[sensor]['value'] = np.random.normal() + shift * 5
packet = MagicPacket(
b'Py2\x18\xe7\xb7\xdf\x8e\x86n;g\xbd\xc0\x00\x00\x02\x11(!`' +
b'=\x80\x15\xecX\xc6 \xd9ii\x9e',
s, False)
self.packets.put_nowait(packet)
self.data_to_send.put_nowait(packet)
self.ptr += 1
await asyncio.sleep(self.poll_interval)
async def update_console(self):
while self.running:
packet = await self.packets.get()
print(packet)
await asyncio.sleep(self.poll_interval)
示例2: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class Listener:
def __init__(self):
self._messages = Queue()
def __call__(self, channel, message):
self._messages.put_nowait((channel, message))
def get(self):
return self._messages.get()
示例3: Message
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class Message(ws.WS):
def __init__(self, loop):
self.queue = Queue(loop=loop)
def get(self):
return self.queue.get()
def on_message(self, websocket, message):
self.queue.put_nowait(message)
示例4: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class ConnectionPool:
def __init__(self):
self._config_dict = None
self._queue = Queue()
self._outstanding_connections = WeakSet()
async def get_conn(self):
self._check_config()
try:
while True:
conn = self._queue.get_nowait()
if conn.is_open():
break
try:
await conn.close()
except Exception:
l.debug('Exception in close rethink connection', exc_info=True)
except QueueEmpty:
conn = await r.connect(**self._config_dict)
self._outstanding_connections.add(conn)
return conn
async def put_conn(self, conn):
self._queue.put_nowait(conn)
self._outstanding_connections.remove(conn)
def set_config(self, config):
self._config_dict = config
def get_config(self):
self._check_config()
return self._config_dict
async def teardown(self):
while True:
try:
conn = self._queue.get_nowait()
except QueueEmpty:
break
self._outstanding_connections.add(conn)
for conn in self._outstanding_connections:
try:
await conn.close()
except Exception:
l.debug('Exception in close rethink connection', exc_info=True)
def _check_config(self):
assert self._config_dict is not None, "Did you remember to run resync.setup()?"
示例5: Echo
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class Echo(WS):
def __init__(self, loop=None):
self.queue = Queue(loop=loop)
def get(self):
return self.queue.get()
def on_message(self, ws, message):
self.queue.put_nowait(message)
def on_ping(self, ws, body):
ws.pong(body)
self.queue.put_nowait('PING: %s' % body.decode('utf-8'))
def on_pong(self, ws, body):
self.queue.put_nowait('PONG: %s' % body.decode('utf-8'))
def on_close(self, ws):
self.queue.put_nowait('CLOSE')
示例6: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class Crawler:
def __init__(self, root_url, max_redirect):
self.max_tasks = 10
self.max_redirect = max_redirect
self.q = Queue()
self.seen_urls = set()
# aiohttp's ClientSession does connection pooling and
# HTTP keep-alives for us.
self.session = aiohttp.ClientSession(loop=loop)
# Put (URL, max_redirect) in the Queue
self.q.put((root_url, self.max_redirect))
@asyncio.coroutine
def crawl(self):
'''Run the crawler untill all work is done.'''
workers = [asyncio.Task(self.work())
for _ in range(self.max_tasks)]
# When all work is done, exit.
yield from self.q.join()
for w in workers:
w.cancel()
@asyncio.coroutine
def work(self):
while True:
url, max_redirect = yield from self.q.get()
# Download page and add new links to self.q
yield from self.fetch(url, max_redirect)
self.q.task_done()
@asyncio.coroutine
def fetch(self, url, max_redirect):
# Handle redirects ourselves.
response = yield from self.session.get(
url, allow_redirects=False)
try:
if is_redirect(response):
if max_redirect > 0:
next_url = response.headers['location']
if next_url in self.seen_urls:
# We have done this before.
return
# Remember we have seen this url.
self.seen_urls.add(next_url)
# Follow the redirect. One less redirect remains.
self.q.put_nowait((next_url, max_redirect -1))
else:
links = yield from self.parse_links(response)
# Python set-logic:
for link in links.difference(self.seen_urls):
self.q.put_nowait((link, self.max_redirect))
self.seen_urls.update(links)
finally:
# Return connection to pool.
yield from response.release()
示例7: ProxyResponse
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class ProxyResponse(object):
'''Asynchronous wsgi response.
'''
_started = False
_headers = None
_done = False
def __init__(self, environ, start_response):
self._loop = environ['pulsar.connection']._loop
self.environ = environ
self.start_response = start_response
self.queue = Queue()
def __iter__(self):
while True:
if self._done:
try:
yield self.queue.get_nowait()
except QueueEmpty:
break
else:
yield async(self.queue.get(), loop=self._loop)
def pre_request(self, response, exc=None):
self._started = True
response.bind_event('data_processed', self.data_processed)
return response
def error(self, exc):
if not self._started:
request = wsgi.WsgiRequest(self.environ)
content_type = request.content_types.best_match(
('text/html', 'text/plain'))
uri = self.environ['RAW_URI']
msg = 'Could not find %s' % uri
logger.info(msg=msg)
if content_type == 'text/html':
html = wsgi.HtmlDocument(title=msg)
html.body.append('<h1>%s</h1>' % msg)
data = html.render()
resp = wsgi.WsgiResponse(504, data, content_type='text/html')
elif content_type == 'text/plain':
resp = wsgi.WsgiResponse(504, msg, content_type='text/html')
else:
resp = wsgi.WsgiResponse(504, '')
self.start_response(resp.status, resp.get_headers())
self._done = True
self.queue.put_nowait(resp.content[0])
def data_processed(self, response, exc=None, **kw):
'''Receive data from the requesting HTTP client.'''
status = response.get_status()
if status == '100 Continue':
stream = self.environ.get('wsgi.input') or io.BytesIO()
body = yield stream.read()
response.transport.write(body)
if response.parser.is_headers_complete():
if self._headers is None:
headers = self.remove_hop_headers(response.headers)
self._headers = Headers(headers, kind='server')
# start the response
self.start_response(status, list(self._headers))
body = response.recv_body()
if response.parser.is_message_complete():
self._done = True
self.queue.put_nowait(body)
def remove_hop_headers(self, headers):
for header, value in headers:
if header.lower() not in wsgi.HOP_HEADERS:
yield header, value
示例8: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
#.........这里部分代码省略.........
break
except aiohttp.ClientError as client_error:
LOGGER.info('try %r for %r raised %r',
tries, url, client_error)
exception = client_error
tries += 1
else:
# We never broke out of the loop: all tries failed.
LOGGER.error('%r failed after %r tries',
url, self.max_tries)
self.record_statistic(FetchStatistic(url=url,
next_url=None,
status=None,
exception=exception,
size=0,
content_type=None,
encoding=None,
num_urls=0,
num_new_urls=0))
return
try:
if is_redirect(response):
location = response.headers['location']
next_url = urllib.parse.urljoin(url, location)
self.record_statistic(FetchStatistic(url=url,
next_url=next_url,
status=response.status,
exception=None,
size=0,
content_type=None,
encoding=None,
num_urls=0,
num_new_urls=0))
if next_url in self.seen_urls:
return
if max_redirect > 0:
LOGGER.info('redirect to %r from %r', next_url, url)
self.add_url(next_url, max_redirect - 1)
else:
LOGGER.error('redirect limit reached for %r from %r',
next_url, url)
else:
stat, links = await self.parse_links(response)
self.record_statistic(stat)
for link in utils.difference(links, self.seen_urls):
# for link in links.difference(self.seen_urls):
self.q.put_nowait((link, self.max_redirect))
# self.seen_urls.update(links)
self.seen_urls.update(links)
finally:
await response.release()
async def work(self):
"""Process queue items forever."""
try:
while True:
url, max_redirect = await self.q.get()
assert url in self.seen_urls
LOGGER.info("url:%s", url)
LOGGER.info("max_redirect:%s", max_redirect)
await self.fetch(url, max_redirect)
self.q.task_done()
except asyncio.CancelledError:
pass
def url_allowed(self, url):
if self.exclude and re.search(self.exclude, url):
return False
parts = urllib.parse.urlparse(url)
if parts.scheme not in ('http', 'https'):
LOGGER.debug('skipping non-http scheme in %r', url)
return False
host, port = urllib.parse.splitport(parts.netloc)
if not self.host_okay(host):
LOGGER.debug('skipping non-root host in %r', url)
return False
return True
def add_url(self, url, max_redirect=None):
"""Add a URL to the queue if not seen before."""
if max_redirect is None:
max_redirect = self.max_redirect
LOGGER.debug('adding %r %r', url, max_redirect)
self.seen_urls.add(url)
self.q.put_nowait((url, max_redirect))
async def crawl(self):
"""Run the crawler until all finished."""
workers = [asyncio.Task(self.work(), loop=self.loop)
for _ in range(self.max_tasks)]
self.t0 = time.time()
yield self.q.join()
self.t1 = time.time()
for w in workers:
w.cancel()
示例9: Crawler
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
#.........这里部分代码省略.........
self.record_statistic(url=url, next_url=next_url, status=response.status)
if next_url in self.seen_urls:
return
if max_redirect > 0:
LOGGER.info("redirect to %r from %r max_redir: %i", next_url, url, max_redirect - 1)
self.add_urls(next_url, max_redirect - 1)
else:
LOGGER.error("redirect limit reached for %r from %r", next_url, url)
return
@asyncio.coroutine
def fetch(self, url, max_redirect, sem):
"""Fetch one URL."""
tries = 0
web_page = None
exception = None
_url = None
_encoding = None
_content_type = None
sleep_time = 0
while tries < self.max_tries:
try:
with (yield from sem):
response = yield from asyncio.wait_for(
self.session.get(url, allow_redirects=False), 10, loop=self.loop
)
if tries > 1:
LOGGER.debug("try %r for %r success", tries, url)
break
except Exception as client_error:
sleep_time += 5
yield from asyncio.sleep(sleep_time)
LOGGER.error("try %r for %r raised %r", tries, url, client_error)
exception = client_error
tries += 1
else:
# We never broke out of the loop: all tries failed.
LOGGER.error("%r failed after %r tries", url, self.max_tries)
self.record_statistic(url=url, exception=exception)
return (web_page, _url, _content_type, _encoding)
try:
_url, _content_type, _encoding = get_content_type_and_encoding(response)
if is_redirect(response):
self.handle_redirect(response, url, max_redirect)
web_page = "redirect"
elif response.status == 200 and _content_type in ("text/html", "application/xml"):
web_page = yield from response.text()
else:
self.record_statistic(
url=response.url, status=response.status, content_type=_content_type, encoding=_encoding
)
except Exception as e:
print("*******error**********")
finally:
yield from response.release()
return (web_page, _url, _content_type, _encoding)
def add_urls(self, urls, max_redirect=None):
"""Add a URL to the queue if not seen before."""
if max_redirect is None:
max_redirect = self.max_redirect
if not isinstance(urls, str):
urls = set(urls)
for link in urls.difference(self.seen_urls):
self.q.put_nowait((link, max_redirect))
self.seen_urls.update(urls)
elif urls not in self.seen_urls:
self.q.put_nowait((urls, max_redirect))
self.seen_urls.add(urls)
@asyncio.coroutine
def work(self, sem):
"""Process queue items forever."""
try:
while True:
url, max_redirect = yield from self.q.get()
# assert url in self.seen_urls
web_page, url, content_type, encoding = yield from self.fetch(url, max_redirect, sem)
if web_page and web_page != "redirect":
new_links = yield from self.parse_links(web_page, url, content_type, encoding)
if self.scraper:
data = self.scraper.scrape(url, web_page)
if self.data_handler:
self.data_handler.handle(data)
self.add_urls(new_links)
self.q.task_done()
except (asyncio.CancelledError,):
print("error")
@asyncio.coroutine
def crawl(self):
sem = asyncio.Semaphore(value=self.max_connections_per_host, loop=self.loop)
"""Run the crawler until all finished."""
LOGGER.info("Starting crawl...")
workers = [asyncio.Task(self.work(sem), loop=self.loop) for _ in range(self.max_tasks)]
self.t0 = time.time()
yield from self.q.join()
self.t1 = time.time()
for w in workers:
w.cancel()
示例10: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
class Emotiv:
"""
Receives, decrypts and stores packets received from Emotiv Headsets.
"""
def __init__(
self, display_output=False, serial_number="",
is_research=False, filter_hz=25, pointer=0):
"""
Sets up initial values.
"""
self.running = True
self.packets = Queue()
self.data_to_send = Queue()
self.battery = 0
self.display_output = display_output
self.poll_interval = 1 / filter_hz
self.is_research = is_research
self.ptr = pointer
self.sensors = {
'F3': {'value': 0, 'quality': 0},
'FC6': {'value': 0, 'quality': 0},
'P7': {'value': 0, 'quality': 0},
'T8': {'value': 0, 'quality': 0},
'F7': {'value': 0, 'quality': 0},
'F8': {'value': 0, 'quality': 0},
'T7': {'value': 0, 'quality': 0},
'P8': {'value': 0, 'quality': 0},
'AF4': {'value': 0, 'quality': 0},
'F4': {'value': 0, 'quality': 0},
'AF3': {'value': 0, 'quality': 0},
'O2': {'value': 0, 'quality': 0},
'O1': {'value': 0, 'quality': 0},
'FC5': {'value': 0, 'quality': 0},
'X': {'value': 0, 'quality': 0},
'Y': {'value': 0, 'quality': 0},
'Unknown': {'value': 0, 'quality': 0}
}
self.serial_number = serial_number
self.old_model = False
def set_filter(self, value):
self.poll_interval = 1 / value
async def setup(self):
self._os_decryption = False
if os.path.exists('/dev/eeg/raw'):
self._os_decryption = True
path = "/dev/eeg/raw"
else:
serial, hidraw_filename = get_linux_setup()
self.serial_number = serial
if os.path.exists("/dev/" + hidraw_filename):
path = "/dev/" + hidraw_filename
else:
path = "/dev/hidraw4"
self.device_path = path
self.setup_crypto(self.serial_number)
async def read_data(self):
self.running = True
with open(self.device_path, 'rb') as hidraw:
while self.running:
try:
data = hidraw.read(32)
if data != "":
if self._os_decryption:
self.packets.put_nowait(EmotivPacket(data))
else:
tasks.put_nowait(data)
self.ptr += 1
await self.process_tasks()
await sleep(self.poll_interval)
except KeyboardInterrupt:
self.running = False
hidraw.close()
def setup_crypto(self, sn):
"""
Performs decryption of packets received.
Stores decrypted packets in a Queue for use.
"""
if is_old_model(sn):
self.old_model = True
print("Old model:", self.old_model)
k = ['\0'] * 16
k[0] = sn[-1]
k[1] = '\0'
k[2] = sn[-2]
if self.is_research:
k[3] = 'H'
k[4] = sn[-1]
k[5] = '\0'
k[6] = sn[-2]
k[7] = 'T'
k[8] = sn[-3]
k[9] = '\x10'
k[10] = sn[-4]
k[11] = 'B'
else:
#.........这里部分代码省略.........
示例11: __init__
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
#.........这里部分代码省略.........
content_type, pdict = cgi.parse_header(content_type)
encoding = pdict.get('charset', 'utf-8')
if content_type in ('text/html', 'application/xml'):
text = yield from response.text()
# Replace href with (?:href|src) to follow image links.
urls = set(re.findall(r'''(?i)href=["']([^\s"'<>]+)''',text))
if urls:
LOGGER.info('got %r distinct urls from %r',len(urls), response.url)
for url in urls:
normalized = urllib.parse.urljoin(response.url, url)
defragmented, frag = urllib.parse.urldefrag(normalized)
if self.url_allowed(defragmented):
links.add(defragmented)
stat = FetchStatistic(
url=response.url,
next_url=None,
status=response.status,
exception=None,
size=len(body),
content_type=content_type,
encoding=encoding,
num_urls=len(links),
num_new_urls=len(links - self.seen_urls))
return stat, links
@asyncio.coroutine
def fetch(self, url, max_redirect):
"""Fetch one URL."""
tries = 0
exception = None
while tries < self.max_tries:
try:
response = yield from self.session.get(url, allow_redirects=False) #1
break #2
except aiohttp.ClientError as client_error:
LOGGER.info('try %r for %r raised %r', tries, url, client_error)
exception = client_error
else:
return
try:
if is_redirect(response):
location = response.headers['location']
else: #4
stat, links = yield from self.parse_links(response)
self.record_statistic(stat)
for link in links.difference(self.seen_urls):
self.q.put_nowait((link, self.max_redirect))
self.seen_urls.update(links)
finally:
yield from response.release()
@asyncio.coroutine
def work(self):
"""Process queue items forever."""
try:
while True:
url, max_redirect = yield from self.q.get() #q.get() Remove and return an item from the queue. If queue is empty, wait until an item is available.
#print('url',url, 'max_redirect', max_redirect)
assert url in self.seen_urls #assert 断言,异常会直接抛出
yield from self.fetch(url, max_redirect)
self.q.task_done() #Indicate that a formerly enqueued task is complete.表明以前排队的任务完成
except asyncio.CancelledError:
pass
def url_allowed(self, url):
if self.exclude and re.search(self.exclude, url):
return False
parts = urllib.parse.urlparse(url)
if parts.scheme not in ('http', 'https'):
LOGGER.debug('skipping non-http scheme in %r', url)
return False
host, port = urllib.parse.splitport(parts.netloc)
if not self.host_okay(host):
LOGGER.debug('skipping non-root host in %r', url)
return False
return True
def add_url(self, url, max_redirect=None):
"""Add a URL to the queue if not seen before."""
if max_redirect is None:
max_redirect = self.max_redirect
LOGGER.debug('adding %r %r', url, max_redirect)
self.seen_urls.add(url)
self.q.put_nowait((url, max_redirect)) #put_nowait() Put an item into the queue without blocking.此句实际最先执行
@asyncio.coroutine
def crawl(self):
"""Run the crawler until all finished."""
workers = [asyncio.Task(self.work(), loop=self.loop) for _ in range(self.max_tasks)]
self.t0 = time.time()
yield from self.q.join() #Block until all items in the queue have been gotten and processed.保持阻塞状态,直到处理了队列中的所有项目为止
self.t1 = time.time()
for w in workers:
w.cancel()
示例12: Queue
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
__author__ = 'zhangxa'
"""
Example of how to user tornado gen.coroutine
"""
from tornado import ioloop
from tornado import gen
from asyncio import Queue,QueueEmpty
from tornado.concurrent import Future
import functools
queue = Queue()
for i in range(10):
queue.put_nowait(i)
def queue_get():
future = Future()
try:
future.set_result(queue.get_nowait())
except QueueEmpty:
pass
return future
@gen.coroutine
def yield_func(n):
print("here")
for i in range(n):
x = yield queue_get()
print(x)
示例13: Pool
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
#.........这里部分代码省略.........
'''The maximum number of open connections allowed.
If more connections are requested, the request
is queued and a connection returned as soon as one becomes
available.
'''
return self._queue._maxsize
@property
def in_use(self):
'''The number of connections in use.
These connections are not available until they are released back
to the pool.
'''
return len(self._in_use_connections)
@property
def available(self):
'''Number of available connections in the pool.
'''
return reduce(self._count_connections, self._queue._queue, 0)
def __contains__(self, connection):
if connection not in self._in_use_connections:
return connection in self._queue._queue
return True
def connect(self):
'''Get a connection from the pool.
The connection is either a new one or retrieved from the
:attr:`available` connections in the pool.
:return: a :class:`.Future` resulting in the connection.
'''
assert not self._closed
return PoolConnection.checkout(self)
def close(self):
'''Close all :attr:`available` and :attr:`in_use` connections.
'''
self._closed = True
queue = self._queue
while queue.qsize():
connection = queue.get_nowait()
connection.close()
in_use = self._in_use_connections
self._in_use_connections = set()
for connection in in_use:
connection.close()
def _get(self):
queue = self._queue
# grab the connection without waiting, important!
if queue.qsize():
connection = queue.get_nowait()
# wait for one to be available
elif self.in_use + self._connecting >= queue._maxsize:
if self._timeout:
connection = yield future_timeout(queue.get(), self._timeout)
else:
connection = yield queue.get()
else: # must create a new connection
self._connecting += 1
try:
connection = yield self._creator()
finally:
self._connecting -= 1
# None signal that a connection was removed form the queue
# Go again
if connection is None:
connection = yield self._get()
else:
if is_socket_closed(connection.sock):
connection.close()
connection = yield self._get()
else:
self._in_use_connections.add(connection)
coroutine_return(connection)
def _put(self, conn, discard=False):
if not self._closed:
try:
self._queue.put_nowait(None if discard else conn)
except QueueFull:
conn.close()
self._in_use_connections.discard(conn)
def info(self, message=None, level=None): # pragma nocover
if self._queue._maxsize != 2:
return
message = '%s: ' % message if message else ''
self.logger.log(level or 10,
'%smax size %s, in_use %s, available %s',
message, self._queue._maxsize, self.in_use,
self.available)
def _count_connections(self, x, y):
return x + int(y is not None)
示例14: workerTask
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
def workerTask(q):
while not q.empty():
processImage(q.get_nowait()[0])
q.task_done()
if not os.path.exists("__working"):
os.mkdir("__working")
convertPdfs(pdfList)
q = Queue(maxsize=0)
num_threads = 4
#put files in queue
for fileName in os.listdir("__working"):
if fileName.endswith(".pbm"):
q.put_nowait(("__working/" + fileName,))
threads = []
for i in range(num_threads):
worker = Thread(target=workerTask, args=(q,))
worker.start()
threads.append(worker)
q.join()
for thread in threads:
thread.join()
subprocess.run("rm -r __working", shell=True)
示例15: udp_reader
# 需要导入模块: from asyncio import Queue [as 别名]
# 或者: from asyncio.Queue import put_nowait [as 别名]
def udp_reader(s: socket, iqueue: Queue, size: int) -> None:
"""Read one or more packets from an UDP socket."""
data, peer = s.recvfrom(size)
iqueue.put_nowait((peer, data))