本文整理汇总了Python中msgpack.Unpacker方法的典型用法代码示例。如果您正苦于以下问题:Python msgpack.Unpacker方法的具体用法?Python msgpack.Unpacker怎么用?Python msgpack.Unpacker使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类msgpack
的用法示例。
在下文中一共展示了msgpack.Unpacker方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: msgpack_appendable_pack
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def msgpack_appendable_pack(o, path):
open(path, 'a+').close() # touch
with open(path, mode='r+b') as f:
packer = msgpack.Packer()
unpacker = msgpack.Unpacker(f)
if type(o) == list:
try:
previous_len = unpacker.read_array_header()
except msgpack.OutOfData:
previous_len = 0
# calculate and replace header
header = packer.pack_array_header(previous_len + len(o))
f.seek(0)
f.write(header)
f.write(bytes(1) * (MAX_MSGPACK_ARRAY_HEADER_LEN - len(header)))
# append new elements
f.seek(0, 2)
for element in o:
f.write(packer.pack(element))
else:
f.write(packer.pack(o))
示例2: make_post_request
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def make_post_request(self, msg, expected_status=200):
request = self.encode(msg)
response = self.fetch('/v1/source', method='POST', body=request,
headers={'Content-type':
'application/vnd.msgpack'})
self.assertEqual(response.code, expected_status)
responses = []
if expected_status < 400:
unpacker = msgpack.Unpacker(**unpack_kwargs)
unpacker.feed(response.body)
for msg in unpacker:
responses.append(msg)
else:
responses = [{'error': str(response.error)}]
return responses
示例3: _messages_generator
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def _messages_generator(self):
unpacker = msgpack.Unpacker()
while True:
# f.read(n) on sys.stdin blocks until n bytes are read, causing
# serializer to hang.
# os.read(fileno, n) will block if there is nothing to read, but will
# return as soon as it is able to read at most n bytes.
with self._reader_lock:
try:
line = os.read(self.input_stream.fileno(), self.CHUNK_SIZE)
except io.UnsupportedOperation:
line = self.input_stream.read(self.CHUNK_SIZE)
if not line:
# Handle EOF, which usually means Storm went away
raise StormWentAwayError()
# As python-msgpack docs suggest, we feed data to the unpacker
# internal buffer in order to let the unpacker deal with message
# boundaries recognition and uncomplete messages. In case input ends
# with a partial message, unpacker raises a StopIteration and will be
# able to continue after being feeded with the rest of the message.
unpacker.feed(line)
for i in unpacker:
yield i
示例4: handle_stream
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def handle_stream(self, stream, address):
unpacker = msgpack.Unpacker(raw=False)
while True:
try:
wire_bytes = yield stream.read_bytes(1024, partial=True)
if not wire_bytes:
break
try:
unpacker.feed(wire_bytes)
except msgpack.exceptions.BufferFull:
# Start over loosing some data?!
unpacker = msgpack.Unpacker(raw=False)
unpacker.feed(wire_bytes)
for record_dict in unpacker:
record = logging.makeLogRecord(record_dict)
logger = logging.getLogger(record.name)
logger.handle(record)
except (EOFError, KeyboardInterrupt, SystemExit, StreamClosedError):
break
except Exception as exc: # pylint: disable=broad-except
log.exception(exc)
示例5: bulk_import_error_records
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def bulk_import_error_records(self, name, params=None):
"""List the records that have errors under the specified bulk import name.
Args:
name (str): Bulk import name.
params (dict, optional): Extra parameters.
Yields:
Row of the data
"""
params = {} if params is None else params
with self.get(
create_url("/v3/bulk_import/error_records/{name}", name=name), params
) as res:
code = res.status
if code != 200:
body = res.read()
self.raise_error("Failed to get bulk import error records", res, body)
body = io.BytesIO(res.read())
decompressor = gzip.GzipFile(fileobj=body)
unpacker = msgpack.Unpacker(decompressor, raw=False)
for row in unpacker:
yield row
示例6: job_result_format_each
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def job_result_format_each(self, job_id, format):
"""Yield a row of the job result with specified format.
Args:
job_id (int): job ID
format (str): Output format of the job result information. "json" or "msgpack"
Yields:
The query result of the specified job in.
"""
with self.get(
create_url("/v3/job/result/{job_id}", job_id=job_id), {"format": format}
) as res:
code = res.status
if code != 200:
self.raise_error("Get job result failed", res, "")
if format == "msgpack":
unpacker = msgpack.Unpacker(res, raw=False)
for row in unpacker:
yield row
elif format == "json":
for row in codecs.getreader("utf-8")(res):
yield json.loads(row)
else:
yield res.read()
示例7: test_msgpack_large_data
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def test_msgpack_large_data(self):
big_string = s_const.mebibyte * 129 * 'V'
struct = ('test', {'key': big_string})
buf = s_msgpack.en(struct)
unpacked_struct = s_msgpack.un(buf)
self.eq(struct, unpacked_struct)
# Ensure our use of msgpack.Unpacker can also handle this data
with self.getTestDir() as dirn:
with s_common.genfile(dirn, 'test.mpk') as fd:
fd.write(buf)
with s_common.genfile(dirn, 'test.mpk') as fd:
genr = s_msgpack.iterfd(fd)
objs = list(genr)
self.len(1, objs)
self.eq(objs[0], struct)
# Ensure that our streaming Unpk object can also handle this data
unpk = s_msgpack.Unpk()
objs = unpk.feed(buf)
self.len(1, objs)
self.eq(objs[0], (135266320, struct))
示例8: iterfd
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def iterfd(fd):
'''
Generator which unpacks a file object of msgpacked content.
Args:
fd: File object to consume data from.
Notes:
String objects are decoded using utf8 encoding. In order to handle
potentially malformed input, ``unicode_errors='surrogatepass'`` is set
to allow decoding bad input strings.
Yields:
Objects from a msgpack stream.
'''
unpk = msgpack.Unpacker(fd, **unpacker_kwargs)
for mesg in unpk:
yield mesg
示例9: iterfile
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def iterfile(path, since=-1):
'''
Generator which yields msgpack objects from a file path.
Args:
path: File path to open and consume data from.
Notes:
String objects are decoded using utf8 encoding. In order to handle
potentially malformed input, ``unicode_errors='surrogatepass'`` is set
to allow decoding bad input strings.
Yields:
Objects from a msgpack stream.
'''
with io.open(path, 'rb') as fd:
unpk = msgpack.Unpacker(fd, **unpacker_kwargs)
for i, mesg in enumerate(unpk):
if i <= since:
continue
yield mesg
示例10: unpack_rawr_zip_payload
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def unpack_rawr_zip_payload(table_sources, payload):
"""unpack a zipfile and turn it into a callable "tables" object."""
# the io we get from S3 is streaming, so we can't seek on it, but zipfile
# seems to require that. so we buffer it all in memory. RAWR tiles are
# generally up to around 100MB in size, which should be safe to store in
# RAM.
from tilequeue.query.common import Table
from io import BytesIO
zfh = zipfile.ZipFile(BytesIO(payload), 'r')
def get_table(table_name):
# need to extract the whole compressed file from zip reader, as it
# doesn't support .tell() on the filelike, which gzip requires.
data = zfh.open(table_name, 'r').read()
unpacker = Unpacker(file_like=BytesIO(data))
source = table_sources[table_name]
return Table(source, unpacker)
return get_table
示例11: wait_for_at_least_one_message
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def wait_for_at_least_one_message(self, channel):
"""
Reads until we receive at least one message we can unpack. Return all found messages.
"""
unpacker = msgpack.Unpacker(encoding='utf-8')
while True:
try:
start = time.time()
chunk = self.ssh_channel[channel].recv(1024)
end = time.time()
self.read_speeds.append( len(chunk) / (end-start) )
if len(self.read_speeds) > 20:
self.read_speeds = self.read_speeds[10:]
if chunk == b'':
# happens only when connection broke. If nothing is to be received, it hangs instead.
self.connection_error(channel, 'Connection broken w')
return False
except Exception as error:
self.connection_error(channel, error)
raise
unpacker.feed(chunk)
messages = [m for m in unpacker]
if messages:
return messages
示例12: stream_unpacker
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def stream_unpacker(fp, object_hook=None):
return msgpack.Unpacker(fp, object_hook=object_hook, encoding='utf8')
示例13: get_lines
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def get_lines(self, stream):
return msgpack.Unpacker(stream, encoding='utf-8', object_pairs_hook=decode_to_sorted)
示例14: msgpack_appendable_unpack
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def msgpack_appendable_unpack(path):
# if not list?
# return msgpack.unpackb(f.read())
with open(path, 'rb') as f:
packer = msgpack.Packer()
unpacker = msgpack.Unpacker(f, encoding='utf-8')
length = unpacker.read_array_header()
header_lenght = len(packer.pack_array_header(length))
unpacker.read_bytes(MAX_MSGPACK_ARRAY_HEADER_LEN - header_lenght)
f.seek(MAX_MSGPACK_ARRAY_HEADER_LEN)
return [unpacker.unpack() for _ in range(length)]
示例15: load_levels
# 需要导入模块: import msgpack [as 别名]
# 或者: from msgpack import Unpacker [as 别名]
def load_levels(col, path):
with open(path, 'rb') as fp:
unpacker = msgpack.Unpacker(fp, raw=False)
for i, level in enumerate(unpacker, start=1):
col.objects(id=level['id']).modify(
upsert=True,
set__name=level['label'],
set__parents=[level_ref(p) for p in level['parents']],
set__admin_level=level.get('admin_level')
)
return i