本文整理汇总了Python中io.BufferedReader方法的典型用法代码示例。如果您正苦于以下问题:Python io.BufferedReader方法的具体用法?Python io.BufferedReader怎么用?Python io.BufferedReader使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类io
的用法示例。
在下文中一共展示了io.BufferedReader方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: read_fastq
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def read_fastq(filename):
"""
return a stream of FASTQ entries, handling gzipped and empty files
"""
if not filename:
return itertools.cycle((None,))
if filename == "-":
filename_fh = sys.stdin
elif filename.endswith('gz'):
if is_python3():
filename_fh = gzip.open(filename, mode='rt')
else:
filename_fh = BufferedReader(gzip.open(filename, mode='rt'))
else:
filename_fh = open(filename)
return stream_fastq(filename_fh)
示例2: test_fetch_files_function
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def test_fetch_files_function(tmpdir):
# Setup and run three experiments
root = tmpdir.strpath
run_test_experiment(exp_name="experiment 1 alpha", exp_id="1234", root_dir=root)
run_test_experiment(exp_name="experiment 2 beta", exp_id="5678", root_dir=root)
run_test_experiment(
exp_name="experiment 3 alpha beta", exp_id="9990", root_dir=root
)
tinydb_reader = TinyDbReader(root)
res = tinydb_reader.fetch_files(indices=0)
assert len(res) == 1
assert list(res[0]["artifacts"].keys()) == ["about"]
assert isinstance(res[0]["artifacts"]["about"], io.BufferedReader)
assert res[0]["date"] == datetime.datetime(1999, 5, 4, 3, 2, 1)
assert res[0]["exp_id"] == "1234"
assert res[0]["exp_name"] == "experiment 1 alpha"
assert list(res[0]["resources"].keys()) == ["sacred/__init__.py"]
assert isinstance(res[0]["resources"]["sacred/__init__.py"], io.BufferedReader)
assert list(res[0]["sources"].keys()) == ["setup.py"]
assert isinstance(res[0]["sources"]["setup.py"], io.BufferedReader)
示例3: _body_file__get
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def _body_file__get(self):
"""
Input stream of the request (wsgi.input).
Setting this property resets the content_length and seekable flag
(unlike setting req.body_file_raw).
"""
if not self.is_body_readable:
return io.BytesIO()
r = self.body_file_raw
clen = self.content_length
if not self.is_body_seekable and clen is not None:
# we need to wrap input in LimitedLengthFile
# but we have to cache the instance as well
# otherwise this would stop working
# (.remaining counter would reset between calls):
# req.body_file.read(100)
# req.body_file.read(100)
env = self.environ
wrapped, raw = env.get('webob._body_file', (0,0))
if raw is not r:
wrapped = LimitedLengthFile(r, clen)
wrapped = io.BufferedReader(wrapped)
env['webob._body_file'] = wrapped, r
r = wrapped
return r
示例4: test_invalid_schema_resource
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def test_invalid_schema_resource(self, tmpdir, caplog, schema):
class FakeProvider(object):
def get_resource_stream(self, pkg, rsc):
return io.BufferedReader(io.BytesIO(schema))
# pkg_resources.resource_stream() cannot be mocked directly
# Instead mock the module-level function it calls.
(flexmock(pkg_resources)
.should_receive('get_provider')
.and_return(FakeProvider()))
filename = os.path.join(str(tmpdir), 'config.yaml')
with open(filename, 'w'):
pass
tasker, workflow = self.prepare()
plugin = ReactorConfigPlugin(tasker, workflow, config_path=str(tmpdir))
with caplog.at_level(logging.ERROR), pytest.raises(Exception):
plugin.run()
captured_errs = [x.message for x in caplog.records]
assert any("cannot validate" in x for x in captured_errs)
示例5: from_fileobj
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def from_fileobj(cls, fileobj, byteorder="big"):
"""Load an nbt file from a proper file object.
The method is used by the :func:`load` helper function when the
``gzipped`` keyword-only argument is not specified explicitly.
Arguments:
fileobj:
Can be either a standard ``io.BufferedReader`` for
uncompressed nbt or a ``gzip.GzipFile`` for gzipped nbt
data. The function simply calls the inherited
:meth:`nbtlib.tag.Compound.parse` classmethod and sets the
:attr:`filename` and :attr:`gzipped` attributes depending
on the argument.
byteorder:
Can be either ``"big"`` or ``"little"``. The argument is
forwarded to :meth:`nbtlib.tag.Compound.parse`.
"""
self = cls.parse(fileobj, byteorder)
self.filename = getattr(fileobj, "name", self.filename)
self.gzipped = isinstance(fileobj, gzip.GzipFile)
self.byteorder = byteorder
return self
示例6: get_SHA1
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def get_SHA1(file_value: io.BufferedReader):
"""
Get SHA1 hash of the file, directly in memory
TODO : Fix the input type
:param file_value: A file to compute the SHA-1
:return: the SHA-1 of the file in memory
"""
h = hashlib.sha1()
b = bytearray(128 * 1024)
mv = memoryview(b)
for n in iter(lambda: file_value.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
示例7: __init__
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def __init__(self, course_file: BufferedReader = None, dryrun=False, debug=False, helm_args=None, continue_on_error=False, create_namespace=True):
self.config = Config()
self.results = ReckonerInstallResults()
self.config.dryrun = dryrun
self.config.debug = debug
self.config.helm_args = helm_args
self.config.continue_on_error = continue_on_error
self.config.create_namespace = create_namespace
if course_file:
self.config.course_path = course_file.name
if self.config.debug:
logging.warn("The --debug flag will be deprecated. Please use --helm-args or --dry-run instead.")
if self.config.helm_args:
logging.warn("Specifying --helm-args on the cli will override helm_args in the course file.")
self.course = Course(course_file)
示例8: idzip_compression
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def idzip_compression(path, output):
'''Compress a file using idzip, a gzip-compatible format with random access support.
'''
if output is None:
output = '-'
with click.open_file(output, mode='wb') as outfh:
writer = _compression.GzipFile(fileobj=outfh, mode='wb')
with click.open_file(path, 'rb') as infh:
try:
infh_wrap = io.BufferedReader(infh)
header = infh_wrap.peek(2)
if _compression.starts_with_gz_magic(header):
click.echo("Detected gzip input file", err=True)
infh_wrap = _compression.GzipFile(fileobj=infh_wrap)
except AttributeError:
infh_wrap = infh
buffer_size = _compression.WRITE_BUFFER_SIZE
chunk = infh_wrap.read(buffer_size)
while chunk:
writer.write(chunk)
chunk = infh_wrap.read(buffer_size)
writer.close()
示例9: get_pricing_data
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def get_pricing_data(service_code, filters=None, max_cache_age_days=30):
from ... import config
if filters is None:
filters = [("location", region_name(clients.ec2.meta.region_name))]
get_products_args = dict(ServiceCode=service_code,
Filters=[dict(Type="TERM_MATCH", Field=k, Value=v) for k, v in filters])
cache_key = hashlib.sha256(json.dumps(get_products_args, sort_keys=True).encode()).hexdigest()[:32]
service_code_filename = os.path.join(config.user_config_dir, "pricing_cache_{}.json.gz".format(cache_key))
try:
cache_date = datetime.fromtimestamp(os.path.getmtime(service_code_filename))
if cache_date < datetime.now() - timedelta(days=max_cache_age_days):
raise Exception("Cache is too old, discard")
with gzip.open(service_code_filename) as gz_fh:
with io.BufferedReader(gz_fh) as buf_fh:
pricing_data = json.loads(buf_fh.read().decode())
except Exception:
logger.info("Fetching pricing data for %s", service_code)
client = boto3.client("pricing", region_name="us-east-1")
pricing_data = [json.loads(p) for p in paginate(client.get_paginator("get_products"), **get_products_args)]
try:
with gzip.open(service_code_filename, "w") as fh:
fh.write(json.dumps(pricing_data).encode())
except Exception as e:
print(e, file=sys.stderr)
return pricing_data
示例10: test_read_byte_file
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def test_read_byte_file(smb_share):
file_path = "%s\\%s" % (smb_share, "file.txt")
file_contents = b"\x00\x01\x02\x03"
expected = "[NtStatus 0xc0000034] No such file or directory"
with pytest.raises(SMBOSError, match=re.escape(expected)):
smbclient.open_file(file_path, mode='rb')
with smbclient.open_file(file_path, mode='wb') as fd:
fd.write(file_contents)
with smbclient.open_file(file_path, mode='rb') as fd:
assert isinstance(fd, io.BufferedReader)
assert fd.closed is False
assert fd.name == file_path
actual = fd.read()
assert actual == file_contents
actual = fd.read()
assert actual == b""
fd.seek(0)
actual = fd.read()
assert actual == file_contents
with pytest.raises(IOError):
fd.write(b"Fail")
assert fd.closed
示例11: extractfile
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
a filename or a TarInfo object. If `member' is a regular file or a
link, an io.BufferedReader object is returned. Otherwise, None is
returned.
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
# Members with unknown types are treated as regular files.
return self.fileobject(self, tarinfo)
elif tarinfo.islnk() or tarinfo.issym():
if isinstance(self.fileobj, _Stream):
# A small but ugly workaround for the case that someone tries
# to extract a (sym)link as a file-object from a non-seekable
# stream of tar blocks.
raise StreamError("cannot extract (sym)link as file object")
else:
# A (sym)link's file object is its target's file object.
return self.extractfile(self._find_link_target(tarinfo))
else:
# If there's no data associated with the member (directory, chrdev,
# blkdev, etc.), return None instead of a file object.
return None
示例12: backport_makefile
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def backport_makefile(self, mode="r", buffering=None, encoding=None,
errors=None, newline=None):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError(
"invalid mode %r (only r, w, b allowed)" % (mode,)
)
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
示例13: backport_makefile
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def backport_makefile(
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
示例14: open
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def open(self, mode):
"""
Open the FileSystem target.
This method returns a file-like object which can either be read from or written to depending
on the specified mode.
:param mode: the mode `r` opens the FileSystemTarget in read-only mode, whereas `w` will
open the FileSystemTarget in write mode. Subclasses can implement
additional options.
:type mode: str
"""
if mode == 'w':
return self.format.pipe_writer(AtomicFtpFile(self._fs, self.path))
elif mode == 'r':
temp_dir = os.path.join(tempfile.gettempdir(), 'luigi-contrib-ftp')
self.__tmp_path = temp_dir + '/' + self.path.lstrip('/') + '-luigi-tmp-%09d' % random.randrange(0, 1e10)
# download file to local
self._fs.get(self.path, self.__tmp_path)
return self.format.pipe_reader(
FileWrapper(io.BufferedReader(io.FileIO(self.__tmp_path, 'r')))
)
else:
raise Exception("mode must be 'r' or 'w' (got: %s)" % mode)
示例15: backport_makefile
# 需要导入模块: import io [as 别名]
# 或者: from io import BufferedReader [as 别名]
def backport_makefile(self, mode="r", buffering=None, encoding=None,
errors=None, newline=None):
"""Backport of socket.makefile from Python 3.5."""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError(
"invalid mode {!r} (only r, w, b allowed)".format(mode)
)
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text