本文整理汇总了Python中storages.compat.BytesIO.seek方法的典型用法代码示例。如果您正苦于以下问题:Python BytesIO.seek方法的具体用法?Python BytesIO.seek怎么用?Python BytesIO.seek使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类storages.compat.BytesIO
的用法示例。
在下文中一共展示了BytesIO.seek方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _read
# 需要导入模块: from storages.compat import BytesIO [as 别名]
# 或者: from storages.compat.BytesIO import seek [as 别名]
def _read(self, name):
memory_file = BytesIO()
try:
pwd = self._connection.pwd()
self._connection.cwd(os.path.dirname(name))
self._connection.retrbinary('RETR ' + os.path.basename(name),
memory_file.write)
self._connection.cwd(pwd)
memory_file.seek(0)
return memory_file
except ftplib.all_errors:
raise FTPStorageException('Error reading file %s' % name)
示例2: _compress_content
# 需要导入模块: from storages.compat import BytesIO [as 别名]
# 或者: from storages.compat.BytesIO import seek [as 别名]
def _compress_content(self, content):
"""Gzip a given string content."""
zbuf = BytesIO()
zfile = GzipFile(mode="wb", compresslevel=6, fileobj=zbuf)
try:
zfile.write(force_bytes(content.read()))
finally:
zfile.close()
zbuf.seek(0)
content.file = zbuf
content.seek(0)
return content
示例3: _compress_content
# 需要导入模块: from storages.compat import BytesIO [as 别名]
# 或者: from storages.compat.BytesIO import seek [as 别名]
def _compress_content(self, content):
"""Gzip a given string content."""
zbuf = BytesIO()
zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
try:
zfile.write(force_bytes(content.read()))
finally:
zfile.close()
zbuf.seek(0)
# Boto 2 returned the InMemoryUploadedFile with the file pointer replaced,
# but Boto 3 seems to have issues with that. No need for fp.name in Boto3
# so just returning the BytesIO directly
return zbuf
示例4: _compress_content
# 需要导入模块: from storages.compat import BytesIO [as 别名]
# 或者: from storages.compat.BytesIO import seek [as 别名]
def _compress_content(self, content):
"""Gzip a given string content."""
zbuf = BytesIO()
# The GZIP header has a modification time attribute (see http://www.zlib.org/rfc-gzip.html)
# This means each time a file is compressed it changes even if the other contents don't change
# For S3 this defeats detection of changes using MD5 sums on gzipped files
# Fixing the mtime at 0.0 at compression time avoids this problem
zfile = GzipFile(mode="wb", compresslevel=6, fileobj=zbuf, mtime=0.0)
try:
zfile.write(force_bytes(content.read()))
finally:
zfile.close()
zbuf.seek(0)
content.file = zbuf
content.seek(0)
return content