本文整理汇总了Python中tarfile.TarFile.open方法的典型用法代码示例。如果您正苦于以下问题:Python TarFile.open方法的具体用法?Python TarFile.open怎么用?Python TarFile.open使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tarfile.TarFile
的用法示例。
在下文中一共展示了TarFile.open方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def __init__(self, zonefile_stream=None):
if zonefile_stream is not None:
with TarFile.open(fileobj=zonefile_stream) as tf:
self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name)
for zf in tf.getmembers()
if zf.isfile() and zf.name != METADATA_FN}
# deal with links: They'll point to their parent object. Less
# waste of memory
links = {zl.name: self.zones[zl.linkname]
for zl in tf.getmembers() if
zl.islnk() or zl.issym()}
self.zones.update(links)
try:
metadata_json = tf.extractfile(tf.getmember(METADATA_FN))
metadata_str = metadata_json.read().decode('UTF-8')
self.metadata = json.loads(metadata_str)
except KeyError:
# no metadata in tar file
self.metadata = None
else:
self.zones = {}
self.metadata = None
示例2: test_download_bucket_with_prefix
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def test_download_bucket_with_prefix(self):
"""Test to verify that basic report downloading works."""
fake_bucket = tempfile.mkdtemp()
mytar = TarFile.open("./koku/masu/test/data/test_local_bucket_prefix.tar.gz")
mytar.extractall(fake_bucket)
test_report_date = datetime(year=2018, month=8, day=7)
with patch.object(DateAccessor, "today", return_value=test_report_date):
report_downloader = ReportDownloader(
self.mock_task,
self.fake_customer_name,
self.fake_auth_credential,
fake_bucket,
Provider.PROVIDER_AWS_LOCAL,
self.aws_provider_uuid,
cache_key=self.fake.word(),
)
# Names from test report .gz file
report_downloader.download_report(test_report_date)
expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local")
self.assertTrue(os.path.isdir(expected_path))
shutil.rmtree(fake_bucket)
示例3: test_download_missing_month
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def test_download_missing_month(self):
"""Test to verify that downloading a non-existant month throws proper exception."""
fake_bucket = tempfile.mkdtemp()
mytar = TarFile.open("./koku/masu/test/data/test_local_bucket_prefix.tar.gz")
mytar.extractall(fake_bucket)
test_report_date = datetime(year=2018, month=7, day=7)
with patch.object(DateAccessor, "today", return_value=test_report_date):
report_downloader = ReportDownloader(
self.mock_task,
self.fake_customer_name,
self.fake_auth_credential,
fake_bucket,
Provider.PROVIDER_AWS_LOCAL,
1,
cache_key=self.fake.word(),
)
# Names from test report .gz file
report_downloader.download_report(test_report_date)
expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local")
self.assertFalse(os.path.isdir(expected_path))
示例4: _create_model_blob_details_with_custom_setup
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def _create_model_blob_details_with_custom_setup(self, model_blob, custom_package_name, custom_package_version,
custom_package_path):
if not os.path.exists(custom_package_path) or not custom_package_path.endswith('.tar.gz'):
raise Exception("Tar file doesn't exit at: " + custom_package_path)
with open(custom_package_path) as fp:
custom_package_blob = fp.read()
model_blob_with_custom_code = {
'custom_package_version': custom_package_version,
'custom_package_name': custom_package_name,
'custom_package_blob': custom_package_blob
}
if model_blob is not None:
model_blob_with_custom_code['model_blob'] = model_blob
return model_blob_with_custom_code
示例5: _get_prediction_module_tar_byte_buffer
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def _get_prediction_module_tar_byte_buffer(self, path_to_prediction_module):
"""
Returns a Byte buffer of a tar file containing the prediction module. The tar file is compressed using bz2
:param path_to_prediction_module: Path to prediction module file
:return: Byte buffer with the tar data
"""
prediction_module_stat = os.stat(path_to_prediction_module)
if stat.S_ISDIR(prediction_module_stat.st_mode):
raise Exception("Expected a file but got a directory for arg 'path_to_prediction_module' = '{}'".format(
path_to_prediction_module))
file_out = BytesIO()
with TarFile.open(mode="w:bz2", fileobj=file_out) as tar:
tar.add(name=path_to_prediction_module, arcname='model.py')
return file_out
示例6: _get_model_resources_tar_byte_buffer
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def _get_model_resources_tar_byte_buffer(self, path_to_model_resources_dir):
"""
Returns a byte buffer of a tar file containing the model resources. The tar file is compressed using bz2.
The topmost folder is named 'model_resource'
:param path_to_model_resources_dir: Path to resources directory
:return: Byte buffer with the tar data
"""
model_resources_stat = os.stat(path_to_model_resources_dir)
if not stat.S_ISDIR(model_resources_stat.st_mode):
raise Exception(
"Expected a directory for arg 'path_to_model_resources_dir' = {}".format(path_to_model_resources_dir))
file_out = BytesIO()
with TarFile.open(mode="w:bz2", fileobj=file_out) as tar:
tar.add(name=path_to_model_resources_dir, recursive=True, arcname='')
return file_out
示例7: gettz
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def gettz(name):
tzinfo = None
if ZONEINFOFILE:
for cachedname, tzinfo in CACHE:
if cachedname == name:
break
else:
tf = TarFile.open(ZONEINFOFILE)
try:
zonefile = tf.extractfile(name)
except KeyError:
tzinfo = None
else:
tzinfo = tzfile(zonefile)
tf.close()
CACHE.insert(0, (name, tzinfo))
del CACHE[CACHESIZE:]
return tzinfo
示例8: test_workspace_read_only_path
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def test_workspace_read_only_path(self):
# Create file where workspace would be, thus preventing the creation
# of the workspace directory.
if os.path.exists(self.project.workspace()):
os.rmdir(self.project.workspace())
with open(os.path.join(self.project.workspace()), 'w'):
pass
with pytest.raises(OSError): # Ensure that the file is in place.
os.mkdir(self.project.workspace())
assert issubclass(WorkspaceError, OSError)
try:
logging.disable(logging.ERROR)
with pytest.raises(WorkspaceError):
list(self.project.find_jobs())
finally:
logging.disable(logging.NOTSET)
assert not os.path.isdir(self._tmp_wd)
assert not os.path.isdir(self.project.workspace())
示例9: test_corrupted_statepoint_file
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def test_corrupted_statepoint_file(self):
job = self.project.open_job(dict(a=0))
job.init()
# overwrite state point manifest file
with open(job.fn(job.FN_MANIFEST), 'w'):
pass
self.project._sp_cache.clear()
self.project._remove_persistent_cache_file()
try:
logging.disable(logging.CRITICAL)
with pytest.raises(JobsCorruptedError):
self.project.open_job(id=job.id)
finally:
logging.disable(logging.NOTSET)
示例10: test_index
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def test_index(self):
docs = list(self.project.index(include_job_document=True))
assert len(docs) == 0
docs = list(self.project.index(include_job_document=False))
assert len(docs) == 0
statepoints = [{'a': i} for i in range(5)]
for sp in statepoints:
self.project.open_job(sp).document['test'] = True
job_ids = set((job.id for job in self.project.find_jobs()))
docs = list(self.project.index())
job_ids_cmp = set((doc['_id'] for doc in docs))
assert job_ids == job_ids_cmp
assert len(docs) == len(statepoints)
for sp in statepoints:
with self.project.open_job(sp):
with open('test.txt', 'w'):
pass
docs = list(self.project.index({'.*' + re.escape(os.path.sep) + r'test\.txt': 'TextFile'}))
assert len(docs) == 2 * len(statepoints)
assert len(set((doc['_id'] for doc in docs))) == len(docs)
示例11: test_export_import_tarfile_zipped
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def test_export_import_tarfile_zipped(self):
target = os.path.join(self._tmp_dir.name, 'data.tar.gz')
for i in range(10):
with self.project.open_job(dict(a=i)) as job:
os.makedirs(job.fn('sub-dir'))
with open(job.fn(os.path.join('sub-dir', 'signac_statepoint.json')), 'w') as file:
file.write(json.dumps({"foo": 0}))
with pytest.deprecated_call():
ids_before_export = list(sorted(self.project.find_job_ids()))
self.project.export_to(target=target)
assert len(self.project) == 10
with TarFile.open(name=target, mode='r:gz') as tarfile:
for i in range(10):
assert 'a/{}'.format(i) in tarfile.getnames()
assert 'a/{}/sub-dir/signac_statepoint.json'.format(i) in tarfile.getnames()
os.replace(self.project.workspace(), self.project.workspace() + '~')
assert len(self.project) == 0
self.project.import_from(origin=target)
assert len(self.project) == 10
with pytest.deprecated_call():
assert ids_before_export == list(sorted(self.project.find_job_ids()))
for job in self.project:
assert job.isfile(os.path.join('sub-dir', 'signac_statepoint.json'))
示例12: test_export_import_zipfile
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def test_export_import_zipfile(self):
target = os.path.join(self._tmp_dir.name, 'data.zip')
for i in range(10):
with self.project.open_job(dict(a=i)) as job:
os.makedirs(job.fn('sub-dir'))
with open(job.fn(os.path.join('sub-dir', 'signac_statepoint.json')), 'w') as file:
file.write(json.dumps({"foo": 0}))
with pytest.deprecated_call():
ids_before_export = list(sorted(self.project.find_job_ids()))
self.project.export_to(target=target)
assert len(self.project) == 10
with ZipFile(target) as zipfile:
for i in range(10):
assert 'a/{}/signac_statepoint.json'.format(i) in zipfile.namelist()
assert 'a/{}/sub-dir/signac_statepoint.json'.format(i) in zipfile.namelist()
os.replace(self.project.workspace(), self.project.workspace() + '~')
assert len(self.project) == 0
self.project.import_from(origin=target)
assert len(self.project) == 10
with pytest.deprecated_call():
assert ids_before_export == list(sorted(self.project.find_job_ids()))
for job in self.project:
assert job.isfile(os.path.join('sub-dir', 'signac_statepoint.json'))
示例13: _add_header
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def _add_header(original, target_file, authors):
with open(original) as source, open(target_file, 'w') as target:
target.write(
'{}\n'
'{}'.format(
HEADER.format(
basename(original),
', '.join(authors),
datetime.now()
) if basename(original) != 'Makefile' else '\n'.join('# {}'.format(line) for line in HEADER.format(
basename(original),
', '.join(authors),
datetime.now()
).splitlines()),
source.read()
)
)
示例14: _extract_file
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def _extract_file(self, fname, extract_dir):
"""
This method receives an argument for the archive to extract and the
destination path.
"""
with ZipFile(fname, "r") as zip_file:
if self.members is None:
get_logger().info(
"Unzipping contents of '%s' to '%s'", fname, extract_dir
)
# Unpack all files from the archive into our new folder
zip_file.extractall(path=extract_dir)
else:
for member in self.members:
get_logger().info(
"Extracting '%s' from '%s' to '%s'", member, fname, extract_dir
)
# Extract the data file from within the archive
with zip_file.open(member) as data_file:
# Save it to our desired file name
with open(os.path.join(extract_dir, member), "wb") as output:
output.write(data_file.read())
示例15: save
# 需要导入模块: from tarfile import TarFile [as 别名]
# 或者: from tarfile.TarFile import open [as 别名]
def save(self, filename=None, compression=None):
if filename is None:
from django.conf import settings
filename = os.path.join(settings.CACHE_ROOT, 'package.tar')
if compression is not None:
filename += '.' + compression
filemode = 'w'
if compression is not None:
filemode += ':' + compression
with TarFile.open(filename, filemode) as f:
self._add_bytesio(f, 'bounds', BytesIO(struct.pack('<iiii', *(int(i*100) for i in self.bounds))))
for level_id, level_data in self.levels.items():
self._add_geometryindexed(f, 'history_%d' % level_id, level_data.history)
self._add_geometryindexed(f, 'restrictions_%d' % level_id, level_data.restrictions)