本文整理汇总了Python中datalad.api.Dataset.export_archive方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.export_archive方法的具体用法?Python Dataset.export_archive怎么用?Python Dataset.export_archive使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类datalad.api.Dataset
的用法示例。
在下文中一共展示了Dataset.export_archive方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_archive
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import export_archive [as 别名]
def test_archive(path):
ds = Dataset(opj(path, 'ds')).create(force=True)
ds.save()
committed_date = ds.repo.get_commit_date()
default_outname = opj(path, 'datalad_{}.tar.gz'.format(ds.id))
with chpwd(path):
res = list(ds.export_archive())
assert_status('ok', res)
assert_result_count(res, 1)
assert(isabs(res[0]['path']))
assert_true(os.path.exists(default_outname))
custom_outname = opj(path, 'myexport.tar.gz')
# feed in without extension
ds.export_archive(filename=custom_outname[:-7])
assert_true(os.path.exists(custom_outname))
custom1_md5 = md5sum(custom_outname)
# encodes the original archive filename -> different checksum, despit
# same content
assert_not_equal(md5sum(default_outname), custom1_md5)
# should really sleep so if they stop using time.time - we know
time.sleep(1.1)
ds.export_archive(filename=custom_outname)
# should not encode mtime, so should be identical
assert_equal(md5sum(custom_outname), custom1_md5)
def check_contents(outname, prefix):
with tarfile.open(outname) as tf:
nfiles = 0
for ti in tf:
# any annex links resolved
assert_false(ti.issym())
ok_startswith(ti.name, prefix + '/')
assert_equal(ti.mtime, committed_date)
if '.datalad' not in ti.name:
# ignore any files in .datalad for this test to not be
# susceptible to changes in how much we generate a meta info
nfiles += 1
# we have exactly four files (includes .gitattributes for default
# MD5E backend), and expect no content for any directory
assert_equal(nfiles, 4)
check_contents(default_outname, 'datalad_%s' % ds.id)
check_contents(custom_outname, 'myexport')
# now loose some content
ds.drop('file_up', check=False)
assert_raises(IOError, ds.export_archive, filename=opj(path, 'my'))
ds.export_archive(filename=opj(path, 'partial'), missing_content='ignore')
assert_true(os.path.exists(opj(path, 'partial.tar.gz')))
示例2: test_zip_archive
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import export_archive [as 别名]
def test_zip_archive(path):
ds = Dataset(opj(path, 'ds')).create(force=True, no_annex=True)
ds.save()
with chpwd(path):
ds.export_archive(filename='my', archivetype='zip')
assert_true(os.path.exists('my.zip'))
custom1_md5 = md5sum('my.zip')
time.sleep(1.1)
ds.export_archive(filename='my', archivetype='zip')
assert_equal(md5sum('my.zip'), custom1_md5)
# should be able to export without us cd'ing to that ds directory
ds.export_archive(filename=ds.path, archivetype='zip')
default_name = 'datalad_{}.zip'.format(ds.id)
assert_true(os.path.exists(os.path.join(ds.path, default_name)))