本文整理汇总了Python中datalad.api.Dataset.addurls方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.addurls方法的具体用法?Python Dataset.addurls怎么用?Python Dataset.addurls使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类datalad.api.Dataset
的用法示例。
在下文中一共展示了Dataset.addurls方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_addurls_url_parts
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_url_parts(self, path):
ds = Dataset(path).create(force=True)
with chpwd(path):
ds.addurls(self.json_file, "{url}", "{_url0}/{_url_basename}")
for fname in ["a.dat", "b.dat", "c.dat"]:
ok_exists(op.join("udir", fname))
示例2: test_addurls_dry_run
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_dry_run(path):
ds = Dataset(path).create(force=True)
with chpwd(path):
json_file = "links.json"
with open(json_file, "w") as jfh:
json.dump([{"url": "URL/a.dat", "name": "a", "subdir": "foo"},
{"url": "URL/b.dat", "name": "b", "subdir": "bar"},
{"url": "URL/c.dat", "name": "c", "subdir": "foo"}],
jfh)
ds.save(message="setup")
with swallow_logs(new_level=logging.INFO) as cml:
ds.addurls(json_file,
"{url}",
"{subdir}//{_url_filename_root}",
dry_run=True)
for dir_ in ["foo", "bar"]:
assert_in("Would create a subdataset at {}".format(dir_),
cml.out)
assert_in(
"Would download URL/a.dat to {}".format(
os.path.join(path, "foo", "BASE")),
cml.out)
assert_in("Metadata: {}".format([u"name=a", u"subdir=foo"]),
cml.out)
示例3: test_addurls_subdataset
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_subdataset(self, path):
ds = Dataset(path).create(force=True)
with chpwd(path):
for save in True, False:
label = "save" if save else "nosave"
hexsha_before = ds.repo.get_hexsha()
ds.addurls(self.json_file, "{url}",
"{subdir}-" + label + "//{name}",
save=save)
hexsha_after = ds.repo.get_hexsha()
for fname in ["foo-{}/a", "bar-{}/b", "foo-{}/c"]:
ok_exists(fname.format(label))
assert_true(save ^ (hexsha_before == hexsha_after))
assert_true(save ^ ds.repo.dirty)
# Now save the "--nosave" changes and check that we have
# all the subdatasets.
ds.add(".")
eq_(set(subdatasets(ds, recursive=True,
result_xfm="relpaths")),
{"foo-save", "bar-save", "foo-nosave", "bar-nosave"})
# We don't try to recreate existing subdatasets.
with swallow_logs(new_level=logging.DEBUG) as cml:
ds.addurls(self.json_file, "{url}", "{subdir}-nosave//{name}")
assert_in("Not creating subdataset at existing path", cml.out)
示例4: test_addurls_repindex
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_repindex(self, path):
ds = Dataset(path).create(force=True)
with chpwd(path):
with assert_raises(IncompleteResultsError) as raised:
ds.addurls(self.json_file, "{url}", "{subdir}")
assert_in("There are file name collisions", str(raised.exception))
ds.addurls(self.json_file, "{url}", "{subdir}-{_repindex}")
for fname in ["foo-0", "bar-0", "foo-1"]:
ok_exists(fname)
示例5: test_addurls_metafail
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_metafail(self, path):
ds = Dataset(path).create(force=True)
# Force failure by passing a non-existent file name to annex.
fn = ds.repo.set_metadata_
def set_meta(_, **kwargs):
for i in fn("wreaking-havoc-and-such", **kwargs):
yield i
with chpwd(path), patch.object(ds.repo, 'set_metadata_', set_meta):
with assert_raises(IncompleteResultsError):
ds.addurls(self.json_file, "{url}", "{name}")
示例6: test_addurls_version
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_version(self, path):
ds = Dataset(path).create(force=True)
def version_fn(url):
if url.endswith("b.dat"):
raise ValueError("Scheme error")
return url + ".v1"
with patch("datalad.plugin.addurls.get_versioned_url", version_fn):
with swallow_logs(new_level=logging.WARNING) as cml:
ds.addurls(self.json_file, "{url}", "{name}",
version_urls=True)
assert_in("b.dat", str(cml.out))
names = ["a", "c"]
for fname in names:
ok_exists(os.path.join(path, fname))
whereis = ds.repo.whereis(names, output="full")
for fname, info in whereis.items():
eq_(info[ds.repo.WEB_UUID]['urls'],
["{}udir/{}.dat.v1".format(self.url, fname)])
示例7: test_addurls
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls(self, path):
ds = Dataset(path).create(force=True)
def get_annex_commit_counts():
return int(
ds.repo.repo.git.rev_list("--count", "git-annex").strip())
n_annex_commits = get_annex_commit_counts()
with chpwd(path):
ds.addurls(self.json_file, "{url}", "{name}")
filenames = ["a", "b", "c"]
for fname in filenames:
ok_exists(fname)
for (fname, meta), subdir in zip(ds.repo.get_metadata(filenames),
["foo", "bar", "foo"]):
assert_dict_equal(meta,
{"subdir": [subdir], "name": [fname]})
# Ignore this check if we're faking dates because that disables
# batch mode.
if not os.environ.get('DATALAD_FAKE__DATES'):
# We should have two new commits on the git-annex: one for the
# added urls and one for the added metadata.
eq_(n_annex_commits + 2, get_annex_commit_counts())
# Add to already existing links, overwriting.
with swallow_logs(new_level=logging.DEBUG) as cml:
ds.addurls(self.json_file, "{url}", "{name}",
ifexists="overwrite")
for fname in filenames:
assert_in("Removing {}".format(os.path.join(path, fname)),
cml.out)
# Add to already existing links, skipping.
assert_in_results(
ds.addurls(self.json_file, "{url}", "{name}", ifexists="skip"),
action="addurls",
status="notneeded")
# Add to already existing links works, as long content is the same.
ds.addurls(self.json_file, "{url}", "{name}")
# But it fails if something has changed.
ds.unlock("a")
with open("a", "w") as ofh:
ofh.write("changed")
ds.save("a")
assert_raises(IncompleteResultsError,
ds.addurls,
self.json_file, "{url}", "{name}")
示例8: test_addurls_subdataset
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_subdataset(self, path):
ds = Dataset(path).create(force=True)
with chpwd(path):
for save in True, False:
label = "save" if save else "nosave"
ds.addurls(self.json_file, "{url}",
"{subdir}-" + label + "//{name}",
save=save)
subdirs = ["{}-{}".format(d, label) for d in ["foo", "bar"]]
subdir_files = dict(zip(subdirs, [["a", "c"], ["b"]]))
for subds, fnames in subdir_files.items():
for fname in fnames:
ok_exists(op.join(subds, fname))
if save:
assert_repo_status(path)
else:
# The datasets are create and saved ...
assert_repo_status(path, modified=subdirs)
# but the downloaded files aren't.
for subds, fnames in subdir_files.items():
assert_repo_status(subds, added=fnames)
# Now save the "--nosave" changes and check that we have
# all the subdatasets.
ds.save()
eq_(set(subdatasets(dataset=ds, recursive=True,
result_xfm="relpaths")),
{"foo-save", "bar-save", "foo-nosave", "bar-nosave"})
# We don't try to recreate existing subdatasets.
with swallow_logs(new_level=logging.DEBUG) as cml:
ds.addurls(self.json_file, "{url}", "{subdir}-nosave//{name}")
assert_in("Not creating subdataset at existing path", cml.out)
示例9: test_addurls_dropped_urls
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_dropped_urls(self, path):
ds = Dataset(path).create(force=True)
with chpwd(path), swallow_logs(new_level=logging.WARNING) as cml:
ds.addurls(self.json_file, "", "{subdir}//{name}")
assert_re_in(r".*Dropped [0-9]+ row\(s\) that had an empty URL",
str(cml.out))
示例10: test_addurls_nonannex_repo
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_nonannex_repo(path):
ds = Dataset(path).create(force=True, no_annex=True)
with assert_raises(IncompleteResultsError) as raised:
ds.addurls("dummy_arg0", "dummy_arg1", "dummy_arg2")
assert_in("not an annex repo", str(raised.exception))
示例11: test_addurls_url_filename
# 需要导入模块: from datalad.api import Dataset [as 别名]
# 或者: from datalad.api.Dataset import addurls [as 别名]
def test_addurls_url_filename(self, path):
ds = Dataset(path).create(force=True)
with chpwd(path):
ds.addurls(self.json_file, "{url}", "{_url0}/{_url_filename}")
for fname in ["udir/a.dat", "udir/b.dat", "udir/c.dat"]:
ok_exists(fname)