本文整理汇总了Python中datalad.distribution.dataset.Dataset.search方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.search方法的具体用法?Python Dataset.search怎么用?Python Dataset.search使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类datalad.distribution.dataset.Dataset
的用法示例。
在下文中一共展示了Dataset.search方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __call__
# 需要导入模块: from datalad.distribution.dataset import Dataset [as 别名]
# 或者: from datalad.distribution.dataset.Dataset import search [as 别名]
def __call__(match,
dataset=None,
search=None,
report=None,
report_matched=False,
format='custom',
regex=False):
lgr.debug("Initiating search for match=%r and dataset %r",
match, dataset)
try:
ds = require_dataset(dataset, check_installed=True, purpose='dataset search')
if ds.id is None:
raise NoDatasetArgumentFound(
"This does not seem to be a dataset (no DataLad dataset ID "
"found). 'datalad create --force %s' can initialize "
"this repository as a DataLad dataset" % ds.path)
except NoDatasetArgumentFound:
exc_info = sys.exc_info()
if dataset is None:
if not ui.is_interactive:
raise NoDatasetArgumentFound(
"No DataLad dataset found. Specify a dataset to be "
"searched, or run interactively to get assistance "
"installing a queriable superdataset."
)
# none was provided so we could ask user either he possibly wants
# to install our beautiful mega-duper-super-dataset?
# TODO: following logic could possibly benefit other actions.
if os.path.exists(LOCAL_CENTRAL_PATH):
central_ds = Dataset(LOCAL_CENTRAL_PATH)
if central_ds.is_installed():
if ui.yesno(
title="No DataLad dataset found at current location",
text="Would you like to search the DataLad "
"superdataset at %r?"
% LOCAL_CENTRAL_PATH):
pass
else:
reraise(*exc_info)
else:
raise NoDatasetArgumentFound(
"No DataLad dataset found at current location. "
"The DataLad superdataset location %r exists, "
"but does not contain an dataset."
% LOCAL_CENTRAL_PATH)
elif ui.yesno(
title="No DataLad dataset found at current location",
text="Would you like to install the DataLad "
"superdataset at %r?"
% LOCAL_CENTRAL_PATH):
from datalad.api import install
central_ds = install(LOCAL_CENTRAL_PATH, source='///')
ui.message(
"From now on you can refer to this dataset using the "
"label '///'"
)
else:
reraise(*exc_info)
lgr.info(
"Performing search using DataLad superdataset %r",
central_ds.path
)
for res in central_ds.search(
match,
search=search, report=report,
report_matched=report_matched,
format=format, regex=regex):
yield res
return
else:
raise
cache_dir = opj(opj(ds.path, get_git_dir(ds.path)), 'datalad', 'cache')
mcache_fname = opj(cache_dir, 'metadata.p%d' % pickle.HIGHEST_PROTOCOL)
meta = None
if os.path.exists(mcache_fname):
lgr.debug("use cached metadata of '{}' from {}".format(ds, mcache_fname))
meta, checksum = pickle.load(open(mcache_fname, 'rb'))
# TODO add more sophisticated tests to decide when the cache is no longer valid
if checksum != ds.repo.get_hexsha():
# errrr, try again below
meta = None
# don't put in 'else', as yet to be written tests above might fail and require
# regenerating meta data
if meta is None:
lgr.info("Loading and caching local meta-data... might take a few seconds")
if not exists(cache_dir):
os.makedirs(cache_dir)
meta = get_metadata(ds, guess_type=False, ignore_subdatasets=False,
ignore_cache=False)
# merge all info on datasets into a single dict per dataset
meta = flatten_metadata_graph(meta)
# extract graph, if any
meta = meta.get('@graph', meta)
# build simple queriable representation
#.........这里部分代码省略.........