本文整理汇总了Python中galaxy.util.bunch.Bunch.tag_using_filenames方法的典型用法代码示例。如果您正苦于以下问题:Python Bunch.tag_using_filenames方法的具体用法?Python Bunch.tag_using_filenames怎么用?Python Bunch.tag_using_filenames使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类galaxy.util.bunch.Bunch
的用法示例。
在下文中一共展示了Bunch.tag_using_filenames方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_uploaded_datasets
# 需要导入模块: from galaxy.util.bunch import Bunch [as 别名]
# 或者: from galaxy.util.bunch.Bunch import tag_using_filenames [as 别名]
#.........这里部分代码省略.........
else:
user_ftp_dir = trans.user_ftp_dir
assert not os.path.islink(user_ftp_dir), "User FTP directory cannot be a symbolic link"
for (dirpath, dirnames, filenames) in os.walk(user_ftp_dir):
for filename in filenames:
path = relpath(os.path.join(dirpath, filename), user_ftp_dir)
if not os.path.islink(os.path.join(dirpath, filename)):
# Normalize filesystem paths
if isinstance(path, text_type):
valid_files.append(unicodedata.normalize('NFC', path))
else:
valid_files.append(path)
else:
ftp_files = []
for ftp_file in ftp_files:
if ftp_file not in valid_files:
log.warning('User passed an invalid file path in ftp_files: %s' % ftp_file)
continue
# TODO: warning to the user (could happen if file is already imported)
ftp_data_file = {'local_filename' : os.path.abspath(os.path.join(user_ftp_dir, ftp_file)),
'filename' : os.path.basename(ftp_file)}
purge = getattr(trans.app.config, 'ftp_upload_purge', True)
file_bunch = get_data_file_filename(ftp_data_file, override_name=name, override_info=info, purge=purge)
if file_bunch.path:
file_bunch.to_posix_lines = to_posix_lines
file_bunch.auto_decompress = auto_decompress
file_bunch.space_to_tab = space_to_tab
rval.append(file_bunch)
return rval
file_type = self.get_file_type(context)
d_type = self.get_datatype(trans, context)
dbkey = context.get('dbkey', None)
tag_using_filenames = context.get('tag_using_filenames', False)
writable_files = d_type.writable_files
writable_files_offset = 0
groups_incoming = [None for _ in writable_files]
for group_incoming in context.get(self.name, []):
i = int(group_incoming['__index__'])
groups_incoming[i] = group_incoming
if d_type.composite_type is not None:
# handle uploading of composite datatypes
# Only one Dataset can be created
dataset = Bunch()
dataset.type = 'composite'
dataset.file_type = file_type
dataset.dbkey = dbkey
dataset.datatype = d_type
dataset.warnings = []
dataset.metadata = {}
dataset.composite_files = {}
dataset.uuid = None
dataset.tag_using_filenames = None
# load metadata
files_metadata = context.get(self.metadata_ref, {})
metadata_name_substition_default_dict = dict((composite_file.substitute_name_with_metadata, d_type.metadata_spec[composite_file.substitute_name_with_metadata].default) for composite_file in d_type.composite_files.values() if composite_file.substitute_name_with_metadata)
for meta_name, meta_spec in d_type.metadata_spec.items():
if meta_spec.set_in_upload:
if meta_name in files_metadata:
meta_value = files_metadata[meta_name]
if meta_name in metadata_name_substition_default_dict:
meta_value = sanitize_for_filename(meta_value, default=metadata_name_substition_default_dict[meta_name])
dataset.metadata[meta_name] = meta_value
dataset.precreated_name = dataset.name = self.get_composite_dataset_name(context)
if dataset.datatype.composite_type == 'auto_primary_file':
# replace sniff here with just creating an empty file