本文整理汇总了Python中galaxy.util.bunch.Bunch.ext方法的典型用法代码示例。如果您正苦于以下问题:Python Bunch.ext方法的具体用法?Python Bunch.ext怎么用?Python Bunch.ext使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类galaxy.util.bunch.Bunch
的用法示例。
在下文中一共展示了Bunch.ext方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_uploaded_datasets
# 需要导入模块: from galaxy.util.bunch import Bunch [as 别名]
# 或者: from galaxy.util.bunch.Bunch import ext [as 别名]
def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ):
def get_data_file_filename( data_file, override_name = None, override_info = None ):
dataset_name = override_name
dataset_info = override_info
def get_file_name( file_name ):
file_name = file_name.split( '\\' )[-1]
file_name = file_name.split( '/' )[-1]
return file_name
try:
# Use the existing file
if not dataset_name and 'filename' in data_file:
dataset_name = get_file_name( data_file['filename'] )
if not dataset_info:
dataset_info = 'uploaded file'
return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
#return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
except:
# The uploaded file should've been persisted by the upload tool action
return Bunch( type=None, path=None, name=None )
#return None, None, None, None, None
def get_url_paste_urls_or_filename( group_incoming, override_name = None, override_info = None ):
filenames = []
url_paste_file = group_incoming.get( 'url_paste', None )
if url_paste_file is not None:
url_paste = open( url_paste_file, 'r' ).read( 1024 )
if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
url_paste = url_paste.replace( '\r', '' ).split( '\n' )
for line in url_paste:
line = line.strip()
if line:
if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ):
continue # non-url line, ignore
precreated_name = line
dataset_name = override_name
if not dataset_name:
dataset_name = line
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
yield Bunch( type='url', path=line, name=precreated_name )
#yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
if override_name:
dataset_name = override_name
if override_info:
dataset_info = override_info
yield Bunch( type='file', path=url_paste_file, name=precreated_name )
#yield ( 'file', url_paste_file, precreated_name, dataset_name, dataset_info )
def get_one_filename( context ):
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
warnings = []
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
if file_bunch.path and url_paste:
if url_paste.strip():
warnings.append( "All file contents specified in the paste box were ignored." )
else: #we need to use url_paste
for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
if file_bunch.path:
break
return file_bunch, warnings
def get_filenames( context ):
rval = []
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
warnings = []
file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
if file_bunch.path:
file_bunch.space_to_tab = space_to_tab
rval.append( file_bunch )
for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
if file_bunch.path:
file_bunch.space_to_tab = space_to_tab
rval.append( file_bunch )
return rval
file_type = self.get_file_type( context )
d_type = self.get_datatype( trans, context )
dbkey = context.get( 'dbkey', None )
writable_files = d_type.writable_files
writable_files_offset = 0
groups_incoming = [ None for filename in writable_files ]
for group_incoming in context.get( self.name, [] ):
i = int( group_incoming['__index__'] )
groups_incoming[ i ] = group_incoming
if d_type.composite_type is not None:
#handle uploading of composite datatypes
#Only one Dataset can be created
dataset = Bunch()
dataset.type = 'composite'
#.........这里部分代码省略.........
示例2: get_uploaded_datasets
# 需要导入模块: from galaxy.util.bunch import Bunch [as 别名]
# 或者: from galaxy.util.bunch.Bunch import ext [as 别名]
def get_uploaded_datasets( self, trans, context, override_name=None, override_info=None ):
def get_data_file_filename( data_file, override_name=None, override_info=None ):
dataset_name = override_name
dataset_info = override_info
def get_file_name( file_name ):
file_name = file_name.split( '\\' )[-1]
file_name = file_name.split( '/' )[-1]
return file_name
try:
# Use the existing file
if not dataset_name and 'filename' in data_file:
dataset_name = get_file_name( data_file['filename'] )
if not dataset_info:
dataset_info = 'uploaded file'
return Bunch( type='file', path=data_file['local_filename'], name=dataset_name )
# return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
except:
# The uploaded file should've been persisted by the upload tool action
return Bunch( type=None, path=None, name=None )
# return None, None, None, None, None
def get_url_paste_urls_or_filename( group_incoming, override_name=None, override_info=None ):
url_paste_file = group_incoming.get( 'url_paste', None )
if url_paste_file is not None:
url_paste = open( url_paste_file, 'r' ).read( 1024 )
if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ) or url_paste.lstrip().lower().startswith( 'https://' ):
url_paste = url_paste.replace( '\r', '' ).split( '\n' )
for line in url_paste:
line = line.strip()
if line:
if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ) and not line.lower().startswith( 'https://' ):
continue # non-url line, ignore
dataset_name = override_name
if not dataset_name:
dataset_name = line
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
yield Bunch( type='url', path=line, name=dataset_name )
# yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' # we need to differentiate between various url pastes here
if override_name:
dataset_name = override_name
if override_info:
dataset_info = override_info
yield Bunch( type='file', path=url_paste_file, name=precreated_name )
# yield ( 'file', url_paste_file, precreated_name, dataset_name, dataset_info )
def get_one_filename( context ):
data_file = context['file_data']
url_paste = context['url_paste']
ftp_files = context['ftp_files']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
uuid = context.get( 'uuid', None ) or None # Turn '' to None
warnings = []
to_posix_lines = False
if context.get( 'to_posix_lines', None ) not in [ "None", None, False ]:
to_posix_lines = True
space_to_tab = False
if context.get( 'space_to_tab', None ) not in [ "None", None, False ]:
space_to_tab = True
file_bunch = get_data_file_filename( data_file, override_name=name, override_info=info )
if file_bunch.path:
if url_paste is not None and url_paste.strip():
warnings.append( "All file contents specified in the paste box were ignored." )
if ftp_files:
warnings.append( "All FTP uploaded file selections were ignored." )
elif url_paste is not None and url_paste.strip(): # we need to use url_paste
for file_bunch in get_url_paste_urls_or_filename( context, override_name=name, override_info=info ):
if file_bunch.path:
break
if file_bunch.path and ftp_files is not None:
warnings.append( "All FTP uploaded file selections were ignored." )
elif ftp_files is not None and trans.user is not None: # look for files uploaded via FTP
user_ftp_dir = trans.user_ftp_dir
for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
for filename in filenames:
for ftp_filename in ftp_files:
if ftp_filename == filename:
path = relpath( os.path.join( dirpath, filename ), user_ftp_dir )
if not os.path.islink( os.path.join( dirpath, filename ) ):
ftp_data_file = { 'local_filename' : os.path.abspath( os.path.join( user_ftp_dir, path ) ),
'filename' : os.path.basename( path ) }
file_bunch = get_data_file_filename( ftp_data_file, override_name=name, override_info=info )
if file_bunch.path:
break
if file_bunch.path:
break
if file_bunch.path:
break
file_bunch.to_posix_lines = to_posix_lines
file_bunch.space_to_tab = space_to_tab
file_bunch.uuid = uuid
return file_bunch, warnings
def get_filenames( context ):
rval = []
#.........这里部分代码省略.........
示例3: get_uploaded_datasets
# 需要导入模块: from galaxy.util.bunch import Bunch [as 别名]
# 或者: from galaxy.util.bunch.Bunch import ext [as 别名]
def get_uploaded_datasets(self, trans, context, override_name=None, override_info=None):
def get_data_file_filename(data_file, override_name=None, override_info=None, purge=True):
dataset_name = override_name
def get_file_name(file_name):
file_name = file_name.split('\\')[-1]
file_name = file_name.split('/')[-1]
return file_name
try:
# Use the existing file
if not dataset_name and 'filename' in data_file:
dataset_name = get_file_name(data_file['filename'])
return Bunch(type='file', path=data_file['local_filename'], name=dataset_name, purge_source=purge)
except Exception:
# The uploaded file should've been persisted by the upload tool action
return Bunch(type=None, path=None, name=None)
def get_url_paste_urls_or_filename(group_incoming, override_name=None, override_info=None):
url_paste_file = group_incoming.get('url_paste', None)
if url_paste_file is not None:
url_paste = open(url_paste_file, 'r').read()
def start_of_url(content):
start_of_url_paste = content.lstrip()[0:8].lower()
looks_like_url = False
for url_prefix in ["http://", "https://", "ftp://", "file://"]:
if start_of_url_paste.startswith(url_prefix):
looks_like_url = True
break
return looks_like_url
if start_of_url(url_paste):
url_paste = url_paste.replace('\r', '').split('\n')
for line in url_paste:
line = line.strip()
if line:
if not start_of_url(line):
continue # non-url line, ignore
if "file://" in line:
if not trans.user_is_admin:
raise AdminRequiredException()
elif not trans.app.config.allow_path_paste:
raise ConfigDoesNotAllowException()
upload_path = line[len("file://"):]
dataset_name = os.path.basename(upload_path)
else:
dataset_name = line
if override_name:
dataset_name = override_name
yield Bunch(type='url', path=line, name=dataset_name)
else:
dataset_name = 'Pasted Entry' # we need to differentiate between various url pastes here
if override_name:
dataset_name = override_name
yield Bunch(type='file', path=url_paste_file, name=dataset_name)
def get_one_filename(context):
data_file = context['file_data']
url_paste = context['url_paste']
ftp_files = context['ftp_files']
name = context.get('NAME', None)
info = context.get('INFO', None)
uuid = context.get('uuid', None) or None # Turn '' to None
file_type = context.get('file_type', None)
dbkey = self.get_dbkey(context)
warnings = []
to_posix_lines = False
if context.get('to_posix_lines', None) not in ["None", None, False]:
to_posix_lines = True
auto_decompress = False
if context.get('auto_decompress', None) not in ["None", None, False]:
auto_decompress = True
space_to_tab = False
if context.get('space_to_tab', None) not in ["None", None, False]:
space_to_tab = True
file_bunch = get_data_file_filename(data_file, override_name=name, override_info=info)
if file_bunch.path:
if url_paste is not None and url_paste.strip():
warnings.append("All file contents specified in the paste box were ignored.")
if ftp_files:
warnings.append("All FTP uploaded file selections were ignored.")
elif url_paste is not None and url_paste.strip(): # we need to use url_paste
for file_bunch in get_url_paste_urls_or_filename(context, override_name=name, override_info=info):
if file_bunch.path:
break
if file_bunch.path and ftp_files is not None:
warnings.append("All FTP uploaded file selections were ignored.")
elif ftp_files is not None and trans.user is not None: # look for files uploaded via FTP
user_ftp_dir = trans.user_ftp_dir
assert not os.path.islink(user_ftp_dir), "User FTP directory cannot be a symbolic link"
for (dirpath, dirnames, filenames) in os.walk(user_ftp_dir):
for filename in filenames:
for ftp_filename in ftp_files:
if ftp_filename == filename:
path = relpath(os.path.join(dirpath, filename), user_ftp_dir)
if not os.path.islink(os.path.join(dirpath, filename)):
ftp_data_file = {'local_filename' : os.path.abspath(os.path.join(user_ftp_dir, path)),
#.........这里部分代码省略.........