当前位置: 首页>>代码示例>>Python>>正文


Python Bunch.primary_file方法代码示例

本文整理汇总了Python中galaxy.util.bunch.Bunch.primary_file方法的典型用法代码示例。如果您正苦于以下问题:Python Bunch.primary_file方法的具体用法?Python Bunch.primary_file怎么用?Python Bunch.primary_file使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在galaxy.util.bunch.Bunch的用法示例。


在下文中一共展示了Bunch.primary_file方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_uploaded_datasets

# 需要导入模块: from galaxy.util.bunch import Bunch [as 别名]
# 或者: from galaxy.util.bunch.Bunch import primary_file [as 别名]

#.........这里部分代码省略.........
             space_to_tab = True
         file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
         if file_bunch.path and url_paste:
             if url_paste.strip():
                 warnings.append( "All file contents specified in the paste box were ignored." )
         else: #we need to use url_paste
             for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
                 if file_bunch.path:
                     break
         return file_bunch, warnings
     def get_filenames( context ):
         rval = []
         data_file = context['file_data']
         url_paste = context['url_paste']
         name = context.get( 'NAME', None )
         info = context.get( 'INFO', None )
         space_to_tab = False
         if context.get( 'space_to_tab', None ) not in ["None", None]:
             space_to_tab = True
         warnings = []
         file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
         if file_bunch.path:
             file_bunch.space_to_tab = space_to_tab
             rval.append( file_bunch )
         for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
             if file_bunch.path:
                 file_bunch.space_to_tab = space_to_tab
                 rval.append( file_bunch )
         return rval
     file_type = self.get_file_type( context )
     d_type = self.get_datatype( trans, context )
     dbkey = context.get( 'dbkey', None )
     writable_files = d_type.writable_files
     writable_files_offset = 0
     groups_incoming = [ None for filename in writable_files ]
     for group_incoming in context.get( self.name, [] ):
         i = int( group_incoming['__index__'] )
         groups_incoming[ i ] = group_incoming
     if d_type.composite_type is not None:
         #handle uploading of composite datatypes
         #Only one Dataset can be created
         dataset = Bunch()
         dataset.type = 'composite'
         dataset.file_type = file_type
         dataset.dbkey = dbkey
         dataset.datatype = d_type
         dataset.warnings = []
         dataset.metadata = {}
         dataset.composite_files = {}
         #load metadata
         files_metadata = context.get( self.metadata_ref, {} )
         for meta_name, meta_spec in d_type.metadata_spec.iteritems():
             if meta_spec.set_in_upload:
                 if meta_name in files_metadata:
                     dataset.metadata[ meta_name ] = files_metadata[ meta_name ]
         dataset_name = None
         dataset_info = None
         if dataset.datatype.composite_type == 'auto_primary_file':
             #replace sniff here with just creating an empty file
             temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' )
             dataset.primary_file = temp_name
             dataset.space_to_tab = False
             dataset.precreated_name = dataset.name = 'Uploaded Composite Dataset (%s)' % ( file_type )
         else:
             file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
             if dataset.datatype.composite_type:
                 precreated_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
             writable_files_offset = 1
             dataset.primary_file = file_bunch.path
             dataset.space_to_tab = file_bunch.space_to_tab
             dataset.precreated_name = file_bunch.precreated_name
             dataset.name = file_bunch.precreated_name
             dataset.warnings.extend( file_bunch.warnings )
         if dataset.primary_file is None:#remove this before finish, this should create an empty dataset
             raise Exception( 'No primary dataset file was available for composite upload' )
         keys = [ value.name for value in writable_files.values() ]
         for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
             key = keys[ i + writable_files_offset ]
             if group_incoming is None and not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
                 dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
                 dataset.composite_files[ key ] = None
             else:
                 file_bunch, warnings = get_one_filename( group_incoming )
                 if file_bunch.path:
                     dataset.composite_files[ key ] = file_bunch.__dict__
                 else:
                     dataset.composite_files[ key ] = None
                     if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
                         dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
         return [ dataset ]
     else:
         datasets = get_filenames( context[ self.name ][0] )
         rval = []
         for dataset in datasets:
             dataset.file_type = file_type
             dataset.datatype = d_type
             dataset.ext = self.get_datatype_ext( trans, context )
             dataset.dbkey = dbkey
             rval.append( dataset )
         return rval
开发者ID:msGenDev,项目名称:Yeps-EURAC,代码行数:104,代码来源:grouping.py

示例2: get_uploaded_datasets

# 需要导入模块: from galaxy.util.bunch import Bunch [as 别名]
# 或者: from galaxy.util.bunch.Bunch import primary_file [as 别名]

#.........这里部分代码省略.........
                else:
                    user_ftp_dir = trans.user_ftp_dir
                    for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
                        for filename in filenames:
                            path = relpath( os.path.join( dirpath, filename ), user_ftp_dir )
                            if not os.path.islink( os.path.join( dirpath, filename ) ):
                                # Normalize filesystem paths
                                if isinstance(path, unicode):
                                    valid_files.append(unicodedata.normalize('NFC', path ))
                                else:
                                    valid_files.append(path)

            else:
                ftp_files = []
            for ftp_file in ftp_files:
                if ftp_file not in valid_files:
                    log.warning( 'User passed an invalid file path in ftp_files: %s' % ftp_file )
                    continue
                    # TODO: warning to the user (could happen if file is already imported)
                ftp_data_file = { 'local_filename' : os.path.abspath( os.path.join( user_ftp_dir, ftp_file ) ),
                                  'filename' : os.path.basename( ftp_file ) }
                file_bunch = get_data_file_filename( ftp_data_file, override_name=name, override_info=info )
                if file_bunch.path:
                    file_bunch.to_posix_lines = to_posix_lines
                    file_bunch.space_to_tab = space_to_tab
                    rval.append( file_bunch )
            return rval
        file_type = self.get_file_type( context )
        d_type = self.get_datatype( trans, context )
        dbkey = context.get( 'dbkey', None )
        writable_files = d_type.writable_files
        writable_files_offset = 0
        groups_incoming = [ None for _ in writable_files ]
        for group_incoming in context.get( self.name, [] ):
            i = int( group_incoming['__index__'] )
            groups_incoming[ i ] = group_incoming
        if d_type.composite_type is not None:
            # handle uploading of composite datatypes
            # Only one Dataset can be created
            dataset = Bunch()
            dataset.type = 'composite'
            dataset.file_type = file_type
            dataset.dbkey = dbkey
            dataset.datatype = d_type
            dataset.warnings = []
            dataset.metadata = {}
            dataset.composite_files = {}
            dataset.uuid = None
            # load metadata
            files_metadata = context.get( self.metadata_ref, {} )
            metadata_name_substition_default_dict = dict( [ ( composite_file.substitute_name_with_metadata, d_type.metadata_spec[ composite_file.substitute_name_with_metadata ].default ) for composite_file in d_type.composite_files.values() if composite_file.substitute_name_with_metadata ] )
            for meta_name, meta_spec in d_type.metadata_spec.iteritems():
                if meta_spec.set_in_upload:
                    if meta_name in files_metadata:
                        meta_value = files_metadata[ meta_name ]
                        if meta_name in metadata_name_substition_default_dict:
                            meta_value = sanitize_for_filename( meta_value, default=metadata_name_substition_default_dict[ meta_name ] )
                        dataset.metadata[ meta_name ] = meta_value
            dataset.precreated_name = dataset.name = self.get_composite_dataset_name( context )
            if dataset.datatype.composite_type == 'auto_primary_file':
                # replace sniff here with just creating an empty file
                temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file( dataset ) ), prefix='upload_auto_primary_file' )
                dataset.primary_file = temp_name
                dataset.to_posix_lines = True
                dataset.space_to_tab = False
            else:
                file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
                writable_files_offset = 1
                dataset.primary_file = file_bunch.path
                dataset.to_posix_lines = file_bunch.to_posix_lines
                dataset.space_to_tab = file_bunch.space_to_tab
                dataset.warnings.extend( warnings )
            if dataset.primary_file is None:  # remove this before finish, this should create an empty dataset
                raise Exception( 'No primary dataset file was available for composite upload' )
            keys = [ value.name for value in writable_files.values() ]
            for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
                key = keys[ i + writable_files_offset ]
                if group_incoming is None and not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
                    dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
                    dataset.composite_files[ key ] = None
                else:
                    file_bunch, warnings = get_one_filename( group_incoming )
                    dataset.warnings.extend( warnings )
                    if file_bunch.path:
                        dataset.composite_files[ key ] = file_bunch.__dict__
                    else:
                        dataset.composite_files[ key ] = None
                        if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
                            dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
            return [ dataset ]
        else:
            datasets = get_filenames( context[ self.name ][0] )
            rval = []
            for dataset in datasets:
                dataset.file_type = file_type
                dataset.datatype = d_type
                dataset.ext = self.get_datatype_ext( trans, context )
                dataset.dbkey = dbkey
                rval.append( dataset )
            return rval
开发者ID:galaxyguardians,项目名称:galaxy,代码行数:104,代码来源:grouping.py

示例3: get_uploaded_datasets

# 需要导入模块: from galaxy.util.bunch import Bunch [as 别名]
# 或者: from galaxy.util.bunch.Bunch import primary_file [as 别名]

#.........这里部分代码省略.........
                    file_bunch.auto_decompress = auto_decompress
                    file_bunch.space_to_tab = space_to_tab
                    if file_type is not None:
                        file_bunch.file_type = file_type
                    if dbkey is not None:
                        file_bunch.dbkey = dbkey
                    rval.append(file_bunch)
            return rval
        file_type = self.get_file_type(context)
        file_count = self.get_file_count(trans, context)
        d_type = self.get_datatype(trans, context)
        dbkey = self.get_dbkey(context)
        tag_using_filenames = context.get('tag_using_filenames', False)
        force_composite = asbool(context.get('force_composite', 'False'))
        writable_files = d_type.writable_files
        writable_files_offset = 0
        groups_incoming = [None for _ in range(file_count)]
        for group_incoming in context.get(self.name, []):
            i = int(group_incoming['__index__'])
            groups_incoming[i] = group_incoming
        if d_type.composite_type is not None or force_composite:
            # handle uploading of composite datatypes
            # Only one Dataset can be created
            dataset = Bunch()
            dataset.type = 'composite'
            dataset.file_type = file_type
            dataset.dbkey = dbkey
            dataset.datatype = d_type
            dataset.warnings = []
            dataset.metadata = {}
            dataset.composite_files = {}
            dataset.uuid = None
            dataset.tag_using_filenames = None
            # load metadata
            files_metadata = context.get(self.metadata_ref, {})
            metadata_name_substition_default_dict = dict((composite_file.substitute_name_with_metadata, d_type.metadata_spec[composite_file.substitute_name_with_metadata].default) for composite_file in d_type.composite_files.values() if composite_file.substitute_name_with_metadata)
            for meta_name, meta_spec in d_type.metadata_spec.items():
                if meta_spec.set_in_upload:
                    if meta_name in files_metadata:
                        meta_value = files_metadata[meta_name]
                        if meta_name in metadata_name_substition_default_dict:
                            meta_value = sanitize_for_filename(meta_value, default=metadata_name_substition_default_dict[meta_name])
                        dataset.metadata[meta_name] = meta_value
            dataset.name = self.get_composite_dataset_name(context)
            if dataset.datatype.composite_type == 'auto_primary_file':
                # replace sniff here with just creating an empty file
                temp_name = sniff.stream_to_file(StringIO(d_type.generate_primary_file(dataset)), prefix='upload_auto_primary_file')
                dataset.primary_file = temp_name
                dataset.to_posix_lines = True
                dataset.auto_decompress = True
                dataset.space_to_tab = False
            else:
                file_bunch, warnings = get_one_filename(groups_incoming[0])
                writable_files_offset = 1
                dataset.primary_file = file_bunch.path
                dataset.to_posix_lines = file_bunch.to_posix_lines
                dataset.auto_decompress = file_bunch.auto_decompress
                dataset.space_to_tab = file_bunch.space_to_tab
                if file_bunch.file_type:
                    dataset.file_type = file_type
                if file_bunch.dbkey:
                    dataset.dbkey = dbkey
                dataset.warnings.extend(warnings)
            if dataset.primary_file is None:  # remove this before finish, this should create an empty dataset
                raise Exception('No primary dataset file was available for composite upload')
            if not force_composite:
                keys = [value.name for value in writable_files.values()]
            else:
                keys = [str(index) for index in range(file_count)]
            for i, group_incoming in enumerate(groups_incoming[writable_files_offset:]):
                key = keys[i + writable_files_offset]
                if not force_composite and group_incoming is None and not writable_files[list(writable_files.keys())[keys.index(key)]].optional:
                    dataset.warnings.append("A required composite file (%s) was not specified." % (key))
                    dataset.composite_files[key] = None
                else:
                    file_bunch, warnings = get_one_filename(group_incoming)
                    dataset.warnings.extend(warnings)
                    if file_bunch.path:
                        if force_composite:
                            key = group_incoming.get("NAME") or i
                        dataset.composite_files[key] = file_bunch.__dict__
                    elif not force_composite:
                        dataset.composite_files[key] = None
                        if not writable_files[list(writable_files.keys())[keys.index(key)]].optional:
                            dataset.warnings.append("A required composite file (%s) was not specified." % (key))
            return [dataset]
        else:
            rval = []
            for i, file_contexts in enumerate(context[self.name]):
                datasets = get_filenames(file_contexts)
                for dataset in datasets:
                    override_file_type = self.get_file_type(context[self.name][i], parent_context=context)
                    d_type = self.get_datatype(trans, context[self.name][i], parent_context=context)
                    dataset.file_type = override_file_type
                    dataset.datatype = d_type
                    dataset.ext = self.get_datatype_ext(trans, context[self.name][i], parent_context=context)
                    dataset.dbkey = self.get_dbkey(context[self.name][i], parent_context=context)
                    dataset.tag_using_filenames = tag_using_filenames
                    rval.append(dataset)
            return rval
开发者ID:msauria,项目名称:galaxy,代码行数:104,代码来源:grouping.py


注:本文中的galaxy.util.bunch.Bunch.primary_file方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。