当前位置: 首页>>代码示例>>Python>>正文


Python DataMap.load方法代码示例

本文整理汇总了Python中lofarpipe.support.data_map.DataMap.load方法的典型用法代码示例。如果您正苦于以下问题:Python DataMap.load方法的具体用法?Python DataMap.load怎么用?Python DataMap.load使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在lofarpipe.support.data_map.DataMap的用法示例。


在下文中一共展示了DataMap.load方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Matchs the hosts in one datamap with those in another

    Parameters
    ----------
    mapfile_in : str, optional
        Filename of datamap to adjust
    mapfile_to_match : str, optional
        Filename of datamap to match

    """
    mapfile_in = kwargs['mapfile_in']
    mapfile_to_match = kwargs['mapfile_to_match']

    map_in = DataMap.load(mapfile_in)
    map_in.iterator = DataMap.SkipIterator
    map_to_match = DataMap.load(mapfile_to_match)
    map_to_match.iterator = DataMap.SkipIterator

    hosts_to_match = []
    for item in map_to_match:
        hosts_to_match.append(item.host)

    for item, host in zip(map_in, hosts_to_match):
        item.host = host

    map_in.save(mapfile_in)
开发者ID:lofar-astron,项目名称:factor,代码行数:30,代码来源:PipelineStep_matchHosts.py

示例2: _load_mapfiles

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
    def _load_mapfiles(self):
        """
        Load data map file, instrument map file, and sky map file.
        Update the 'skip' fields in these map files: if 'skip' is True in any
        of the maps, then 'skip' must be set to True in all maps.
        """
        self.logger.debug("Loading map files:"
            "\n\tdata map: %s\n\tinstrument map: %s\n\tsky map: %s" % (
                self.inputs['args'][0], 
                self.inputs['instrument_mapfile'],
                self.inputs['sky_mapfile']
            )
        )
        self.data_map = DataMap.load(self.inputs['args'][0])
        self.inst_map = DataMap.load(self.inputs['instrument_mapfile'])
        self.sky_map = DataMap.load(self.inputs['sky_mapfile'])

        if not validate_data_maps(self.data_map, self.inst_map, self.sky_map):
            self.logger.error("Validation of input data mapfiles failed")
            return False

        # Update the skip fields of the three maps. If 'skip' is True in any of
        # these maps, then 'skip' must be set to True in all maps.
        for x, y, z in zip(self.data_map, self.inst_map, self.sky_map):
            x.skip = y.skip = z.skip = (x.skip or y.skip or z.skip)
        
        return True
开发者ID:saiyanprince,项目名称:pyimager,代码行数:29,代码来源:bbs_reducer.py

示例3: go

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
    def go(self):
        super(imager_create_dbs, self).go()

        # get assoc_theta, convert from empty string if needed 
        assoc_theta = self.inputs["assoc_theta"]
        if assoc_theta == "":
            assoc_theta = None

        # Load mapfile data from files
        self.logger.info(self.inputs["slice_paths_mapfile"])
        slice_paths_map = MultiDataMap.load(self.inputs["slice_paths_mapfile"])
        input_map = DataMap.load(self.inputs['args'][0])
        source_list_map = DataMap.load(self.inputs['source_list_map_path'])

        if self._validate_input_data(input_map, slice_paths_map):
            return 1

        # Run the nodes with now collected inputs
        jobs, output_map = self._run_create_dbs_node(
                 input_map, slice_paths_map, assoc_theta,
                 source_list_map)

        # Collect the output of the node scripts write to (map) files
        return self._collect_and_assign_outputs(jobs, output_map,
                                    slice_paths_map)
开发者ID:jjdmol,项目名称:LOFAR,代码行数:27,代码来源:imager_create_dbs.py

示例4: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Prunes entries from a mapfile

    Parameters
    ----------
    mapfile_in : str
        Filename of datamap to trim
    prune_str : str
        Entries starting with this string will be removed.

    Returns
    -------
    result : dict
        New datamap filename

    """
    mapfile_in = kwargs['mapfile_in']
    prune_str = kwargs['prune_str'].lower()
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']
    prunelen = len(prune_str)

    map_out = DataMap([])
    map_in = DataMap.load(mapfile_in)

    for i, item in enumerate(map_in):
        if item.file[:prunelen].lower() != prune_str:
            map_out.data.append(DataProduct(item.host, item.file, item.skip))

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:AHorneffer,项目名称:prefactor,代码行数:37,代码来源:PipelineStep_pruneMapfile.py

示例5: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    fileid = kwargs['mapfile_in']
    datamap = DataMap.load(fileid)
    hdf5File = os.path.join(kwargs['hdf5_dir'],kwargs['hdf5file'])
    if kwargs.has_key('instrument'):
        instrument = kwargs['instrument']
    else:
        instrument = '/instrument'
    if kwargs.has_key('compression'):
        compression = int(kwargs['compression'])
    else:
        compression = 5
    if kwargs.has_key('solset'):
        solsetName = kwargs['solset']
    else:
        solsetName = None


    # Check is all the necessary files are available
    antennaFile = os.path.join(datamap[0].file,'ANTENNA')
    if not os.path.isdir(antennaFile):
        logging.critical('Missing ANTENNA table.')
        sys.exit(1)
    fieldFile = os.path.join(datamap[0].file,'FIELD')
    if not os.path.isdir(fieldFile):
        logging.critical('Missing FIELD table.')
        sys.exit(1)
    skydbFile = os.path.join(datamap[0].file,'sky')
    if not os.path.isdir(skydbFile):
        logging.critical('Missing sky table.')
        sys.exit(1)
        
    #generate list of parmDB-filenames
    parmDBnames = [ MS.file+instrument for MS in datamap ]

    #create and fill the hdf5-file:
    solset = parmDBs2h5parm(hdf5File, parmDBnames, antennaFile, fieldFile, skydbFile, compression=compression, solsetName=solsetName)

    # Add CREATE entry to history 
    h5parmDB = h5parm(hdf5File, readonly = False)
    soltabs = h5parmDB.getSoltabs(solset=solset)
    for st in soltabs:
        sw = solWriter(soltabs[st])
        sw.addHistory('CREATE (by PipelineStep_losotoImporter from %s / %s - %s)' % (os.path.abspath(''), 
                                   os.path.basename(parmDBnames[0]), os.path.basename(parmDBnames[-1]) ) )
    h5parmDB.close()

    #generate mapfile and wrap up
    mapfileentry = {}
    mapfileentry['host'] = 'localhost'
    mapfileentry['file'] = hdf5File
    mapfileentry['skip'] = False            
    outfileid = os.path.join(kwargs['mapfile_dir'], kwargs['filename'])
    outmap = open(outfileid, 'w')
    outmap.write(repr([mapfileentry]))
    outmap.close()
    result = {}
    result['mapfile'] = outfileid
    return result
开发者ID:Dzielak,项目名称:prefactor,代码行数:61,代码来源:losotoImporter.py

示例6: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Copies each entry of mapfile_in as often as the the length of the corresponding 
    group into a new mapfile

    Parameters
    ----------
    mapfile_in : str
        Name of the input mapfile to be expanded. (E.g. with the skymodels for the 
        different groups.)
    mapfile_groups : str
        Name of the multi-mapfile with the given groups. Number of groups need
        to be the same as the number of files in mapfile_in. 
    mapfile_dir : str
        Directory for output mapfile
    filename: str
        Name of output mapfile
    ignore_dummies: str (optional)
        If true, do not count dummy entries when expanding

    Returns
    -------
    result : dict
        Output datamap filename

    """
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']

    try:
        ignore_dummies = str(kwargs['ignore_dummies'])	# if the user has defined a dummy preference, follow it, otherwise count dummies as usual
        ignore_dummies = ignore_dummies in ['true', 'True', '1', 'T', 't']
    except:
        ignore_dummies = False

    inmap = DataMap.load(kwargs['mapfile_in'])
    groupmap = MultiDataMap.load(kwargs['mapfile_groups'])

    if len(inmap) != len(groupmap):
        raise ValueError('PipelineStep_mapfileSingleToGroup: length of {0} and {1} differ'.format(kwargs['mapfile_in'],kwargs['mapfile_groups']))

    map_out = DataMap([])
    inindex = 0

    if ignore_dummies:
        for groupID in xrange(len(groupmap)):
            for fileID in xrange(len(groupmap[groupID].file)):
                if (groupmap[groupID].file)[fileID] != 'dummy_entry':
                        map_out.data.append(DataProduct(inmap[groupID].host, inmap[groupID].file, (inmap[groupID].skip or groupmap[groupID].skip) ))
    else:
        for groupID in xrange(len(groupmap)):
            for fileID in xrange(len(groupmap[groupID].file)):
                map_out.data.append(DataProduct(inmap[groupID].host, inmap[groupID].file, (inmap[groupID].skip or groupmap[groupID].skip) ))

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:varenius,项目名称:lofar-lb,代码行数:61,代码来源:PipelineStep_mapfileSingleToGroupX.py

示例7: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Appends a string to filenames in a mapfile

    Parameters
    ----------
    mapfile_in : str
        Filename of datamap to append to
    append : str
        String to append
    append_index : bool
        If True, append a unique index to each file
    mapfile_dir : str
        Directory for output mapfile
    filename: str
        Name of output mapfile

    Returns
    -------
    result : dict
        New datamap filename

    """
    mapfile_in = kwargs['mapfile_in']

    if 'append_index' in kwargs:
        append_index = kwargs['append_index']
        if type(append_index) is str:
            if append_index.lower() == 'true':
                append_index = True
            else:
                append_index = False
    else:
        append_index = False

    append_str = kwargs['append']
    if append_str == 'None':
        append_str = ''
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']

    map_out = DataMap([])
    map_in = DataMap.load(mapfile_in)

    for i, item in enumerate(map_in):
        if append_index:
            map_out.data.append(DataProduct(item.host, item.file+append_str+'_{}'.format(i), item.skip))
        else:
            map_out.data.append(DataProduct(item.host, item.file+append_str, item.skip))

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:lofar-astron,项目名称:factor,代码行数:57,代码来源:PipelineStep_appendMapfile.py

示例8: go

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
    def go(self):
        # TODO: Remove dependency on mapfile_dir 
        self.logger.info("Starting copier run")
        super(copier, self).go()

        # Load data from mapfiles
        self.source_map = DataMap.load(self.inputs['mapfile_source'])
        self.target_map = DataMap.load(self.inputs['mapfile_target'])

        # validate data in mapfiles
        if not self._validate_mapfiles(self.inputs['allow_rename']):
            return 1

        # Run the compute nodes with the node specific mapfiles
        for source, target in zip(self.source_map, self.target_map):
            args = [source.host, source.file, target.file]
            self.append_job(target.host, args)

        # start the jobs, return the exit status.
        return self.run_jobs()
开发者ID:saiyanprince,项目名称:pyimager,代码行数:22,代码来源:copier.py

示例9: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Makes a mapfile for list of files

    Parameters
    ----------
    files : list or str
        List of files or mapfile with such a list as the only entry. May be
        given as a list of strings or as a string (e.g.,
        '[s1.skymodel, s2.skymodel]'
    hosts : list or str
        List of hosts/nodes. May be given as a list or as a string (e.g.,
        '[host1, host2]'
    mapfile_dir : str
        Directory for output mapfile
    filename: str
        Name of output mapfile

    Returns
    -------
    result : dict
        Output datamap filename

    """
    if type(kwargs['files']) is str:
        try:
            # Check if input is mapfile containing list as a string
            map_in = DataMap.load(kwargs['files'])
            in_files = [item.file for item in map_in]
            files = []
            for f in in_files:
                files += f.strip('[]').split(',')
        except:
            files = kwargs['files']
            files = files.strip('[]').split(',')
        files = [f.strip() for f in files]
    if type(kwargs['hosts']) is str:
        hosts = kwargs['hosts'].strip('[]').split(',')
        hosts = [h.strip() for h in hosts]
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']

    for i in range(len(files)-len(hosts)):
        hosts.append(hosts[i])

    map_out = DataMap([])
    for h, f in zip(hosts, files):
        map_out.data.append(DataProduct(h, f, False))

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:AHorneffer,项目名称:prefactor,代码行数:56,代码来源:PipelineStep_addListMapfile.py

示例10: update_state

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def update_state(dir_input):
    """
    Updates the paths in mapfiles or state files

    Parameters
    ----------
    dir_input : str
        Directory containing files to update

    """
    file_list = glob.glob(os.path.join(dir_input, '*'))

    if dir_input.endswith('mapfiles'):
        # Assume path is a pipeline mapfiles directory. In this case, we can
        # simply substitute the new working_dir for the old one in each of the
        # mapfiles
        working_dir = dir_input.split('results/')[0]
        for f in file_list:
            map = DataMap.load(f)
            for item in map:
                if '/' in item.file:
                    old_working_dir = item.file.split('results/')[0]
                    item.file = item.file.replace(old_working_dir, working_dir)
            map.save(f)
    elif dir_input.endswith('state'):
        # Assume path is the Factor state directory. In this case, we can try to
        # load files as pickled state files and look for paths inside. If found,
        # substitute new working_dir for the old one
        working_dir = os.path.dirname(dir_input)
        for f in file_list:
            try:
                with open(f, "rb") as fp:
                    d = pickle.load(fp)
                    for k, v in d.iteritems():
                        if type(v) is str:
                            if k == 'working_dir':
                                d[k] = working_dir
                            if '/' in v:
                                for infix in ['results/', 'state/', 'chunks/']:
                                    parts = v.split(infix)
                                    if len(parts) > 1:
                                        d[k] = os.path.join(working_dir, infix, parts[-1])
                        elif type(v) is list:
                            for i, l in enumerate(v):
                                if '/' in l:
                                    for infix in ['results/', 'state/', 'chunks/']:
                                        parts = l.split(infix)
                                        if len(parts) > 1:
                                            v[i] = os.path.join(working_dir, infix, parts[-1])
                            d[k] = v
                with open(f, "w") as fp:
                    pickle.dump(d, fp)
            except:
                pass
开发者ID:lofar-astron,项目名称:factor,代码行数:56,代码来源:unarchive.py

示例11: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Makes a mapfile by compressing input mapfile items into one item

    Parameters
    ----------
    mapfile_in : str
        Filename of datamap containing MS files
    mapfile_dir : str
        Directory for output mapfile
    filename: str
        Name of output mapfile
    list_format : bool, optional
        If True, the compreseed item will use a Python list format (e.g.,
        '[file1, file2, ...]'. If False, it will be a space-separated list (e.g.,
        'file1 file2 ...'

    Returns
    -------
    result : dict
        New parmdb datamap filename

    """
    mapfile_in = kwargs['mapfile_in']
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']
    if 'list_format' in kwargs:
        list_format = kwargs['list_format']
    else:
        list_format = True
    if type(list_format) is str:
        if list_format.lower() == 'true':
            list_format = True
        else:
            list_format = False

    map_in = DataMap.load(mapfile_in)
    map_out = DataMap([])
    map_in.iterator = DataMap.SkipIterator
    file_list = [item.file for item in map_in]
    if list_format:
        newlist = '[{0}]'.format(','.join(file_list))
    else:
        newlist = '{0}'.format(' '.join(file_list))

    # Just assign host of first file to compressed file
    hosts = [item.host for item in map_in]
    map_out.data.append(DataProduct(hosts[0], newlist, False))

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:astrofle,项目名称:factor,代码行数:56,代码来源:PipelineStep_compressMapfile.py

示例12: _create_mapfile_ato

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def _create_mapfile_ato(inmap):
    maps = DataMap([])
    mapsin = DataMap.load(inmap)
    mapsin.iterator = DataMap.SkipIterator
    newlist = ''
    for i, item in enumerate(mapsin):
        newlist = newlist + item.file + ','
    newlist = newlist.rstrip(',')
    newlist = '[' + newlist + ']'
    maps.data.append(DataProduct('localhost', newlist, False))
    return maps
开发者ID:jjdmol,项目名称:LOFAR,代码行数:13,代码来源:PipelineStep_addMapfile.py

示例13: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Selects those files from mapfile_in that have the same filename-base as the one in
    mapfile_reference.

    Parameters
    ----------
    mapfile_in : str
        Name of the input mapfile from which to select files.
    mapfile_reference : str
        Name of the reference mapfile
    mapfile_dir : str
        Directory for output mapfile
    filename: str
        Name of output mapfile

    Returns
    -------
    result : dict
        Output datamap filename

    """
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']

    inmap = DataMap.load(kwargs['mapfile_in'])
    refmap = DataMap.load(kwargs['mapfile_reference'])

    map_out = DataMap([])

    basenames = [ os.path.splitext(os.path.basename(item.file))[0] for item in inmap]
    for refitem in refmap:
        refbase = os.path.splitext(os.path.basename(refitem.file))[0]
        idx = basenames.index(refbase)
        map_out.append(inmap[idx])

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:Joshuaalbert,项目名称:factor,代码行数:43,代码来源:PipelineStep_selectMatching.py

示例14: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Trims a string from filenames in a mapfile

    Note that everything from the last instance of the matching string to the
    end is trimmed.

    Parameters
    ----------
    mapfile_in : str
        Filename of datamap to trim
    trim_str : str
        String to remove
    mapfile_dir : str
        Directory for output mapfile
    filename: str
        Name of output mapfile
    counter : int
        If counter is greater than 0, replace "image32" with "image42". This is
        a special argument for facetselfcal looping only

    Returns
    -------
    result : dict
        New datamap filename

    """
    mapfile_in = kwargs['mapfile_in']
    trim_str = kwargs['trim']
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']
    if 'counter' in kwargs:
        counter = int(kwargs['counter'])
    else:
        counter = 0

    map_out = DataMap([])
    map_in = DataMap.load(mapfile_in)

    for i, item in enumerate(map_in):
        index = item.file.rfind(trim_str)
        if index >= 0:
            item_trim = item.file[:index]
            if counter > 0:
                item_trim = item_trim.replace('image32', 'image42')
            map_out.data.append(DataProduct(item.host, item_trim,
                item.skip))

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:AHorneffer,项目名称:prefactor,代码行数:55,代码来源:PipelineStep_trimMapfile.py

示例15: plugin_main

# 需要导入模块: from lofarpipe.support.data_map import DataMap [as 别名]
# 或者: from lofarpipe.support.data_map.DataMap import load [as 别名]
def plugin_main(args, **kwargs):
    """
    Makes a mapfile by expanding single input mapfile item into many items

    Parameters
    ----------
    mapfile_in : str
        Filename of datamap containing single item
    mapfile_to_match : str
        Filename of datamap containing multiple items
    mapfile_dir : str
        Directory for output mapfile
    filename: str
        Name of output mapfile

    Returns
    -------
    result : dict
        New parmdb datamap filename

    """
    mapfile_in = kwargs['mapfile_in']
    mapfile_to_match = kwargs['mapfile_to_match']
    mapfile_dir = kwargs['mapfile_dir']
    filename = kwargs['filename']

    map_in = DataMap.load(mapfile_in)
    map_match = DataMap.load(mapfile_to_match)
    map_out = DataMap([])

    map_match.iterator = DataMap.SkipIterator
    for item in map_match:
        map_out.data.append(DataProduct(item.host, map_in[0].file, item.skip))

    fileid = os.path.join(mapfile_dir, filename)
    map_out.save(fileid)
    result = {'mapfile': fileid}

    return result
开发者ID:astrofle,项目名称:factor,代码行数:41,代码来源:PipelineStep_expandMapfile.py


注:本文中的lofarpipe.support.data_map.DataMap.load方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。