本文整理汇总了Python中coverage_model.parameter.ParameterDictionary类的典型用法代码示例。如果您正苦于以下问题:Python ParameterDictionary类的具体用法?Python ParameterDictionary怎么用?Python ParameterDictionary使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ParameterDictionary类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: cov_io
def cov_io(self, context, value_array, comp_val=None):
pdict = ParameterDictionary()
time = ParameterContext(name='time', param_type=QuantityType(value_encoding=np.float64))
pdict.add_context(context)
pdict.add_context(time, True)
# Construct temporal and spatial Coordinate Reference System objects
tcrs = CRS([AxisTypeEnum.TIME])
scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT])
# Construct temporal and spatial Domain objects
tdom = GridDomain(GridShape('temporal', [0]), tcrs, MutabilityEnum.EXTENSIBLE) # 1d (timeline)
sdom = GridDomain(GridShape('spatial', [0]), scrs, MutabilityEnum.IMMUTABLE) # 0d spatial topology (station/trajectory)
# Instantiate the SimplexCoverage providing the ParameterDictionary, spatial Domain and temporal Domain
cov = SimplexCoverage('test_data', create_guid(), 'sample coverage_model', parameter_dictionary=pdict, temporal_domain=tdom, spatial_domain=sdom)
cov.insert_timesteps(len(value_array))
cov.set_parameter_values('test', tdoa=slice(0,len(value_array)), value=value_array)
comp_val = comp_val if comp_val is not None else value_array
testval = cov.get_parameter_values('test')
try:
np.testing.assert_array_equal(testval, comp_val)
except:
print repr(value_array)
raise
示例2: get_param_dict
def get_param_dict(param_dict_name = None):
raise NotImplementedError('This method has been replaced by DatasetManagementService, please use read_parameter_dictionary_by_name instead')
# read the file just once, not every time needed
global _PARAMETER_DICTIONARIES
global _PARAMETER_CONTEXTS
if not _PARAMETER_DICTIONARIES:
param_dict_defs_file = "res/config/param_dict_defs.yml"
with open(param_dict_defs_file, "r") as f_dict:
dict_string = f_dict.read()
_PARAMETER_DICTIONARIES = yaml.load(dict_string)
param_context_defs_file = "res/config/param_context_defs.yml"
with open(param_context_defs_file, "r") as f_ctxt:
ctxt_string = f_ctxt.read()
_PARAMETER_CONTEXTS = yaml.load(ctxt_string)
# make sure we have the one requested
context_names = _PARAMETER_DICTIONARIES[param_dict_name]
for name in context_names:
if not _PARAMETER_CONTEXTS.has_key(name):
raise AssertionError('The parameter dict has a context that does not exist in the parameter context defs specified in yml: %s' % name)
# package and ship
pdict = ParameterDictionary()
for ctxt_name in context_names:
param_context = ParameterContext.load(_PARAMETER_CONTEXTS[ctxt_name])
pdict.add_context(param_context)
return pdict
示例3: _new_coverage
def _new_coverage(self, root_dir, persistence_guid, name, reference_coverage_locs, parameter_dictionary, complex_type, reference_coverage_extents={}):
reference_coverage_locs = reference_coverage_locs or [] # Can be empty
# Coverage doesn't exist, make a new one
if name is None:
raise SystemError('\'reference_coverages\' and \'name\' cannot be None')
if not isinstance(name, basestring):
raise TypeError('\'name\' must be of type basestring')
self.name = name
if parameter_dictionary is None:
parameter_dictionary = ParameterDictionary()
# Must be in 'a' for a new coverage
self.mode = 'a'
self._reference_covs = collections.OrderedDict()
if not hasattr(reference_coverage_locs, '__iter__'):
reference_coverage_locs = [reference_coverage_locs]
self._persistence_layer = PostgresPersistenceLayer(root_dir,
persistence_guid,
name=self.name,
mode=self.mode,
param_dict=parameter_dictionary,
rcov_locs=reference_coverage_locs,
rcov_extents=reference_coverage_extents,
complex_type=complex_type,
coverage_type='complex',
version=self.version)
for pc in parameter_dictionary.itervalues():
self.append_parameter(pc[1])
示例4: __init__
def __init__(self,param_dictionary=None, stream_definition_id='', locator=None):
"""
"""
if type(param_dictionary) == dict:
self._pdict = ParameterDictionary.load(param_dictionary)
elif isinstance(param_dictionary,ParameterDictionary):
self._pdict = param_dictionary
elif stream_definition_id:
pdict = RecordDictionaryTool.pdict_from_stream_def(stream_definition_id)
self._pdict = ParameterDictionary.load(pdict)
self._stream_def = stream_definition_id
else:
raise BadRequest('Unable to create record dictionary with improper ParameterDictionary')
if stream_definition_id:
self._stream_def=stream_definition_id
self._shp = None
self._rd = {}
self._locator = locator
self._setup_params()
示例5: sync_rdt_with_coverage
def sync_rdt_with_coverage(self, coverage=None, tdoa=None, start_time=None, end_time=None, stride_time=None, parameters=None):
'''
Builds a granule based on the coverage
'''
if coverage is None:
coverage = self.coverage
slice_ = slice(None) # Defaults to all values
if tdoa is not None and isinstance(tdoa,slice):
slice_ = tdoa
elif stride_time is not None:
validate_is_instance(start_time, Number, 'start_time must be a number for striding.')
validate_is_instance(end_time, Number, 'end_time must be a number for striding.')
validate_is_instance(stride_time, Number, 'stride_time must be a number for striding.')
ugly_range = np.arange(start_time, end_time, stride_time)
idx_values = [TimeUtils.get_relative_time(coverage,i) for i in ugly_range]
slice_ = [idx_values]
elif not (start_time is None and end_time is None):
time_var = coverage._temporal_param_name
uom = coverage.get_parameter_context(time_var).uom
if start_time is not None:
start_units = TimeUtils.ts_to_units(uom,start_time)
log.info('Units: %s', start_units)
start_idx = TimeUtils.get_relative_time(coverage,start_units)
log.info('Start Index: %s', start_idx)
start_time = start_idx
if end_time is not None:
end_units = TimeUtils.ts_to_units(uom,end_time)
log.info('End units: %s', end_units)
end_idx = TimeUtils.get_relative_time(coverage,end_units)
log.info('End index: %s', end_idx)
end_time = end_idx
slice_ = slice(start_time,end_time,stride_time)
log.info('Slice: %s', slice_)
if parameters is not None:
pdict = ParameterDictionary()
params = set(coverage.list_parameters()).intersection(parameters)
for param in params:
pdict.add_context(coverage.get_parameter_context(param))
rdt = RecordDictionaryTool(param_dictionary=pdict)
self.pdict = pdict
else:
rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
fields = coverage.list_parameters()
if parameters is not None:
fields = set(fields).intersection(parameters)
for d in fields:
rdt[d] = coverage.get_parameter_values(d,tdoa=slice_)
self.rdt = rdt # Sync
示例6: _create_parameter_dictionary
def _create_parameter_dictionary(self):
pdict = ParameterDictionary()
lat_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
lat_ctxt.axis = AxisTypeEnum.LAT
lat_ctxt.uom = 'degree_north'
pdict.add_context(lat_ctxt)
lon_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
lon_ctxt.axis = AxisTypeEnum.LON
lon_ctxt.uom = 'degree_east'
pdict.add_context(lon_ctxt)
return pdict
示例7: _doload
def _doload(self):
# Make sure the coverage directory exists
if not os.path.exists(pth):
raise SystemError('Cannot find specified coverage: {0}'.format(pth))
# All appears well - load it up!
self._persistence_layer = PersistenceLayer(root_dir, persistence_guid, mode=self.mode)
self.name = self._persistence_layer.name
self.spatial_domain = self._persistence_layer.sdom
self.temporal_domain = self._persistence_layer.tdom
self._range_dictionary = ParameterDictionary()
self._range_value = RangeValues()
self._bricking_scheme = self._persistence_layer.global_bricking_scheme
self._in_memory_storage = False
auto_flush_values = self._persistence_layer.auto_flush_values
inline_data_writes = self._persistence_layer.inline_data_writes
from coverage_model.persistence import PersistedStorage
for parameter_name in self._persistence_layer.parameter_metadata.keys():
md = self._persistence_layer.parameter_metadata[parameter_name]
pc = md.parameter_context
self._range_dictionary.add_context(pc)
s = PersistedStorage(md, self._persistence_layer.brick_dispatcher, dtype=pc.param_type.storage_encoding, fill_value=pc.param_type.fill_value, mode=self.mode, inline_data_writes=inline_data_writes, auto_flush=auto_flush_values)
self._range_value[parameter_name] = get_value_class(param_type=pc.param_type, domain_set=pc.dom, storage=s)
示例8: _construct_stream_and_publisher
def _construct_stream_and_publisher(self, stream_name, stream_config):
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r: _construct_stream_and_publisher: "
"stream_name:%r, stream_config:\n%s",
self._platform_id, stream_name,
self._pp.pformat(stream_config))
decoder = IonObjectDeserializer(obj_registry=get_obj_registry())
if 'stream_def_dict' not in stream_config:
# should not happen: PlatformAgent._validate_configuration validates this.
log.error("'stream_def_dict' key not in configuration for stream %r" % stream_name)
return
stream_def_dict = stream_config['stream_def_dict']
stream_def_dict['type_'] = 'StreamDefinition'
stream_def_obj = decoder.deserialize(stream_def_dict)
self._stream_defs[stream_name] = stream_def_obj
routing_key = stream_config['routing_key']
stream_id = stream_config['stream_id']
exchange_point = stream_config['exchange_point']
parameter_dictionary = stream_def_dict['parameter_dictionary']
log.debug("%r: got parameter_dictionary from stream_def_dict", self._platform_id)
self._data_streams[stream_name] = stream_id
self._param_dicts[stream_name] = ParameterDictionary.load(parameter_dictionary)
stream_route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key)
publisher = self._create_publisher(stream_id, stream_route)
self._data_publishers[stream_name] = publisher
log.debug("%r: created publisher for stream_name=%r", self._platform_id, stream_name)
示例9: _get_data
def _get_data(cls, config):
new_flst = get_safe(config, 'constraints.new_files', [])
hdr_cnt = get_safe(config, 'header_count', SlocumParser.DEFAULT_HEADER_SIZE)
for f in new_flst:
try:
parser = SlocumParser(f[0], hdr_cnt)
#CBM: Not in use yet...
# ext_dset_res = get_safe(config, 'external_dataset_res', None)
# t_vname = ext_dset_res.dataset_description.parameters['temporal_dimension']
# x_vname = ext_dset_res.dataset_description.parameters['zonal_dimension']
# y_vname = ext_dset_res.dataset_description.parameters['meridional_dimension']
# z_vname = ext_dset_res.dataset_description.parameters['vertical_dimension']
# var_lst = ext_dset_res.dataset_description.parameters['variables']
max_rec = get_safe(config, 'max_records', 1)
dprod_id = get_safe(config, 'data_producer_id', 'unknown data producer')
#tx_yml = get_safe(config, 'taxonomy')
#ttool = TaxyTool.load(tx_yml) #CBM: Assertion inside RDT.__setitem__ requires same instance of TaxyTool
pdict = ParameterDictionary.load(get_safe(config, 'param_dictionary'))
cnt = calculate_iteration_count(len(parser.sensor_map), max_rec)
for x in xrange(cnt):
#rdt = RecordDictionaryTool(taxonomy=ttool)
rdt = RecordDictionaryTool(param_dictionary=pdict)
for name in parser.sensor_map:
d = parser.data_map[name][x*max_rec:(x+1)*max_rec]
rdt[name]=d
#g = build_granule(data_producer_id=dprod_id, taxonomy=ttool, record_dictionary=rdt)
g = build_granule(data_producer_id=dprod_id, record_dictionary=rdt, param_dictionary=pdict)
yield g
except SlocumParseException as spe:
# TODO: Decide what to do here, raise an exception or carry on
log.error('Error parsing data file: \'{0}\''.format(f))
示例10: sync_rdt_with_coverage
def sync_rdt_with_coverage(self, coverage=None, tdoa=None, start_time=None, end_time=None, parameters=None):
'''
Builds a granule based on the coverage
'''
if coverage is None:
coverage = self.coverage
slice_ = slice(None) # Defaults to all values
if tdoa is not None and isinstance(tdoa,slice):
slice_ = tdoa
elif not (start_time is None and end_time is None):
uom = coverage.get_parameter_context('time').uom
if start_time is not None:
start_units = self.ts_to_units(uom,start_time)
log.info('Units: %s', start_units)
start_idx = self.get_relative_time(coverage,start_units)
log.info('Start Index: %s', start_idx)
start_time = start_idx
if end_time is not None:
end_units = self.ts_to_units(uom,end_time)
log.info('End units: %s', end_units)
end_idx = self.get_relative_time(coverage,end_units)
log.info('End index: %s', end_idx)
end_time = end_idx
slice_ = slice(start_time,end_time)
log.info('Slice: %s', slice_)
if parameters is not None:
pdict = ParameterDictionary()
params = set(coverage.list_parameters()).intersection(parameters)
for param in params:
pdict.add_context(coverage.get_parameter_context(param))
rdt = RecordDictionaryTool(param_dictionary=pdict)
self.pdict = pdict
else:
rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
fields = coverage.list_parameters()
if parameters is not None:
fields = set(fields).intersection(parameters)
for d in fields:
rdt[d] = coverage.get_parameter_values(d,tdoa=slice_)
self.rdt = rdt # Sync
示例11: rdt_to_granule
def rdt_to_granule(self, context, value_array, comp_val=None):
pdict = ParameterDictionary()
pdict.add_context(context)
rdt = RecordDictionaryTool(param_dictionary=pdict)
rdt["test"] = value_array
granule = rdt.to_granule()
rdt2 = RecordDictionaryTool.load_from_granule(granule)
testval = comp_val if comp_val is not None else value_array
actual = rdt2["test"]
if isinstance(testval, basestring):
self.assertEquals(testval, actual)
else:
np.testing.assert_array_equal(testval, actual)
示例12: _setup_resources
def _setup_resources(self):
pdict = ParameterDictionary()
t_ctxt = ParameterContext('data', param_type=QuantityType(value_encoding=numpy.dtype('int64')))
t_ctxt.axis = AxisTypeEnum.TIME
t_ctxt.uom = 'seconds since 01-01-1970'
pdict.add_context(t_ctxt)
stream_id, stream_route, stream_def = self.create_stream_and_logger(name='fibonacci_stream', pdict=pdict)
# tx = TaxyTool()
# tx.add_taxonomy_set('data', 'external_data')
self.DVR_CONFIG['dh_cfg'] = {
'TESTING': True,
'stream_id': stream_id,
'stream_route': stream_route,
'stream_def': stream_def,
'data_producer_id': 'fibonacci_data_producer_id',
'max_records': 4,
}
示例13: rdt_to_granule
def rdt_to_granule(self, context, value_array, comp_val=None):
time = ParameterContext(name='time', param_type=QuantityType(value_encoding=np.float64))
pdict = ParameterDictionary()
pdict.add_context(time, is_temporal=True)
pdict.add_context(context)
rdt = RecordDictionaryTool(param_dictionary=pdict)
rdt['time'] = np.arange(len(value_array))
rdt['test'] = value_array
granule = rdt.to_granule()
rdt2 = RecordDictionaryTool.load_from_granule(granule)
testval = comp_val if comp_val is not None else value_array
actual = rdt2['test']
if isinstance(testval, basestring):
self.assertEquals(testval, actual)
else:
np.testing.assert_array_equal(testval, actual)
示例14: load_from_granule
def load_from_granule(cls, g):
"""
@brief return an instance of Record Dictionary Tool from a granule. Used when a granule is received in a message
"""
if g.param_dictionary:
result = cls(param_dictionary=ParameterDictionary.load(g.param_dictionary))
else:
result = cls(TaxyTool(g.taxonomy))
result._rd = g.record_dictionary
if result._rd.has_key(0):
result._shp = result._rd[0].shape
return result
示例15: load_from_granule
def load_from_granule(cls, g):
if isinstance(g.param_dictionary, str):
instance = cls(stream_definition_id=g.param_dictionary, locator=g.locator)
pdict = RecordDictionaryTool.pdict_from_stream_def(g.param_dictionary)
instance._pdict = ParameterDictionary.load(pdict)
else:
instance = cls(param_dictionary=g.param_dictionary, locator=g.locator)
instance._pdict = ParameterDictionary.load(g.param_dictionary)
if g.domain:
instance._shp = (g.domain[0],)
for k,v in g.record_dictionary.iteritems():
if v is not None:
ptype = instance._pdict.get_context(k).param_type
paramval = get_value_class(ptype, domain_set = instance.domain)
paramval[:] = v
paramval.storage._storage.flags.writeable = False
instance._rd[k] = paramval
return instance