本文整理汇总了Python中ion.services.dm.utility.granule.RecordDictionaryTool.to_granule方法的典型用法代码示例。如果您正苦于以下问题:Python RecordDictionaryTool.to_granule方法的具体用法?Python RecordDictionaryTool.to_granule怎么用?Python RecordDictionaryTool.to_granule使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ion.services.dm.utility.granule.RecordDictionaryTool
的用法示例。
在下文中一共展示了RecordDictionaryTool.to_granule方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_granule
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_granule(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
stream_def_id = self.pubsub_management.create_stream_definition('ctd', parameter_dictionary_id=pdict_id, stream_configuration={'reference_designator':"GA03FLMA-RI001-13-CTDMOG999"})
pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
self.addCleanup(self.pubsub_management.delete_stream_definition,stream_def_id)
stream_id, route = self.pubsub_management.create_stream('ctd_stream', 'xp1', stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream,stream_id)
publisher = StandaloneStreamPublisher(stream_id, route)
subscriber = StandaloneStreamSubscriber('sub', self.verify_incoming)
subscriber.start()
self.addCleanup(subscriber.stop)
subscription_id = self.pubsub_management.create_subscription('sub', stream_ids=[stream_id])
self.pubsub_management.activate_subscription(subscription_id)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = np.arange(10)
rdt['temp'] = np.random.randn(10) * 10 + 30
rdt['pressure'] = [20] * 10
self.assertEquals(set(pdict.keys()), set(rdt.fields))
self.assertEquals(pdict.temporal_parameter_name, rdt.temporal_parameter)
self.assertEquals(rdt._stream_config['reference_designator'],"GA03FLMA-RI001-13-CTDMOG999")
self.rdt = rdt
self.data_producer_id = 'data_producer'
self.provider_metadata_update = {1:1}
publisher.publish(rdt.to_granule(data_producer_id='data_producer', provider_metadata_update={1:1}))
self.assertTrue(self.event.wait(10))
self.pubsub_management.deactivate_subscription(subscription_id)
self.pubsub_management.delete_subscription(subscription_id)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = np.array([None,None,None])
self.assertTrue(rdt['time'] is None)
rdt['time'] = np.array([None, 1, 2])
self.assertEquals(rdt['time'][0], rdt.fill_value('time'))
stream_def_obj = self.pubsub_management.read_stream_definition(stream_def_id)
rdt = RecordDictionaryTool(stream_definition=stream_def_obj)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
granule = rdt.to_granule()
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['time'], np.arange(20))
np.testing.assert_array_equal(rdt['temp'], np.arange(20))
示例2: get_last_values
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def get_last_values(cls, dataset_id, number_of_points):
coverage = DatasetManagementService._get_coverage(dataset_id,mode='r')
if coverage.num_timesteps < number_of_points:
if coverage.num_timesteps == 0:
rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
return rdt.to_granule()
number_of_points = coverage.num_timesteps
rdt = cls._coverage_to_granule(coverage,tdoa=slice(-number_of_points,None))
coverage.close(timeout=5)
return rdt.to_granule()
示例3: test_serialize_compatability
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_serialize_compatability(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('ctd extended', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
stream_id, route = self.pubsub_management.create_stream('ctd1', 'xp1', stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream, stream_id)
sub_id = self.pubsub_management.create_subscription('sub1', stream_ids=[stream_id])
self.addCleanup(self.pubsub_management.delete_subscription, sub_id)
self.pubsub_management.activate_subscription(sub_id)
self.addCleanup(self.pubsub_management.deactivate_subscription, sub_id)
verified = Event()
def verifier(msg, route, stream_id):
for k,v in msg.record_dictionary.iteritems():
if v is not None:
self.assertIsInstance(v, np.ndarray)
rdt = RecordDictionaryTool.load_from_granule(msg)
for k,v in rdt.iteritems():
self.assertIsInstance(rdt[k], np.ndarray)
self.assertIsInstance(v, np.ndarray)
verified.set()
subscriber = StandaloneStreamSubscriber('sub1', callback=verifier)
subscriber.start()
self.addCleanup(subscriber.stop)
publisher = StandaloneStreamPublisher(stream_id,route)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
ph.fill_rdt(rdt,10)
publisher.publish(rdt.to_granule())
self.assertTrue(verified.wait(60))
示例4: test_execute_transform
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_execute_transform(self):
available_fields_in = ['time', 'lat', 'lon', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
available_fields_out = ['time', 'lat', 'lon', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0', 'TEMPWAT_L1','CONDWAT_L1','PRESWAT_L1','PRACSAL', 'DENSITY']
exchange_pt1 = 'xp1'
exchange_pt2 = 'xp2'
stream_id_in,stream_id_out,stream_route_in,stream_route_out,stream_def_in_id,stream_def_out_id = self._setup_streams(exchange_pt1, exchange_pt2, available_fields_in, available_fields_out)
rdt_in = RecordDictionaryTool(stream_definition_id=stream_def_in_id)
dt = 20
rdt_in['time'] = np.arange(dt)
rdt_in['lat'] = [40.992469] * dt
rdt_in['lon'] = [-71.727069] * dt
rdt_in['TEMPWAT_L0'] = self._get_param_vals('TEMPWAT_L0', slice(None), (dt,))
rdt_in['CONDWAT_L0'] = self._get_param_vals('CONDWAT_L0', slice(None), (dt,))
rdt_in['PRESWAT_L0'] = self._get_param_vals('PRESWAT_L0', slice(None), (dt,))
msg = rdt_in.to_granule()
#pid = self.container.spawn_process('transform_stream','ion.processes.data.transforms.transform_prime','TransformPrime',{'process':{'routes':{(stream_id_in, stream_id_out):None},'stream_id':stream_id_out}})
config = {'process':{'routes':{(stream_id_in, stream_id_out):None},'queue_name':exchange_pt1, 'publish_streams':{str(stream_id_out):stream_id_out}, 'process_type':'stream_process'}}
pid = self.container.spawn_process('transform_stream','ion.processes.data.transforms.transform_prime','TransformPrime',config)
rdt_out = self.container.proc_manager.procs[pid]._execute_transform(msg, (stream_id_in,stream_id_out))
#need below to wrap result in a param val object
rdt_out = RecordDictionaryTool.load_from_granule(rdt_out.to_granule())
for k,v in rdt_out.iteritems():
self.assertEqual(len(v), dt)
self._validate_transforms(rdt_in, rdt_out)
self.container.proc_manager.terminate_process(pid)
示例5: test_execute_advanced_transform
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_execute_advanced_transform(self):
# Runs a transform across L0-L2 with stream definitions including available fields
streams = self.setup_advanced_transform()
in_stream_id, in_stream_def_id = streams[0]
out_stream_id, out_stream_defs_id = streams[1]
validation_event = Event()
def validator(msg, route, stream_id):
rdt = RecordDictionaryTool.load_from_granule(msg)
if not np.allclose(rdt['rho'], np.array([1001.0055034])):
return
validation_event.set()
self.setup_validator(validator)
in_route = self.pubsub_management.read_stream_route(in_stream_id)
publisher = StandaloneStreamPublisher(in_stream_id, in_route)
outbound_rdt = RecordDictionaryTool(stream_definition_id=in_stream_def_id)
outbound_rdt['time'] = [0]
outbound_rdt['TEMPWAT_L0'] = [280000]
outbound_rdt['CONDWAT_L0'] = [100000]
outbound_rdt['PRESWAT_L0'] = [2789]
outbound_rdt['lat'] = [45]
outbound_rdt['lon'] = [-71]
outbound_granule = outbound_rdt.to_granule()
publisher.publish(outbound_granule)
self.assertTrue(validation_event.wait(2))
示例6: publish_to_data_product
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def publish_to_data_product(self, data_product_id):
stream_ids, _ = self.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True)
self.assertTrue(len(stream_ids))
stream_id = stream_ids.pop()
route = self.pubsub_management.read_stream_route(stream_id)
stream_definition = self.pubsub_management.read_stream_definition(stream_id=stream_id)
stream_def_id = stream_definition._id
publisher = StandaloneStreamPublisher(stream_id, route)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
now = time.time()
ntp_now = now + 2208988800 # Do not use in production, this is a loose translation
rdt['internal_timestamp'] = [ntp_now]
rdt['temp'] = [300000]
rdt['preferred_timestamp'] = ['driver_timestamp']
rdt['time'] = [ntp_now]
rdt['port_timestamp'] = [ntp_now]
rdt['quality_flag'] = [None]
rdt['lat'] = [45]
rdt['conductivity'] = [4341400]
rdt['driver_timestamp'] = [ntp_now]
rdt['lon'] = [-71]
rdt['pressure'] = [256.8]
granule = rdt.to_granule()
publisher.publish(granule)
示例7: execute_retrieve
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def execute_retrieve(self):
"""
execute_retrieve Executes a retrieval and returns the result
as a value in lieu of publishing it on a stream
"""
try:
coverage = DatasetManagementService._get_coverage(self.dataset_id, mode="r")
if coverage.num_timesteps == 0:
log.info("Reading from an empty coverage")
rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
else:
rdt = self._coverage_to_granule(
coverage=coverage,
start_time=self.start_time,
end_time=self.end_time,
stride_time=self.stride_time,
parameters=self.parameters,
tdoa=self.tdoa,
)
except:
log.exception("Problems reading from the coverage")
raise BadRequest("Problems reading from the coverage")
finally:
coverage.close(timeout=5)
return rdt.to_granule()
示例8: retrieve_oob
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def retrieve_oob(cls, dataset_id='', query=None, delivery_format=''):
query = query or {}
coverage = None
try:
coverage = cls._get_coverage(dataset_id)
if coverage is None:
raise BadRequest('no such coverage')
if isinstance(coverage, SimplexCoverage) and coverage.is_empty():
log.info('Reading from an empty coverage')
rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
else:
args = {
'start_time' : query.get('start_time', None),
'end_time' : query.get('end_time', None),
'stride_time' : query.get('stride_time', None),
'parameters' : query.get('parameters', None),
'stream_def_id' : delivery_format,
'tdoa' : query.get('tdoa', None),
'sort_parameter' : query.get('sort_parameter', None)
}
rdt = ReplayProcess._cov2granule(coverage=coverage, **args)
except Exception as e:
cls._eject_cache(dataset_id)
data_products, _ = Container.instance.resource_registry.find_subjects(object=dataset_id, predicate=PRED.hasDataset, subject_type=RT.DataProduct)
for data_product in data_products:
log.error("Data Product %s (%s) had issues reading from the coverage model\nretrieve_oob(dataset_id='%s', query=%s, delivery_format=%s)", data_product.name, data_product._id, dataset_id, query, delivery_format)
log.error("Problems reading from the coverage", exc_info=True)
raise BadRequest('Problems reading from the coverage')
return rdt.to_granule()
示例9: test_derived_data_product
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_derived_data_product(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='Instrument DP', temporal_domain=tdom.dump(), spatial_domain=sdom.dump())
dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id)
self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)
self.dpsc_cli.activate_data_product_persistence(dp_id)
self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)
dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
if not dataset_ids:
raise NotFound("Data Product %s dataset does not exist" % str(dp_id))
dataset_id = dataset_ids[0]
# Make the derived data product
simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp'])
tempwat_dp = DataProduct(name='TEMPWAT')
tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id)
self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
self.dpsc_cli.activate_data_product_persistence(tempwat_dp_id)
self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, tempwat_dp_id)
# Check that the streams associated with the data product are persisted with
stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
for stream_id in stream_ids:
self.assertTrue(self.ingestclient.is_persisted(stream_id))
stream_id = stream_ids[0]
route = self.pubsubcli.read_stream_route(stream_id=stream_id)
rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
rdt['pressure'] = np.arange(20)
publisher = StandaloneStreamPublisher(stream_id,route)
dataset_modified = Event()
def cb(*args, **kwargs):
dataset_modified.set()
es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
es.start()
self.addCleanup(es.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_modified.wait(30))
tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True)
tempwat_dataset_id = tempwat_dataset_ids[0]
granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['time'], np.arange(20))
self.assertEquals(set(rdt.fields), set(['time','temp']))
开发者ID:MauriceManning,项目名称:coi-services,代码行数:61,代码来源:test_data_product_management_service_integration.py
示例10: get_granule
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def get_granule(self, time=None, pd=None):
lat,lon,_ = self.get_location(time)
value = self.get_value(time)
pkg = RecordDictionaryTool(pd)
pkg['salinity'] = array([value]*self.message_size)
pkg['lat'] = array([lat]*self.message_size)
pkg['lon'] = array([lon]*self.message_size)
granule = pkg.to_granule()
return granule
示例11: test_array_visualization
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_array_visualization(self):
data_product_id, stream_def_id = self.make_array_data_product()
# Make a granule with an array type, give it a few values
# Send it to google_dt transform, verify output
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = np.arange(2208988800, 2208988810)
rdt['temp_sample'] = np.arange(10*4).reshape(10,4)
rdt['cond_sample'] = np.arange(10*4).reshape(10,4)
granule = rdt.to_granule()
dataset_monitor = DatasetMonitor(self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id))
self.addCleanup(dataset_monitor.stop)
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
gdt_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('google_dt',id_only=True)
gdt_stream_def = self.create_stream_definition('gdt', parameter_dictionary_id=gdt_pdict_id)
gdt_data_granule = VizTransformGoogleDTAlgorithm.execute(granule, params=gdt_stream_def)
rdt = RecordDictionaryTool.load_from_granule(gdt_data_granule)
testval = {'data_content': [
[0.0 , 0.0 , 1.0 , 2.0 , 3.0 , 0.0 , 2.0 , 4.0 , 6.0 , 0.0 , 1.0 , 2.0 , 3.0] ,
[1.0 , 4.0 , 5.0 , 6.0 , 7.0 , 8.0 , 10.0 , 12.0 , 14.0 , 4.0 , 5.0 , 6.0 , 7.0] ,
[2.0 , 8.0 , 9.0 , 10.0 , 11.0 , 16.0 , 18.0 , 20.0 , 22.0 , 8.0 , 9.0 , 10.0 , 11.0] ,
[3.0 , 12.0 , 13.0 , 14.0 , 15.0 , 24.0 , 26.0 , 28.0 , 30.0 , 12.0 , 13.0 , 14.0 , 15.0] ,
[4.0 , 16.0 , 17.0 , 18.0 , 19.0 , 32.0 , 34.0 , 36.0 , 38.0 , 16.0 , 17.0 , 18.0 , 19.0] ,
[5.0 , 20.0 , 21.0 , 22.0 , 23.0 , 40.0 , 42.0 , 44.0 , 46.0 , 20.0 , 21.0 , 22.0 , 23.0] ,
[6.0 , 24.0 , 25.0 , 26.0 , 27.0 , 48.0 , 50.0 , 52.0 , 54.0 , 24.0 , 25.0 , 26.0 , 27.0] ,
[7.0 , 28.0 , 29.0 , 30.0 , 31.0 , 56.0 , 58.0 , 60.0 , 62.0 , 28.0 , 29.0 , 30.0 , 31.0] ,
[8.0 , 32.0 , 33.0 , 34.0 , 35.0 , 64.0 , 66.0 , 68.0 , 70.0 , 32.0 , 33.0 , 34.0 , 35.0] ,
[9.0 , 36.0 , 37.0 , 38.0 , 39.0 , 72.0 , 74.0 , 76.0 , 78.0 , 36.0 , 37.0 , 38.0 , 39.0]] ,
'data_description': [('time', 'number', 'time'),
('temp_sample[0]', 'number', 'temp_sample[0]', {'precision': '5'}),
('temp_sample[1]', 'number', 'temp_sample[1]', {'precision': '5'}),
('temp_sample[2]', 'number', 'temp_sample[2]', {'precision': '5'}),
('temp_sample[3]', 'number', 'temp_sample[3]', {'precision': '5'}),
('temp_offset[0]', 'number', 'temp_offset[0]', {'precision': '5'}),
('temp_offset[1]', 'number', 'temp_offset[1]', {'precision': '5'}),
('temp_offset[2]', 'number', 'temp_offset[2]', {'precision': '5'}),
('temp_offset[3]', 'number', 'temp_offset[3]', {'precision': '5'}),
('cond_sample[0]', 'number', 'cond_sample[0]', {'precision': '5'}),
('cond_sample[1]', 'number', 'cond_sample[1]', {'precision': '5'}),
('cond_sample[2]', 'number', 'cond_sample[2]', {'precision': '5'}),
('cond_sample[3]', 'number', 'cond_sample[3]', {'precision': '5'})],
'viz_product_type': 'google_dt'}
self.assertEquals(rdt['google_dt_components'][0], testval)
示例12: test_transform_prime_no_available_fields
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_transform_prime_no_available_fields(self):
available_fields_in = []
available_fields_out = []
exchange_pt1 = 'xp1'
exchange_pt2 = 'xp2'
stream_id_in,stream_id_out,stream_route_in,stream_route_out,stream_def_in_id,stream_def_out_id = self._setup_streams(exchange_pt1, exchange_pt2, available_fields_in, available_fields_out)
#launch transform
config = {'process':{'routes':{(stream_id_in, stream_id_out):None},'queue_name':exchange_pt1, 'publish_streams':{str(stream_id_out):stream_id_out}, 'process_type':'stream_process'}}
pid = self.container.spawn_process('transform_stream','ion.processes.data.transforms.transform_prime','TransformPrime',config)
#create publish
publisher = StandaloneStreamPublisher(stream_id_in, stream_route_in)
self.container.proc_manager.procs[pid].subscriber.xn.bind(stream_route_in.routing_key, publisher.xp)
#data
rdt_in = RecordDictionaryTool(stream_definition_id=stream_def_in_id)
dt = 20
rdt_in['time'] = np.arange(dt)
rdt_in['lat'] = [40.992469] * dt
rdt_in['lon'] = [-71.727069] * dt
rdt_in['TEMPWAT_L0'] = self._get_param_vals('TEMPWAT_L0', slice(None), (dt,))
rdt_in['CONDWAT_L0'] = self._get_param_vals('CONDWAT_L0', slice(None), (dt,))
rdt_in['PRESWAT_L0'] = self._get_param_vals('PRESWAT_L0', slice(None), (dt,))
msg = rdt_in.to_granule()
#publish granule to transform and have transform publish it to subsciber
#validate transformed data
e = gevent.event.Event()
def cb(msg, sr, sid):
self.assertEqual(sid, stream_id_out)
rdt_out = RecordDictionaryTool.load_from_granule(msg)
self.assertEquals(set([k for k,v in rdt_out.iteritems()]), set(available_fields_out))
for k,v in rdt_out.iteritems():
self.assertEquals(rdt_out[k], None)
e.set()
sub = StandaloneStreamSubscriber('stream_subscriber', cb)
sub.xn.bind(stream_route_out.routing_key, getattr(self.container.proc_manager.procs[pid], stream_id_out).xp)
self.addCleanup(sub.stop)
sub.start()
#publish msg to transform
publisher.publish(msg)
#wait to receive msg
self.assertTrue(e.wait(4))
示例13: test_filter
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def test_filter(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
filtered_stream_def_id = self.pubsub_management.create_stream_definition('filtered', parameter_dictionary_id=pdict_id, available_fields=['time', 'temp'])
self.addCleanup(self.pubsub_management.delete_stream_definition, filtered_stream_def_id)
rdt = RecordDictionaryTool(stream_definition_id=filtered_stream_def_id)
self.assertEquals(rdt._available_fields,['time','temp'])
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
with self.assertRaises(KeyError):
rdt['pressure'] = np.arange(20)
granule = rdt.to_granule()
rdt2 = RecordDictionaryTool.load_from_granule(granule)
self.assertEquals(rdt._available_fields, rdt2._available_fields)
self.assertEquals(rdt.fields, rdt2.fields)
for k,v in rdt.iteritems():
self.assertTrue(np.array_equal(rdt[k], rdt2[k]))
示例14: retrieve_oob
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def retrieve_oob(cls, dataset_id='', query=None, delivery_format=''):
query = query or {}
coverage = None
try:
coverage = cls._get_coverage(dataset_id)
if coverage is None:
raise BadRequest('no such coverage')
if coverage.num_timesteps == 0:
log.info('Reading from an empty coverage')
rdt = RecordDictionaryTool(param_dictionary=coverage.parameter_dictionary)
else:
rdt = ReplayProcess._coverage_to_granule(coverage=coverage, start_time=query.get('start_time', None), end_time=query.get('end_time',None), stride_time=query.get('stride_time',None), parameters=query.get('parameters',None), stream_def_id=delivery_format, tdoa=query.get('tdoa',None))
except:
cls._eject_cache(dataset_id)
log.exception('Problems reading from the coverage')
raise BadRequest('Problems reading from the coverage')
return rdt.to_granule()
示例15: rdt_to_granule
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import to_granule [as 别名]
def rdt_to_granule(self, context, value_array, comp_val=None):
pdict = ParameterDictionary()
pdict.add_context(context)
rdt = RecordDictionaryTool(param_dictionary=pdict)
rdt["test"] = value_array
granule = rdt.to_granule()
rdt2 = RecordDictionaryTool.load_from_granule(granule)
testval = comp_val if comp_val is not None else value_array
actual = rdt2["test"]
if isinstance(testval, basestring):
self.assertEquals(testval, actual)
else:
np.testing.assert_array_equal(testval, actual)