本文整理汇总了Python中ion.services.dm.utility.granule.RecordDictionaryTool.load_from_granule方法的典型用法代码示例。如果您正苦于以下问题:Python RecordDictionaryTool.load_from_granule方法的具体用法?Python RecordDictionaryTool.load_from_granule怎么用?Python RecordDictionaryTool.load_from_granule使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ion.services.dm.utility.granule.RecordDictionaryTool
的用法示例。
在下文中一共展示了RecordDictionaryTool.load_from_granule方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_array_flow_paths
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_array_flow_paths(self):
data_product_id, stream_def_id = self.make_array_data_product()
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
dm = DatasetMonitor(dataset_id)
self.addCleanup(dm.stop)
# I need to make sure that we can fill the RDT with its values
# Test for one timestep
# Test for multiple timesteps
# Publishes
# Ingests correctly
# Retrieves correctly
#--------------------------------------------------------------------------------
# Ensure that the RDT can be filled with ArrayType values
#--------------------------------------------------------------------------------
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = [0]
rdt['temp_sample'] = [[0,1,2,3,4]]
np.testing.assert_array_equal(rdt['temp_sample'], np.array([[0,1,2,3,4]]))
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(dm.event.wait(10))
dm.event.clear()
granule = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['temp_sample'], np.array([[0,1,2,3,4]]))
#--------------------------------------------------------------------------------
# Ensure that it deals with multiple values
#--------------------------------------------------------------------------------
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = [1,2,3]
rdt['temp_sample'] = [[0,1,2,3,4],[1],[5,5,5,5,5]]
m = rdt.fill_value('temp_sample') or np.finfo(np.float32).max
np.testing.assert_equal(m,np.finfo(np.float32).max)
np.testing.assert_array_equal(rdt['temp_sample'], [[0,1,2,3,4],[1,m,m,m,m],[5,5,5,5,5]])
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(dm.event.wait(10))
dm.event.clear()
#--------------------------------------------------------------------------------
# Retrieve and Verify
#--------------------------------------------------------------------------------
retrieved_granule = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(retrieved_granule)
np.testing.assert_array_equal(rdt['time'], np.array([0,1,2,3]))
np.testing.assert_array_equal(rdt['temp_sample'], np.array([[0,1,2,3,4],[0,1,2,3,4],[1,m,m,m,m],[5,5,5,5,5]]))
示例2: verify_incoming
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def verify_incoming(self, m,r,s):
rdt = RecordDictionaryTool.load_from_granule(m)
self.assertEquals(rdt, self.rdt)
self.assertEquals(m.data_producer_id, self.data_producer_id)
self.assertEquals(m.provider_metadata_update, self.provider_metadata_update)
self.assertNotEqual(m.creation_timestamp, None)
self.event.set()
示例3: test_lctest_preload
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_lctest_preload(self):
self.preload_lctest()
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('sparse_dict', id_only=True)
stream_def_id = self.create_stream_definition('sparse_example', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('sparse_example', stream_def_id=stream_def_id)
self.activate_data_product(data_product_id)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['sparse_float'] = [3.14159265358979323]
rdt['sparse_double'] = [2.7182818284590452353602874713526624977572470936999595]
rdt['sparse_int'] = [131071] # 6th mersenne prime
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
for i in xrange(10):
dataset_monitor.event.clear()
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例4: test_example2_preload
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_example2_preload(self):
print 'preloading...'
self.preload_example2()
data_product_ids, _ = self.container.resource_registry.find_resources_ext(alt_id='DPROD104', alt_id_ns='PRE')
data_product_id = data_product_ids[0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
with DirectCoverageAccess() as dca:
dca.upload_calibration_coefficients(dataset_id, 'test_data/vel3d_coeff.csv', 'test_data/vel3d_coeff.yml')
from ion_functions.data.test.test_vel_functions import TS, VE, VN, VU
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['velocity_east'] = [VE[0]]
rdt['velocity_north'] = [VN[0]]
rdt['velocity_up'] = [VU[0]]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例5: test_example_preload
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_example_preload(self):
print 'preloading...'
self.preload_example1()
data_product_ids, _ = self.container.resource_registry.find_resources_ext(alt_id='DPROD102', alt_id_ns='PRE')
data_product_id = data_product_ids[0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
with DirectCoverageAccess() as dca:
dca.upload_calibration_coefficients(dataset_id, 'test_data/sbe16coeffs.csv', 'test_data/sbe16coeffs.yml')
ph = ParameterHelper(self.dataset_management, self.addCleanup)
rdt = ph.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['temperature'] = [248471]
rdt['pressure'] = [528418]
rdt['conductivity'] = [1673175]
rdt['thermistor_temperature']=[24303]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ph.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例6: test_execute_transform
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_execute_transform(self):
available_fields_in = ['time', 'lat', 'lon', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
available_fields_out = ['time', 'lat', 'lon', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0', 'TEMPWAT_L1','CONDWAT_L1','PRESWAT_L1','PRACSAL', 'DENSITY']
exchange_pt1 = 'xp1'
exchange_pt2 = 'xp2'
stream_id_in,stream_id_out,stream_route_in,stream_route_out,stream_def_in_id,stream_def_out_id = self._setup_streams(exchange_pt1, exchange_pt2, available_fields_in, available_fields_out)
rdt_in = RecordDictionaryTool(stream_definition_id=stream_def_in_id)
dt = 20
rdt_in['time'] = np.arange(dt)
rdt_in['lat'] = [40.992469] * dt
rdt_in['lon'] = [-71.727069] * dt
rdt_in['TEMPWAT_L0'] = self._get_param_vals('TEMPWAT_L0', slice(None), (dt,))
rdt_in['CONDWAT_L0'] = self._get_param_vals('CONDWAT_L0', slice(None), (dt,))
rdt_in['PRESWAT_L0'] = self._get_param_vals('PRESWAT_L0', slice(None), (dt,))
msg = rdt_in.to_granule()
#pid = self.container.spawn_process('transform_stream','ion.processes.data.transforms.transform_prime','TransformPrime',{'process':{'routes':{(stream_id_in, stream_id_out):None},'stream_id':stream_id_out}})
config = {'process':{'routes':{(stream_id_in, stream_id_out):None},'queue_name':exchange_pt1, 'publish_streams':{str(stream_id_out):stream_id_out}, 'process_type':'stream_process'}}
pid = self.container.spawn_process('transform_stream','ion.processes.data.transforms.transform_prime','TransformPrime',config)
rdt_out = self.container.proc_manager.procs[pid]._execute_transform(msg, (stream_id_in,stream_id_out))
#need below to wrap result in a param val object
rdt_out = RecordDictionaryTool.load_from_granule(rdt_out.to_granule())
for k,v in rdt_out.iteritems():
self.assertEqual(len(v), dt)
self._validate_transforms(rdt_in, rdt_out)
self.container.proc_manager.terminate_process(pid)
示例7: cb
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def cb(msg, sr, sid):
self.assertEqual(sid, stream_id_out)
rdt_out = RecordDictionaryTool.load_from_granule(msg)
self.assertEquals(set([k for k,v in rdt_out.iteritems()]), set(available_fields_out))
for k,v in rdt_out.iteritems():
self.assertEquals(rdt_out[k], None)
e.set()
示例8: test_instrument_simple
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_instrument_simple(self):
instrument_model_id = self.create_instrument_model()
instrument_agent_id = self.create_instrument_agent(instrument_model_id)
instrument_device_id = self.create_instrument_device(instrument_model_id)
instrument_agent_instance_id = self.create_instrument_agent_instance(instrument_agent_id, instrument_device_id)
raw_dp_id, parsed_dp_id = self.create_instrument_data_products(instrument_device_id)
self.start_instrument_agent_instance(instrument_agent_instance_id)
agent_process_id = self.poll_instrument_agent_instance(instrument_agent_instance_id, instrument_device_id)
agent_client = ResourceAgentClient(instrument_device_id,
to_name=agent_process_id,
process=FakeProcess())
self.agent_state_transition(agent_client, ResourceAgentEvent.INITIALIZE, ResourceAgentState.INACTIVE)
self.agent_state_transition(agent_client, ResourceAgentEvent.GO_ACTIVE, ResourceAgentState.IDLE)
self.agent_state_transition(agent_client, ResourceAgentEvent.RUN, ResourceAgentState.COMMAND)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(parsed_dp_id)
for i in xrange(10):
monitor = DatasetMonitor(dataset_id=dataset_id)
agent_client.execute_resource(AgentCommand(command=SBE37ProtocolEvent.ACQUIRE_SAMPLE))
if not monitor.wait():
raise AssertionError('Failed on the %ith granule' % i)
monitor.stop()
rdt = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(dataset_id))
self.assertEquals(len(rdt), 10)
示例9: process
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def process(self, dataset_id, start_time=0, end_time=0):
if not dataset_id:
raise BadRequest('No dataset id specified.')
now = time.time()
start_time = start_time or (now - (3600*(self.run_interval+1))) # Every N hours with 1 of overlap
end_time = end_time or now
qc_params = [i for i in self.qc_params if i in self.qc_suffixes] or self.qc_suffixes
self.qc_publisher = EventPublisher(event_type=OT.ParameterQCEvent)
log.debug('Iterating over the data blocks')
for st,et in self.chop(int(start_time),int(end_time)):
log.debug('Chopping %s:%s', st, et)
log.debug("Retrieving data: data_retriever.retrieve('%s', query={'start_time':%s, 'end_time':%s')", dataset_id, st, et)
granule = self.data_retriever.retrieve(dataset_id, query={'start_time':st, 'end_time':et})
log.debug('Retrieved Data')
rdt = RecordDictionaryTool.load_from_granule(granule)
qc_fields = [i for i in rdt.fields if any([i.endswith(j) for j in qc_params])]
log.debug('QC Fields: %s', qc_fields)
for field in qc_fields:
val = rdt[field]
if val is None:
continue
if not np.all(val):
log.debug('Found QC Alerts')
indexes = np.where(val==0)
timestamps = rdt[rdt.temporal_parameter][indexes[0]]
self.flag_qc_parameter(dataset_id, field, timestamps.tolist(),{})
示例10: check_tempsf_instrument_data_product
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def check_tempsf_instrument_data_product(self, reference_designator):
passing = True
info_list = []
passing &= self.check_data_product_reference(reference_designator, info_list)
if not passing: return passing
data_product_id, stream_def_id, dataset_id = info_list.pop()
now = time.time()
ntp_now = now + 2208988800
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = [ntp_now]
rdt['temperature'] = [[ 25.3884, 26.9384, 24.3394, 23.3401, 22.9832,
29.4434, 26.9873, 15.2883, 16.3374, 14.5883, 15.7253, 18.4383,
15.3488, 17.2993, 10.2111, 11.5993, 10.9345, 9.4444, 9.9876,
10.9834, 11.0098, 5.3456, 4.2994, 4.3009]]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
passing &= self.assertTrue(dataset_monitor.event.wait(20))
if not passing: return passing
granule = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
passing &= self.assert_array_almost_equal(rdt['temperature'], [[
25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
5.3456, 4.2994, 4.3009]])
return passing
示例11: test_derived_data_product
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_derived_data_product(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='Instrument DP', temporal_domain=tdom.dump(), spatial_domain=sdom.dump())
dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id)
self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)
self.dpsc_cli.activate_data_product_persistence(dp_id)
self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)
dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
if not dataset_ids:
raise NotFound("Data Product %s dataset does not exist" % str(dp_id))
dataset_id = dataset_ids[0]
# Make the derived data product
simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp'])
tempwat_dp = DataProduct(name='TEMPWAT')
tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id)
self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
self.dpsc_cli.activate_data_product_persistence(tempwat_dp_id)
self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, tempwat_dp_id)
# Check that the streams associated with the data product are persisted with
stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
for stream_id in stream_ids:
self.assertTrue(self.ingestclient.is_persisted(stream_id))
stream_id = stream_ids[0]
route = self.pubsubcli.read_stream_route(stream_id=stream_id)
rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
rdt['pressure'] = np.arange(20)
publisher = StandaloneStreamPublisher(stream_id,route)
dataset_modified = Event()
def cb(*args, **kwargs):
dataset_modified.set()
es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
es.start()
self.addCleanup(es.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_modified.wait(30))
tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True)
tempwat_dataset_id = tempwat_dataset_ids[0]
granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['time'], np.arange(20))
self.assertEquals(set(rdt.fields), set(['time','temp']))
开发者ID:MauriceManning,项目名称:coi-services,代码行数:61,代码来源:test_data_product_management_service_integration.py
示例12: verifier
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def verifier(msg, route, stream_id):
for k,v in msg.record_dictionary.iteritems():
if v is not None:
self.assertIsInstance(v, np.ndarray)
rdt = RecordDictionaryTool.load_from_granule(msg)
for field in rdt.fields:
self.assertIsInstance(rdt[field], np.ndarray)
verified.set()
示例13: verify_incoming
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def verify_incoming(self, m,r,s):
rdt = RecordDictionaryTool.load_from_granule(m)
for k,v in rdt.iteritems():
np.testing.assert_array_equal(v, self.rdt[k])
self.assertEquals(m.data_producer_id, self.data_producer_id)
self.assertEquals(m.provider_metadata_update, self.provider_metadata_update)
self.assertNotEqual(m.creation_timestamp, None)
self.event.set()
示例14: test_granule
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_granule(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
stream_def_id = self.pubsub_management.create_stream_definition('ctd', parameter_dictionary_id=pdict_id, stream_configuration={'reference_designator':"GA03FLMA-RI001-13-CTDMOG999"})
pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
self.addCleanup(self.pubsub_management.delete_stream_definition,stream_def_id)
stream_id, route = self.pubsub_management.create_stream('ctd_stream', 'xp1', stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream,stream_id)
publisher = StandaloneStreamPublisher(stream_id, route)
subscriber = StandaloneStreamSubscriber('sub', self.verify_incoming)
subscriber.start()
self.addCleanup(subscriber.stop)
subscription_id = self.pubsub_management.create_subscription('sub', stream_ids=[stream_id])
self.pubsub_management.activate_subscription(subscription_id)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = np.arange(10)
rdt['temp'] = np.random.randn(10) * 10 + 30
rdt['pressure'] = [20] * 10
self.assertEquals(set(pdict.keys()), set(rdt.fields))
self.assertEquals(pdict.temporal_parameter_name, rdt.temporal_parameter)
self.assertEquals(rdt._stream_config['reference_designator'],"GA03FLMA-RI001-13-CTDMOG999")
self.rdt = rdt
self.data_producer_id = 'data_producer'
self.provider_metadata_update = {1:1}
publisher.publish(rdt.to_granule(data_producer_id='data_producer', provider_metadata_update={1:1}))
self.assertTrue(self.event.wait(10))
self.pubsub_management.deactivate_subscription(subscription_id)
self.pubsub_management.delete_subscription(subscription_id)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = np.array([None,None,None])
self.assertTrue(rdt['time'] is None)
rdt['time'] = np.array([None, 1, 2])
self.assertEquals(rdt['time'][0], rdt.fill_value('time'))
stream_def_obj = self.pubsub_management.read_stream_definition(stream_def_id)
rdt = RecordDictionaryTool(stream_definition=stream_def_obj)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
granule = rdt.to_granule()
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['time'], np.arange(20))
np.testing.assert_array_equal(rdt['temp'], np.arange(20))
示例15: test_add_parameter_function
# 需要导入模块: from ion.services.dm.utility.granule import RecordDictionaryTool [as 别名]
# 或者: from ion.services.dm.utility.granule.RecordDictionaryTool import load_from_granule [as 别名]
def test_add_parameter_function(self):
# req-tag: NEW SA - 31
# Make a CTDBP Data Product
data_product_id = self.make_ctd_data_product()
self.data_product_id = data_product_id
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
# Throw some data in it
rdt = self.ph.rdt_for_data_product(data_product_id)
rdt['time'] = np.arange(30)
rdt['temp'] = np.arange(30)
rdt['pressure'] = np.arange(30)
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(dataset_monitor.wait())
dataset_monitor.event.clear()
#--------------------------------------------------------------------------------
# This is what the user defines either via preload or through the UI
#--------------------------------------------------------------------------------
# Where the egg is
egg_url = self.egg_url
# Make a parameter function
owner = 'ion_example.add_arrays'
func = 'add_arrays'
arglist = ['a', 'b']
pf = ParameterFunction(name='add_arrays', function_type=PFT.PYTHON, owner=owner, function=func, args=arglist, egg_uri=egg_url)
pfunc_id = self.dataset_management.create_parameter_function(pf)
#--------------------------------------------------------------------------------
self.addCleanup(self.dataset_management.delete_parameter_function, pfunc_id)
# Make a data process definition
dpd = DataProcessDefinition(name='add_arrays', description='Sums two arrays')
dpd_id = self.data_process_management.create_data_process_definition(dpd, pfunc_id)
# TODO: assert assoc exists
argmap = {'a':'temp', 'b':'pressure'}
dp_id = self.data_process_management.create_data_process(dpd_id, [data_product_id], argument_map=argmap, out_param_name='array_sum')
# Verify that the function worked!
granule = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['array_sum'], np.arange(0,60,2))
# Verify that we can inspect it as well
source_code = self.data_process_management.inspect_data_process_definition(dpd_id)
self.assertEquals(source_code, 'def add_arrays(a, b):\n return a+b\n')
url = self.data_process_management.get_data_process_definition_url(dpd_id)
self.assertEquals(url, 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg')
dpd_ids, _ = self.resource_registry.find_resources(name='dataqc_spiketest', restype=RT.DataProcessDefinition, id_only=True)
dpd_id = dpd_ids[0]
url = self.data_process_management.get_data_process_definition_url(dpd_id)
self.assertEquals(url, 'https://github.com/ooici/ion-functions/blob/master/ion_functions/qc/qc_functions.py')