本文整理汇总了Python中ion.services.dm.utility.test.parameter_helper.ParameterHelper类的典型用法代码示例。如果您正苦于以下问题:Python ParameterHelper类的具体用法?Python ParameterHelper怎么用?Python ParameterHelper使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ParameterHelper类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_example_preload
def test_example_preload(self):
print 'preloading...'
self.preload_example1()
data_product_ids, _ = self.container.resource_registry.find_resources_ext(alt_id='DPROD102', alt_id_ns='PRE')
data_product_id = data_product_ids[0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
with DirectCoverageAccess() as dca:
dca.upload_calibration_coefficients(dataset_id, 'test_data/sbe16coeffs.csv', 'test_data/sbe16coeffs.yml')
ph = ParameterHelper(self.dataset_management, self.addCleanup)
rdt = ph.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['temperature'] = [248471]
rdt['pressure'] = [528418]
rdt['conductivity'] = [1673175]
rdt['thermistor_temperature']=[24303]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ph.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例2: test_tmpsf_arrays
def test_tmpsf_arrays(self):
self.preload_tmpsf()
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('tmpsf_sample', id_only=True)
stream_def_id = self.create_stream_definition('tmpsf', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('tmpsf', stream_def_id=stream_def_id)
self.activate_data_product(data_product_id)
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
tomato = {'quality_flag': 'ok', 'preferred_timestamp':
'port_timestamp', 'internal_timestamp': 3223662780.0,
'stream_name': 'tmpsf_sample', 'values': [{'value_id':
'timestamp', 'value': 3223662780.0}, {'value_id':
'temperature', 'value': [21.4548, 21.0132, 20.9255,
21.1266, 21.1341, 21.5606, 21.2156, 21.4749,
21.3044, 21.132, 21.1798, 21.2352, 21.3488,
21.1214, 21.6426, 21.1479, 21.0069, 21.5426,
21.3204, 21.2402, 21.3968, 21.4371, 21.0411,
21.4361]}, {'value_id': 'battery_voltage', 'value':
11.5916}, {'value_id': 'serial_number',
'value': '021964'}], 'port_timestamp':
1378230448.439269, 'driver_timestamp':
3587219248.444593, 'pkt_format_id':
'JSON_Data', 'pkt_version': 1}
from ion.agents.populate_rdt import populate_rdt
rdt = populate_rdt(rdt, [tomato])
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
breakpoint(locals())
示例3: TestDynamicParameters
class TestDynamicParameters(DMTestCase):
def setUp(self):
DMTestCase.setUp(self)
self.ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = self.ph.create_simple_cc_pdict()
self.stream_def_id = self.pubsub_management.create_stream_definition('Calibration Coefficients', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, self.stream_def_id)
def test_coefficient_compatibility(self):
data_product_id = self.create_data_product(name='Calibration Coefficient Test Data product', stream_def_id=self.stream_def_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
rdt['time'] = np.arange(10)
rdt['temp'] = [10] * 10
rdt['cc_coefficient'] = [2] * 10
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(dataset_monitor.wait())
rdt2 = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(dataset_id))
np.testing.assert_array_equal(rdt2['offset'],[12]*10)
示例4: test_example2_preload
def test_example2_preload(self):
print 'preloading...'
self.preload_example2()
data_product_ids, _ = self.container.resource_registry.find_resources_ext(alt_id='DPROD104', alt_id_ns='PRE')
data_product_id = data_product_ids[0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
with DirectCoverageAccess() as dca:
dca.upload_calibration_coefficients(dataset_id, 'test_data/vel3d_coeff.csv', 'test_data/vel3d_coeff.yml')
from ion_functions.data.test.test_vel_functions import TS, VE, VN, VU
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['velocity_east'] = [VE[0]]
rdt['velocity_north'] = [VN[0]]
rdt['velocity_up'] = [VU[0]]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例5: test_qc_events
def test_qc_events(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_qc_pdict()
stream_def_id = self.pubsub_management.create_stream_definition('qc stream def', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
stream_id, route = self.pubsub_management.create_stream('qc stream', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream, stream_id)
ingestion_config_id = self.get_ingestion_config()
dataset_id = self.create_dataset(pdict_id)
config = DotDict()
self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingestion_config_id, dataset_id=dataset_id, config=config)
self.addCleanup(self.ingestion_management.unpersist_data_stream, stream_id, ingestion_config_id)
publisher = StandaloneStreamPublisher(stream_id, route)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = np.arange(10)
rdt['temp'] = np.arange(10) * 3
verified = Event()
def verification(event, *args, **kwargs):
self.assertEquals(event.qc_parameter, 'temp_qc')
self.assertEquals(event.temporal_value, 7)
verified.set()
es = EventSubscriber(event_type=OT.ParameterQCEvent, origin=dataset_id, callback=verification, auto_delete=True)
es.start()
self.addCleanup(es.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(verified.wait(10))
示例6: check_tempsf_instrument_data_product
def check_tempsf_instrument_data_product(self, reference_designator):
passing = True
info_list = []
passing &= self.check_data_product_reference(reference_designator, info_list)
if not passing: return passing
data_product_id, stream_def_id, dataset_id = info_list.pop()
now = time.time()
ntp_now = now + 2208988800
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = [ntp_now]
rdt['temperature'] = [[ 25.3884, 26.9384, 24.3394, 23.3401, 22.9832,
29.4434, 26.9873, 15.2883, 16.3374, 14.5883, 15.7253, 18.4383,
15.3488, 17.2993, 10.2111, 11.5993, 10.9345, 9.4444, 9.9876,
10.9834, 11.0098, 5.3456, 4.2994, 4.3009]]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
passing &= self.assertTrue(dataset_monitor.event.wait(20))
if not passing: return passing
granule = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
passing &= self.assert_array_almost_equal(rdt['temperature'], [[
25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
5.3456, 4.2994, 4.3009]])
return passing
示例7: test_serialize_compatability
def test_serialize_compatability(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('ctd extended', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
stream_id, route = self.pubsub_management.create_stream('ctd1', 'xp1', stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream, stream_id)
sub_id = self.pubsub_management.create_subscription('sub1', stream_ids=[stream_id])
self.addCleanup(self.pubsub_management.delete_subscription, sub_id)
self.pubsub_management.activate_subscription(sub_id)
self.addCleanup(self.pubsub_management.deactivate_subscription, sub_id)
verified = Event()
def verifier(msg, route, stream_id):
for k,v in msg.record_dictionary.iteritems():
if v is not None:
self.assertIsInstance(v, np.ndarray)
rdt = RecordDictionaryTool.load_from_granule(msg)
for k,v in rdt.iteritems():
self.assertIsInstance(rdt[k], np.ndarray)
self.assertIsInstance(v, np.ndarray)
verified.set()
subscriber = StandaloneStreamSubscriber('sub1', callback=verifier)
subscriber.start()
self.addCleanup(subscriber.stop)
publisher = StandaloneStreamPublisher(stream_id,route)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
ph.fill_rdt(rdt,10)
publisher.publish(rdt.to_granule())
self.assertTrue(verified.wait(60))
示例8: test_qc_interval_integration
def test_qc_interval_integration(self):
# 1 need to make a dataset that only has one discrete qc violation
# 2 Launch the process
# 3 Setup the scheduler to run it say three times
# 4 Get the Events and verify the data
#--------------------------------------------------------------------------------
# Make a dataset that has only one discrete qc violation
#--------------------------------------------------------------------------------
dp_id, dataset_id, stream_def_id = self.make_data_product()
ph = ParameterHelper(self.dataset_management, self.addCleanup)
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
for rdt in self.populate_vectors(stream_def_id, 1, lambda x : [41] + [39] * (x-1)):
ph.publish_rdt_to_data_product(dp_id, rdt)
self.assertTrue(monitor.event.wait(10)) # Give it 10 seconds to populate
#--------------------------------------------------------------------------------
# Launch the process
#--------------------------------------------------------------------------------
interval_key = uuid4().hex
config = DotDict()
config.process.interval_key = interval_key
config.process.qc_params = ['glblrng_qc'] # The others are tested in other tests for completeness
self.sync_launch(config)
async_queue = Queue()
def callback(event, *args, **kwargs):
times = event.temporal_values
self.assertEquals(len(times), 1)
async_queue.put(1)
es = EventSubscriber(event_type=OT.ParameterQCEvent, origin=dp_id, callback=callback, auto_delete=True)
es.start()
self.addCleanup(es.stop)
#--------------------------------------------------------------------------------
# Setup the scheduler
#--------------------------------------------------------------------------------
timer_id = self.scheduler_service.create_interval_timer(start_time=time.time(),
end_time=time.time()+13,
interval=5,
event_origin=interval_key)
#--------------------------------------------------------------------------------
# Get the events and verify them
#--------------------------------------------------------------------------------
try:
for i in xrange(2):
async_queue.get(timeout=10)
except Empty:
raise AssertionError('QC Events not raised')
示例9: create_lookup_rdt
def create_lookup_rdt(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_lookups()
stream_def_id = self.pubsub_management.create_stream_definition('lookup', parameter_dictionary_id=pdict_id, stream_configuration={'reference_designator':"GA03FLMA-RI001-13-CTDMOG999"})
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
return rdt
示例10: create_lookup_rdt
def create_lookup_rdt(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_lookups()
stream_def_id = self.pubsub_management.create_stream_definition('lookup', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
return rdt
示例11: test_hydrophone_simulator
def test_hydrophone_simulator(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.crete_simple_array_pdict()
stream_def_id = self.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('ctd hydrophone', stream_def_id=stream_def_id)
self.activate_data_product(data_product_id)
s = HydrophoneSimulator(data_product_id, interval=4)
breakpoint(locals())
s.stop()
示例12: make_data_product
def make_data_product(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_simple_qc_pdict()
stream_def_id = self.create_stream_definition('global range', parameter_dictionary_id=pdict_id, stream_configuration={'reference_designator':'QCTEST'})
self.populate_qc_tables()
dp_id = self.create_data_product('qc data product', stream_def_id=stream_def_id)
self.activate_data_product(dp_id)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(dp_id)
return dp_id, dataset_id, stream_def_id
示例13: setUp
def setUp(self):
DMTestCase.setUp(self)
self.ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = self.ph.create_simple_cc_pdict()
self.stream_def_id = self.pubsub_management.create_stream_definition('Calibration Coefficients', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, self.stream_def_id)
示例14: run
def run(self):
while not self.finished.wait(self.interval):
rdt = ParameterHelper.rdt_for_data_product(self.data_product_id)
now = time.time()
if self.simple_time:
rdt['time'] = [self.i]
else:
rdt['time'] = np.array([now + 2208988800])
rdt['temp'] = self.float_range(10,14,np.array([now]))
rdt['pressure'] = self.float_range(11,12,np.array([now]))
rdt['lat'] = [41.205]
rdt['lon'] = [-71.74]
rdt['conductivity'] = self.float_range(3.3,3.5,np.array([now]))
rdt['driver_timestamp'] = np.array([now + 2208988800])
rdt['preferred_timestamp'] = ['driver_timestamp']
ParameterHelper.publish_rdt_to_data_product(self.data_product_id, rdt)
self.i += 1
示例15: test_create_dataset
def test_create_dataset(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
dp = DataProduct(name='example')
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
# Yield to other greenlets, had an issue with connectivity
gevent.sleep(1)
log.debug("--------------------------------")
log.debug(dataset_id)
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
log.debug(coverage_path)
log.debug("--------------------------------")
breakpoint(locals(), globals())