本文整理汇总了Python中ion.services.dm.utility.test.parameter_helper.ParameterHelper.get_rdt方法的典型用法代码示例。如果您正苦于以下问题:Python ParameterHelper.get_rdt方法的具体用法?Python ParameterHelper.get_rdt怎么用?Python ParameterHelper.get_rdt使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ion.services.dm.utility.test.parameter_helper.ParameterHelper
的用法示例。
在下文中一共展示了ParameterHelper.get_rdt方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_create_dataset
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_create_dataset(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
dp = DataProduct(name='example')
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
# Yield to other greenlets, had an issue with connectivity
gevent.sleep(1)
log.debug("--------------------------------")
log.debug(dataset_id)
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
log.debug(coverage_path)
log.debug("--------------------------------")
breakpoint(locals(), globals())
示例2: setup_resource
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def setup_resource(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
# Yield to other greenlets, had an issue with connectivity
gevent.sleep(1)
self.offering_id = dataset_id
示例3: test_fdt_created_during
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_fdt_created_during(self):
# generate a data product and check that the FDT exists
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
dp = DataProduct(name='example')
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
print "--------------------------------"
print dataset_id
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
print coverage_path
print "--------------------------------"
示例4: test_pydap
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_pydap(self):
if not CFG.get_safe('bootstrap.use_pydap',False):
raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)')
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt,10)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
pydap_host = CFG.get_safe('server.pydap.host','localhost')
pydap_port = CFG.get_safe('server.pydap.port',8001)
url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)
ds = open_url(url)
np.testing.assert_array_equal(ds['time'][:], np.arange(10))
untested = []
for k,v in rdt.iteritems():
if k==rdt.temporal_parameter:
continue
context = rdt.context(k)
if isinstance(context.param_type, QuantityType):
np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
elif isinstance(context.param_type, ArrayType):
values = np.empty(rdt[k].shape, dtype='O')
for i,obj in enumerate(rdt[k]):
values[i] = str(obj)
np.testing.assert_array_equal(ds[k][k][:][0], values)
elif isinstance(context.param_type, ConstantType):
np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
elif isinstance(context.param_type, CategoryType):
np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
else:
untested.append('%s (%s)' % (k,context.param_type))
if untested:
raise AssertionError('Untested parameters: %s' % untested)
示例5: test_get_data_from_FDW
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_get_data_from_FDW(self):
# generate a data product and check that the FDW can get data
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
print "--------------------------------"
print dataset_id
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
print coverage_path
print "--------------------------------"
#verify table exists in the DB (similar to above)
# ....code...
# check that the geoserver layer exists as above
# ... code ....
# make a WMS/WFS request...somet like this (or both)
url = self.gs_host+'/geoserver/geonode/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=geonode:ooi_' + dataset_id + '_ooi&maxFeatures=1&outputFormat=csv'
r = requests.get(url)
assertTrue(r.status_code == 200)
示例6: test_create_dataset_verify_geoserver_layer
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_create_dataset_verify_geoserver_layer(self):
#generate layer and check that the service created it in geoserver
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
log.debug("--------------------------------")
log.debug(dataset_id)
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
log.debug(coverage_path)
log.debug("--------------------------------")
# verify that the layer exists in geoserver
try:
r = requests.get(self.gs_rest_url + '/layers/ooi_' + dataset_id + '_ooi.xml', auth=(self.username, self.PASSWORD))
self.assertTrue(r.status_code == 200)
except Exception as e:
log.error("check service and layer exist...%s", e)
self.assertTrue(False)
示例7: test_coverage_transform
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_coverage_transform(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream, stream_id)
ingestion_config_id = self.get_ingestion_config()
dataset_id = self.create_dataset(pdict_id)
self.ingestion_management.persist_data_stream(stream_id=stream_id, ingestion_configuration_id=ingestion_config_id, dataset_id=dataset_id)
self.addCleanup(self.ingestion_management.unpersist_data_stream, stream_id, ingestion_config_id)
publisher = StandaloneStreamPublisher(stream_id, route)
rdt = ph.get_rdt(stream_def_id)
ph.fill_parsed_rdt(rdt)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_monitor.wait())
replay_granule = self.data_retriever.retrieve(dataset_id)
rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)
np.testing.assert_array_almost_equal(rdt_out['time'], rdt['time'])
np.testing.assert_array_almost_equal(rdt_out['temp'], rdt['temp'])
np.testing.assert_allclose(rdt_out['conductivity_L1'], np.array([42.914]))
np.testing.assert_allclose(rdt_out['temp_L1'], np.array([20.]))
np.testing.assert_allclose(rdt_out['pressure_L1'], np.array([3.068]))
np.testing.assert_allclose(rdt_out['density'], np.array([1021.7144739593881], dtype='float32'))
np.testing.assert_allclose(rdt_out['salinity'], np.array([30.935132729668283], dtype='float32'))
示例8: test_sparse_values
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_sparse_values(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_sparse()
stream_def_id = self.pubsub_management.create_stream_definition('sparse', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream, stream_id)
dataset_id = self.create_dataset(pdict_id)
self.start_ingestion(stream_id,dataset_id)
self.addCleanup(self.stop_ingestion, stream_id)
# Publish initial granule
# the first one has the sparse value set inside it, sets lat to 45 and lon to -71
ntp_now = time.time() + 2208988800
rdt = ph.get_rdt(stream_def_id)
rdt['time'] = [ntp_now]
rdt['internal_timestamp'] = [ntp_now]
rdt['temp'] = [300000]
rdt['preferred_timestamp'] = ['driver_timestamp']
rdt['port_timestamp'] = [ntp_now]
rdt['quality_flag'] = ['']
rdt['lat'] = [45]
rdt['conductivity'] = [4341400]
rdt['driver_timestamp'] = [ntp_now]
rdt['lon'] = [-71]
rdt['pressure'] = [256.8]
publisher = StandaloneStreamPublisher(stream_id, route)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_monitor.wait())
dataset_monitor.reset()
replay_granule = self.data_retriever.retrieve(dataset_id)
rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)
# Check the values and make sure they're correct
np.testing.assert_allclose(rdt_out['time'], rdt['time'])
np.testing.assert_allclose(rdt_out['temp'], rdt['temp'])
np.testing.assert_allclose(rdt_out['lat'], np.array([45]))
np.testing.assert_allclose(rdt_out['lon'], np.array([-71]))
np.testing.assert_allclose(rdt_out['conductivity_L1'], np.array([42.914]))
np.testing.assert_allclose(rdt_out['temp_L1'], np.array([20.]))
np.testing.assert_allclose(rdt_out['pressure_L1'], np.array([3.068]))
np.testing.assert_allclose(rdt_out['density'], np.array([1021.7144739593881], dtype='float32'))
np.testing.assert_allclose(rdt_out['salinity'], np.array([30.935132729668283], dtype='float32'))
# We're going to change the lat/lon
rdt = ph.get_rdt(stream_def_id)
rdt['time'] = time.time() + 2208988800
rdt['lat'] = [46]
rdt['lon'] = [-73]
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_monitor.wait())
dataset_monitor.reset()
replay_granule = self.data_retriever.retrieve(dataset_id)
rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)
np.testing.assert_allclose(rdt_out['time'], rdt['time'])
for i in xrange(9):
ntp_now = time.time() + 2208988800
rdt['time'] = [ntp_now]
rdt['internal_timestamp'] = [ntp_now]
rdt['temp'] = [300000]
rdt['preferred_timestamp'] = ['driver_timestamp']
rdt['port_timestamp'] = [ntp_now]
rdt['quality_flag'] = [None]
rdt['conductivity'] = [4341400]
rdt['driver_timestamp'] = [ntp_now]
rdt['pressure'] = [256.8]
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_monitor.wait())
dataset_monitor.reset()
replay_granule = self.data_retriever.retrieve(dataset_id)
rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)
np.testing.assert_allclose(rdt_out['pressure'], np.array([256.8] * 10))
np.testing.assert_allclose(rdt_out['lat'], np.array([45] + [46] * 9))
np.testing.assert_allclose(rdt_out['lon'], np.array([-71] + [-73] * 9))
示例9: test_sparse_values
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_sparse_values(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_sparse()
stream_def_id = self.pubsub_management.create_stream_definition('sparse', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
stream_id, route = self.pubsub_management.create_stream('example', exchange_point=self.exchange_point_name, stream_definition_id=stream_def_id)
self.addCleanup(self.pubsub_management.delete_stream, stream_id)
dataset_id = self.create_dataset(pdict_id)
self.start_ingestion(stream_id,dataset_id)
self.addCleanup(self.stop_ingestion, stream_id)
ntp_now = time.time() + 2208988800
rdt = ph.get_rdt(stream_def_id)
rdt['time'] = [ntp_now]
rdt['internal_timestamp'] = [ntp_now]
rdt['temp'] = [300000]
rdt['preferred_timestamp'] = ['driver_timestamp']
rdt['port_timestamp'] = [ntp_now]
rdt['quality_flag'] = [None]
rdt['lat'] = [45]
rdt['conductivity'] = [4341400]
rdt['driver_timestamp'] = [ntp_now]
rdt['lon'] = [-71]
rdt['pressure'] = [256.8]
publisher = StandaloneStreamPublisher(stream_id, route)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_monitor.wait())
dataset_monitor.event.clear()
replay_granule = self.data_retriever.retrieve(dataset_id)
rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)
np.testing.assert_array_almost_equal(rdt_out['time'], rdt['time'])
np.testing.assert_array_almost_equal(rdt_out['temp'], rdt['temp'])
np.testing.assert_array_almost_equal(rdt_out['lat'], np.array([45]))
np.testing.assert_array_almost_equal(rdt_out['lon'], np.array([-71]))
np.testing.assert_array_almost_equal(rdt_out['conductivity_L1'], np.array([42.914]))
np.testing.assert_array_almost_equal(rdt_out['temp_L1'], np.array([20.]))
np.testing.assert_array_almost_equal(rdt_out['pressure_L1'], np.array([3.068]))
np.testing.assert_array_almost_equal(rdt_out['density'], np.array([1021.7144739593881], dtype='float32'))
np.testing.assert_array_almost_equal(rdt_out['salinity'], np.array([30.935132729668283], dtype='float32'))
rdt = ph.get_rdt(stream_def_id)
rdt['lat'] = [46]
rdt['lon'] = [-73]
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_monitor.wait())
dataset_monitor.event.clear()
rdt = ph.get_rdt(stream_def_id)
rdt['lat'] = [1000]
rdt['lon'] = [3]
publisher.publish(rdt.to_granule())
rdt = ph.get_rdt(stream_def_id)
rdt['time'] = [ntp_now]
rdt['internal_timestamp'] = [ntp_now]
rdt['temp'] = [300000]
rdt['preferred_timestamp'] = ['driver_timestamp']
rdt['port_timestamp'] = [ntp_now]
rdt['quality_flag'] = [None]
rdt['conductivity'] = [4341400]
rdt['driver_timestamp'] = [ntp_now]
rdt['pressure'] = [256.8]
dataset_monitor.event.clear()
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_monitor.wait())
dataset_monitor.event.clear()
replay_granule = self.data_retriever.retrieve(dataset_id)
rdt_out = RecordDictionaryTool.load_from_granule(replay_granule)
np.testing.assert_array_almost_equal(rdt_out['lat'], np.array([45, 46]))
np.testing.assert_array_almost_equal(rdt_out['lon'], np.array([-71,-73]))
示例10: test_pydap
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
def test_pydap(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
dp = DataProduct(name='example')
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt,10)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.wait())
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
pydap_host = CFG.get_safe('server.pydap.host','localhost')
pydap_port = CFG.get_safe('server.pydap.port',8001)
url = 'http://%s:%s/%s' %(pydap_host, pydap_port, data_product_id)
for i in xrange(3): # Do it three times to test that the cache doesn't corrupt the requests/responses
ds = open_url(url)
np.testing.assert_array_equal(list(ds['data']['time']), np.arange(10))
untested = []
for k,v in rdt.iteritems():
if k==rdt.temporal_parameter:
continue
context = rdt.context(k)
if isinstance(context.param_type, QuantityType):
np.testing.assert_array_equal(list(ds['data'][k]), rdt[k])
elif isinstance(context.param_type, ArrayType):
if context.param_type.inner_encoding is None:
values = np.empty(rdt[k].shape, dtype='O')
for i,obj in enumerate(rdt[k]):
values[i] = str(obj)
np.testing.assert_array_equal(list(ds['data'][k]), values)
elif len(rdt[k].shape)>1:
values = np.empty(rdt[k].shape[0], dtype='O')
for i in xrange(rdt[k].shape[0]):
values[i] = ','.join(map(lambda x : str(x), rdt[k][i].tolist()))
elif isinstance(context.param_type, ConstantType):
np.testing.assert_array_equal(list(ds['data'][k]), rdt[k])
elif isinstance(context.param_type, CategoryType):
np.testing.assert_array_equal(list(ds['data'][k]), rdt[k].astype('|S'))
else:
untested.append('%s (%s)' % (k,context.param_type))
if untested:
raise AssertionError('Untested parameters: %s' % untested)
示例11: TestDMExtended
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
class TestDMExtended(DMTestCase):
'''
ion/services/dm/test/test_dm_extended.py:TestDMExtended
'''
def setUp(self):
DMTestCase.setUp(self)
self.ph = ParameterHelper(self.dataset_management, self.addCleanup)
@attr('UTIL')
def test_pydap_handlers(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
stream_def_id = self.create_stream_definition('ctd', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('ctd', stream_def_id=stream_def_id)
self.activate_data_product(data_product_id)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
rdt = self.ph.get_rdt(stream_def_id)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
self.ph.publish_rdt_to_data_product(data_product_id,rdt)
dataset_monitor.event.wait(10)
from pydap.client import open_url
pydap_host = CFG.get_safe('server.pydap.host','localhost')
pydap_port = CFG.get_safe('server.pydap.port',8001)
url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)
ds = open_url(url)
ds['temp']['temp'][:]
def make_array_data_product(self):
pdict_id = self.ph.crete_simple_array_pdict()
stream_def_id = self.create_stream_definition('test_array_flow_paths', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('test_array_flow_paths', stream_def_id)
self.activate_data_product(data_product_id)
return data_product_id, stream_def_id
def preload_beta(self):
config = DotDict()
config.op = 'load'
config.loadui=True
config.ui_path = "http://userexperience.oceanobservatories.org/database-exports/Candidates"
config.attachments = "res/preload/r2_ioc/attachments"
config.scenario = 'BETA'
config.categories='ParameterFunctions,ParameterDefs,ParameterDictionary'
self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
def preload_tmpsf(self):
config = DotDict()
config.op = 'load'
config.loadui=True
config.ui_path = "http://userexperience.oceanobservatories.org/database-exports/Candidates"
config.attachments = "res/preload/r2_ioc/attachments"
config.scenario = 'BETA,TMPSF'
#config.categories='ParameterFunctions,ParameterDefs,ParameterDictionary'
self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
def preload_example1(self):
config = DotDict()
config.op = 'load'
config.loadui=True
config.ui_path = "http://userexperience.oceanobservatories.org/database-exports/Candidates"
config.attachments = "res/preload/r2_ioc/attachments"
config.scenario = 'BETA,EXAMPLE1'
config.path = 'master'
#config.categories='ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition,DataProduct'
self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
def preload_example2(self):
config = DotDict()
config.op = 'load'
config.loadui=True
config.ui_path = "http://userexperience.oceanobservatories.org/database-exports/Candidates"
config.attachments = "res/preload/r2_ioc/attachments"
config.scenario = 'BETA,EXAMPLE2'
config.path = 'master'
#config.categories='ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition,DataProduct'
self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
def preload_ctdpf(self):
config = DotDict()
config.op = 'load'
config.loadui=True
config.ui_path = "http://userexperience.oceanobservatories.org/database-exports/Candidates"
config.attachments = "res/preload/r2_ioc/attachments"
config.scenario = 'BETA,CTDPF'
config.path = 'master'
#config.categories='ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition,DataProduct'
self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
self.container.spawn_process('import_dataset', 'ion.processes.data.import_dataset', 'ImportDataset', {'op':'load', 'instrument':'CTDPF'})
def preload_lctest(self):
config = DotDict()
config.op = 'load'
config.loadui=True
#.........这里部分代码省略.........
示例12: TestDMExtended
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import get_rdt [as 别名]
class TestDMExtended(DMTestCase):
'''
ion/services/dm/test/test_dm_extended.py:TestDMExtended
'''
def setUp(self):
DMTestCase.setUp(self)
self.ph = ParameterHelper(self.dataset_management, self.addCleanup)
@attr('UTIL')
def test_pydap_handlers(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
stream_def_id = self.create_stream_definition('ctd', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('ctd', stream_def_id=stream_def_id)
self.activate_data_product(data_product_id)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
rdt = self.ph.get_rdt(stream_def_id)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
self.ph.publish_rdt_to_data_product(data_product_id,rdt)
dataset_monitor.event.wait(10)
from pydap.client import open_url
pydap_host = CFG.get_safe('server.pydap.host','localhost')
pydap_port = CFG.get_safe('server.pydap.port',8001)
url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)
ds = open_url(url)
ds['temp']['temp'][:]
def make_array_data_product(self):
pdict_id = self.ph.crete_simple_array_pdict()
stream_def_id = self.create_stream_definition('test_array_flow_paths', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('test_array_flow_paths', stream_def_id)
self.activate_data_product(data_product_id)
return data_product_id, stream_def_id
def preload_beta(self):
config = DotDict()
config.op = 'load'
config.loadui=True
config.ui_path = "https://userexperience.oceanobservatories.org/database-exports/Candidates"
config.attachments = "res/preload/r2_ioc/attachments"
config.scenario = 'BETA'
config.categories='ParameterFunctions,ParameterDefs,ParameterDictionary'
self.container.spawn_process('preloader', 'ion.processes.bootstrap.ion_loader', 'IONLoader', config)
def create_google_dt_workflow_def(self):
# Check to see if the workflow defnition already exist
workflow_def_ids,_ = self.resource_registry.find_resources(restype=RT.WorkflowDefinition, name='Realtime_Google_DT', id_only=True)
if len(workflow_def_ids) > 0:
workflow_def_id = workflow_def_ids[0]
else:
# Build the workflow definition
workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Realtime_Google_DT',description='Convert stream data to Google Datatable')
#Add a transformation process definition
google_dt_procdef_id = self.create_google_dt_data_process_definition()
workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id)
workflow_def_obj.workflow_steps.append(workflow_step_obj)
#Create it in the resource registry
workflow_def_id = self.workflow_management.create_workflow_definition(workflow_def_obj)
return workflow_def_id
def create_google_dt_data_process_definition(self):
#First look to see if it exists and if not, then create it
dpd,_ = self.resource_registry.find_resources(restype=RT.DataProcessDefinition, name='google_dt_transform')
if len(dpd) > 0:
return dpd[0]
# Data Process Definition
dpd_obj = IonObject(RT.DataProcessDefinition,
name='google_dt_transform',
description='Convert data streams to Google DataTables',
module='ion.processes.data.transforms.viz.google_dt',
class_name='VizTransformGoogleDT')
try:
procdef_id = self.data_process_management.create_data_process_definition(dpd_obj)
except Exception as ex:
self.fail("failed to create new VizTransformGoogleDT data process definition: %s" %ex)
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('google_dt', id_only=True)
# create a stream definition for the data from the
stream_def_id = self.pubsub_management.create_stream_definition(name='VizTransformGoogleDT', parameter_dictionary_id=pdict_id)
self.data_process_management.assign_stream_definition_to_data_process_definition(stream_def_id, procdef_id, binding='google_dt' )
return procdef_id
@attr('UTIL')
def test_dm_realtime_visualization(self):
self.preload_beta()
#.........这里部分代码省略.........