本文整理汇总了Python中ion.services.dm.utility.test.parameter_helper.ParameterHelper.publish_rdt_to_data_product方法的典型用法代码示例。如果您正苦于以下问题:Python ParameterHelper.publish_rdt_to_data_product方法的具体用法?Python ParameterHelper.publish_rdt_to_data_product怎么用?Python ParameterHelper.publish_rdt_to_data_product使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ion.services.dm.utility.test.parameter_helper.ParameterHelper
的用法示例。
在下文中一共展示了ParameterHelper.publish_rdt_to_data_product方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_example_preload
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_example_preload(self):
print 'preloading...'
self.preload_example1()
data_product_ids, _ = self.container.resource_registry.find_resources_ext(alt_id='DPROD102', alt_id_ns='PRE')
data_product_id = data_product_ids[0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
with DirectCoverageAccess() as dca:
dca.upload_calibration_coefficients(dataset_id, 'test_data/sbe16coeffs.csv', 'test_data/sbe16coeffs.yml')
ph = ParameterHelper(self.dataset_management, self.addCleanup)
rdt = ph.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['temperature'] = [248471]
rdt['pressure'] = [528418]
rdt['conductivity'] = [1673175]
rdt['thermistor_temperature']=[24303]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ph.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例2: test_fdt_created_during
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_fdt_created_during(self):
# generate a data product and check that the FDT exists
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
dp = DataProduct(name='example')
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
print "--------------------------------"
print dataset_id
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
print coverage_path
print "--------------------------------"
示例3: test_create_dataset
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_create_dataset(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
dp = DataProduct(name='example')
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
# Yield to other greenlets, had an issue with connectivity
gevent.sleep(1)
log.debug("--------------------------------")
log.debug(dataset_id)
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
log.debug(coverage_path)
log.debug("--------------------------------")
breakpoint(locals(), globals())
示例4: TestDynamicParameters
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
class TestDynamicParameters(DMTestCase):
def setUp(self):
DMTestCase.setUp(self)
self.ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = self.ph.create_simple_cc_pdict()
self.stream_def_id = self.pubsub_management.create_stream_definition('Calibration Coefficients', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, self.stream_def_id)
def test_coefficient_compatibility(self):
data_product_id = self.create_data_product(name='Calibration Coefficient Test Data product', stream_def_id=self.stream_def_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id)
rdt['time'] = np.arange(10)
rdt['temp'] = [10] * 10
rdt['cc_coefficient'] = [2] * 10
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(dataset_monitor.wait())
rdt2 = RecordDictionaryTool.load_from_granule(self.data_retriever.retrieve(dataset_id))
np.testing.assert_array_equal(rdt2['offset'],[12]*10)
示例5: test_lctest_preload
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_lctest_preload(self):
self.preload_lctest()
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('sparse_dict', id_only=True)
stream_def_id = self.create_stream_definition('sparse_example', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('sparse_example', stream_def_id=stream_def_id)
self.activate_data_product(data_product_id)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['sparse_float'] = [3.14159265358979323]
rdt['sparse_double'] = [2.7182818284590452353602874713526624977572470936999595]
rdt['sparse_int'] = [131071] # 6th mersenne prime
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
for i in xrange(10):
dataset_monitor.event.clear()
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例6: check_tempsf_instrument_data_product
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def check_tempsf_instrument_data_product(self, reference_designator):
passing = True
info_list = []
passing &= self.check_data_product_reference(reference_designator, info_list)
if not passing: return passing
data_product_id, stream_def_id, dataset_id = info_list.pop()
now = time.time()
ntp_now = now + 2208988800
rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
rdt['time'] = [ntp_now]
rdt['temperature'] = [[ 25.3884, 26.9384, 24.3394, 23.3401, 22.9832,
29.4434, 26.9873, 15.2883, 16.3374, 14.5883, 15.7253, 18.4383,
15.3488, 17.2993, 10.2111, 11.5993, 10.9345, 9.4444, 9.9876,
10.9834, 11.0098, 5.3456, 4.2994, 4.3009]]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
passing &= self.assertTrue(dataset_monitor.event.wait(20))
if not passing: return passing
granule = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
passing &= self.assert_array_almost_equal(rdt['temperature'], [[
25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
5.3456, 4.2994, 4.3009]])
return passing
示例7: setup_resource
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def setup_resource(self):
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
# Yield to other greenlets, had an issue with connectivity
gevent.sleep(1)
self.offering_id = dataset_id
示例8: test_tmpsf_arrays
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_tmpsf_arrays(self):
self.preload_tmpsf()
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('tmpsf_sample', id_only=True)
stream_def_id = self.create_stream_definition('tmpsf', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product('tmpsf', stream_def_id=stream_def_id)
self.activate_data_product(data_product_id)
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
tomato = {'quality_flag': 'ok', 'preferred_timestamp':
'port_timestamp', 'internal_timestamp': 3223662780.0,
'stream_name': 'tmpsf_sample', 'values': [{'value_id':
'timestamp', 'value': 3223662780.0}, {'value_id':
'temperature', 'value': [21.4548, 21.0132, 20.9255,
21.1266, 21.1341, 21.5606, 21.2156, 21.4749,
21.3044, 21.132, 21.1798, 21.2352, 21.3488,
21.1214, 21.6426, 21.1479, 21.0069, 21.5426,
21.3204, 21.2402, 21.3968, 21.4371, 21.0411,
21.4361]}, {'value_id': 'battery_voltage', 'value':
11.5916}, {'value_id': 'serial_number',
'value': '021964'}], 'port_timestamp':
1378230448.439269, 'driver_timestamp':
3587219248.444593, 'pkt_format_id':
'JSON_Data', 'pkt_version': 1}
from ion.agents.populate_rdt import populate_rdt
rdt = populate_rdt(rdt, [tomato])
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
breakpoint(locals())
示例9: test_example2_preload
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_example2_preload(self):
print 'preloading...'
self.preload_example2()
data_product_ids, _ = self.container.resource_registry.find_resources_ext(alt_id='DPROD104', alt_id_ns='PRE')
data_product_id = data_product_ids[0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
with DirectCoverageAccess() as dca:
dca.upload_calibration_coefficients(dataset_id, 'test_data/vel3d_coeff.csv', 'test_data/vel3d_coeff.yml')
from ion_functions.data.test.test_vel_functions import TS, VE, VN, VU
rdt = ParameterHelper.rdt_for_data_product(data_product_id)
rdt['time'] = [time.time() + 2208988800]
rdt['velocity_east'] = [VE[0]]
rdt['velocity_north'] = [VN[0]]
rdt['velocity_up'] = [VU[0]]
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
dataset_monitor.event.wait(10)
g = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(g)
breakpoint(locals())
示例10: test_pydap
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_pydap(self):
if not CFG.get_safe('bootstrap.use_pydap',False):
raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)')
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt,10)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
pydap_host = CFG.get_safe('server.pydap.host','localhost')
pydap_port = CFG.get_safe('server.pydap.port',8001)
url = 'http://%s:%s/%s' %(pydap_host, pydap_port, dataset_id)
ds = open_url(url)
np.testing.assert_array_equal(ds['time'][:], np.arange(10))
untested = []
for k,v in rdt.iteritems():
if k==rdt.temporal_parameter:
continue
context = rdt.context(k)
if isinstance(context.param_type, QuantityType):
np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
elif isinstance(context.param_type, ArrayType):
values = np.empty(rdt[k].shape, dtype='O')
for i,obj in enumerate(rdt[k]):
values[i] = str(obj)
np.testing.assert_array_equal(ds[k][k][:][0], values)
elif isinstance(context.param_type, ConstantType):
np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
elif isinstance(context.param_type, CategoryType):
np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
else:
untested.append('%s (%s)' % (k,context.param_type))
if untested:
raise AssertionError('Untested parameters: %s' % untested)
示例11: test_qc_interval_integration
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_qc_interval_integration(self):
# 1 need to make a dataset that only has one discrete qc violation
# 2 Launch the process
# 3 Setup the scheduler to run it say three times
# 4 Get the Events and verify the data
#--------------------------------------------------------------------------------
# Make a dataset that has only one discrete qc violation
#--------------------------------------------------------------------------------
dp_id, dataset_id, stream_def_id = self.make_data_product()
ph = ParameterHelper(self.dataset_management, self.addCleanup)
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
for rdt in self.populate_vectors(stream_def_id, 1, lambda x : [41] + [39] * (x-1)):
ph.publish_rdt_to_data_product(dp_id, rdt)
self.assertTrue(monitor.event.wait(10)) # Give it 10 seconds to populate
#--------------------------------------------------------------------------------
# Launch the process
#--------------------------------------------------------------------------------
interval_key = uuid4().hex
config = DotDict()
config.process.interval_key = interval_key
config.process.qc_params = ['glblrng_qc'] # The others are tested in other tests for completeness
self.sync_launch(config)
async_queue = Queue()
def callback(event, *args, **kwargs):
times = event.temporal_values
self.assertEquals(len(times), 1)
async_queue.put(1)
es = EventSubscriber(event_type=OT.ParameterQCEvent, origin=dp_id, callback=callback, auto_delete=True)
es.start()
self.addCleanup(es.stop)
#--------------------------------------------------------------------------------
# Setup the scheduler
#--------------------------------------------------------------------------------
timer_id = self.scheduler_service.create_interval_timer(start_time=time.time(),
end_time=time.time()+13,
interval=5,
event_origin=interval_key)
#--------------------------------------------------------------------------------
# Get the events and verify them
#--------------------------------------------------------------------------------
try:
for i in xrange(2):
async_queue.get(timeout=10)
except Empty:
raise AssertionError('QC Events not raised')
示例12: test_get_data_from_FDW
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_get_data_from_FDW(self):
# generate a data product and check that the FDW can get data
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
print "--------------------------------"
print dataset_id
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
print coverage_path
print "--------------------------------"
#verify table exists in the DB (similar to above)
# ....code...
# check that the geoserver layer exists as above
# ... code ....
# make a WMS/WFS request...somet like this (or both)
url = self.gs_host+'/geoserver/geonode/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=geonode:ooi_' + dataset_id + '_ooi&maxFeatures=1&outputFormat=csv'
r = requests.get(url)
assertTrue(r.status_code == 200)
示例13: run
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def run(self):
while not self.finished.wait(self.interval):
rdt = ParameterHelper.rdt_for_data_product(self.data_product_id)
now = time.time()
if self.simple_time:
rdt['time'] = [self.i]
else:
rdt['time'] = np.array([now + 2208988800])
rdt['temp'] = self.float_range(10,14,np.array([now]))
rdt['pressure'] = self.float_range(11,12,np.array([now]))
rdt['lat'] = [41.205]
rdt['lon'] = [-71.74]
rdt['conductivity'] = self.float_range(3.3,3.5,np.array([now]))
rdt['driver_timestamp'] = np.array([now + 2208988800])
rdt['preferred_timestamp'] = ['driver_timestamp']
ParameterHelper.publish_rdt_to_data_product(self.data_product_id, rdt)
self.i += 1
示例14: test_create_dataset_verify_geoserver_layer
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def test_create_dataset_verify_geoserver_layer(self):
#generate layer and check that the service created it in geoserver
ph = ParameterHelper(self.dataset_management, self.addCleanup)
pdict_id = ph.create_extended_parsed()
stream_def_id = self.pubsub_management.create_stream_definition('example', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='example')
dp.spatial_domain = sdom.dump()
dp.temporal_domain = tdom.dump()
data_product_id = self.data_product_management.create_data_product(dp, stream_def_id)
self.addCleanup(self.data_product_management.delete_data_product, data_product_id)
self.data_product_management.activate_data_product_persistence(data_product_id)
self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
dataset_id = self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
monitor = DatasetMonitor(dataset_id)
self.addCleanup(monitor.stop)
rdt = ph.get_rdt(stream_def_id)
ph.fill_rdt(rdt, 100)
ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(monitor.event.wait(10))
gevent.sleep(1) # Yield to other greenlets, had an issue with connectivity
log.debug("--------------------------------")
log.debug(dataset_id)
coverage_path = DatasetManagementService()._get_coverage_path(dataset_id)
log.debug(coverage_path)
log.debug("--------------------------------")
# verify that the layer exists in geoserver
try:
r = requests.get(self.gs_rest_url + '/layers/ooi_' + dataset_id + '_ooi.xml', auth=(self.username, self.PASSWORD))
self.assertTrue(r.status_code == 200)
except Exception as e:
log.error("check service and layer exist...%s", e)
self.assertTrue(False)
示例15: make_large_dataset
# 需要导入模块: from ion.services.dm.utility.test.parameter_helper import ParameterHelper [as 别名]
# 或者: from ion.services.dm.utility.test.parameter_helper.ParameterHelper import publish_rdt_to_data_product [as 别名]
def make_large_dataset(self, temp_vector):
monitor_queue = Queue()
# Make 27 hours of data
ph = ParameterHelper(self.dataset_management, self.addCleanup)
data_product_id, dataset_id, stream_def_id = self.make_data_product()
es = EventSubscriber(event_type=OT.DatasetModified, origin=dataset_id, auto_delete=True, callback = lambda *args, **kwargs : monitor_queue.put(1))
es.start()
self.addCleanup(es.stop)
for rdt in self.populate_vectors(stream_def_id, 3, temp_vector):
ph.publish_rdt_to_data_product(data_product_id, rdt)
try:
for i in xrange(3):
monitor_queue.get(timeout=10)
except Empty:
raise AssertionError('Failed to populate dataset in time')
return data_product_id