本文整理汇总了Python中interface.services.dm.iingestion_management_service.IngestionManagementServiceClient.is_persisted方法的典型用法代码示例。如果您正苦于以下问题:Python IngestionManagementServiceClient.is_persisted方法的具体用法?Python IngestionManagementServiceClient.is_persisted怎么用?Python IngestionManagementServiceClient.is_persisted使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类interface.services.dm.iingestion_management_service.IngestionManagementServiceClient
的用法示例。
在下文中一共展示了IngestionManagementServiceClient.is_persisted方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: TestDataProductManagementServiceIntegration
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
#.........这里部分代码省略.........
dp_obj = IonObject(RT.DataProduct,
name='DP1',
description='some new dp',
temporal_domain = tdom,
spatial_domain = sdom)
dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
stream_definition_id=ctd_stream_def_id)
stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
self.assertEquals(ctd_stream_def_id, stream_def_id)
def test_activate_suspend_data_product(self):
#------------------------------------------------------------------------------------------------
# create a stream definition for the data from the ctd simulator
#------------------------------------------------------------------------------------------------
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)
log.debug("Created stream def id %s" % ctd_stream_def_id)
#------------------------------------------------------------------------------------------------
# test creating a new data product w/o a stream definition
#------------------------------------------------------------------------------------------------
# Construct temporal and spatial Coordinate Reference System objects
tdom, sdom = time_series_domain()
sdom = sdom.dump()
tdom = tdom.dump()
dp_obj = IonObject(RT.DataProduct,
name='DP1',
description='some new dp',
temporal_domain = tdom,
spatial_domain = sdom)
log.debug("Created an IonObject for a data product: %s" % dp_obj)
#------------------------------------------------------------------------------------------------
# Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
#------------------------------------------------------------------------------------------------
dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
stream_definition_id=ctd_stream_def_id)
#------------------------------------------------------------------------------------------------
# test activate and suspend data product persistence
#------------------------------------------------------------------------------------------------
self.dpsc_cli.activate_data_product_persistence(dp_id)
dp_obj = self.dpsc_cli.read_data_product(dp_id)
self.assertIsNotNone(dp_obj)
dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
if not dataset_ids:
raise NotFound("Data Product %s dataset does not exist" % str(dp_id))
self.get_datastore(dataset_ids[0])
# Check that the streams associated with the data product are persisted with
stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
for stream_id in stream_ids:
self.assertTrue(self.ingestclient.is_persisted(stream_id))
#--------------------------------------------------------------------------------
# Now get the data in one chunk using an RPC Call to start_retreive
#--------------------------------------------------------------------------------
replay_data = self.data_retriever.retrieve(dataset_ids[0])
self.assertIsInstance(replay_data, Granule)
log.debug("The data retriever was able to replay the dataset that was attached to the data product "
"we wanted to be persisted. Therefore the data product was indeed persisted with "
"otherwise we could not have retrieved its dataset using the data retriever. Therefore "
"this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'")
data_product_object = self.rrclient.read(dp_id)
self.assertEquals(data_product_object.name,'DP1')
self.assertEquals(data_product_object.description,'some new dp')
log.debug("Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. "
" Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the "
"resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description,data_product_object.name,
data_product_object.description))
#------------------------------------------------------------------------------------------------
# test suspend data product persistence
#------------------------------------------------------------------------------------------------
self.dpsc_cli.suspend_data_product_persistence(dp_id)
self.dpsc_cli.force_delete_data_product(dp_id)
# now try to get the deleted dp object
with self.assertRaises(NotFound):
dp_obj = self.rrclient.read(dp_id)
示例2: TestDataProductManagementServiceIntegration
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
#.........这里部分代码省略.........
stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
self.assertEquals(ctd_stream_def_id, stream_def_id)
def test_derived_data_product(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id)
tdom, sdom = time_series_domain()
dp = DataProduct(name='Instrument DP', temporal_domain=tdom.dump(), spatial_domain=sdom.dump())
dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id)
self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)
self.dpsc_cli.activate_data_product_persistence(dp_id)
self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)
dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
if not dataset_ids:
raise NotFound("Data Product %s dataset does not exist" % str(dp_id))
dataset_id = dataset_ids[0]
# Make the derived data product
simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp'])
tempwat_dp = DataProduct(name='TEMPWAT')
tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id)
self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
# Check that the streams associated with the data product are persisted with
stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
for stream_id in stream_ids:
self.assertTrue(self.ingestclient.is_persisted(stream_id))
stream_id = stream_ids[0]
route = self.pubsubcli.read_stream_route(stream_id=stream_id)
rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
rdt['pressure'] = np.arange(20)
publisher = StandaloneStreamPublisher(stream_id,route)
dataset_modified = Event()
def cb(*args, **kwargs):
dataset_modified.set()
es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
es.start()
self.addCleanup(es.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_modified.wait(30))
tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True)
tempwat_dataset_id = tempwat_dataset_ids[0]
granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['time'], np.arange(20))
self.assertEquals(set(rdt.fields), set(['time','temp']))
def test_activate_suspend_data_product(self):
示例3: TestDataProductManagementServiceIntegration
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
#.........这里部分代码省略.........
stream_definition_id=ctd_stream_def_id)
stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
self.assertEquals(ctd_stream_def_id, stream_def_id)
def test_derived_data_product(self):
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id)
self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id)
dp = DataProduct(name='Instrument DP')
dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id)
self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)
self.dpsc_cli.activate_data_product_persistence(dp_id)
self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)
dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
if not dataset_ids:
raise NotFound("Data Product %s dataset does not exist" % str(dp_id))
dataset_id = dataset_ids[0]
# Make the derived data product
simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp'])
tempwat_dp = DataProduct(name='TEMPWAT', category=DataProductTypeEnum.DERIVED)
tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id)
self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
# Check that the streams associated with the data product are persisted with
stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
for stream_id in stream_ids:
self.assertTrue(self.ingestclient.is_persisted(stream_id))
stream_id = stream_ids[0]
route = self.pubsubcli.read_stream_route(stream_id=stream_id)
rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
rdt['time'] = np.arange(20)
rdt['temp'] = np.arange(20)
rdt['pressure'] = np.arange(20)
publisher = StandaloneStreamPublisher(stream_id,route)
dataset_modified = Event()
def cb(*args, **kwargs):
dataset_modified.set()
es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
es.start()
self.addCleanup(es.stop)
publisher.publish(rdt.to_granule())
self.assertTrue(dataset_modified.wait(30))
tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True)
tempwat_dataset_id = tempwat_dataset_ids[0]
granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id)
rdt = RecordDictionaryTool.load_from_granule(granule)
np.testing.assert_array_equal(rdt['time'], np.arange(20))
self.assertEquals(set(rdt.fields), set(['time','temp']))
def test_activate_suspend_data_product(self):
示例4: TestLoader
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
#.........这里部分代码省略.........
self.assertEquals(len(filtered_objs), 1)
return filtered_objs[0]
@attr('INT', group='loader')
@attr('SMOKE', group='loader')
def test_row_values(self):
""" use only rows from NOSE scenario for specific names and details included in this test.
rows in NOSE may rely on entries in BETA scenarios,
but should not specifically test values from those scenarios.
"""
# first make sure this scenario loads successfully
self.assert_can_load("BETA,NOSE")
# check for ExternalDataset
eds = self.find_object_by_name('Test External Dataset', RT.ExternalDataset)
edm1 = self.find_object_by_name('Test External Dataset Model', RT.ExternalDatasetModel)
edm2,_ = self.container.resource_registry.find_objects(eds._id, PRED.hasModel, RT.ExternalDatasetModel, True)
self.assertEquals(edm1._id, edm2[0])
inst = self.find_object_by_name('Test Instrument Agent Instance', RT.ExternalDatasetAgentInstance)
self.assertEquals('value1', inst.dataset_agent_config['key1'], msg='dataset_agent_config[key1] is not value1:\n%r'%inst.agent_config)
# check for an Org
org = self.find_object_by_name('CASPER', RT.Org)
self.assertFalse(org.contacts is None)
self.assertEquals('Userbrough', org.contacts[0].individual_name_family)
self.assertEquals('primary', org.contacts[0].roles[0])
# check data product
dp = self.find_object_by_name('Test DP L0 CTD', RT.DataProduct)
# should be persisted
streams, _ = self.container.resource_registry.find_objects(dp._id, PRED.hasStream, RT.Stream, True)
self.assertTrue(streams)
self.assertEquals(1, len(streams))
self.assertTrue(self.ingestion_management.is_persisted(streams[0]))
self.assertAlmostEqual(32.88237, dp.geospatial_bounds.geospatial_latitude_limit_north,places=3)
# but L1 data product should not be persisted
dp = self.find_object_by_name('Test DP L1 conductivity', RT.DataProduct)
streams, _ = self.container.resource_registry.find_objects(dp._id, PRED.hasStream, RT.Stream, True)
self.assertEquals(1, len(streams))
self.assertTrue(streams)
self.assertFalse(self.ingestion_management.is_persisted(streams[0]))
site = self.find_object_by_name('Test Instrument Site', RT.InstrumentSite)
self.assertFalse(site.constraint_list is None)
self.assertEquals(2, len(site.constraint_list))
con = site.constraint_list[0]
self.assertAlmostEqual( 32.88237, con.geospatial_latitude_limit_north, places=3)
self.assertAlmostEqual(-117.23214, con.geospatial_longitude_limit_east, places=3)
con = site.constraint_list[1]
self.assertEquals('TemporalBounds', con.type_)
# check that coordinate system was loaded
self.assertFalse(site.coordinate_reference_system is None)
# check that InstrumentDevice contacts are loaded
dev = self.find_object_by_name('Unit Test SMB37', RT.InstrumentDevice)
self.assertTrue(len(dev.contacts)==2)
self.assertEquals('Userbrough', dev.contacts[0].individual_name_family)
# check has attachments
attachments = self.container.resource_registry.find_attachments(dev._id)
self.assertTrue(len(attachments)>0)
# check for platform agents
agent = self.find_object_by_name('Unit Test Platform Agent', RT.PlatformAgent)
self.assertEquals(2, len(agent.stream_configurations))
parsed = agent.stream_configurations[1]
# self.assertEquals('platform_eng_parsed', parsed.parameter_dictionary_name)
self.assertEquals('ctd_parsed_param_dict', parsed.parameter_dictionary_name)
# OBSOLETE: check that alarm was added to StreamConfig
# self.assertEquals(1, len(parsed.alarms), msg='alarms: %r'%parsed.alarms)
# self.assertEquals('temp', parsed.alarms[0]['kwargs']['value_id'])
# check for platform agents
self.find_object_by_name('Unit Test Platform Agent Instance', RT.PlatformAgentInstance)
# check for platform model boolean values
model = self.find_object_by_name('Nose Testing Platform Model', RT.PlatformModel)
self.assertEquals(True, model.shore_networked)
self.assertNotEqual('str', model.shore_networked.__class__.__name__)
# check for data process definition
self.find_object_by_name("Logical Transform Definition", RT.DataProcessDefinition)
iai = self.find_object_by_name("Test InstrumentAgentInstance", RT.InstrumentAgentInstance)
self.assertEqual({'SCHEDULER': {'VERSION': {'number': 3.0}, 'CLOCK_SYNC': 48.2, 'ACQUIRE_STATUS': {}},
'PARAMETERS': {"TXWAVESTATS": False, 'TXWAVEBURST': 'false', 'TXREALTIME': True}},
iai.startup_config)
self.assertEqual(2, len(iai.alerts))
# self.assertEqual({'entry': 'foo'}, iai.alerts['complex'])
pai = self.find_object_by_name("Unit Test Platform Agent Instance", RT.PlatformAgentInstance)
self.assertEqual({'entry': 'foo'}, pai.alerts['complex'])
orgs, _ = self.container.resource_registry.find_subjects(RT.Org, PRED.hasResource, iai._id, True)
self.assertEqual(1, len(orgs))
self.assertEqual(org._id, orgs[0])
示例5: TestLoader
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
class TestLoader(IonIntegrationTestCase):
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.ingestion_management = IngestionManagementServiceClient()
def assert_can_load(self, scenarios, loadui=False, loadooi=False,
path=TESTED_DOC, ui_path='default'):
""" perform preload for given scenarios and raise exception if there is a problem with the data """
config = dict(op="load",
scenario=scenarios,
attachments="res/preload/r2_ioc/attachments",
loadui=loadui,
loadooi=loadooi,
path=path, ui_path=ui_path,
assets='res/preload/r2_ioc/ooi_assets',
bulk=loadooi,
ooiexclude='DataProduct,DataProductLink')
self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=config)
@attr('PRELOAD')
def test_ui_valid(self):
""" make sure UI assets are valid using DEFAULT_UI_ASSETS = 'https://userexperience.oceanobservatories.org/database-exports/' """
self.assert_can_load("BASE,BETA", loadui=True, ui_path='default')
@attr('PRELOAD')
def test_ui_candidates_valid(self):
""" make sure UI assets are valid using DEFAULT_UI_ASSETS = 'https://userexperience.oceanobservatories.org/database-exports/Candidates' """
self.assert_can_load("BASE,BETA", loadui=True, ui_path='candidate')
@attr('PRELOAD')
def test_assets_valid(self):
""" make sure can load asset DB """
self.assert_can_load("BASE,BETA,DEVS", path='master', loadooi=True)
@attr('PRELOAD')
def test_alpha_valid(self):
""" make sure R2_DEMO scenario in master google doc
is valid and self-contained (doesn't rely on rows from other scenarios except BASE and BETA)
NOTE: test will pass/fail based on current google doc, not just code changes.
"""
self.assert_can_load("BASE,BETA,ALPHA_SYS", path='master')
@attr('PRELOAD')
def test_beta_valid(self):
""" make sure R2_DEMO scenario in master google doc
is valid and self-contained (doesn't rely on rows from other scenarios except BASE and BETA)
NOTE: test will pass/fail based on current google doc, not just code changes.
"""
self.assert_can_load("BASE,BETA,BETA_SYS", path='master')
@attr('PRELOAD')
def test_devs_valid(self):
""" make sure DEVS scenario in master google doc
is valid and self-contained (doesn't rely on rows from other scenarios except BASE and BETA)
NOTE: test will pass/fail based on current google doc, not just code changes.
"""
self.assert_can_load("BASE,BETA,DEVS", path='master')
def find_object_by_name(self, name, type):
objects,_ = self.container.resource_registry.find_resources(type, id_only=False)
self.assertTrue(len(objects)>=1)
found = None
for object in objects:
print object.name
if object.name==name:
self.assertFalse(found, msg='Found more than one %s "%s" (was expecting just one)'%(type,name))
found = object
self.assertTrue(found, msg='Did not find %s "%s"'%(type,name))
return found
@attr('INT', group='loader')
@attr('SMOKE', group='loader')
def test_row_values(self):
""" use only rows from NOSE scenario for specific names and details included in this test.
rows in NOSE may rely on entries in BASE and BETA scenarios,
but should not specifically test values from those scenarios.
"""
# first make sure this scenario loads successfully
self.assert_can_load("BASE,BETA,NOSE")
# check for an Org
org = self.find_object_by_name('CASPER', RT.Org)
self.assertFalse(org.contacts is None)
self.assertEquals('Userbrough', org.contacts[0].individual_name_family)
self.assertEquals('primary', org.contacts[0].roles[0])
# check data product
dp = self.find_object_by_name('Test DP L0 CTD', RT.DataProduct)
formats = dp.available_formats
self.assertEquals(2, len(formats))
self.assertEquals('csv', formats[0])
# should be persisted
streams, _ = self.container.resource_registry.find_objects(dp._id, PRED.hasStream, RT.Stream, True)
self.assertTrue(streams)
self.assertEquals(1, len(streams))
self.assertTrue(self.ingestion_management.is_persisted(streams[0]))
#.........这里部分代码省略.........
示例6: TestLoader
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
#.........这里部分代码省略.........
@attr('INT', group='loader')
@attr('SMOKE', group='loader')
def test_row_values(self):
""" use only rows from NOSE scenario for specific names and details included in this test.
rows in NOSE may rely on entries in BETA scenarios,
but should not specifically test values from those scenarios.
"""
# first make sure this scenario loads successfully
self._preload_scenario("BETA,NOSE")
# check for ExternalDataset
eds = self.find_object_by_name('Test External CTD Dataset', RT.ExternalDataset)
edm1 = self.find_object_by_name('Test External CTD Dataset Model', RT.ExternalDatasetModel)
edm2,_ = self.container.resource_registry.find_objects(eds._id, PRED.hasModel, RT.ExternalDatasetModel, True)
self.assertEquals(edm1._id, edm2[0])
inst = self.find_object_by_name('Test External CTD Agent Instance', RT.ExternalDatasetAgentInstance)
self.assertEquals('value1', inst.driver_config['key1'], msg='driver_config[key1] is not value1:\n%r' % inst.driver_config)
# check for an Org
org = self.find_object_by_name('CASPER', RT.Org)
self.assertFalse(org.contacts is None)
self.assertEquals('Userbrough', org.contacts[0].individual_name_family)
self.assertEquals('primary', org.contacts[0].roles[0])
# check data product
dp = self.find_object_by_name('Test DP L0 CTD', RT.DataProduct)
# should be persisted
streams, _ = self.container.resource_registry.find_objects(dp._id, PRED.hasStream, RT.Stream, True)
self.assertTrue(streams)
self.assertEquals(1, len(streams))
self.assertTrue(self.ingestion_management.is_persisted(streams[0]))
self.assertAlmostEqual(32.88237, dp.geospatial_bounds.geospatial_latitude_limit_north,places=3)
# but L1 data product should not be persisted
dp = self.find_object_by_name('Test DP L1 conductivity', RT.DataProduct)
streams, _ = self.container.resource_registry.find_objects(dp._id, PRED.hasStream, RT.Stream, True)
self.assertEquals(1, len(streams))
self.assertTrue(streams)
self.assertFalse(self.ingestion_management.is_persisted(streams[0]))
site = self.find_object_by_name('Test Instrument Site', RT.InstrumentSite)
self.assertFalse(site.constraint_list is None)
self.assertEquals(2, len(site.constraint_list))
con = site.constraint_list[0]
self.assertAlmostEqual( 32.88237, con.geospatial_latitude_limit_north, places=3)
self.assertAlmostEqual(-117.23214, con.geospatial_longitude_limit_east, places=3)
con = site.constraint_list[1]
self.assertEquals('TemporalBounds', con.type_)
# check that coordinate system was loaded
self.assertFalse(site.coordinate_reference_system is None)
# check that InstrumentDevice contacts are loaded
dev = self.find_object_by_name('Unit Test SMB37', RT.InstrumentDevice)
self.assertTrue(len(dev.contacts)==2)
self.assertEquals('Userbrough', dev.contacts[0].individual_name_family)
# check has attachments
attachments = self.container.resource_registry.find_attachments(dev._id)
self.assertTrue(len(attachments)>0)
# check for platform agents
agent = self.find_object_by_name('Unit Test Platform Agent', RT.PlatformAgent)
self.assertEquals(2, len(agent.stream_configurations))
示例7: TestLoader
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
class TestLoader(IonIntegrationTestCase):
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.ingestion_management = IngestionManagementServiceClient()
def test_lca_load(self):
config = dict(op="load", scenario="R2_DEMO", attachments="res/preload/r2_ioc/attachments")
self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=config)
# make sure contact entries were created correctly
res,_ = self.container.resource_registry.find_resources(RT.Org, id_only=False)
self.assertTrue(len(res) > 1)
found = False
for org in res:
if org.name=='RSN':
self.assertFalse(found, msg='Found more than one Org "RSN" -- should have preloaded one')
found = True
self.assertFalse(org.contacts is None)
self.assertEquals('Delaney', org.contacts[0].individual_name_family)
self.assertEquals('primary', org.contacts[0].roles[0])
self.assertTrue(found, msg='Did not find Org "RSN" -- should have been preloaded')
# check data product
res,_ = self.container.resource_registry.find_resources(RT.DataProduct, name='CTDBP-1012-REC1 Raw', id_only=False)
self.assertEquals(1, len(res))
dp = res[0]
formats = dp.available_formats
self.assertEquals(2, len(formats))
self.assertEquals('csv', formats[0])
# should be persisted
streams, _ = self.container.resource_registry.find_objects(dp._id, PRED.hasStream, RT.Stream, True)
self.assertTrue(streams)
self.assertEquals(1, len(streams))
self.assertTrue(self.ingestion_management.is_persisted(streams[0]))
self.assertTrue(math.fabs(dp.geospatial_bounds.geospatial_latitude_limit_north-44.7)<.01)
# but L1 data product should not be persisted
res,_ = self.container.resource_registry.find_resources(RT.DataProduct, name='Conductivity L1', id_only=True)
self.assertEquals(1, len(res))
dpid = res[0]
streams, _ = self.container.resource_registry.find_objects(dpid, PRED.hasStream, RT.Stream, True)
self.assertEquals(1, len(streams))
self.assertTrue(streams)
self.assertFalse(self.ingestion_management.is_persisted(streams[0]))
res,_ = self.container.resource_registry.find_resources(RT.InstrumentSite, id_only=False)
self.assertTrue(len(res) > 1)
found = False
for site in res:
if site.name=='Logical instrument 1 Demo':
self.assertFalse(found, msg='Found more than one InstrumentSite "Logical instrument 1 Demo" -- should have preloaded one')
found = True
self.assertFalse(site.constraint_list is None)
self.assertEquals(2, len(site.constraint_list))
con = site.constraint_list[0]
self.assertTrue(math.fabs(con.geospatial_latitude_limit_north-32.88)<.01)
self.assertTrue(math.fabs(con.geospatial_longitude_limit_east+117.23)<.01)
con = site.constraint_list[1]
self.assertEquals('TemporalBounds', con.type_)
# check that coordinate system was loaded
self.assertFalse(site.coordinate_reference_system is None)
self.assertTrue(found, msg='Did not find InstrumentSite "Logical instrument 1 Demo" -- should have been preloaded')
# check that InstrumentDevice contacts are loaded
res,_ = self.container.resource_registry.find_resources(RT.InstrumentDevice, name='CTD Simulator 1 Demo', id_only=False)
self.assertTrue(len(res) == 1)
self.assertTrue(len(res[0].contacts)==2)
self.assertEquals('Ampe', res[0].contacts[0].individual_name_family)
# check has attachments
attachments,_ = self.container.resource_registry.find_attachments(res[0]._id)
self.assertTrue(len(attachments)>0)
# check for platform agents
res,_ = self.container.resource_registry.find_resources(RT.PlatformAgent, id_only=False)
self.assertTrue(len(res)>0)
# check for platform agents
res,_ = self.container.resource_registry.find_resources(RT.PlatformAgentInstance, id_only=False)
self.assertTrue(len(res)>0)
示例8: TestInstrumentDataIngestion
# 需要导入模块: from interface.services.dm.iingestion_management_service import IngestionManagementServiceClient [as 别名]
# 或者: from interface.services.dm.iingestion_management_service.IngestionManagementServiceClient import is_persisted [as 别名]
#.........这里部分代码省略.........
#
#--------------------------------------------------------------------------------
# Grab the ingestion configuration from the resource registry
#--------------------------------------------------------------------------------
# The ingestion configuration should have been created by the bootstrap service
# which is configured through r2deploy.yml
ingest_configs, _ = self.resource_registry.find_resources(
restype=RT.IngestionConfiguration,id_only=True)
return ingest_configs[0]
def prepare_ingestion(self):
#
# Takes pieces from test_dm_end_2_end.py as of 7/23/12
#
# Get the ingestion configuration from the resource registry
self.ingest_config_id = ingest_config_id = self.get_ingestion_config()
# to keep the (stream_id, dataset_id) associated with each stream_name
self.dataset_ids = {}
for stream_name, stream_config in self._stream_config.iteritems():
stream_id = stream_config['id']
dataset_id = self.ingestion_management.persist_data_stream(
stream_id=stream_id,
ingestion_configuration_id=ingest_config_id)
log.info("persisting stream_name=%s (stream_id=%s): dataset_id=%s" % (
stream_name, stream_id, dataset_id))
self.assertTrue(self.ingestion_management.is_persisted(stream_id))
self.dataset_ids[stream_name] = (stream_id, dataset_id)
def verify_granules_persisted(self):
#
# takes elements from ingestion_management_test.py as of 7/23/12
#
ingest_config_id = self.ingest_config_id
for stream_name, (stream_id, dataset_id) in self.dataset_ids.iteritems():
assoc = self.resource_registry.find_associations(
subject=ingest_config_id, predicate=PRED.hasSubscription)
sub = self.resource_registry.read(assoc[0].o)
self.assertTrue(sub.is_active)
dataset = self.resource_registry.read(dataset_id)
self.assertIsInstance(dataset, DataSet)
log.info("Data persisted for stream_name=%s (stream_id=%s, "
"dataset_id=%s) dataset=%s" % (stream_name, stream_id, dataset_id, dataset))
def test_poll_and_verify_granules_persisted(self):
#
# As test_instrument_agent.py:TestInstrumentAgent.test_poll with
# verification that data are persisted.
#
self._test_poll()
self.verify_granules_persisted()