本文整理汇总了Python中ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient.create方法的典型用法代码示例。如果您正苦于以下问题:Python EnhancedResourceRegistryClient.create方法的具体用法?Python EnhancedResourceRegistryClient.create怎么用?Python EnhancedResourceRegistryClient.create使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient
的用法示例。
在下文中一共展示了EnhancedResourceRegistryClient.create方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: TestDeployment
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestDeployment(IonIntegrationTestCase):
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.c = DotDict()
self.c.resource_registry = self.rrclient
self.RR2 = EnhancedResourceRegistryClient(self.rrclient)
self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)
# deactivate all data processes when tests are complete
def killAllDataProcesses():
for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
self.dsmsclient.deactivate_data_process(proc_id)
self.dsmsclient.delete_data_process(proc_id)
self.addCleanup(killAllDataProcesses)
#@unittest.skip("targeting")
def test_create_deployment(self):
#create a deployment with metadata and an initial site and device
platform_site__obj = IonObject(RT.PlatformSite,
name='PlatformSite1',
description='test platform site')
site_id = self.omsclient.create_platform_site(platform_site__obj)
platform_device__obj = IonObject(RT.PlatformDevice,
name='PlatformDevice1',
description='test platform device')
device_id = self.imsclient.create_platform_device(platform_device__obj)
start = IonTime(datetime.datetime(2013,1,1))
end = IonTime(datetime.datetime(2014,1,1))
temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
deployment_obj = IonObject(RT.Deployment,
name='TestDeployment',
description='some new deployment',
constraint_list=[temporal_bounds])
deployment_id = self.omsclient.create_deployment(deployment_obj)
self.omsclient.deploy_platform_site(site_id, deployment_id)
self.imsclient.deploy_platform_device(device_id, deployment_id)
log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )
#retrieve the deployment objects and check that the assoc site and device are attached
read_deployment_obj = self.omsclient.read_deployment(deployment_id)
log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) )
site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True)
self.assertEqual(len(site_ids), 1)
device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True)
self.assertEqual(len(device_ids), 1)
#delete the deployment
self.RR2.pluck(deployment_id)
self.omsclient.force_delete_deployment(deployment_id)
# now try to get the deleted dp object
try:
self.omsclient.read_deployment(deployment_id)
except NotFound:
pass
else:
self.fail("deleted deployment was found during read")
#@unittest.skip("targeting")
def base_activate_deployment(self):
#-------------------------------------------------------------------------------------
# Create platform site, platform device, platform model
#-------------------------------------------------------------------------------------
platform_site__obj = IonObject(RT.PlatformSite,
name='PlatformSite1',
description='test platform site')
platform_site_id = self.omsclient.create_platform_site(platform_site__obj)
platform_device_obj = IonObject(RT.PlatformDevice,
name='PlatformDevice1',
description='test platform device')
platform_device_id = self.imsclient.create_platform_device(platform_device_obj)
platform_model__obj = IonObject(RT.PlatformModel,
name='PlatformModel1',
#.........这里部分代码省略.........
示例2: TestObservatoryManagementServiceIntegration
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
#container = Container()
#print 'starting container'
#container.start()
#print 'started container'
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
self.org_management_service = OrgManagementServiceClient(node=self.container.node)
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
#print 'TestObservatoryManagementServiceIntegration: started services'
self.event_publisher = EventPublisher()
# @unittest.skip('this exists only for debugging the launch process')
# def test_just_the_setup(self):
# return
def destroy(self, resource_ids):
self.OMS.force_delete_observatory(resource_ids.observatory_id)
self.OMS.force_delete_subsite(resource_ids.subsite_id)
self.OMS.force_delete_subsite(resource_ids.subsite2_id)
self.OMS.force_delete_subsite(resource_ids.subsiteb_id)
self.OMS.force_delete_subsite(resource_ids.subsitez_id)
self.OMS.force_delete_platform_site(resource_ids.platform_site_id)
self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id)
self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id)
self.OMS.force_delete_platform_site(resource_ids.platform_site3_id)
self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id)
self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id)
self.OMS.force_delete_instrument_site(resource_ids.instrument_siteb3_id)
self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id)
#@unittest.skip('targeting')
def test_observatory_management(self):
resources = self._make_associations()
self._do_test_find_related_sites(resources)
self._do_test_get_sites_devices_status(resources)
self._do_test_find_site_data_products(resources)
self._do_test_find_related_frames_of_reference(resources)
self._do_test_create_geospatial_point_center(resources)
self._do_test_find_observatory_org(resources)
self.destroy(resources)
def _do_test_find_related_sites(self, resources):
site_resources, site_children = self.OMS.find_related_sites(resources.org_id)
#import sys, pprint
#print >> sys.stderr, pprint.pformat(site_resources)
#print >> sys.stderr, pprint.pformat(site_children)
#self.assertIn(resources.org_id, site_resources)
self.assertIn(resources.observatory_id, site_resources)
self.assertIn(resources.subsite_id, site_resources)
self.assertIn(resources.subsite_id, site_resources)
self.assertIn(resources.subsite2_id, site_resources)
self.assertIn(resources.platform_site_id, site_resources)
self.assertIn(resources.instrument_site_id, site_resources)
self.assertEquals(len(site_resources), 13)
self.assertEquals(site_resources[resources.observatory_id].type_, RT.Observatory)
self.assertIn(resources.org_id, site_children)
self.assertIn(resources.observatory_id, site_children)
self.assertIn(resources.subsite_id, site_children)
self.assertIn(resources.subsite_id, site_children)
self.assertIn(resources.subsite2_id, site_children)
self.assertIn(resources.platform_site_id, site_children)
self.assertNotIn(resources.instrument_site_id, site_children)
self.assertEquals(len(site_children), 9)
self.assertIsInstance(site_children[resources.subsite_id], list)
self.assertEquals(len(site_children[resources.subsite_id]), 2)
def _do_test_get_sites_devices_status(self, resources):
result_dict = self.OMS.get_sites_devices_status(resources.org_id)
site_resources = result_dict.get("site_resources", None)
site_children = result_dict.get("site_children", None)
#.........这里部分代码省略.........
示例3: TestPlatformInstrument
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestPlatformInstrument(BaseIntTestPlatform):
def setUp(self):
self._start_container()
self._pp = pprint.PrettyPrinter()
log.debug("oms_uri = %s", OMS_URI)
self.oms = CIOMSClientFactory.create_instance(OMS_URI)
#url = OmsTestMixin.start_http_server()
#log.debug("TestPlatformInstrument:setup http url %s", url)
#
#result = self.oms.event.register_event_listener(url)
#log.debug("TestPlatformInstrument:setup register_event_listener result %s", result)
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.pubsubclient = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.RR2 = EnhancedResourceRegistryClient(self.rrclient)
self.org_id = self.RR2.create(any_old(RT.Org))
log.debug("Org created: %s", self.org_id)
# see _set_receive_timeout
self._receive_timeout = 300
self.instrument_device_id = ''
self.platform_device_id = ''
self.platform_site_id = ''
self.platform_agent_instance_id = ''
self._pa_client = ''
def done():
CIOMSClientFactory.destroy_instance(self.oms)
event_notifications = OmsTestMixin.stop_http_server()
log.info("event_notifications = %s" % str(event_notifications))
self.addCleanup(done)
@unittest.skip('Must be run locally...')
def test_platform_with_instrument_streaming(self):
#
# The following is with just a single platform and the single
# instrument "SBE37_SIM_08", which corresponds to the one on port 4008.
#
#load the paramaters and the param dicts necesssary for the VEL3D
log.debug( "load params------------------------------------------------------------------------------")
self._load_params()
log.debug( " _register_oms_listener------------------------------------------------------------------------------")
self._register_oms_listener()
#create the instrument device/agent/mode
log.debug( "---------- create_instrument_resources ----------" )
self._create_instrument_resources()
#create the platform device, agent and instance
log.debug( "---------- create_platform_configuration ----------" )
self._create_platform_configuration('LPJBox_CI_Ben_Hall')
self.rrclient.create_association(subject=self.platform_device_id, predicate=PRED.hasDevice, object=self.instrument_device_id)
log.debug( "---------- start_platform ----------" )
self._start_platform()
self.addCleanup(self._stop_platform)
# get everything in command mode:
self._ping_agent()
log.debug( " ---------- initialize ----------" )
self._initialize()
_ia_client = ResourceAgentClient(self.instrument_device_id, process=FakeProcess())
state = _ia_client.get_agent_state()
log.info("TestPlatformInstrument get_agent_state %s", state)
log.debug( " ---------- go_active ----------" )
self._go_active()
state = _ia_client.get_agent_state()
log.info("TestPlatformInstrument get_agent_state %s", state)
log.debug( "---------- run ----------" )
self._run()
gevent.sleep(2)
#.........这里部分代码省略.........
示例4: TestInstrumentManagementServiceIntegration
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
#container = Container()
#print 'starting container'
#container.start()
#print 'started container'
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.IDS = IdentityManagementServiceClient(node=self.container.node)
self.PSC = PubsubManagementServiceClient(node=self.container.node)
self.DP = DataProductManagementServiceClient(node=self.container.node)
self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
self.DSC = DatasetManagementServiceClient(node=self.container.node)
self.PDC = ProcessDispatcherServiceClient(node=self.container.node)
self.RR2 = EnhancedResourceRegistryClient(self.RR)
# @unittest.skip('this test just for debugging setup')
# def test_just_the_setup(self):
# return
@attr('EXT')
def test_resources_associations_extensions(self):
"""
create one of each resource and association used by IMS
to guard against problems in ion-definitions
"""
#stuff we control
instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance))
instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent))
instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel))
instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice))
platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance))
platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent))
platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice))
platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel))
sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice))
sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel))
#stuff we associate to
data_producer_id, _ = self.RR.create(any_old(RT.DataProducer))
org_id, _ = self.RR.create(any_old(RT.Org))
#instrument_agent_instance_id #is only a target
#instrument_agent
self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id)
self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id)
#instrument_device
self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id)
self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id)
self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id)
self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id)
self.RR.create_association(org_id, PRED.hasResource, instrument_device_id)
instrument_model_id #is only a target
platform_agent_instance_id #is only a target
#platform_agent
self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id)
self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id)
#platform_device
self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id)
self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id)
self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id)
platform_model_id #is only a target
#sensor_device
self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id)
self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id)
sensor_model_id #is only a target
#create a parsed product for this instrument output
tdom, sdom = time_series_domain()
tdom = tdom.dump()
sdom = sdom.dump()
dp_obj = IonObject(RT.DataProduct,
name='the parsed data',
description='ctd stream test',
processing_level_code='Parsed_Canonical',
temporal_domain = tdom,
spatial_domain = sdom)
pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id)
data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
log.debug( 'new dp_id = %s', data_product_id1)
#.........这里部分代码省略.........
示例5: BaseIntTestPlatform
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class BaseIntTestPlatform(IonIntegrationTestCase, HelperTestMixin):
"""
A base class with several conveniences supporting specific platform agent
integration tests, see:
- ion/agents/platform/test/test_platform_agent_with_rsn.py
- ion/services/sa/observatory/test/test_platform_launch.py
The platform IDs used here are organized as follows:
Node1D -> MJ01C -> LJ01D
where -> goes from parent platform to child platform.
This is a subset of the whole topology defined in the simulated platform
network (network.yml), which in turn is used by the RSN OMS simulator.
- 'LJ01D' is the root platform used in test_single_platform
- 'Node1D' is the root platform used in test_hierarchy
Methods are provided to construct specific platform topologies, but
subclasses decide which to use.
"""
@classmethod
def setUpClass(cls):
HelperTestMixin.setUpClass()
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
self.DP = DataProductManagementServiceClient(node=self.container.node)
self.PSC = PubsubManagementServiceClient(node=self.container.node)
self.PDC = ProcessDispatcherServiceClient(node=self.container.node)
self.DSC = DatasetManagementServiceClient()
self.IDS = IdentityManagementServiceClient(node=self.container.node)
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self.org_id = self.RR2.create(any_old(RT.Org))
log.debug("Org created: %s", self.org_id)
# Create InstrumentModel
# TODO create multiple models as needed; for the moment assuming all
# used instruments are the same model here.
instModel_obj = IonObject(RT.InstrumentModel,
name='SBE37IMModel',
description="SBE37IMModel")
self.instModel_id = self.IMS.create_instrument_model(instModel_obj)
log.debug('new InstrumentModel id = %s ', self.instModel_id)
# Use the network definition provided by RSN OMS directly.
rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
CIOMSClientFactory.destroy_instance(rsn_oms)
if log.isEnabledFor(logging.TRACE):
# show serialized version for the network definition:
network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition)
log.trace("NetworkDefinition serialization:\n%s", network_definition_ser)
# set attributes for the platforms:
self._platform_attributes = {}
for platform_id in self._network_definition.pnodes:
pnode = self._network_definition.pnodes[platform_id]
dic = dict((attr.attr_id, attr.defn) for attr in pnode.attrs.itervalues())
self._platform_attributes[platform_id] = dic
log.trace("_platform_attributes: %s", self._platform_attributes)
# set ports for the platforms:
self._platform_ports = {}
for platform_id in self._network_definition.pnodes:
pnode = self._network_definition.pnodes[platform_id]
dic = {}
for port_id, port in pnode.ports.iteritems():
dic[port_id] = dict(port_id=port_id,
network=port.network)
self._platform_ports[platform_id] = dic
log.trace("_platform_ports: %s", self._platform_attributes)
self._async_data_result = AsyncResult()
self._data_subscribers = []
self._samples_received = []
self.addCleanup(self._stop_data_subscribers)
self._async_event_result = AsyncResult()
self._event_subscribers = []
self._events_received = []
self.addCleanup(self._stop_event_subscribers)
self._start_event_subscriber(sub_type="platform_event")
# instruments that have been set up: instr_key: i_obj
self._setup_instruments = {}
#################################################################
# data subscribers handling
#################################################################
#.........这里部分代码省略.........
示例6: TestRollups
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestRollups(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
#container = Container()
#print 'starting container'
#container.start()
#print 'started container'
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self._setup_statuses()
def _make_status(self, bad_items_dict=None):
if bad_items_dict is None:
bad_items_dict = {}
ret = {}
for k in reverse_mapping.values():
if k in bad_items_dict:
ret[k] = bad_items_dict[k]
else:
ret[k] = DeviceStatusType.STATUS_OK
return ret
def _setup_statuses(self):
# set up according to https://docs.google.com/drawings/d/1kZ_L4xr4Be0OdqMDX6tiI50hROgvLHU4HcnD7e_NIKE/pub?w=1200z
# https://confluence.oceanobservatories.org/display/syseng/CIAD+SA+OV+Observatory+Status+and+Events
device_agents = {}
ms = self._make_status
# override the default "get agent" function and resource registyr
IMS_SVC = self._get_svc(InstrumentManagementService)
OMS_SVC = self._get_svc(ObservatoryManagementService)
self.IMS_ASB = self._get_specific_attr(IMS_SVC, AgentStatusBuilder)
self.OMS_ASB = self._get_specific_attr(OMS_SVC, AgentStatusBuilder)
assert self.IMS_ASB
assert self.OMS_ASB
self.IMS_ASB.RR2 = IMS_SVC.RR2
self.OMS_ASB.RR2 = OMS_SVC.RR2
# create org
org_id = self.OMS.create_marine_facility(any_old(RT.Org))
obs_id = self.OMS.create_observatory(any_old(RT.Observatory), org_id)
# create instrument and platform devices and sites
pst = dict([(i + 1, self.RR2.create(any_old(RT.PlatformSite))) for i in range(8)])
pdv = dict([(i + 1, self.RR2.create(any_old(RT.PlatformDevice))) for i in range(11)])
ist = dict([(i + 1, self.RR2.create(any_old(RT.InstrumentSite))) for i in range(6)])
idv = dict([(i + 1, self.RR2.create(any_old(RT.InstrumentDevice))) for i in range(6)])
# create associations
has_site = [
(obs_id, pst[2]),
(pst[2], pst[1]),
(pst[1], ist[1]),
(pst[2], pst[3]),
(pst[3], ist[2]),
(pst[3], ist[3]),
(obs_id, pst[4]),
(pst[4], pst[5]),
(pst[4], pst[6]),
(pst[6], pst[7]),
(pst[7], ist[4]),
(pst[6], pst[8]),
(pst[8], ist[5]),
(pst[8], ist[6]),
]
has_device = [
(pst[2], pdv[2]),
(pst[1], pdv[1]),
(ist[1], idv[1]),
(pst[3], pdv[3]),
(pdv[3], idv[2]),
(pdv[3], idv[3]),
(ist[2], idv[2]),
(ist[3], idv[3]),
(pst[4], pdv[4]),
(pdv[4], pdv[5]),
(pdv[5], pdv[6]),
(pdv[5], pdv[7]),
(pdv[7], idv[4]),
(pst[6], pdv[5]),
(pst[7], pdv[6]),
(pst[8], pdv[7]),
(ist[5], idv[4]),
(pdv[8], pdv[9]),
#.........这里部分代码省略.........
示例7: TestDeployment
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestDeployment(IonIntegrationTestCase):
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dmpsclient = DataProductManagementServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.psmsclient = PubsubManagementServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.c = DotDict()
self.c.resource_registry = self.rrclient
self.RR2 = EnhancedResourceRegistryClient(self.rrclient)
self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node)
# deactivate all data processes when tests are complete
def killAllDataProcesses():
for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
self.dsmsclient.deactivate_data_process(proc_id)
self.dsmsclient.delete_data_process(proc_id)
self.addCleanup(killAllDataProcesses)
#@unittest.skip("targeting")
def test_create_deployment(self):
#create a deployment with metadata and an initial site and device
platform_site__obj = IonObject(RT.PlatformSite,
name='PlatformSite1',
description='test platform site')
site_id = self.omsclient.create_platform_site(platform_site__obj)
platform_device__obj = IonObject(RT.PlatformDevice,
name='PlatformDevice1',
description='test platform device')
device_id = self.imsclient.create_platform_device(platform_device__obj)
start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple())))
end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple())))
temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end)
deployment_obj = IonObject(RT.Deployment,
name='TestDeployment',
description='some new deployment',
constraint_list=[temporal_bounds])
deployment_id = self.omsclient.create_deployment(deployment_obj)
self.omsclient.assign_site_to_deployment(site_id, deployment_id)
self.omsclient.assign_device_to_deployment(device_id, deployment_id)
log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) )
#retrieve the deployment objects and check that the assoc site and device are attached
read_deployment_obj = self.omsclient.read_deployment(deployment_id)
log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) )
site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True)
self.assertEqual(len(site_ids), 1)
device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True)
self.assertEqual(len(device_ids), 1)
#delete the deployment
self.omsclient.force_delete_deployment(deployment_id)
# now try to get the deleted dp object
try:
self.omsclient.read_deployment(deployment_id)
except NotFound:
pass
else:
self.fail("deleted deployment was found during read")
#@unittest.skip("targeting")
def test_prepare_deployment_support(self):
deploy_sup = self.omsclient.prepare_deployment_support()
self.assertTrue(deploy_sup)
self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].type_, "AssocDeploymentInstDevice")
self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, [])
self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, [])
self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].type_, "AssocDeploymentPlatDevice")
self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].resources, [])
self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, [])
self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].type_, "AssocDeploymentInstSite")
self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, [])
self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, [])
self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].type_, "AssocDeploymentPlatSite")
self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].resources, [])
self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, [])
#create a deployment with metadata and an initial site and device
platform_site__obj = IonObject(RT.PlatformSite,
name='PlatformSite1',
description='test platform site')
site_id = self.omsclient.create_platform_site(platform_site__obj)
#.........这里部分代码省略.........
示例8: TestAgentStatusBuilderIntegration
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestAgentStatusBuilderIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
#container = Container()
#print 'starting container'
#container.start()
#print 'started container'
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self._setup_statuses()
def _make_status(self, bad_items_dict):
ret = {}
for k in reverse_mapping.values():
if k in bad_items_dict:
ret[k] = bad_items_dict[k]
else:
ret[k] = DeviceStatusType.STATUS_OK
return ret
def _setup_statuses(self):
device_agents = {}
IMS_SVC = self._get_svc(InstrumentManagementService)
OMS_SVC = self._get_svc(ObservatoryManagementService)
self.IMS_ASB = self._get_specific_attr(IMS_SVC, AgentStatusBuilder)
self.OMS_ASB = self._get_specific_attr(OMS_SVC, AgentStatusBuilder)
assert self.IMS_ASB
assert self.OMS_ASB
self.IMS_ASB.RR2 = IMS_SVC.RR2
self.OMS_ASB.RR2 = OMS_SVC.RR2
# create one tree of devices
self.grandparent1_device_id = self.RR2.create(any_old(RT.PlatformDevice))
self.parent1_device_id = self.RR2.create(any_old(RT.PlatformDevice))
self.child1_device_id = self.RR2.create(any_old(RT.InstrumentDevice))
self.RR2.create_association(self.grandparent1_device_id, PRED.hasDevice, self.parent1_device_id)
self.RR2.create_association(self.parent1_device_id, PRED.hasDevice, self.child1_device_id)
g1_agent = FakeAgent()
g1_stat = self._make_status({AggregateStatusType.AGGREGATE_COMMS: DeviceStatusType.STATUS_UNKNOWN})
p1_stat = self._make_status({AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_CRITICAL})
c1_stat = self._make_status({AggregateStatusType.AGGREGATE_LOCATION: DeviceStatusType.STATUS_WARNING})
g1_agent.set_agent("aggstatus", g1_stat)
g1_agent.set_agent("child_agg_status", {self.parent1_device_id: p1_stat,
self.child1_device_id: c1_stat})
device_agents[self.grandparent1_device_id] = g1_agent
c1_agent = FakeAgent()
c1_agent.set_agent("aggstatus", c1_stat)
device_agents[self.child1_device_id] = c1_agent
# create second tree of devices
self.grandparent2_device_id = self.RR2.create(any_old(RT.PlatformDevice))
self.parent2_device_id = self.RR2.create(any_old(RT.PlatformDevice))
self.child2_device_id = self.RR2.create(any_old(RT.InstrumentDevice))
self.RR2.create_association(self.grandparent2_device_id, PRED.hasDevice, self.parent2_device_id)
self.RR2.create_association(self.parent2_device_id, PRED.hasDevice, self.child2_device_id)
g2_agent = FakeAgent()
g2_stat = self._make_status({AggregateStatusType.AGGREGATE_COMMS: DeviceStatusType.STATUS_UNKNOWN})
p2_stat = self._make_status({AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_CRITICAL})
c2_stat = self._make_status({AggregateStatusType.AGGREGATE_LOCATION: DeviceStatusType.STATUS_WARNING})
g2_agent.set_agent("aggstatus", g2_stat)
g2_agent.set_agent("child_agg_status", {self.parent2_device_id: p2_stat,
self.child2_device_id: c2_stat})
device_agents[self.grandparent2_device_id] = g2_agent
def my_get_agent_client(device_id, **kwargs):
try:
return device_agents[device_id]
except KeyError:
raise NotFound
self.IMS_ASB._get_agent_client = my_get_agent_client
@unittest.skip("hasDevice rollup is no longer supported")
def test_get_device_rollup(self):
iext = self.IMS.get_instrument_device_extension(self.child1_device_id)
istatus = self._make_status({AggregateStatusType.AGGREGATE_LOCATION: DeviceStatusType.STATUS_WARNING})
#.........这里部分代码省略.........
示例9: TestPlatformLaunch
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestPlatformLaunch(IonIntegrationTestCase):
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
self.DP = DataProductManagementServiceClient(node=self.container.node)
self.PSC = PubsubManagementServiceClient(node=self.container.node)
self.PDC = ProcessDispatcherServiceClient(node=self.container.node)
self.DSC = DatasetManagementServiceClient()
self.IDS = IdentityManagementServiceClient(node=self.container.node)
self.RR2 = EnhancedResourceRegistryClient(self.RR)
# Use the network definition provided by RSN OMS directly.
rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
# get serialized version for the configuration:
self._network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition)
if log.isEnabledFor(logging.TRACE):
log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser)
self._async_data_result = AsyncResult()
self._data_subscribers = []
self._samples_received = []
self.addCleanup(self._stop_data_subscribers)
self._async_event_result = AsyncResult()
self._event_subscribers = []
self._events_received = []
self.addCleanup(self._stop_event_subscribers)
self._start_event_subscriber()
def _start_data_subscriber(self, stream_name, stream_id):
"""
Starts data subscriber for the given stream_name and stream_config
"""
def consume_data(message, stream_route, stream_id):
# A callback for processing subscribed-to data.
log.info('Subscriber received data message: %s.', str(message))
self._samples_received.append(message)
self._async_data_result.set()
log.info('_start_data_subscriber stream_name=%r stream_id=%r',
stream_name, stream_id)
# Create subscription for the stream
exchange_name = '%s_queue' % stream_name
self.container.ex_manager.create_xn_queue(exchange_name).purge()
sub = StandaloneStreamSubscriber(exchange_name, consume_data)
sub.start()
self._data_subscribers.append(sub)
sub_id = self.PSC.create_subscription(name=exchange_name, stream_ids=[stream_id])
self.PSC.activate_subscription(sub_id)
sub.subscription_id = sub_id
def _stop_data_subscribers(self):
"""
Stop the data subscribers on cleanup.
"""
try:
for sub in self._data_subscribers:
if hasattr(sub, 'subscription_id'):
try:
self.PSC.deactivate_subscription(sub.subscription_id)
except:
pass
self.PSC.delete_subscription(sub.subscription_id)
sub.stop()
finally:
self._data_subscribers = []
def _start_event_subscriber(self, event_type="DeviceEvent", sub_type="platform_event"):
"""
Starts event subscriber for events of given event_type ("DeviceEvent"
by default) and given sub_type ("platform_event" by default).
"""
def consume_event(evt, *args, **kwargs):
# A callback for consuming events.
log.info('Event subscriber received evt: %s.', str(evt))
self._events_received.append(evt)
self._async_event_result.set(evt)
sub = EventSubscriber(event_type=event_type,
sub_type=sub_type,
callback=consume_event)
sub.start()
log.info("registered event subscriber for event_type=%r, sub_type=%r",
event_type, sub_type)
self._event_subscribers.append(sub)
sub._ready_event.wait(timeout=EVENT_TIMEOUT)
#.........这里部分代码省略.........
示例10: TestDataProductManagementServiceCoverage
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestDataProductManagementServiceCoverage(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
log.debug("Start rel from url")
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.DPMS = DataProductManagementServiceClient()
self.RR = ResourceRegistryServiceClient()
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self.DAMS = DataAcquisitionManagementServiceClient()
self.PSMS = PubsubManagementServiceClient()
self.ingestclient = IngestionManagementServiceClient()
self.PD = ProcessDispatcherServiceClient()
self.DSMS = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
log.debug("get datastore")
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.PSMS.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.PD.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
self.addCleanup(self.cleaning_up)
@staticmethod
def clean_subscriptions():
ingestion_management = IngestionManagementServiceClient()
pubsub = PubsubManagementServiceClient()
rr = ResourceRegistryServiceClient()
ingestion_config_ids = ingestion_management.list_ingestion_configurations(id_only=True)
for ic in ingestion_config_ids:
subscription_ids, assocs = rr.find_objects(subject=ic, predicate=PRED.hasSubscription, id_only=True)
for subscription_id, assoc in zip(subscription_ids, assocs):
rr.delete_association(assoc)
try:
pubsub.deactivate_subscription(subscription_id)
except:
log.exception("Unable to decativate subscription: %s", subscription_id)
pubsub.delete_subscription(subscription_id)
def cleaning_up(self):
for pid in self.pids:
log.debug("number of pids to be terminated: %s", len(self.pids))
try:
self.PD.cancel_process(pid)
log.debug("Terminated the process: %s", pid)
except:
log.debug("could not terminate the process id: %s" % pid)
TestDataProductManagementServiceCoverage.clean_subscriptions()
for xn in self.exchange_names:
xni = self.container.ex_manager.create_xn_queue(xn)
xni.delete()
for xp in self.exchange_points:
xpi = self.container.ex_manager.create_xp(xp)
xpi.delete()
def test_CRUD_data_product(self):
#------------------------------------------------------------------------------------------------
# create a stream definition for the data from the ctd simulator
#------------------------------------------------------------------------------------------------
parameter_dictionary = self.DSMS.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
ctd_stream_def_id = self.PSMS.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id)
log.debug("Created stream def id %s" % ctd_stream_def_id)
#------------------------------------------------------------------------------------------------
# test creating a new data product w/o a stream definition
#------------------------------------------------------------------------------------------------
# Generic time-series data domain creation
tdom, sdom = time_series_domain()
dp_obj = IonObject(RT.DataProduct,
name='DP1',
description='some new dp',
#.........这里部分代码省略.........
示例11: DataAcquisitionManagementService
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class DataAcquisitionManagementService(BaseDataAcquisitionManagementService):
def on_init(self):
self.RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry)
# -----------------
# The following operations register different types of data producers
# -----------------
def register_external_data_set(self, external_dataset_id=''):
"""Register an existing external data set as data producer
@param external_dataset_id str
@retval data_producer_id str
"""
# retrieve the data_source object
data_set_obj = self.clients.resource_registry.read(external_dataset_id)
if data_set_obj is None:
raise NotFound("External Data Set %s does not exist" % external_dataset_id)
#create a ExtDatasetProducerContext to hold the state of the this producer
producer_context_obj = IonObject(OT.ExtDatasetProducerContext)
#create data producer resource and associate to this external_dataset_id
data_producer_obj = IonObject(RT.DataProducer,name=data_set_obj.name,
description="Primary DataProducer for ExternalDataset %s" % data_set_obj.name,
producer_context=producer_context_obj, is_primary=True)
data_producer_id, rev = self.clients.resource_registry.create(data_producer_obj)
# Create association
self.clients.resource_registry.create_association(external_dataset_id, PRED.hasDataProducer, data_producer_id)
return data_producer_id
def unregister_external_data_set(self, external_dataset_id=''):
"""
@param external_dataset_id str
@throws NotFound object with specified id does not exist
"""
# Verify that id is valid
external_data_set_obj = self.clients.resource_registry.read(external_dataset_id)
# List all resource ids that are objects for this data_source and has the hasDataProducer link
producers, producer_assns = self.clients.resource_registry.find_objects(
subject=external_dataset_id, predicate=PRED.hasDataProducer, id_only=True)
for producer, producer_assn in zip(producers, producer_assns):
log.debug("DataAcquisitionManagementService:unregister_external_data_set delete association %s", str(producer_assn))
self.clients.resource_registry.delete_association(producer_assn)
log.debug("DataAcquisitionManagementService:unregister_external_data_set delete producer %s", str(producer))
self.clients.resource_registry.delete(producer)
return
def register_process(self, data_process_id=''):
"""
Register an existing data process as data producer
"""
# retrieve the data_process object
data_process_obj = self.clients.resource_registry.read(data_process_id)
if data_process_obj is None:
raise NotFound("Data Process %s does not exist" % data_process_id)
#find the data process definition
parameters = []
data_process_def_objs, _ = self.clients.resource_registry.find_objects(
subject=data_process_id, predicate=PRED.hasProcessDefinition, object_type=RT.DataProcessDefinition, id_only=False)
if not data_process_def_objs:
parameters = set()
out_data_product_ids, _ = self.clients.resource_registry.find_objects(
subject=data_process_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct,id_only=True)
for dp_id in out_data_product_ids:
stream_ids, _ = self.clients.resource_registry.find_objects(subject=dp_id, predicate=PRED.hasStream, id_only=True)
for stream_id in stream_ids:
stream_def = self.clients.pubsub_management.read_stream_definition(stream_id=stream_id)
parameters = parameters.union(stream_def.available_fields)
parameters = list(parameters)
else:
parameters = data_process_def_objs[0].parameters
#create a DataProcessProducerContext to hold the state of the this producer
producer_context_obj = IonObject(OT.DataProcessProducerContext, configuration=data_process_obj.configuration, parameters=parameters)
#create data producer resource and associate to this data_process_id
data_producer_obj = IonObject(RT.DataProducer,name=data_process_obj.name,
description="Primary DataProducer for DataProcess %s" % data_process_obj.name,
producer_context=producer_context_obj, is_primary=True)
data_producer_id, rev = self.clients.resource_registry.create(data_producer_obj)
# Create association
self.clients.resource_registry.create_association(data_process_id, PRED.hasDataProducer, data_producer_id)
return data_producer_id
#.........这里部分代码省略.........
示例12: TestPlatformInstrument
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestPlatformInstrument(BaseIntTestPlatform):
def setUp(self):
self._start_container()
self._pp = pprint.PrettyPrinter()
log.debug("oms_uri = %s", OMS_URI)
self.oms = CIOMSClientFactory.create_instance(OMS_URI)
self._get_platform_attributes()
url = OmsTestMixin.start_http_server()
log.info("TestPlatformInstrument:setup http url %s", url)
result = self.oms.event.register_event_listener(url)
log.info("TestPlatformInstrument:setup register_event_listener result %s", result)
# response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall')
# log.info("TestPlatformInstrument:setup get_platform_ports %s", response)
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.pubsubclient = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.RR2 = EnhancedResourceRegistryClient(self.rrclient)
self.org_id = self.RR2.create(any_old(RT.Org))
log.debug("Org created: %s", self.org_id)
# see _set_receive_timeout
self._receive_timeout = 177
self.instrument_device = ''
self.platform_device = ''
self.platform_agent_instance_id = ''
self._pa_client = ''
def done():
CIOMSClientFactory.destroy_instance(self.oms)
event_notifications = OmsTestMixin.stop_http_server()
log.info("event_notifications = %s" % str(event_notifications))
self.addCleanup(done)
def _get_platform_attributes(self):
attr_infos = self.oms.attr.get_platform_attributes('LPJBox_CI_Ben_Hall')
log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos))
# ret_infos = attr_infos['LPJBox_CI_Ben_Hall']
# for attrName, attr_defn in ret_infos.iteritems():
# attr = AttrNode(attrName, attr_defn)
# pnode.add_attribute(attr)
return attr_infos
@unittest.skip('Still in construction...')
def test_platform_with_instrument_streaming(self):
#
# The following is with just a single platform and the single
# instrument "SBE37_SIM_08", which corresponds to the one on port 4008.
#
#load the paramaters and the param dicts necesssary for the VEL3D
self._load_params()
#create the instrument device/agent/mode
self._create_instrument_resources()
#create the platform device, agent and instance
self._create_platform_configuration('LPJBox_CI_Ben_Hall')
self.rrclient.create_association(subject=self.platform_device, predicate=PRED.hasDevice, object=self.instrument_device)
self._start_platform()
# self.addCleanup(self._stop_platform, p_root)
# get everything in command mode:
self._ping_agent()
self._initialize()
_ia_client = ResourceAgentClient(self.instrument_device, process=FakeProcess())
state = _ia_client.get_agent_state()
log.info("TestPlatformInstrument get_agent_state %s", state)
self._go_active()
# self._run()
gevent.sleep(3)
# note that this includes the instrument also getting to the command state
#.........这里部分代码省略.........
示例13: BaseIntTestPlatform
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class BaseIntTestPlatform(IonIntegrationTestCase, HelperTestMixin):
"""
A base class with several conveniences supporting specific platform agent
integration tests, see:
- ion/agents/platform/test/test_platform_agent_with_rsn.py
- ion/services/sa/observatory/test/test_platform_launch.py
The platform IDs used here are organized as follows:
Node1D -> MJ01C -> LJ01D
where -> goes from parent platform to child platform.
This is a subset of the whole topology defined in the simulated platform
network (network.yml), which in turn is used by the RSN OMS simulator.
- 'LJ01D' is the root platform used in test_single_platform
- 'Node1D' is the root platform used in test_hierarchy
Methods are provided to construct specific platform topologies, but
subclasses decide which to use.
"""
@classmethod
def setUpClass(cls):
HelperTestMixin.setUpClass()
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node)
self.DP = DataProductManagementServiceClient(node=self.container.node)
self.PSC = PubsubManagementServiceClient(node=self.container.node)
self.PDC = ProcessDispatcherServiceClient(node=self.container.node)
self.DSC = DatasetManagementServiceClient()
self.IDS = IdentityManagementServiceClient(node=self.container.node)
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self.org_id = self.RR2.create(any_old(RT.Org))
log.debug("Org created: %s", self.org_id)
# Use the network definition provided by RSN OMS directly.
rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri'])
self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms)
CIOMSClientFactory.destroy_instance(rsn_oms)
# get serialized version for the configuration:
self._network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition)
log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser)
# set attributes for the platforms:
self._platform_attributes = {}
for platform_id in self._network_definition.pnodes:
pnode = self._network_definition.pnodes[platform_id]
dic = dict((attr.attr_id, attr.defn) for attr in pnode.attrs.itervalues())
self._platform_attributes[platform_id] = dic
log.trace("_platform_attributes: %s", self._platform_attributes)
# set ports for the platforms:
self._platform_ports = {}
for platform_id in self._network_definition.pnodes:
pnode = self._network_definition.pnodes[platform_id]
dic = {}
for port_id, port in pnode.ports.iteritems():
dic[port_id] = dict(port_id=port_id,
network=port.network)
self._platform_ports[platform_id] = dic
log.trace("_platform_ports: %s", self._platform_attributes)
self._async_data_result = AsyncResult()
self._data_subscribers = []
self._samples_received = []
self.addCleanup(self._stop_data_subscribers)
self._async_event_result = AsyncResult()
self._event_subscribers = []
self._events_received = []
self.addCleanup(self._stop_event_subscribers)
self._start_event_subscriber()
#################################################################
# data subscribers handling
#################################################################
def _start_data_subscriber(self, stream_name, stream_id):
"""
Starts data subscriber for the given stream_name and stream_config
"""
def consume_data(message, stream_route, stream_id):
# A callback for processing subscribed-to data.
log.info('Subscriber received data message: %s. stream_name=%r stream_id=%r',
str(message), stream_name, stream_id)
self._samples_received.append(message)
self._async_data_result.set()
log.info('_start_data_subscriber stream_name=%r stream_id=%r',
#.........这里部分代码省略.........
示例14: DataProcessManagementService
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class DataProcessManagementService(BaseDataProcessManagementService):
def on_init(self):
IonObject("Resource") # suppress pyflakes error
self.override_clients(self.clients)
self.init_module_uploader()
self.get_unique_id = (lambda : uuid4().hex)
self.data_product_management = DataProductManagementServiceClient()
def init_module_uploader(self):
if self.CFG:
#looking for forms like host=amoeba.ucsd.edu, remotepath=/var/www/release, user=steve
cfg_host = self.CFG.get_safe("service.data_process_management.process_release_host", None)
cfg_remotepath = self.CFG.get_safe("service.data_process_management.process_release_directory", None)
cfg_user = self.CFG.get_safe("service.data_process_management.process_release_user",
pwd.getpwuid(os.getuid())[0])
cfg_wwwprefix = self.CFG.get_safe("service.data_process_management.process_release_wwwprefix", None)
if cfg_host is None or cfg_remotepath is None or cfg_wwwprefix is None:
raise BadRequest("Missing configuration items; host='%s', directory='%s', wwwprefix='%s'" %
(cfg_host, cfg_remotepath, cfg_wwwprefix))
self.module_uploader = RegisterModulePreparerPy(dest_user=cfg_user,
dest_host=cfg_host,
dest_path=cfg_remotepath,
dest_wwwprefix=cfg_wwwprefix)
def override_clients(self, new_clients):
"""
Replaces the service clients with a new set of them... and makes sure they go to the right places
"""
self.RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry)
#shortcut names for the import sub-services
if hasattr(self.clients, "resource_registry"):
self.RR = self.clients.resource_registry
#todo: need to know what object will be worked with here
def register_data_process_definition(self, process_code=''):
"""
register a process module by putting it in a web-accessible location
@process_code a base64-encoded python file
"""
# # retrieve the resource
# data_process_definition_obj = self.clients.resource_registry.read(data_process_definition_id)
dest_filename = "process_code_%s.py" % self.get_unique_id() #data_process_definition_obj._id
#process the input file (base64-encoded .py)
uploader_obj, err = self.module_uploader.prepare(process_code, dest_filename)
if None is uploader_obj:
raise BadRequest("Process code failed validation: %s" % err)
# actually upload
up_success, err = uploader_obj.upload()
if not up_success:
raise BadRequest("Upload failed: %s" % err)
# #todo: save module / class?
# data_process_definition_obj.uri = uploader_obj.get_destination_url()
# self.clients.resource_registry.update(data_process_definition_obj)
return uploader_obj.get_destination_url()
@classmethod
def _cmp_transform_function(cls, tf1, tf2):
return tf1.module == tf2.module and \
tf1.cls == tf2.cls and \
tf1.uri == tf2.uri and \
tf1.function_type == tf2.function_type
def create_transform_function(self, transform_function=''):
'''
Creates a new transform function
'''
return self.RR2.create(transform_function, RT.TransformFunction)
def read_transform_function(self, transform_function_id=''):
tf = self.RR2.read(transform_function_id, RT.TransformFunction)
return tf
def update_transform_function(self, transform_function=None):
self.RR2.update(transform_function, RT.TransformFunction)
def delete_transform_function(self, transform_function_id=''):
self.RR2.retire(transform_function_id, RT.TransformFunction)
def force_delete_transform_function(self, transform_function_id=''):
self.RR2.pluck_delete(transform_function_id, RT.TransformFunction)
#.........这里部分代码省略.........
示例15: TestEnhancedResourceRegistryClient
# 需要导入模块: from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient [as 别名]
# 或者: from ion.util.enhanced_resource_registry_client.EnhancedResourceRegistryClient import create [as 别名]
class TestEnhancedResourceRegistryClient(PyonTestCase):
def setUp(self):
self.rr = Mock()
self.RR2 = EnhancedResourceRegistryClient(self.rr)
def sample_resource(self):
return any_old(RT.InstrumentDevice)
def test_init(self):
pass
def test_create(self):
"""
test resource creation in normal case
"""
# get objects
good_sample_resource = self.sample_resource()
#configure Mock
self.rr.create.return_value = ('111', 'bla')
self.rr.find_resources.return_value = ([], [])
sample_resource_id = self.RR2.create(good_sample_resource, RT.InstrumentDevice)
self.rr.create.assert_called_once_with(good_sample_resource)
self.assertEqual(sample_resource_id, '111')
def test_create_bad_wrongtype(self):
"""
test resource creation failure for wrong type
"""
# get objects
bad_sample_resource = any_old(RT.PlatformDevice)
delattr(bad_sample_resource, "name")
#configure Mock
self.rr.create.return_value = ('111', 'bla')
self.rr.find_resources.return_value = ([], [])
self.assertRaises(BadRequest, self.RR2.create, bad_sample_resource, RT.InstrumentDevice)
def test_create_bad_noname(self):
"""
test resource creation failure for no name
"""
# get objects
bad_sample_resource = self.sample_resource()
delattr(bad_sample_resource, "name")
#configure Mock
self.rr.create.return_value = ('111', 'bla')
self.rr.find_resources.return_value = ([], [])
self.assertRaises(BadRequest, self.RR2.create, bad_sample_resource, RT.InstrumentDevice)
# def test_create_bad_dupname(self):
# """
# test resource creation failure for duplicate name
# """
# # get objects
#
# bad_sample_resource = self.sample_resource()
# #really, the resource doesn't matter; it's the retval from find that matters
#
# #configure Mock
# self.rr.create.return_value = ('111', 'bla')
# self.rr.find_resources.return_value = ([0], [0])
#
# self.assertRaises(BadRequest, self.RR2.create, bad_sample_resource, RT.InstrumentDevice)
#
def test_read(self):
"""
test resource read (passthru)
"""
# get objects
myret = self.sample_resource()
#configure Mock
self.rr.read.return_value = myret
response = self.RR2.read("111", RT.InstrumentDevice)
self.rr.read.assert_called_once_with("111")
self.assertEqual(response, myret)
#self.assertDictEqual(response.__dict__,
# self.sample_resource().__dict__)
def test_read_bad_wrongtype(self):
"""
test resource read (passthru)
#.........这里部分代码省略.........