本文整理汇总了Python中interface.services.dm.idata_retriever_service.DataRetrieverServiceClient类的典型用法代码示例。如果您正苦于以下问题:Python DataRetrieverServiceClient类的具体用法?Python DataRetrieverServiceClient怎么用?Python DataRetrieverServiceClient使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DataRetrieverServiceClient类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: assert_raw_granules_ingested
def assert_raw_granules_ingested(self, count, payload_size):
#--------------------------------------------------------------------------------
# Test the slicing capabilities
#--------------------------------------------------------------------------------
data_retriever = DataRetrieverServiceClient()
for i in range(0, count-1):
granule = data_retriever.retrieve(dataset_id=self._raw_dataset_id, query={'tdoa':slice(i,i+1)})
rdt = RecordDictionaryTool.load_from_granule(granule)
log.info("Granule index: %d, time: %s, size: %s", i, rdt['time'][0], len(rdt['raw'][0]))
self.assertEqual(payload_size, len(rdt['raw'][0]))
示例2: setUp
def setUp(self):
# Start container
super(TestActivateInstrumentIntegration, self).setUp()
config = DotDict()
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.usernotificationclient = UserNotificationServiceClient()
#setup listerner vars
self._data_greenlets = []
self._no_samples = None
self._samples_received = []
self.event_publisher = EventPublisher()
示例3: setUp
def setUp(self):
super(DataRetrieverIntTestAlpha,self).setUp()
self._start_container()
config = DotDict()
config.bootstrap.processes.ingestion.module = 'ion.processes.data.ingestion.ingestion_worker_a'
config.bootstrap.processes.replay.module = 'ion.processes.data.replay.replay_process_a'
self.container.start_rel_from_url('res/deploy/r2dm.yml', config)
self.datastore_name = 'test_datasets'
self.datastore = self.container.datastore_manager.get_datastore(self.datastore_name, profile=DataStore.DS_PROFILE.SCIDATA)
self.data_retriever = DataRetrieverServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.resource_registry = ResourceRegistryServiceClient()
xs_dot_xp = CFG.core_xps.science_data
try:
self.XS, xp_base = xs_dot_xp.split('.')
self.XP = '.'.join([get_sys_name(), xp_base])
except ValueError:
raise StandardError('Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp)
示例4: setUp
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node)
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
self.process_dispatcher = ProcessDispatcherServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
#------------------------------------------------------------------------------------------------
# First launch the ingestors
#------------------------------------------------------------------------------------------------
self.exchange_space = 'science_granule_ingestion'
self.exchange_point = 'science_data'
config = DotDict()
config.process.datastore_name = 'datasets'
config.process.queue_name = self.exchange_space
self.exchange_names.append(self.exchange_space)
self.exchange_points.append(self.exchange_point)
pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
log.debug("the ingestion worker process id: %s", pid)
self.pids.append(pid)
self.addCleanup(self.cleaning_up)
示例5: setUp
def setUp(self):
super(DataRetrieverServiceIntTest,self).setUp()
self._start_container()
self.container.start_rel_from_url('res/deploy/r2dm.yml')
self.couch = self.container.datastore_manager.get_datastore('test_data_retriever', profile=DataStore.DS_PROFILE.EXAMPLES)
self.datastore_name = 'test_data_retriever'
self.dr_cli = DataRetrieverServiceClient(node=self.container.node)
self.dsm_cli = DatasetManagementServiceClient(node=self.container.node)
self.rr_cli = ResourceRegistryServiceClient(node=self.container.node)
self.ps_cli = PubsubManagementServiceClient(node=self.container.node)
示例6: setUp
def setUp(self):
self._start_container()
config = DotDict()
config.bootstrap.processes.ingestion.module = 'ion.processes.data.ingestion.ingestion_worker_a'
config.bootstrap.processes.replay.module = 'ion.processes.data.replay.replay_process_a'
self.container.start_rel_from_url('res/deploy/r2dm.yml', config)
self.datastore_name = 'test_datasets'
self.pubsub_management = PubsubManagementServiceClient()
self.ingestion_management = IngestionManagementServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.process_dispatcher = ProcessDispatcherServiceClient()
self.data_retriever = DataRetrieverServiceClient()
示例7: setUp
def setUp(self): # Love the non pep-8 convention
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.process_dispatcher = ProcessDispatcherServiceClient()
self.pubsub_management = PubsubManagementServiceClient()
self.resource_registry = ResourceRegistryServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.ingestion_management = IngestionManagementServiceClient()
self.data_retriever = DataRetrieverServiceClient()
self.event = Event()
self.exchange_space_name = 'test_granules'
self.exchange_point_name = 'science_data'
self.i = 0
self.cci = 0
示例8: setUp
def setUp(self):
self.i=0
self._start_container()
self.container.start_rel_from_url('res/deploy/r2params.yml')
self.dataset_management = DatasetManagementServiceClient()
self.pubsub_management = PubsubManagementServiceClient()
self.data_product_management = DataProductManagementServiceClient()
self.resource_registry = self.container.resource_registry
self.data_retriever = DataRetrieverServiceClient()
pdicts, _ = self.resource_registry.find_resources(restype='ParameterDictionary', id_only=False)
self.dp_ids = []
for pdict in pdicts:
stream_def_id = self.pubsub_management.create_stream_definition(pdict.name, parameter_dictionary_id=pdict._id)
dp_id = self.make_dp(stream_def_id)
if dp_id: self.dp_ids.append(dp_id)
示例9: validate_data_ingest_retrieve
def validate_data_ingest_retrieve(self, dataset_id):
assertions = self.assertTrue
self.data_retriever = DataRetrieverServiceClient(node=self.container.node)
#validate that data was ingested
replay_granule = self.data_retriever.retrieve_last_granule(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(replay_granule)
salinity = get_safe(rdt, 'salinity')
assertions(salinity != None)
#retrieve all the granules from the database and check the values
replay_granule_all = self.data_retriever.retrieve(dataset_id)
rdt = RecordDictionaryTool.load_from_granule(replay_granule_all)
for k, v in rdt.iteritems():
if k == 'salinity':
for val in numpy.nditer(v):
assertions(val > 0)
示例10: setUp
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.RR = ResourceRegistryServiceClient()
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self.OMS = ObservatoryManagementServiceClient()
self.org_management_service = OrgManagementServiceClient()
self.IMS = InstrumentManagementServiceClient()
self.dpclient = DataProductManagementServiceClient()
self.pubsubcli = PubsubManagementServiceClient()
self.damsclient = DataAcquisitionManagementServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.data_retriever = DataRetrieverServiceClient()
self.data_product_management = DataProductManagementServiceClient()
self._load_stage = 0
self._resources = {}
示例11: DataRetrieverIntTestAlpha
class DataRetrieverIntTestAlpha(IonIntegrationTestCase):
def setUp(self):
super(DataRetrieverIntTestAlpha,self).setUp()
self._start_container()
config = DotDict()
config.bootstrap.processes.ingestion.module = 'ion.processes.data.ingestion.ingestion_worker_a'
config.bootstrap.processes.replay.module = 'ion.processes.data.replay.replay_process_a'
self.container.start_rel_from_url('res/deploy/r2dm.yml', config)
self.datastore_name = 'test_datasets'
self.datastore = self.container.datastore_manager.get_datastore(self.datastore_name, profile=DataStore.DS_PROFILE.SCIDATA)
self.data_retriever = DataRetrieverServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.resource_registry = ResourceRegistryServiceClient()
xs_dot_xp = CFG.core_xps.science_data
try:
self.XS, xp_base = xs_dot_xp.split('.')
self.XP = '.'.join([get_sys_name(), xp_base])
except ValueError:
raise StandardError('Invalid CFG for core_xps.science_data: "%s"; must have "xs.xp" structure' % xs_dot_xp)
@attr('LOCOINT')
@unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
def test_define_replay(self):
# Create a dataset to work with
dataset_id = self.dataset_management.create_dataset('fakestream', self.datastore_name)
replay_id, stream_id = self.data_retriever.define_replay(dataset_id=dataset_id)
# Verify that the replay instance was created
replay = self.resource_registry.read(replay_id)
pid = replay.process_id
process = self.container.proc_manager.procs[pid]
self.assertIsInstance(process,ReplayProcess, 'Incorrect process launched')
示例12: setUp
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubclient = PubsubManagementServiceClient(node=self.container.node)
self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.workflowclient = WorkflowManagementServiceClient(node=self.container.node)
self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)
self.data_retriever = DataRetrieverServiceClient(node=self.container.node)
self.ctd_stream_def = SBE37_CDM_stream_definition()
示例13: setUp
def setUp(self):
# Start container
super(TestActivateRSNVel3DInstrument, self).setUp()
config = DotDict()
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
示例14: setUp
def setUp(self): # Love the non pep-8 convention
self._start_container()
self.container.start_rel_from_url("res/deploy/r2deploy.yml")
self.process_dispatcher = ProcessDispatcherServiceClient()
self.pubsub_management = PubsubManagementServiceClient()
self.resource_registry = ResourceRegistryServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.ingestion_management = IngestionManagementServiceClient()
self.data_retriever = DataRetrieverServiceClient()
self.pids = []
self.event = Event()
self.exchange_space_name = "test_granules"
self.exchange_point_name = "science_data"
self.i = 0
self.purge_queues()
self.queue_buffer = []
self.streams = []
self.addCleanup(self.stop_all_ingestion)
示例15: setUp
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
#setup listerner vars
self._data_greenlets = []
self._no_samples = None
self._samples_received = []