本文整理汇总了Python中interface.objects.ProcessDefinition.executable['module']方法的典型用法代码示例。如果您正苦于以下问题:Python ProcessDefinition.executable['module']方法的具体用法?Python ProcessDefinition.executable['module']怎么用?Python ProcessDefinition.executable['module']使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类interface.objects.ProcessDefinition
的用法示例。
在下文中一共展示了ProcessDefinition.executable['module']方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: on_initial_bootstrap
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def on_initial_bootstrap(self, process, config, **kwargs):
pds_client = ProcessDispatcherServiceProcessClient(process=process)
ingestion_module = config.get_safe('bootstrap.processes.ingestion.module','ion.processes.data.ingestion.science_granule_ingestion_worker')
ingestion_class = config.get_safe('bootstrap.processes.ingestion.class' ,'ScienceGranuleIngestionWorker')
ingestion_datastore = config.get_safe('bootstrap.processes.ingestion.datastore_name', 'datasets')
ingestion_queue = config.get_safe('bootstrap.processes.ingestion.queue' , 'science_granule_ingestion')
ingestion_workers = config.get_safe('bootstrap.processes.ingestion.workers', 2)
replay_module = config.get_safe('bootstrap.processes.replay.module', 'ion.processes.data.replay.replay_process')
replay_class = config.get_safe('bootstrap.processes.replay.class' , 'ReplayProcess')
process_definition = ProcessDefinition(
name='ingestion_worker_process',
description='Worker transform process for ingestion of datasets')
process_definition.executable['module']= ingestion_module
process_definition.executable['class'] = ingestion_class
ingestion_procdef_id = pds_client.create_process_definition(process_definition=process_definition)
#--------------------------------------------------------------------------------
# Simulate a HA ingestion worker by creating two of them
#--------------------------------------------------------------------------------
config = DotDict()
config.process.datastore_name = ingestion_datastore
config.process.queue_name = ingestion_queue
for i in xrange(ingestion_workers):
pds_client.schedule_process(process_definition_id=ingestion_procdef_id, configuration=config)
process_definition = ProcessDefinition(name='data_replay_process', description='Process for the replay of datasets')
process_definition.executable['module']= replay_module
process_definition.executable['class'] = replay_class
pds_client.create_process_definition(process_definition=process_definition)
示例2: ingestion_worker
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def ingestion_worker(self, process, config):
# ingestion
ingestion_module = config.get_safe('bootstrap.processes.ingestion.module','ion.processes.data.ingestion.science_granule_ingestion_worker')
ingestion_class = config.get_safe('bootstrap.processes.ingestion.class' ,'ScienceGranuleIngestionWorker')
ingestion_datastore = config.get_safe('bootstrap.processes.ingestion.datastore_name', 'datasets')
ingestion_queue = config.get_safe('bootstrap.processes.ingestion.queue' , 'science_granule_ingestion')
ingestion_workers = config.get_safe('bootstrap.processes.ingestion.workers', 1)
#--------------------------------------------------------------------------------
# Create ingestion workers
#--------------------------------------------------------------------------------
process_definition = ProcessDefinition(
name='ingestion_worker_process',
description='Worker transform process for ingestion of datasets')
process_definition.executable['module']= ingestion_module
process_definition.executable['class'] = ingestion_class
ingestion_procdef_id = self.pds_client.create_process_definition(process_definition=process_definition)
#--------------------------------------------------------------------------------
# Simulate a HA ingestion worker by creating two of them
#--------------------------------------------------------------------------------
config = DotDict()
config.process.datastore_name = ingestion_datastore
config.process.queue_name = ingestion_queue
for i in xrange(ingestion_workers):
self.pds_client.schedule_process(process_definition_id=ingestion_procdef_id, configuration=config)
示例3: notification_worker
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def notification_worker(self, process, config):
# user notifications
notification_module = config.get_safe('bootstrap.processes.user_notification.module','ion.processes.data.transforms.notification_worker')
notification_class = config.get_safe('bootstrap.processes.user_notification.class' ,'NotificationWorker')
notification_workers = config.get_safe('bootstrap.processes.user_notification.workers', 1)
#--------------------------------------------------------------------------------
# Create notification workers
#--------------------------------------------------------------------------------
# set up the process definition
process_definition_uns = ProcessDefinition(
name='notification_worker_process',
description='Worker transform process for user notifications')
process_definition_uns.executable['module']= notification_module
process_definition_uns.executable['class'] = notification_class
uns_procdef_id = self.pds_client.create_process_definition(process_definition=process_definition_uns)
config = DotDict()
config.process.type = 'simple'
for i in xrange(notification_workers):
config.process.name = 'notification_worker_%s' % i
config.process.queue_name = 'notification_worker_queue'
self.pds_client.schedule_process(process_definition_id=uns_procdef_id, configuration=config)
示例4: start_worker
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def start_worker(self):
proc_def = ProcessDefinition()
proc_def.executable['module'] = 'ion.processes.data.last_update_cache'
proc_def.executable['class'] = 'LastUpdateCache'
proc_def_id = self.pd_cli.create_process_definition(process_definition=proc_def)
subscription_id = self.pubsub_cli.create_subscription(query=ExchangeQuery(), exchange_name='ingestion_aggregate')
config = {
'couch_storage' : {
'datastore_name' :self.datastore_name,
'datastore_profile' : 'SCIDATA'
}
}
transform_id = self.tms_cli.create_transform(
name='last_update_cache',
description='LastUpdate that compiles an aggregate of metadata',
in_subscription_id=subscription_id,
process_definition_id=proc_def_id,
configuration=config
)
self.tms_cli.activate_transform(transform_id=transform_id)
transform = self.rr_cli.read(transform_id)
pid = transform.process_id
handle = self.container.proc_manager.procs[pid]
return handle
示例5: test_presf_L1
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def test_presf_L1(self):
'''
Test that packets are processed by the ctd_L1_pressure transform
'''
#---------------------------------------------------------------------------------------------
# Launch a ctd transform
#---------------------------------------------------------------------------------------------
# Create the process definition
process_definition = ProcessDefinition(
name='PresfL1Transform',
description='For testing PresfL1Transform')
process_definition.executable['module']= 'ion.processes.data.transforms.ctd.presf_L1'
process_definition.executable['class'] = 'PresfL1Transform'
ctd_transform_proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)
# Build the config
config = DotDict()
config.process.queue_name = self.exchange_name
config.process.exchange_point = self.exchange_point
pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
stream_def_id = self.pubsub.create_stream_definition('pres_stream_def', parameter_dictionary_id=pdict_id)
pres_stream_id, _ = self.pubsub.create_stream('test_pressure',
stream_definition_id=stream_def_id,
exchange_point='science_data')
config.process.publish_streams.seafloor_pressure = pres_stream_id
# Schedule the process
self.process_dispatcher.schedule_process(process_definition_id=ctd_transform_proc_def_id, configuration=config)
示例6: test_event_in_stream_out_transform
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def test_event_in_stream_out_transform(self):
"""
Test the event-in/stream-out transform
"""
stream_id, _ = self.pubsub.create_stream('test_stream', exchange_point='science_data')
self.exchange_cleanup.append('science_data')
#---------------------------------------------------------------------------------------------
# Launch a ctd transform
#---------------------------------------------------------------------------------------------
# Create the process definition
process_definition = ProcessDefinition(
name='EventToStreamTransform',
description='For testing an event-in/stream-out transform')
process_definition.executable['module']= 'ion.processes.data.transforms.event_in_stream_out_transform'
process_definition.executable['class'] = 'EventToStreamTransform'
proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)
# Build the config
config = DotDict()
config.process.queue_name = 'test_queue'
config.process.exchange_point = 'science_data'
config.process.publish_streams.output = stream_id
config.process.event_type = 'ExampleDetectableEvent'
config.process.variables = ['voltage', 'temperature' ]
# Schedule the process
pid = self.process_dispatcher.schedule_process(process_definition_id=proc_def_id, configuration=config)
self.addCleanup(self.process_dispatcher.cancel_process,pid)
#---------------------------------------------------------------------------------------------
# Create a subscriber for testing
#---------------------------------------------------------------------------------------------
ar_cond = gevent.event.AsyncResult()
def subscriber_callback(m, r, s):
ar_cond.set(m)
sub = StandaloneStreamSubscriber('sub', subscriber_callback)
self.addCleanup(sub.stop)
sub_id = self.pubsub.create_subscription('subscription_cond',
stream_ids=[stream_id],
exchange_name='sub')
self.pubsub.activate_subscription(sub_id)
self.queue_cleanup.append(sub.xn.queue)
sub.start()
gevent.sleep(4)
#---------------------------------------------------------------------------------------------
# Publish an event. The transform has been configured to receive this event
#---------------------------------------------------------------------------------------------
event_publisher = EventPublisher("ExampleDetectableEvent")
event_publisher.publish_event(origin = 'fake_origin', voltage = '5', temperature = '273')
# Assert that the transform processed the event and published data on the output stream
result_cond = ar_cond.get(timeout=10)
self.assertTrue(result_cond)
示例7: __init__
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def __init__(self, *args, **kwargs):
super(TransformExampleLauncher,self).__init__(*args,**kwargs)
#-------------------------------
# Process Definitions
#-------------------------------
transform_example_definition = ProcessDefinition(name='transform_example_definition')
transform_example_definition.executable['module'] = 'ion.services.dm.transformation.transform_example'
transform_example_definition.executable['class'] = 'TransformExample'
示例8: replay_defs
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def replay_defs(self, process, config):
replay_module = config.get_safe('bootstrap.processes.replay.module', 'ion.processes.data.replay.replay_process')
replay_class = config.get_safe('bootstrap.processes.replay.class' , 'ReplayProcess')
#--------------------------------------------------------------------------------
# Create replay process definition
#--------------------------------------------------------------------------------
process_definition = ProcessDefinition(name=DataRetrieverService.REPLAY_PROCESS, description='Process for the replay of datasets')
process_definition.executable['module']= replay_module
process_definition.executable['class'] = replay_class
self.pds_client.create_process_definition(process_definition=process_definition)
示例9: eoi_services
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def eoi_services(self,process,config):
eoi_module = config.get_safe('bootstrap.processes.registration.module', 'ion.processes.data.registration.eoi_registration_process')
eoi_class = config.get_safe('bootstrap.processes.registration.class', 'EOIRegistrationProcess')
process_definition = ProcessDefinition(
name = 'eoi_server',
description = 'Process for eoi data sources')
process_definition.executable['module'] = eoi_module
process_definition.executable['class'] = eoi_class
self._create_and_launch(process_definition)
示例10: test_execute_transform
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def test_execute_transform(self):
# set up
process_definition = ProcessDefinition(name='procdef_execute')
process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example'
process_definition.executable['class'] = 'ReverseTransform'
data = [1,2,3]
process_definition_id, _ = self.rr_cli.create(process_definition)
retval = self.tms_cli.execute_transform(process_definition_id,data)
self.assertEquals(retval,[3,2,1])
示例11: post_process_dispatcher
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def post_process_dispatcher(self, config):
ingestion_module = config.get_safe('bootstrap.processes.ingestion.module','ion.processes.data.ingestion.ingestion_worker')
ingestion_class = config.get_safe('bootstrap.processes.ingestion.class' ,'IngestionWorker')
replay_module = config.get_safe('bootstrap.processes.replay.module', 'ion.processes.data.replay.replay_process')
replay_class = config.get_safe('bootstrap.processes.replay.class' , 'ReplayProcess')
process_definition = ProcessDefinition(
name='ingestion_worker_process',
description='Worker transform process for ingestion of datasets')
process_definition.executable['module']= ingestion_module
process_definition.executable['class'] = ingestion_class
self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)
process_definition = ProcessDefinition(
name='data_replay_process',
description='Process for the replay of datasets')
process_definition.executable['module']= replay_module
process_definition.executable['class'] = replay_class
self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)
示例12: create_ingestion_configuration
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def create_ingestion_configuration(self, exchange_point_id='', couch_storage=None, hdf_storage=None,number_of_workers=0):
"""
@brief Setup ingestion workers to ingest all the data from a single exchange point.
@param exchange_point_id is the resource id for the exchagne point to ingest from
@param couch_storage is the specification of the couch database to use
@param hdf_storage is the specification of the filesystem to use for hdf data files
@param number_of_workers is the number of ingestion workers to create
"""
if self.process_definition_id is None:
process_definition = ProcessDefinition(name='ingestion_worker_process', description='Worker transform process for ingestion of datasets')
process_definition.executable['module']='ion.processes.data.ingestion.ingestion_worker'
process_definition.executable['class'] = 'IngestionWorker'
self.process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)
# Give each ingestion configuration its own queue name to receive data on
exchange_name = 'ingestion_queue'
##------------------------------------------------------------------------------------
## declare our intent to subscribe to all messages on the exchange point
query = ExchangeQuery()
subscription_id = self.clients.pubsub_management.create_subscription(query=query,\
exchange_name=exchange_name, name='Ingestion subscription', description='Subscription for ingestion workers')
##------------------------------------------------------------------------------------------
# create an ingestion_configuration instance and update the registry
# @todo: right now sending in the exchange_point_id as the name...
ingestion_configuration = IngestionConfiguration( name = self.XP)
ingestion_configuration.description = '%s exchange point ingestion configuration' % self.XP
ingestion_configuration.number_of_workers = number_of_workers
if hdf_storage is not None:
ingestion_configuration.hdf_storage.update(hdf_storage)
if couch_storage is not None:
ingestion_configuration.couch_storage.update(couch_storage)
ingestion_configuration_id, _ = self.clients.resource_registry.create(ingestion_configuration)
self._launch_transforms(
ingestion_configuration.number_of_workers,
subscription_id,
ingestion_configuration_id,
ingestion_configuration,
self.process_definition_id
)
return ingestion_configuration_id
示例13: pydap_server
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def pydap_server(self, process, config):
pydap_module = config.get_safe('bootstrap.processes.pydap.module', 'ion.processes.data.externalization.lightweight_pydap')
pydap_class = config.get_safe('bootstrap.processes.pydap.class', 'LightweightPyDAP')
use_pydap = config.get_safe('bootstrap.launch_pydap', False)
process_definition = ProcessDefinition(
name = 'pydap_server',
description = 'Lightweight WSGI Server for PyDAP')
process_definition.executable['module'] = pydap_module
process_definition.executable['class'] = pydap_class
self._create_and_launch(process_definition,use_pydap)
示例14: on_start
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def on_start(self):
super(IngestionManagementService,self).on_start()
self.event_publisher = EventPublisher(event_type="DatasetIngestionConfigurationEvent")
#########################################################################################################
# The code for process_definition may not really belong here, but we do not have a different way so
# far to preload the process definitions. This will later probably be part of a set of predefinitions
# for processes.
#########################################################################################################
process_definition = ProcessDefinition(name='ingestion_worker_process', description='Worker transform process for ingestion of datasets')
process_definition.executable['module']='ion.processes.data.ingestion.ingestion_worker'
process_definition.executable['class'] = 'IngestionWorker'
self.process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)
示例15: run_external_transform
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable['module'] [as 别名]
def run_external_transform(self):
'''
This example script illustrates how a transform can interact with the an outside process (very basic)
it launches an external_transform example which uses the operating system command 'bc' to add 1 to the input
Producer -> A -> 'FS.TEMP/transform_output'
A is an external transform that spawns an OS process to increment the input by 1
'''
pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
tms_cli = TransformManagementServiceClient(node=self.container.node)
procd_cli = ProcessDispatcherServiceClient(node=self.container.node)
#-------------------------------
# Process Definition
#-------------------------------
process_definition = ProcessDefinition(name='external_transform_definition')
process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example'
process_definition.executable['class'] = 'ExternalTransform'
process_definition_id = procd_cli.create_process_definition(process_definition=process_definition)
#-------------------------------
# Streams
#-------------------------------
input_stream_id = pubsub_cli.create_stream(name='input_stream', original=True)
#-------------------------------
# Subscription
#-------------------------------
query = StreamQuery(stream_ids=[input_stream_id])
input_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='input_queue')
#-------------------------------
# Launch Transform
#-------------------------------
transform_id = tms_cli.create_transform(name='external_transform',
in_subscription_id=input_subscription_id,
process_definition_id=process_definition_id,
configuration={})
tms_cli.activate_transform(transform_id)
#-------------------------------
# Launch Producer
#-------------------------------
id_p = self.container.spawn_process('myproducer', 'ion.processes.data.transforms.transform_example', 'TransformExampleProducer', {'process':{'type':'stream_process','publish_streams':{'out_stream':input_stream_id}},'stream_producer':{'interval':4000}})
self.container.proc_manager.procs[id_p].start()