本文整理汇总了Python中interface.objects.ProcessDefinition.executable方法的典型用法代码示例。如果您正苦于以下问题:Python ProcessDefinition.executable方法的具体用法?Python ProcessDefinition.executable怎么用?Python ProcessDefinition.executable使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类interface.objects.ProcessDefinition
的用法示例。
在下文中一共展示了ProcessDefinition.executable方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_data_process_definition
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def create_data_process_definition(self, data_process_definition=None):
data_process_definition_id = self.RR2.create(data_process_definition, RT.DataProcessDefinition)
# -------------------------------
# Process Definition
# -------------------------------
# Create the underlying process definition
process_definition = ProcessDefinition()
process_definition.name = data_process_definition.name
process_definition.description = data_process_definition.description
process_definition.executable = {
"module": data_process_definition.module,
"class": data_process_definition.class_name,
}
process_definition_id = self.clients.process_dispatcher.create_process_definition(
process_definition=process_definition
)
self.RR2.assign_process_definition_to_data_process_definition_with_has_process_definition(
process_definition_id, data_process_definition_id
)
return data_process_definition_id
示例2: launch_instrument
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def launch_instrument(self, agt_id, agent_config, timeout_spawn=None):
"""
Launches an instrument agent.
@param agt_id Some ID mainly used for logging
@param agent_config Agent configuration
@param timeout_spawn Timeout in secs for the RUNNING event (by
default, the value given in constructor).
If None or zero, no wait is performed.
@return process ID
"""
timeout_spawn = timeout_spawn or self._timeout_spawn
log.debug("launch_instrument: agt_id=%r, timeout_spawn=%s", agt_id, timeout_spawn)
name = 'InstrumentAgent_%s' % agt_id
pdef = ProcessDefinition(name=name)
pdef.executable = {
'module': 'ion.agents.instrument.instrument_agent',
'class': 'InstrumentAgent'
}
pdef_id = self._pd_client.create_process_definition(process_definition=pdef)
pid = self._agent_launcher.launch(agent_config, pdef_id)
if timeout_spawn:
log.debug("launch_instrument: agt_id=%r: waiting for RUNNING", agt_id)
self._agent_launcher.await_launch(timeout_spawn)
log.debug("launch_instrument: agt_id=%r: RUNNING", agt_id)
return pid
示例3: launch_platform
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def launch_platform(self, agt_id, agent_config, timeout_spawn=30):
"""
Launches a platform agent.
@param agt_id Some ID mainly used for logging
@param agent_config Agent configuration
@param timeout_spawn Timeout in secs for the SPAWN event (by
default 30). If None or zero, no wait is performed.
@return process ID
"""
log.debug("launch platform: agt_id=%r, timeout_spawn=%s", agt_id, timeout_spawn)
name = 'PlatformAgent_%s' % agt_id
pdef = ProcessDefinition(name=name)
pdef.executable = {
'module': 'ion.agents.platform.platform_agent',
'class': 'PlatformAgent'
}
pdef_id = self._pd_client.create_process_definition(process_definition=pdef)
pid = self._agent_launcher.launch(agent_config, pdef_id)
if timeout_spawn:
self._agent_launcher.await_launch(timeout_spawn)
return pid
示例4: create_event_process_definition
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def create_event_process_definition(self, version='', module='', class_name='', uri='', arguments=None, event_types = None, sub_types = None, origin_types = None):
"""
Create a resource which defines the processing of events.
@param version str
@param module str
@param class_name str
@param uri str
@param arguments list
@return procdef_id str
"""
# Create the event process detail object
event_process_definition_detail = EventProcessDefinitionDetail()
event_process_definition_detail.event_types = event_types
event_process_definition_detail.sub_types = sub_types
event_process_definition_detail.origin_types = origin_types
# Create the process definition
process_definition = ProcessDefinition(name=create_unique_identifier('event_process'))
process_definition.executable = {
'module':module,
'class': class_name,
'url': uri
}
process_definition.version = version
process_definition.arguments = arguments
process_definition.definition = event_process_definition_detail
procdef_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)
return procdef_id
示例5: dispatch_process
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def dispatch_process(self, upid, spec, subscribers, constraints=None,
immediate=False):
name = spec.get('name')
self.event_pub.publish_event(event_type="ProcessLifecycleEvent",
origin=name, origin_type="DispatchedHAProcess",
state=ProcessStateEnum.SPAWN)
process_def = ProcessDefinition(name=name)
process_def.executable = {'module': spec.get('module'),
'class': spec.get('class')}
process_def_id = self.real_client.create_process_definition(process_def)
pid = self.real_client.create_process(process_def_id)
process_schedule = ProcessSchedule()
sched_pid = self.real_client.schedule_process(process_def_id,
process_schedule, configuration={}, process_id=pid)
proc = self.real_client.read_process(sched_pid)
dict_proc = {'upid': proc.process_id,
'state': self.state_map.get(proc.process_state, self.unknown_state),
}
return dict_proc
示例6: setUp
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node)
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
self.process_dispatcher = ProcessDispatcherServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
#------------------------------------------------------------------------------------------------
# First launch the ingestors
#------------------------------------------------------------------------------------------------
self.exchange_space = 'science_granule_ingestion'
self.exchange_point = 'science_data'
config = DotDict()
config.process.datastore_name = 'datasets'
config.process.queue_name = self.exchange_space
self.exchange_names.append(self.exchange_space)
self.exchange_points.append(self.exchange_point)
pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
log.debug("the ingestion worker process id: %s", pid)
self.pids.append(pid)
self.addCleanup(self.cleaning_up)
示例7: create_definition
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def create_definition(self, definition_id, definition_type, executable,
name=None, description=None):
if name is None:
raise BadRequest("create_definition must have a name supplied")
# note: we lose the description
definition = ProcessDefinition(name=name)
definition.executable = {'module': executable.get('module'),
'class': executable.get('class')}
definition.definition_type = definition_type
created_definition = self.real_client.create_process_definition(
definition, definition_id)
示例8: test_code_download
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def test_code_download(self):
# create a process definition that has no URL; only module and class.
process_definition_no_url = ProcessDefinition(name='test_process_nodownload')
process_definition_no_url.executable = {'module': 'ion.my.test.process',
'class': 'TestProcess'}
process_definition_id_no_url = self.pd_cli.create_process_definition(process_definition_no_url)
# create another that has a URL of the python file (this very file)
# verifies L4-CI-CEI-RQ114
url = "file://%s" % os.path.join(os.path.dirname(__file__), 'test_process_dispatcher.py')
process_definition = ProcessDefinition(name='test_process_download')
process_definition.executable = {'module': 'ion.my.test.process',
'class': 'TestProcess', 'url': url}
process_definition_id = self.pd_cli.create_process_definition(process_definition)
process_target = ProcessTarget()
process_schedule = ProcessSchedule()
process_schedule.queueing_mode = ProcessQueueingMode.ALWAYS
process_schedule.target = process_target
self.waiter.start()
# Test a module with no download fails
pid_no_url = self.pd_cli.create_process(process_definition_id_no_url)
self.pd_cli.schedule_process(process_definition_id_no_url,
process_schedule, process_id=pid_no_url)
self.waiter.await_state_event(pid_no_url, ProcessStateEnum.FAILED)
# Test a module with a URL runs
pid = self.pd_cli.create_process(process_definition_id)
self.pd_cli.schedule_process(process_definition_id,
process_schedule, process_id=pid)
self.waiter.await_state_event(pid, ProcessStateEnum.RUNNING)
示例9: _do_launch_gate
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def _do_launch_gate(self, platform_id, agent_config, timeout_spawn):
"""
The method for when using the ProcessStateGate pattern, which is the
one used by test_oms_launch2 to launch the root platform.
"""
log.debug("_do_launch_gate: platform_id=%r, timeout_spawn=%s",
platform_id, timeout_spawn)
pa_name = 'PlatformAgent_%s' % platform_id
pdef = ProcessDefinition(name=pa_name)
pdef.executable = {
'module': PA_MOD,
'class': PA_CLS
}
pdef_id = self._pd_client.create_process_definition(process_definition=pdef)
log.debug("using schedule_process directly %r", platform_id)
pid = self._pd_client.schedule_process(process_definition_id=pdef_id,
schedule=None,
configuration=agent_config)
if timeout_spawn:
# ProcessStateGate used as indicated in its pydoc (9/21/12)
gate = ProcessStateGate(self._pd_client.read_process, pid, ProcessStateEnum.RUNNING)
err_msg = None
try:
if not gate.await(timeout_spawn):
err_msg = "The platform agent instance did not spawn in " \
"%s seconds. gate.wait returned false. " % \
timeout_spawn
log.error(err_msg)
except Exception as e:
log.error("Exception while waiting for platform agent instance "
"(platform_id=%r) "
"to spawn in %s seconds: %s",
platform_id, timeout_spawn, str(e)) #,exc_Info=True)
if err_msg:
raise PlatformException(err_msg)
log.debug("_do_launch_gate: platform_id=%r: agent spawned, pid=%r "
"(ProcessStateGate pattern used)",
platform_id, pid)
return pid
示例10: create_logger
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def create_logger(self, name, stream_id=""):
# logger process
producer_definition = ProcessDefinition(name=name + "_logger")
producer_definition.executable = {
"module": "ion.processes.data.stream_granule_logger",
"class": "StreamGranuleLogger",
}
logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
configuration = {"process": {"stream_id": stream_id}}
pid = self.processdispatchclient.schedule_process(
process_definition_id=logger_procdef_id, configuration=configuration
)
return pid
示例11: create_process
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def create_process(name= '', module = '', class_name = '', configuration = None):
'''
A helper method to create a process
'''
producer_definition = ProcessDefinition(name=name)
producer_definition.executable = {
'module':module,
'class': class_name
}
process_dispatcher = ProcessDispatcherServiceClient()
procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition)
pid = process_dispatcher.schedule_process(process_definition_id= procdef_id, configuration=configuration)
return pid
示例12: create_logger
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def create_logger(self, name, stream_id=''):
# logger process
producer_definition = ProcessDefinition(name=name+'_logger')
producer_definition.executable = {
'module':'ion.processes.data.stream_granule_logger',
'class':'StreamGranuleLogger'
}
logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
configuration = {
'process':{
'stream_id':stream_id,
}
}
pid = self.processdispatchclient.schedule_process(process_definition_id= logger_procdef_id, configuration=configuration)
return pid
示例13: create_worker
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def create_worker(self, number_of_workers=1):
"""
Creates notification workers
@param number_of_workers int
@retval pids list
"""
pids = []
for n in xrange(number_of_workers):
process_definition = ProcessDefinition( name='notification_worker_%s' % n)
process_definition.executable = {
'module': 'ion.processes.data.transforms.notification_worker',
'class':'NotificationWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)
# ------------------------------------------------------------------------------------
# Process Spawning
# ------------------------------------------------------------------------------------
pid2 = self.process_dispatcher.create_process(process_definition_id)
#@todo put in a configuration
configuration = {}
configuration['process'] = dict({
'name': 'notification_worker_%s' % n,
'type':'simple',
'queue_name': 'notification_worker_queue'
})
pid = self.process_dispatcher.schedule_process(
process_definition_id,
configuration = configuration,
process_id=pid2
)
pids.append(pid)
return pids
示例14: setUp
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
log.debug("Start rel from url")
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.DPMS = DataProductManagementServiceClient()
self.RR = ResourceRegistryServiceClient()
self.RR2 = EnhancedResourceRegistryClient(self.RR)
self.DAMS = DataAcquisitionManagementServiceClient()
self.PSMS = PubsubManagementServiceClient()
self.ingestclient = IngestionManagementServiceClient()
self.PD = ProcessDispatcherServiceClient()
self.DSMS = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
log.debug("get datastore")
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.PSMS.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.PD.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
self.addCleanup(self.cleaning_up)
示例15: start_input_stream_process
# 需要导入模块: from interface.objects import ProcessDefinition [as 别名]
# 或者: from interface.objects.ProcessDefinition import executable [as 别名]
def start_input_stream_process(self, ctd_stream_id, module = 'ion.processes.data.ctd_stream_publisher', class_name= 'SimpleCtdPublisher'):
###
### Start the process for producing the CTD data
###
# process definition for the ctd simulator...
producer_definition = ProcessDefinition()
producer_definition.executable = {
'module':module,
'class':class_name
}
ctd_sim_procdef_id = self.process_dispatcher.create_process_definition(process_definition=producer_definition)
# Start the ctd simulator to produce some data
configuration = {
'process':{
'stream_id':ctd_stream_id,
}
}
ctd_sim_pid = self.process_dispatcher.schedule_process(process_definition_id=ctd_sim_procdef_id, configuration=configuration)
return ctd_sim_pid