当前位置: 首页>>代码示例>>Python>>正文


Python objects.ProcessDefinition类代码示例

本文整理汇总了Python中interface.objects.ProcessDefinition的典型用法代码示例。如果您正苦于以下问题:Python ProcessDefinition类的具体用法?Python ProcessDefinition怎么用?Python ProcessDefinition使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了ProcessDefinition类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: ingestion_worker

    def ingestion_worker(self, process, config):
        # ingestion
        ingestion_module    = config.get_safe('bootstrap.processes.ingestion.module','ion.processes.data.ingestion.science_granule_ingestion_worker')
        ingestion_class     = config.get_safe('bootstrap.processes.ingestion.class' ,'ScienceGranuleIngestionWorker')
        ingestion_datastore = config.get_safe('bootstrap.processes.ingestion.datastore_name', 'datasets')
        ingestion_queue     = config.get_safe('bootstrap.processes.ingestion.queue' , 'science_granule_ingestion')
        ingestion_workers   = config.get_safe('bootstrap.processes.ingestion.workers', 1)
        #--------------------------------------------------------------------------------
        # Create ingestion workers
        #--------------------------------------------------------------------------------

        process_definition = ProcessDefinition(
            name='ingestion_worker_process',
            description='Worker transform process for ingestion of datasets')
        process_definition.executable['module']= ingestion_module
        process_definition.executable['class'] = ingestion_class
        ingestion_procdef_id = self.pds_client.create_process_definition(process_definition=process_definition)

        #--------------------------------------------------------------------------------
        # Simulate a HA ingestion worker by creating two of them
        #--------------------------------------------------------------------------------
        config = DotDict()
        config.process.datastore_name = ingestion_datastore
        config.process.queue_name     = ingestion_queue

        for i in xrange(ingestion_workers):
            self.pds_client.schedule_process(process_definition_id=ingestion_procdef_id, configuration=config)
开发者ID:kerfoot,项目名称:coi-services,代码行数:27,代码来源:bootstrap_process_dispatcher.py

示例2: create_event_process_definition

    def create_event_process_definition(self, version='', module='', class_name='', uri='', arguments=None, event_types = None, sub_types = None, origin_types = None):
        """
        Create a resource which defines the processing of events.

        @param version str
        @param module str
        @param class_name str
        @param uri str
        @param arguments list

        @return procdef_id str
        """

        # Create the event process detail object
        event_process_definition_detail = EventProcessDefinitionDetail()
        event_process_definition_detail.event_types = event_types
        event_process_definition_detail.sub_types = sub_types
        event_process_definition_detail.origin_types = origin_types

        # Create the process definition
        process_definition = ProcessDefinition(name=create_unique_identifier('event_process'))
        process_definition.executable = {
            'module':module,
            'class': class_name,
            'url': uri
        }
        process_definition.version = version
        process_definition.arguments = arguments
        process_definition.definition = event_process_definition_detail

        procdef_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition)

        return procdef_id
开发者ID:Bobfrat,项目名称:coi-services,代码行数:33,代码来源:event_management_service.py

示例3: test_create_event_process

    def test_create_event_process(self):
        """
        Test creating an event process
        """
        process_definition = ProcessDefinition(name='test')
        process_definition.definition = ''

        rrc = ResourceRegistryServiceClient(node = self.container.node)
        process_definition_id = rrc.create(process_definition)

        self.mock_rr_client.find_objects = Mock()
        self.mock_rr_client.find_objects.return_value = ['stream_id_1'], 'obj_assoc_1'

#        self.mock_pd_client.schedule_process = Mock()
#        self.mock_pd_client.schedule_process.return_value = 'process_id'

        self.mock_rr_client.create_association = mocksignature(self.mock_rr_client.create_association)

        pid = self.event_management.create_event_process(process_definition_id=process_definition_id,
            event_types=['type_1', 'type_2'],
            sub_types=['subtype_1', 'subtype_2'],
            origins=['or_1', 'or_2'],
            origin_types=['t1', 't2'],
            out_data_products={'conductivity': 'id1'}
        )
开发者ID:kerfoot,项目名称:coi-services,代码行数:25,代码来源:event_management_test.py

示例4: notification_worker

    def notification_worker(self, process, config):
        # user notifications
        notification_module    = config.get_safe('bootstrap.processes.user_notification.module','ion.processes.data.transforms.notification_worker')
        notification_class     = config.get_safe('bootstrap.processes.user_notification.class' ,'NotificationWorker')
        notification_workers = config.get_safe('bootstrap.processes.user_notification.workers', 1)

        #--------------------------------------------------------------------------------
        # Create notification workers
        #--------------------------------------------------------------------------------

        # set up the process definition
        process_definition_uns = ProcessDefinition(
            name='notification_worker_process',
            description='Worker transform process for user notifications')
        process_definition_uns.executable['module']= notification_module
        process_definition_uns.executable['class'] = notification_class
        uns_procdef_id = self.pds_client.create_process_definition(process_definition=process_definition_uns)

        config = DotDict()
        config.process.type = 'simple'

        for i in xrange(notification_workers):
            config.process.name = 'notification_worker_%s' % i
            config.process.queue_name = 'notification_worker_queue'
            self.pds_client.schedule_process(process_definition_id=uns_procdef_id, configuration=config)
开发者ID:Bobfrat,项目名称:coi-services,代码行数:25,代码来源:bootstrap_process_dispatcher.py

示例5: dispatch_process

    def dispatch_process(self, upid, spec, subscribers, constraints=None,
                         immediate=False):

        name = spec.get('name')
        self.event_pub.publish_event(event_type="ProcessLifecycleEvent",
            origin=name, origin_type="DispatchedHAProcess",
            state=ProcessStateEnum.SPAWN)
        process_def = ProcessDefinition(name=name)
        process_def.executable = {'module': spec.get('module'),
                'class': spec.get('class')}

        process_def_id = self.real_client.create_process_definition(process_def)

        pid = self.real_client.create_process(process_def_id)

        process_schedule = ProcessSchedule()

        sched_pid = self.real_client.schedule_process(process_def_id,
                process_schedule, configuration={}, process_id=pid)

        proc = self.real_client.read_process(sched_pid)
        dict_proc = {'upid': proc.process_id,
                'state': self.state_map.get(proc.process_state, self.unknown_state),
                }
        return dict_proc
开发者ID:pombredanne,项目名称:coi-services,代码行数:25,代码来源:high_availability_agent.py

示例6: launch_instrument

    def launch_instrument(self, agt_id, agent_config, timeout_spawn=None):
        """
        Launches an instrument agent.

        @param agt_id           Some ID mainly used for logging
        @param agent_config     Agent configuration
        @param timeout_spawn    Timeout in secs for the RUNNING event (by
                                default, the value given in constructor).
                                If None or zero, no wait is performed.

        @return process ID
        """
        timeout_spawn = timeout_spawn or self._timeout_spawn
        log.debug("launch_instrument: agt_id=%r, timeout_spawn=%s", agt_id, timeout_spawn)

        name = 'InstrumentAgent_%s' % agt_id
        pdef = ProcessDefinition(name=name)
        pdef.executable = {
            'module': 'ion.agents.instrument.instrument_agent',
            'class':  'InstrumentAgent'
        }

        pdef_id = self._pd_client.create_process_definition(process_definition=pdef)

        pid = self._agent_launcher.launch(agent_config, pdef_id)

        if timeout_spawn:
            log.debug("launch_instrument: agt_id=%r: waiting for RUNNING", agt_id)
            self._agent_launcher.await_launch(timeout_spawn)
            log.debug("launch_instrument: agt_id=%r: RUNNING", agt_id)

        return pid
开发者ID:Bobfrat,项目名称:coi-services,代码行数:32,代码来源:launcher.py

示例7: create_data_process_definition

    def create_data_process_definition(self, data_process_definition=None):

        data_process_definition_id = self.RR2.create(data_process_definition, RT.DataProcessDefinition)

        # -------------------------------
        # Process Definition
        # -------------------------------
        # Create the underlying process definition
        process_definition = ProcessDefinition()
        process_definition.name = data_process_definition.name
        process_definition.description = data_process_definition.description

        process_definition.executable = {
            "module": data_process_definition.module,
            "class": data_process_definition.class_name,
        }
        process_definition_id = self.clients.process_dispatcher.create_process_definition(
            process_definition=process_definition
        )

        self.RR2.assign_process_definition_to_data_process_definition_with_has_process_definition(
            process_definition_id, data_process_definition_id
        )

        return data_process_definition_id
开发者ID:oldpatricka,项目名称:coi-services,代码行数:25,代码来源:data_process_management_service.py

示例8: launch_platform

    def launch_platform(self, agt_id, agent_config, timeout_spawn=30):
        """
        Launches a platform agent.

        @param agt_id           Some ID mainly used for logging
        @param agent_config     Agent configuration
        @param timeout_spawn    Timeout in secs for the SPAWN event (by
                                default 30). If None or zero, no wait is performed.

        @return process ID
        """
        log.debug("launch platform: agt_id=%r, timeout_spawn=%s", agt_id, timeout_spawn)

        name = 'PlatformAgent_%s' % agt_id
        pdef = ProcessDefinition(name=name)
        pdef.executable = {
            'module': 'ion.agents.platform.platform_agent',
            'class':  'PlatformAgent'
        }

        pdef_id = self._pd_client.create_process_definition(process_definition=pdef)

        pid = self._agent_launcher.launch(agent_config, pdef_id)

        if timeout_spawn:
            self._agent_launcher.await_launch(timeout_spawn)

        return pid
开发者ID:mbarry02,项目名称:coi-services,代码行数:28,代码来源:launcher.py

示例9: test_presf_L1

    def test_presf_L1(self):
        '''
        Test that packets are processed by the ctd_L1_pressure transform
        '''

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='PresfL1Transform',
            description='For testing PresfL1Transform')
        process_definition.executable['module']= 'ion.processes.data.transforms.ctd.presf_L1'
        process_definition.executable['class'] = 'PresfL1Transform'
        ctd_transform_proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

        # Build the config
        config = DotDict()
        config.process.queue_name = self.exchange_name
        config.process.exchange_point = self.exchange_point

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)

        stream_def_id =  self.pubsub.create_stream_definition('pres_stream_def', parameter_dictionary_id=pdict_id)
        pres_stream_id, _ = self.pubsub.create_stream('test_pressure',
            stream_definition_id=stream_def_id,
            exchange_point='science_data')

        config.process.publish_streams.seafloor_pressure = pres_stream_id

        # Schedule the process
        self.process_dispatcher.schedule_process(process_definition_id=ctd_transform_proc_def_id, configuration=config)
开发者ID:swarbhanu,项目名称:coi-services,代码行数:32,代码来源:test_ctd_transforms.py

示例10: start_worker

    def start_worker(self):


        proc_def = ProcessDefinition()
        proc_def.executable['module'] = 'ion.processes.data.last_update_cache'
        proc_def.executable['class'] = 'LastUpdateCache'
        proc_def_id = self.pd_cli.create_process_definition(process_definition=proc_def)


        subscription_id = self.pubsub_cli.create_subscription(query=ExchangeQuery(), exchange_name='ingestion_aggregate')

        config = {
            'couch_storage' : {
                'datastore_name' :self.datastore_name,
                'datastore_profile' : 'SCIDATA'
            }
        }

        transform_id = self.tms_cli.create_transform(
            name='last_update_cache',
            description='LastUpdate that compiles an aggregate of metadata',
            in_subscription_id=subscription_id,
            process_definition_id=proc_def_id,
            configuration=config
        )


        self.tms_cli.activate_transform(transform_id=transform_id)
        transform = self.rr_cli.read(transform_id)
        pid = transform.process_id
        handle = self.container.proc_manager.procs[pid]
        return handle
开发者ID:seman,项目名称:coi-services,代码行数:32,代码来源:test_last_update_cache.py

示例11: test_event_in_stream_out_transform

    def test_event_in_stream_out_transform(self):
        """
        Test the event-in/stream-out transform
        """

        stream_id, _ = self.pubsub.create_stream('test_stream', exchange_point='science_data')
        self.exchange_cleanup.append('science_data')

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='EventToStreamTransform',
            description='For testing an event-in/stream-out transform')
        process_definition.executable['module']= 'ion.processes.data.transforms.event_in_stream_out_transform'
        process_definition.executable['class'] = 'EventToStreamTransform'
        proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

        # Build the config
        config = DotDict()
        config.process.queue_name = 'test_queue'
        config.process.exchange_point = 'science_data'
        config.process.publish_streams.output = stream_id
        config.process.event_type = 'ExampleDetectableEvent'
        config.process.variables = ['voltage', 'temperature' ]

        # Schedule the process
        pid = self.process_dispatcher.schedule_process(process_definition_id=proc_def_id, configuration=config)
        self.addCleanup(self.process_dispatcher.cancel_process,pid)

        #---------------------------------------------------------------------------------------------
        # Create a subscriber for testing
        #---------------------------------------------------------------------------------------------

        ar_cond = gevent.event.AsyncResult()
        def subscriber_callback(m, r, s):
            ar_cond.set(m)
        sub = StandaloneStreamSubscriber('sub', subscriber_callback)
        self.addCleanup(sub.stop)
        sub_id = self.pubsub.create_subscription('subscription_cond',
            stream_ids=[stream_id],
            exchange_name='sub')
        self.pubsub.activate_subscription(sub_id)
        self.queue_cleanup.append(sub.xn.queue)
        sub.start()

        gevent.sleep(4)

        #---------------------------------------------------------------------------------------------
        # Publish an event. The transform has been configured to receive this event
        #---------------------------------------------------------------------------------------------

        event_publisher = EventPublisher("ExampleDetectableEvent")
        event_publisher.publish_event(origin = 'fake_origin', voltage = '5', temperature = '273')

        # Assert that the transform processed the event and published data on the output stream
        result_cond = ar_cond.get(timeout=10)
        self.assertTrue(result_cond)
开发者ID:Bobfrat,项目名称:coi-services,代码行数:59,代码来源:event_management_test.py

示例12: __init__

    def __init__(self, *args, **kwargs):
        super(TransformExampleLauncher,self).__init__(*args,**kwargs)

        #-------------------------------
        # Process Definitions
        #-------------------------------

        transform_example_definition = ProcessDefinition(name='transform_example_definition')
        transform_example_definition.executable['module'] = 'ion.services.dm.transformation.transform_example'
        transform_example_definition.executable['class'] = 'TransformExample'
开发者ID:daf,项目名称:coi-services,代码行数:10,代码来源:transform_example.py

示例13: setUp

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        datastore_name = CACHE_DATASTORE_NAME
        self.db = self.container.datastore_manager.get_datastore(datastore_name)
        self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')

        self.process_definitions  = {}
        ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class' :'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space       = 'science_granule_ingestion'
        self.exchange_point       = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)
开发者ID:mbarry02,项目名称:coi-services,代码行数:55,代码来源:test_data_product_management_service_integration.py

示例14: eoi_services

    def eoi_services(self,process,config):
        eoi_module = config.get_safe('bootstrap.processes.registration.module', 'ion.processes.data.registration.eoi_registration_process')
        eoi_class  = config.get_safe('bootstrap.processes.registration.class', 'EOIRegistrationProcess')

        process_definition = ProcessDefinition(
                name = 'eoi_server',
                description = 'Process for eoi data sources')
        process_definition.executable['module'] = eoi_module
        process_definition.executable['class'] = eoi_class

        self._create_and_launch(process_definition)
开发者ID:ednad,项目名称:coi-services,代码行数:11,代码来源:bootstrap_process_dispatcher.py

示例15: replay_defs

    def replay_defs(self, process, config):
        replay_module       = config.get_safe('bootstrap.processes.replay.module', 'ion.processes.data.replay.replay_process')
        replay_class        = config.get_safe('bootstrap.processes.replay.class' , 'ReplayProcess')
        #--------------------------------------------------------------------------------
        # Create replay process definition
        #--------------------------------------------------------------------------------

        process_definition = ProcessDefinition(name=DataRetrieverService.REPLAY_PROCESS, description='Process for the replay of datasets')
        process_definition.executable['module']= replay_module
        process_definition.executable['class'] = replay_class
        self.pds_client.create_process_definition(process_definition=process_definition)
开发者ID:Bobfrat,项目名称:coi-services,代码行数:11,代码来源:bootstrap_process_dispatcher.py


注:本文中的interface.objects.ProcessDefinition类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。