当前位置: 首页>>代码示例>>Python>>正文


Python DataBuilder.get_instance方法代码示例

本文整理汇总了Python中pinball.ui.data_builder.DataBuilder.get_instance方法的典型用法代码示例。如果您正苦于以下问题:Python DataBuilder.get_instance方法的具体用法?Python DataBuilder.get_instance怎么用?Python DataBuilder.get_instance使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在pinball.ui.data_builder.DataBuilder的用法示例。


在下文中一共展示了DataBuilder.get_instance方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: graph

# 需要导入模块: from pinball.ui.data_builder import DataBuilder [as 别名]
# 或者: from pinball.ui.data_builder.DataBuilder import get_instance [as 别名]
def graph(request):
    try:
        data_builder = DataBuilder(DbStore(), use_cache=True)
        workflow = request.GET['workflow']
        if 'instance' in request.GET:
            instance = request.GET['instance']
            if instance == 'latest':
                instance = data_builder.get_latest_instance(workflow).instance
            jobs_data = data_builder.get_jobs(workflow=workflow,
                                              instance=instance)
            instance_data = data_builder.get_instance(workflow=workflow,
                                                      instance=instance)
            workflow_graph = WorkflowGraph(jobs_data, instance_data)
        else:
            workflow_graph = WorkflowGraph.from_parser(workflow)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(workflow_graph.get_svg(), mimetype='image/svg+xml')
开发者ID:DotModus,项目名称:pinball,代码行数:22,代码来源:views.py

示例2: DataBuilderTestCase

# 需要导入模块: from pinball.ui.data_builder import DataBuilder [as 别名]
# 或者: from pinball.ui.data_builder.DataBuilder import get_instance [as 别名]
class DataBuilderTestCase(unittest.TestCase):
    def setUp(self):
        self._store = EphemeralStore()
        self._data_builder = DataBuilder(self._store)

    @mock.patch('os.makedirs')
    @mock.patch('__builtin__.open')
    def _add_tokens(self, _, __):
        generate_workflows(2, 2, 2, 2, 2, self._store)

    def test_get_workflows_empty(self):
        self.assertEqual([], self._data_builder.get_workflows())

    def _get_workflows(self):
        self._add_tokens()
        workflows = self._data_builder.get_workflows()
        self.assertEqual(4, len(workflows))
        workflow_status = {'workflow_0': Status.RUNNING,
                           'workflow_1': Status.RUNNING,
                           'workflow_2': Status.SUCCESS,
                           'workflow_3': Status.FAILURE}
        for workflow in workflows:
            self.assertEqual(workflow_status[workflow.workflow],
                             workflow.status)
            self.assertEqual('instance_1', workflow.last_instance)
            del workflow_status[workflow.workflow]
        self.assertEqual({}, workflow_status)

    def test_get_workflows(self):
        self._get_workflows()

    def test_get_workflows_using_cache(self):
        self._data_builder.use_cache = True
        self._get_workflows()
        # Only finished (archived) workflow instances should have been cached.
        expected_cached_names = ['/workflow/workflow_2/instance_0/',
                                 '/workflow/workflow_2/instance_1/',
                                 '/workflow/workflow_3/instance_0/',
                                 '/workflow/workflow_3/instance_1/']
        cached_names = sorted(self._store.read_cached_data_names())
        self.assertEqual(expected_cached_names, cached_names)

    def test_get_workflow_empty(self):
        self.assertIsNone(self._data_builder.get_workflow('does_not_exist'))

    def _get_workflow(self):
        self._add_tokens()
        workflow = self._data_builder.get_workflow('workflow_0')
        self.assertEqual('workflow_0', workflow.workflow)
        self.assertEqual(Status.RUNNING, workflow.status)
        self.assertEqual('instance_1', workflow.last_instance)

    def test_get_workflow(self):
        self._get_workflow()

    def test_get_workflow_using_cache(self):
        self._data_builder.use_cache = True
        self._get_workflow()
        # Instances of a running workflow should not have been cached.
        self.assertEqual([], self._store.read_cached_data_names())

    def test_get_instances_empty(self):
        self.assertEqual([],
                         self._data_builder.get_instances('does_not_exist'))

    def _get_instances(self):
        self._add_tokens()
        instances = self._data_builder.get_instances('workflow_2')
        self.assertEqual(2, len(instances))
        instance_status = [Status.SUCCESS, Status.FAILURE]
        for instance in instances:
            self.assertEqual('workflow_2', instance.workflow)
            instance_status.remove(instance.status)
        self.assertEqual([], instance_status)

    def test_get_instances(self):
        self._get_instances()

    def test_get_instances_using_cache(self):
        self._data_builder.use_cache = True
        self._get_instances()
        expected_cached_names = ['/workflow/workflow_2/instance_0/',
                                 '/workflow/workflow_2/instance_1/']
        cached_names = sorted(self._store.read_cached_data_names())
        self.assertEqual(expected_cached_names, cached_names)

    def test_get_instance_empty(self):
        self.assertIsNone(None,
                          self._data_builder.get_instance('does_not_exist',
                                                          'instance_0'))

    def _get_instance(self):
        self._add_tokens()
        instance = self._data_builder.get_instance('workflow_0', 'instance_0')
        self.assertEqual('workflow_0', instance.workflow)
        self.assertEqual('instance_0', instance.instance)

    def test_get_instance(self):
        self._get_instance()

#.........这里部分代码省略.........
开发者ID:DotModus,项目名称:pinball,代码行数:103,代码来源:data_builder_test.py

示例3: Worker

# 需要导入模块: from pinball.ui.data_builder import DataBuilder [as 别名]
# 或者: from pinball.ui.data_builder.DataBuilder import get_instance [as 别名]

#.........这里部分代码省略.........
        return True

    def _execute_job(self):
        """Execute the owned job."""
        assert self._owned_job_token
        job = pickle.loads(self._owned_job_token.data)
        name = Name.from_job_token_name(self._owned_job_token.name)
        self._executor = JobExecutor.from_job(name.workflow,
                                              name.instance,
                                              name.job,
                                              job,
                                              self._data_builder,
                                              self._emailer)
        success = self._executor.prepare()
        if success:
            self._owned_job_token.data = pickle.dumps(self._executor.job)
            success = self._update_owned_job_token()
            if success:
                self._start_renew_ownership()
                success = self._executor.execute()
                self._stop_renew_ownership()
        if success:
            self._move_job_token_to_waiting(self._executor.job, True)
        elif self._executor.job.retry():
            self._keep_job_token_in_runnable(self._executor.job)
        else:
            signaller = Signaller(self._client, name.workflow, name.instance)
            # If ARCHIVE is not set, this is the first failed job in the
            # workflow.
            first_failure = not signaller.is_action_set(Signal.ARCHIVE)
            self._move_job_token_to_waiting(self._executor.job, False)
            self._send_job_failure_emails(first_failure)
        self._executor = None
        self._owned_job_token = None
        # If needed, archive the workflow.
        self._process_signals(name.workflow, name.instance)

    def _send_instance_end_email(self, workflow, instance):
        try:
            schedule_data = self._data_builder.get_schedule(workflow)
            if not schedule_data:
                LOG.warning('no schedule found for workflow %s', workflow)
            elif schedule_data.emails:
                instance_data = self._data_builder.get_instance(workflow,
                                                                instance)
                jobs_data = self._data_builder.get_jobs(workflow, instance)
                self._emailer.send_instance_end_message(schedule_data.emails,
                                                        instance_data,
                                                        jobs_data)
        except:
            LOG.exception('error sending instance end email for workflow %s '
                          'instance %s', workflow, instance)

    def _send_job_failure_emails(self, first_failure):
        assert self._owned_job_token
        name = Name.from_job_token_name(self._owned_job_token.name)
        job = self._executor.job
        emails = set(job.emails)
        if first_failure:
            schedule_data = self._data_builder.get_schedule(name.workflow)
            if schedule_data:
                emails.update(schedule_data.emails)
            else:
                LOG.warning('no schedule found for workflow %s', name.workflow)
        if emails:
            execution = len(job.history) - 1
            job_execution_data = self._data_builder.get_execution(
                name.workflow, name.instance, name.job, execution)
            try:
                self._emailer.send_job_execution_end_message(
                    list(emails), job_execution_data)
            except:
                LOG.exception('error sending job failure email for '
                              'workflow %s instance %s job %s execution %d',
                              name.workflow,
                              name.instance,
                              name.job,
                              execution)

    @staticmethod
    def _randomized_worker_polling_time():
        """Generate random worker polling time."""
        return (1.0 + random.random()) * PinballConfig.WORKER_POLL_TIME_SEC

    def run(self):
        """Run the worker."""
        LOG.info('Running worker ' + self._name)
        while True:
            signaller = Signaller(self._client)
            if signaller.is_action_set(Signal.EXIT):
                return
            if not signaller.is_action_set(Signal.DRAIN):
                self._own_runnable_job_token()
            if self._owned_job_token:
                self._execute_job()
            elif self._test_only_end_if_no_runnable:
                return
            else:
                time.sleep(Worker._randomized_worker_polling_time())
        LOG.info('Exiting worker ' + self._name)
开发者ID:Betterment,项目名称:pinball,代码行数:104,代码来源:worker.py


注:本文中的pinball.ui.data_builder.DataBuilder.get_instance方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。