本文整理汇总了Python中pinball.ui.data_builder.DataBuilder.get_workflows方法的典型用法代码示例。如果您正苦于以下问题:Python DataBuilder.get_workflows方法的具体用法?Python DataBuilder.get_workflows怎么用?Python DataBuilder.get_workflows使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pinball.ui.data_builder.DataBuilder
的用法示例。
在下文中一共展示了DataBuilder.get_workflows方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _compute_workflow
# 需要导入模块: from pinball.ui.data_builder import DataBuilder [as 别名]
# 或者: from pinball.ui.data_builder.DataBuilder import get_workflows [as 别名]
def _compute_workflow(dbstore):
"""Cache thread's target callable that computes the workflow.
This runnable is called my thread's run() method when thread
starts. It will compute workflows data, serialize it, and store it
in _WORKFLOW_JSON. This computation will infinitely
repeat itself, constantly updating the _WORKFLOW_JSON until pinball_ui
server stops.
Args:
dbstore: The store to retrieve runs status.
"""
global _WORKFLOWS_JSON
data_builder = DataBuilder(dbstore, use_cache=True)
while True:
try:
LOG.info("Workflow data computation starting.")
workflows_data = data_builder.get_workflows()
schedules_data = data_builder.get_schedules()
_WORKFLOWS_JSON = _serialize(workflows_data, schedules_data)
LOG.info("Workflow data computation complete.")
# TODO(mao): Tune this parameter depending on future
# pinball user experience.
# TODO(mao): Make this computation run at scheduled time intervals
# and cancel the next execution if the previous job hasn't
# finished.
time.sleep(60 * 20)
except Exception as e:
LOG.exception(e)
示例2: DataBuilderTestCase
# 需要导入模块: from pinball.ui.data_builder import DataBuilder [as 别名]
# 或者: from pinball.ui.data_builder.DataBuilder import get_workflows [as 别名]
class DataBuilderTestCase(unittest.TestCase):
def setUp(self):
self._store = EphemeralStore()
self._data_builder = DataBuilder(self._store)
@mock.patch('os.makedirs')
@mock.patch('__builtin__.open')
def _add_tokens(self, _, __):
generate_workflows(2, 2, 2, 2, 2, self._store)
def test_get_workflows_empty(self):
self.assertEqual([], self._data_builder.get_workflows())
def _get_workflows(self):
self._add_tokens()
workflows = self._data_builder.get_workflows()
self.assertEqual(4, len(workflows))
workflow_status = {'workflow_0': Status.RUNNING,
'workflow_1': Status.RUNNING,
'workflow_2': Status.SUCCESS,
'workflow_3': Status.FAILURE}
for workflow in workflows:
self.assertEqual(workflow_status[workflow.workflow],
workflow.status)
self.assertEqual('instance_1', workflow.last_instance)
del workflow_status[workflow.workflow]
self.assertEqual({}, workflow_status)
def test_get_workflows(self):
self._get_workflows()
def test_get_workflows_using_cache(self):
self._data_builder.use_cache = True
self._get_workflows()
# Only finished (archived) workflow instances should have been cached.
expected_cached_names = ['/workflow/workflow_2/instance_0/',
'/workflow/workflow_2/instance_1/',
'/workflow/workflow_3/instance_0/',
'/workflow/workflow_3/instance_1/']
cached_names = sorted(self._store.read_cached_data_names())
self.assertEqual(expected_cached_names, cached_names)
def test_get_workflow_empty(self):
self.assertIsNone(self._data_builder.get_workflow('does_not_exist'))
def _get_workflow(self):
self._add_tokens()
workflow = self._data_builder.get_workflow('workflow_0')
self.assertEqual('workflow_0', workflow.workflow)
self.assertEqual(Status.RUNNING, workflow.status)
self.assertEqual('instance_1', workflow.last_instance)
def test_get_workflow(self):
self._get_workflow()
def test_get_workflow_using_cache(self):
self._data_builder.use_cache = True
self._get_workflow()
# Instances of a running workflow should not have been cached.
self.assertEqual([], self._store.read_cached_data_names())
def test_get_instances_empty(self):
self.assertEqual([],
self._data_builder.get_instances('does_not_exist'))
def _get_instances(self):
self._add_tokens()
instances = self._data_builder.get_instances('workflow_2')
self.assertEqual(2, len(instances))
instance_status = [Status.SUCCESS, Status.FAILURE]
for instance in instances:
self.assertEqual('workflow_2', instance.workflow)
instance_status.remove(instance.status)
self.assertEqual([], instance_status)
def test_get_instances(self):
self._get_instances()
def test_get_instances_using_cache(self):
self._data_builder.use_cache = True
self._get_instances()
expected_cached_names = ['/workflow/workflow_2/instance_0/',
'/workflow/workflow_2/instance_1/']
cached_names = sorted(self._store.read_cached_data_names())
self.assertEqual(expected_cached_names, cached_names)
def test_get_instance_empty(self):
self.assertIsNone(None,
self._data_builder.get_instance('does_not_exist',
'instance_0'))
def _get_instance(self):
self._add_tokens()
instance = self._data_builder.get_instance('workflow_0', 'instance_0')
self.assertEqual('workflow_0', instance.workflow)
self.assertEqual('instance_0', instance.instance)
def test_get_instance(self):
self._get_instance()
#.........这里部分代码省略.........