本文整理匯總了Python中pinball.ui.data_builder.DataBuilder.get_workflows方法的典型用法代碼示例。如果您正苦於以下問題:Python DataBuilder.get_workflows方法的具體用法?Python DataBuilder.get_workflows怎麽用?Python DataBuilder.get_workflows使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類pinball.ui.data_builder.DataBuilder
的用法示例。
在下文中一共展示了DataBuilder.get_workflows方法的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _compute_workflow
# 需要導入模塊: from pinball.ui.data_builder import DataBuilder [as 別名]
# 或者: from pinball.ui.data_builder.DataBuilder import get_workflows [as 別名]
def _compute_workflow(dbstore):
"""Cache thread's target callable that computes the workflow.
This runnable is called my thread's run() method when thread
starts. It will compute workflows data, serialize it, and store it
in _WORKFLOW_JSON. This computation will infinitely
repeat itself, constantly updating the _WORKFLOW_JSON until pinball_ui
server stops.
Args:
dbstore: The store to retrieve runs status.
"""
global _WORKFLOWS_JSON
data_builder = DataBuilder(dbstore, use_cache=True)
while True:
try:
LOG.info("Workflow data computation starting.")
workflows_data = data_builder.get_workflows()
schedules_data = data_builder.get_schedules()
_WORKFLOWS_JSON = _serialize(workflows_data, schedules_data)
LOG.info("Workflow data computation complete.")
# TODO(mao): Tune this parameter depending on future
# pinball user experience.
# TODO(mao): Make this computation run at scheduled time intervals
# and cancel the next execution if the previous job hasn't
# finished.
time.sleep(60 * 20)
except Exception as e:
LOG.exception(e)
示例2: DataBuilderTestCase
# 需要導入模塊: from pinball.ui.data_builder import DataBuilder [as 別名]
# 或者: from pinball.ui.data_builder.DataBuilder import get_workflows [as 別名]
class DataBuilderTestCase(unittest.TestCase):
def setUp(self):
self._store = EphemeralStore()
self._data_builder = DataBuilder(self._store)
@mock.patch('os.makedirs')
@mock.patch('__builtin__.open')
def _add_tokens(self, _, __):
generate_workflows(2, 2, 2, 2, 2, self._store)
def test_get_workflows_empty(self):
self.assertEqual([], self._data_builder.get_workflows())
def _get_workflows(self):
self._add_tokens()
workflows = self._data_builder.get_workflows()
self.assertEqual(4, len(workflows))
workflow_status = {'workflow_0': Status.RUNNING,
'workflow_1': Status.RUNNING,
'workflow_2': Status.SUCCESS,
'workflow_3': Status.FAILURE}
for workflow in workflows:
self.assertEqual(workflow_status[workflow.workflow],
workflow.status)
self.assertEqual('instance_1', workflow.last_instance)
del workflow_status[workflow.workflow]
self.assertEqual({}, workflow_status)
def test_get_workflows(self):
self._get_workflows()
def test_get_workflows_using_cache(self):
self._data_builder.use_cache = True
self._get_workflows()
# Only finished (archived) workflow instances should have been cached.
expected_cached_names = ['/workflow/workflow_2/instance_0/',
'/workflow/workflow_2/instance_1/',
'/workflow/workflow_3/instance_0/',
'/workflow/workflow_3/instance_1/']
cached_names = sorted(self._store.read_cached_data_names())
self.assertEqual(expected_cached_names, cached_names)
def test_get_workflow_empty(self):
self.assertIsNone(self._data_builder.get_workflow('does_not_exist'))
def _get_workflow(self):
self._add_tokens()
workflow = self._data_builder.get_workflow('workflow_0')
self.assertEqual('workflow_0', workflow.workflow)
self.assertEqual(Status.RUNNING, workflow.status)
self.assertEqual('instance_1', workflow.last_instance)
def test_get_workflow(self):
self._get_workflow()
def test_get_workflow_using_cache(self):
self._data_builder.use_cache = True
self._get_workflow()
# Instances of a running workflow should not have been cached.
self.assertEqual([], self._store.read_cached_data_names())
def test_get_instances_empty(self):
self.assertEqual([],
self._data_builder.get_instances('does_not_exist'))
def _get_instances(self):
self._add_tokens()
instances = self._data_builder.get_instances('workflow_2')
self.assertEqual(2, len(instances))
instance_status = [Status.SUCCESS, Status.FAILURE]
for instance in instances:
self.assertEqual('workflow_2', instance.workflow)
instance_status.remove(instance.status)
self.assertEqual([], instance_status)
def test_get_instances(self):
self._get_instances()
def test_get_instances_using_cache(self):
self._data_builder.use_cache = True
self._get_instances()
expected_cached_names = ['/workflow/workflow_2/instance_0/',
'/workflow/workflow_2/instance_1/']
cached_names = sorted(self._store.read_cached_data_names())
self.assertEqual(expected_cached_names, cached_names)
def test_get_instance_empty(self):
self.assertIsNone(None,
self._data_builder.get_instance('does_not_exist',
'instance_0'))
def _get_instance(self):
self._add_tokens()
instance = self._data_builder.get_instance('workflow_0', 'instance_0')
self.assertEqual('workflow_0', instance.workflow)
self.assertEqual('instance_0', instance.instance)
def test_get_instance(self):
self._get_instance()
#.........這裏部分代碼省略.........