本文整理汇总了Python中SpiffWorkflow.Workflow类的典型用法代码示例。如果您正苦于以下问题:Python Workflow类的具体用法?Python Workflow怎么用?Python Workflow使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Workflow类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testRunWorkflow
def testRunWorkflow(self):
filename = os.path.join(os.path.dirname(__file__), 'xml/openwfe/workflow1.xml')
wf_specs = self.reader.parse_file(filename)
wf_spec = wf_specs[0]
for name in wf_spec.task_specs:
wf_spec.task_specs[name].reached_event.connect(self.on_reached_cb)
wf_spec.task_specs[name].completed_event.connect(on_complete_cb, self.taken_path)
workflow = Workflow(wf_spec)
try:
workflow.complete_all()
except:
workflow.dump()
raise
path = [( 1, 'Start'),
( 2, 'concurrence_1'),
( 3, 'task_a1'),
( 4, 'task_a2'),
( 5, 'if_condition_1'),
( 6, 'task_a3'),
( 7, 'if_condition_1_end'),
( 8, 'if_condition_2'),
( 9, 'task_a5'),
(10, 'if_condition_2_end'),
( 3, 'task_b1'),
( 4, 'task_b2'),
( 5, 'concurrence_1_end'),
( 6, 'task_c1'),
( 7, 'task_c2'),
( 8, 'End')]
assert_same_path(self, path, self.taken_path)
示例2: testSerializeWorkflow
def testSerializeWorkflow(self, path_file=None, data=None):
if self.serializer is None:
return
if path_file is None:
path_file = os.path.join(data_dir, 'spiff', 'workflow1.path')
path = open(path_file).read()
elif os.path.exists(path_file):
path = open(path_file).read()
else:
path = None
# run a workflow fresh from the spec to completion, see if it
# serialises and deserialises correctly.
workflow_without_save = run_workflow(self, self.wf_spec, path, data)
try:
serialized1 = workflow_without_save.serialize(self.serializer)
restored_wf = Workflow.deserialize(self.serializer, serialized1)
serialized2 = restored_wf.serialize(self.serializer)
except TaskNotSupportedError as e:
return
else:
self.assert_(isinstance(serialized1, self.serial_type))
self.assert_(isinstance(serialized2, self.serial_type))
self.compareSerialization(serialized1, serialized2)
# try an freshly started workflow, see if it serialises and
# deserialiases correctly. (no longer catch for exceptions: if they
# were going to happen they should have happened already.)
workflow = Workflow(self.wf_spec)
serialized1 = workflow.serialize(self.serializer)
restored_wf = Workflow.deserialize(self.serializer, serialized1)
serialized2 = restored_wf.serialize(self.serializer)
self.assert_(isinstance(serialized1, self.serial_type))
self.assert_(isinstance(serialized2, self.serial_type))
self.compareSerialization(serialized1, serialized2)
self.assertFalse(restored_wf.is_completed())
# Run it to completion, see if it serialises and deserialises correctly
# also check if the restored and unrestored ones are the same after
# being run through.
workflow_unrestored = run_workflow(self, self.wf_spec, path, data, workflow=workflow)
workflow_restored = run_workflow(self, self.wf_spec, path, data, workflow=restored_wf)
serialized1 = workflow_restored.serialize(self.serializer)
restored_wf = Workflow.deserialize(self.serializer, serialized1)
serialized2 = restored_wf.serialize(self.serializer)
self.assert_(isinstance(serialized1, self.serial_type))
self.assert_(isinstance(serialized2, self.serial_type))
self.compareSerialization(serialized1, serialized2)
serialized_crosscheck = workflow_unrestored.serialize(self.serializer)
self.assert_(isinstance(serialized_crosscheck, self.serial_type))
# compare the restored and unrestored completed ones. Because they ran
# separately, exclude the last_state_change time. Because you can have
# dynamically created tasks, don't compare (uu)ids.
self.compareSerialization(serialized_crosscheck, serialized2,
exclude_dynamic=True)
示例3: deserialize_workflow
def deserialize_workflow(self, s_state, **kwargs):
wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs)
workflow = Workflow(wf_spec)
# data
workflow.data = self._deserialize_dict(s_state['data'])
# outer_workflow
#workflow.outer_workflow = find_workflow_by_id(remap_workflow_id(s_state['outer_workflow']))
# success
workflow.success = s_state['success']
# workflow
workflow.spec = wf_spec
# task_tree
workflow.task_tree = self._deserialize_task(workflow, s_state['task_tree'])
# Re-connect parents
for task in workflow.get_tasks():
task.parent = workflow.get_task(task.parent)
# last_task
workflow.last_task = workflow.get_task(s_state['last_task'])
return workflow
示例4: _advance_to_a1
def _advance_to_a1(self, wf_spec):
workflow = Workflow(wf_spec)
tasks = workflow.get_tasks(Task.READY)
task_start = tasks[0]
workflow.complete_task_from_id(task_start.id)
tasks = workflow.get_tasks(Task.READY)
multichoice = tasks[0]
workflow.complete_task_from_id(multichoice.id)
tasks = workflow.get_tasks(Task.READY)
task_a1 = tasks[0]
workflow.complete_task_from_id(task_a1.id)
return workflow
示例5: start_workflow
def start_workflow(workflow_name):
""" start workflow for graduation project """
# create workflow instance
workflow = Workflow(workflow_specs[workflow_name])
# complete start task
start_task = workflow.get_tasks(state=Task.READY)[0]
workflow.complete_task_from_id(start_task.id)
# save username in workflow
workflow.data["student"] = g.user.username
# save workflow instance to database
save_workflow_instance(workflow, g.user.id)
return redirect(url_for('.user_page', username=g.user.username))
示例6: deserialize_workflow
def deserialize_workflow(self, s_state, **kwargs):
wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs)
original_root = wf_spec.task_specs['Root']
workflow = Workflow(wf_spec, deserializing=True)
new_root = wf_spec.task_specs['Root']
assert original_root is new_root
# attributes
workflow.attributes = s_state['attributes']
# last_task
workflow.last_task = s_state['last_task']
# outer_workflow
#workflow.outer_workflow = find_workflow_by_id(remap_workflow_id(s_state['outer_workflow']))
# success
workflow.success = s_state['success']
# workflow
workflow.spec = wf_spec
# task_tree
old_root_task = workflow.task_tree
workflow.task_tree = self._deserialize_task(workflow, s_state['task_tree'])
assert old_root_task is workflow.task_tree
return workflow
示例7: testDictionarySerializer
def testDictionarySerializer(self):
"""
Tests the SelectivePickler serializer for persisting Workflows and Tasks.
"""
old_workflow = self.workflow
serializer = DictionarySerializer()
serialized_workflow = old_workflow.serialize(serializer)
serializer = DictionarySerializer()
new_workflow = Workflow.deserialize(serializer, serialized_workflow)
before = old_workflow.get_dump()
after = new_workflow.get_dump()
self.assert_(before == after, 'Before:\n' + before + '\n' \
+ 'After:\n' + after + '\n')
示例8: testDeserialization
def testDeserialization(self):
"""
Tests the that deserialized workflow matches the original workflow
"""
old_workflow = self.workflow
old_workflow.spec.start.set_data(marker=True)
serializer = DictionarySerializer()
serialized_workflow = old_workflow.serialize(serializer)
serializer = DictionarySerializer()
new_workflow = Workflow.deserialize(serializer, serialized_workflow)
self.assertEqual(len(new_workflow.get_tasks()), len(old_workflow.get_tasks()))
self.assertEqual(new_workflow.spec.start.get_data('marker'), old_workflow.spec.start.get_data('marker'))
self.assertEqual(1, len([t for t in new_workflow.get_tasks() if t.task_spec.name == 'Start']))
self.assertEqual(1, len([t for t in new_workflow.get_tasks() if t.task_spec.name == 'Root']))
示例9: run_workflow
def run_workflow(test, wf_spec, expected_path, expected_data, workflow=None):
# Execute all tasks within the Workflow.
if workflow is None:
taken_path = track_workflow(wf_spec)
workflow = Workflow(wf_spec)
else:
taken_path = track_workflow(workflow.spec)
test.assert_(not workflow.is_completed(), 'Workflow is complete before start')
try:
# We allow the workflow to require a maximum of 5 seconds to
# complete, to allow for testing long running tasks.
for i in range(10):
workflow.complete_all(False)
if workflow.is_completed():
break
time.sleep(0.5)
except:
workflow.task_tree.dump()
raise
#workflow.task_tree.dump()
test.assert_(workflow.is_completed(),
'complete_all() returned, but workflow is not complete\n'
+ workflow.task_tree.get_dump())
# Make sure that there are no waiting tasks left in the tree.
for thetask in Task.Iterator(workflow.task_tree, Task.READY):
workflow.task_tree.dump()
raise Exception('Task with state READY: %s' % thetask.name)
# Check whether the correct route was taken.
if expected_path is not None:
taken_path = '\n'.join(taken_path) + '\n'
error = 'Expected:\n'
error += '%s\n' % expected_path
error += 'but got:\n'
error += '%s\n' % taken_path
test.assert_(taken_path == expected_path, error)
# Check data availibility.
if expected_data is not None:
result = workflow.get_data('data', '')
error = 'Expected:\n'
error += '%s\n' % expected_data
error += 'but got:\n'
error += '%s\n' % result
test.assert_(result == expected_data, error)
return workflow
示例10: deserialize_workflow
def deserialize_workflow(self, s_state):
wf_spec_class = get_class(s_state['workflow'])
wf_spec = wf_spec_class()
workflow = Workflow(wf_spec)
workflow.attributes = s_state['attributes']
workflow.last_task = s_state['last_task']
workflow.success = s_state['success']
tasks = [self.deserialize_task(workflow, serialized_task) for serialized_task in s_state['task_tree']]
workflow.task_tree = [task for task in tasks if task.task_spec.name == 'Root'][0]
workflow.spec = wf_spec
return workflow
示例11: runWorkflow
def runWorkflow(self, wf_spec, xml_filename):
taken_path = []
for name in wf_spec.task_specs:
wf_spec.task_specs[name].reached_event.connect(on_reached_cb, taken_path)
wf_spec.task_specs[name].completed_event.connect(on_complete_cb, taken_path)
# Execute all tasks within the Workflow
workflow = Workflow(wf_spec)
self.assert_(not workflow.is_completed(), "Workflow is complete before start")
try:
workflow.complete_all(False)
except:
workflow.task_tree.dump()
raise
# workflow.task_tree.dump()
self.assert_(
workflow.is_completed(),
"complete_all() returned, but workflow is not complete\n" + workflow.task_tree.get_dump(),
)
# Make sure that there are no waiting tasks left in the tree.
for thetask in Task.Iterator(workflow.task_tree, Task.READY):
workflow.task_tree.dump()
raise Exception("Task with state READY: %s" % thetask.name)
# Check whether the correct route was taken.
filename = xml_filename + ".path"
if os.path.exists(filename):
file = open(filename, "r")
expected = file.read()
file.close()
taken_path = "\n".join(taken_path) + "\n"
error = "%s:\n" % name
error += "Expected:\n"
error += "%s\n" % expected
error += "but got:\n"
error += "%s\n" % taken_path
self.assert_(taken_path == expected, error)
# Check attribute availibility.
filename = xml_filename + ".data"
if os.path.exists(filename):
file = open(filename, "r")
expected = file.read()
file.close()
result = workflow.get_attribute("data", "")
error = "%s:\n" % name
error += "Expected:\n"
error += "%s\n" % expected
error += "but got:\n"
error += "%s\n" % result
self.assert_(result == expected, error)
示例12: _runWorkflow
def _runWorkflow(self, wf_spec):
taken_path = {'reached': [],
'completed': []}
for name, task in wf_spec.task_specs.iteritems():
task.reached_event.connect(on_reached_cb, taken_path['reached'])
task.completed_event.connect(on_complete_cb, taken_path['completed'])
# Execute all tasks within the Workflow.
workflow = Workflow(wf_spec)
self.assert_(not workflow.is_completed(), 'Workflow complete before start')
try:
workflow.complete_all()
except:
workflow.dump()
raise
self.assert_(workflow.is_completed(),
'complete_all() returned, but workflow is not complete\n'
+ workflow.task_tree.get_dump())
#workflow.task_tree.dump()
assert_same_path(self, self.expected_path, taken_path['completed'])
示例13: deserialize_workflow
def deserialize_workflow(self, s_state, **kwargs):
wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs)
workflow = Workflow(wf_spec)
# attributes
workflow.attributes = s_state['attributes']
# last_task
workflow.last_task = s_state['last_task']
# outer_workflow
#workflow.outer_workflow = find_workflow_by_id(remap_workflow_id(s_state['outer_workflow']))
# success
workflow.success = s_state['success']
# workflow
workflow.spec = wf_spec
# task_tree
workflow.task_tree = self._deserialize_task(workflow, s_state['task_tree'])
return workflow
示例14: testBeginWorkflowStepByStep
def testBeginWorkflowStepByStep(self):
"""
Simulates interactive calls, as would be issued by a user.
"""
wf_spec = self._createWorkflowSpec()
workflow = Workflow(wf_spec)
tasks = workflow.get_tasks(Task.READY)
self.assertEqual(len(tasks), 1)
self.assertEqual(tasks[0].task_spec.name, 'Start')
workflow.complete_task_from_id(tasks[0].id)
self.assertEqual(tasks[0].state, Task.COMPLETED)
tasks = workflow.get_tasks(Task.READY)
self.assertEqual(len(tasks), 2)
task_a1 = tasks[0]
task_b1 = tasks[1]
self.assertEqual(task_a1.task_spec.__class__, Simple)
self.assertEqual(task_a1.task_spec.name, 'task_a1')
self.assertEqual(task_b1.task_spec.__class__, Simple)
self.assertEqual(task_b1.task_spec.name, 'task_b1')
workflow.complete_task_from_id(task_a1.id)
self.assertEqual(task_a1.state, Task.COMPLETED)
tasks = workflow.get_tasks(Task.READY)
self.assertEqual(len(tasks), 2)
self.assertTrue(task_b1 in tasks)
task_a2 = tasks[0]
self.assertEqual(task_a2.task_spec.__class__, Simple)
self.assertEqual(task_a2.task_spec.name, 'task_a2')
workflow.complete_task_from_id(task_a2.id)
tasks = workflow.get_tasks(Task.READY)
self.assertEqual(len(tasks), 1)
self.assertTrue(task_b1 in tasks)
workflow.complete_task_from_id(task_b1.id)
tasks = workflow.get_tasks(Task.READY)
self.assertEqual(len(tasks), 1)
workflow.complete_task_from_id(tasks[0].id)
tasks = workflow.get_tasks(Task.READY)
self.assertEqual(len(tasks), 1)
self.assertEqual(tasks[0].task_spec.name, 'synch_1')
示例15: open
import json
from SpiffWorkflow import Workflow
from SpiffWorkflow.specs import WorkflowSpec
from SpiffWorkflow.serializer.json import JSONSerializer
# Load from JSON
with open('nuclear.json') as fp:
workflow_json = fp.read()
serializer = JSONSerializer()
spec = WorkflowSpec.deserialize(serializer, workflow_json)
# Alternatively, create an instance of the Python based specification.
#from nuclear import NuclearStrikeWorkflowSpec
#spec = NuclearStrikeWorkflowSpec()
# Create the workflow.
workflow = Workflow(spec)
# Execute until all tasks are done or require manual intervention.
# For the sake of this tutorial, we ignore the "manual" flag on the
# tasks. In practice, you probably don't want to do that.
workflow.complete_all(halt_on_manual=False)
# Alternatively, this is what a UI would do for a manual task.
#workflow.complete_task_from_id(...)