本文整理汇总了Python中spreads.workflow.Workflow.find_by_id方法的典型用法代码示例。如果您正苦于以下问题:Python Workflow.find_by_id方法的具体用法?Python Workflow.find_by_id怎么用?Python Workflow.find_by_id使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类spreads.workflow.Workflow
的用法示例。
在下文中一共展示了Workflow.find_by_id方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: to_python
# 需要导入模块: from spreads.workflow import Workflow [as 别名]
# 或者: from spreads.workflow.Workflow import find_by_id [as 别名]
def to_python(self, value):
from spreadsplug.web import app
try:
uuid.UUID(value)
workflow = Workflow.find_by_id(app.config['base_path'], value)
except ValueError:
workflow = Workflow.find_by_slug(app.config['base_path'], value)
if workflow is None:
abort(404)
return workflow
示例2: get
# 需要导入模块: from spreads.workflow import Workflow [as 别名]
# 或者: from spreads.workflow.Workflow import find_by_id [as 别名]
def get(self, workflow_id, filename):
uuid.UUID(workflow_id)
workflow = Workflow.find_by_id(self.base_path, workflow_id)
zstream = workflow.bag.package_as_zipstream(compression=None)
self.set_status(200)
self.set_header('Content-type', 'application/zip')
self.set_header('Content-length',
str(self.calculate_zipsize(zstream.paths_to_write)))
self.zstream_iter = iter(zstream)
self.send_next_chunk()
示例3: transfer_to_stick
# 需要导入模块: from spreads.workflow import Workflow [as 别名]
# 或者: from spreads.workflow.Workflow import find_by_id [as 别名]
def transfer_to_stick(wf_id, base_path):
workflow = Workflow.find_by_id(base_path, wf_id)
stick = find_stick()
files = list(workflow.path.rglob('*'))
num_files = len(files)
# Filter out problematic characters
clean_name = (workflow.path.name.replace(':', '_')
.replace('/', '_'))
workflow.status['step'] = 'transfer'
try:
if IS_WIN:
target_path = Path(stick)/clean_name
else:
mount = stick.get_dbus_method(
"FilesystemMount",
dbus_interface="org.freedesktop.UDisks.Device")
mount_point = mount('', [])
target_path = Path(mount_point)/clean_name
if target_path.exists():
shutil.rmtree(unicode(target_path))
target_path.mkdir()
signals['transfer:started'].send(workflow)
for num, path in enumerate(files, 1):
signals['transfer:progressed'].send(
workflow, progress=(num/num_files)*0.79, status=path.name)
workflow.status['step_done'] = (num/num_files)*0.79
target = target_path/path.relative_to(workflow.path)
if path.is_dir():
target.mkdir()
else:
shutil.copyfile(unicode(path), unicode(target))
finally:
if 'mount_point' in locals():
signals['transfer:progressed'].send(workflow, progress=0.8,
status="Syncing...")
workflow.status['step_done'] = 0.8
unmount = stick.get_dbus_method(
"FilesystemUnmount",
dbus_interface="org.freedesktop.UDisks.Device")
unmount([], timeout=1e6) # dbus-python doesn't know an infinite
# timeout... unmounting sometimes takes a
# long time, since the device has to be
# synced.
signals['transfer:completed'].send(workflow)
workflow.status['step'] = None
示例4: upload_workflow
# 需要导入模块: from spreads.workflow import Workflow [as 别名]
# 或者: from spreads.workflow.Workflow import find_by_id [as 别名]
def upload_workflow(wf_id, base_path, endpoint, user_config,
start_process=False, start_output=False):
logger.debug("Uploading workflow to postprocessing server")
workflow = Workflow.find_by_id(base_path, wf_id)
# NOTE: This is kind of nasty.... We temporarily write the user-supplied
# configuration to the bag, update the tag-payload, create the zip, and
# once everything is done, we restore the old version
tmp_cfg = copy.deepcopy(workflow.config)
tmp_cfg.set(user_config)
tmp_cfg_path = workflow.path/'config.yml'
tmp_cfg.dump(filename=unicode(tmp_cfg_path),
sections=(user_config['plugins'] + ["plugins", "device"]))
workflow.bag.add_tagfiles(unicode(tmp_cfg_path))
# Create a zipstream from the workflow-bag
zstream = workflow.bag.package_as_zipstream(compression=None)
zstream_copy = copy.deepcopy(zstream)
zsize = sum(len(x) for x in zstream_copy)
def zstream_wrapper():
""" Wrapper around our zstream so we can emit a signal when all data
has been streamed to the client.
"""
transferred = 0
progress = "0.00"
for data in zstream:
yield data
transferred += len(data)
# Only update progress if we've progress at least by 0.01
new_progress = "{0:.2f}".format(transferred/zsize)
if new_progress != progress:
progress = new_progress
signals['submit:progressed'].send(
workflow, progress=float(progress),
status="Uploading workflow...")
# NOTE: This is neccessary since requests makes a chunked upload when
# passed a plain generator, which is not supported by the WSGI
# protocol that receives it. Hence we wrap it inside of a
# GeneratorIO to make it appear as a file-like object with a
# known size.
zstream_fp = GeneratorIO(zstream_wrapper(), zsize)
signals['submit:started'].send(workflow)
resp = requests.post(endpoint, data=zstream_fp,
headers={'Content-Type': 'application/zip'})
if not resp:
error_msg = "Upload failed: {0}".format(resp.content)
signals['submit:error'].send(workflow, message=error_msg,
data=resp.content)
logger.error(error_msg)
else:
wfid = resp.json()['id']
if start_process:
requests.post(endpoint + "/{0}/process".format(wfid))
if start_output:
requests.post(endpoint + "/{0}/output".format(wfid))
signals['submit:completed'].send(workflow, remote_id=wfid)
# Restore our old configuration
workflow._save_config()
示例5: output_workflow
# 需要导入模块: from spreads.workflow import Workflow [as 别名]
# 或者: from spreads.workflow.Workflow import find_by_id [as 别名]
def output_workflow(wf_id, base_path):
workflow = Workflow.find_by_id(base_path, wf_id)
logger.debug("Initiating output generation for workflow {0}"
.format(workflow.slug))
workflow.output()
示例6: process_workflow
# 需要导入模块: from spreads.workflow import Workflow [as 别名]
# 或者: from spreads.workflow.Workflow import find_by_id [as 别名]
def process_workflow(wf_id, base_path):
workflow = Workflow.find_by_id(base_path, wf_id)
logger.debug("Initiating processing for workflow {0}"
.format(workflow.slug))
workflow.process()