本文整理汇总了Python中celery.task.sets.TaskSet.successful方法的典型用法代码示例。如果您正苦于以下问题:Python TaskSet.successful方法的具体用法?Python TaskSet.successful怎么用?Python TaskSet.successful使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类celery.task.sets.TaskSet
的用法示例。
在下文中一共展示了TaskSet.successful方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run_analysis
# 需要导入模块: from celery.task.sets import TaskSet [as 别名]
# 或者: from celery.task.sets.TaskSet import successful [as 别名]
def run_analysis(analysis_uuid):
"""Manage analysis execution"""
RETRY_INTERVAL = 5 # seconds
try:
analysis = Analysis.objects.get(uuid=analysis_uuid)
except (Analysis.DoesNotExist, Analysis.MultipleObjectsReturned) as exc:
logger.error("Can not retrieve analysis with UUID '%s': '%s'",
analysis_uuid, exc)
run_analysis.update_state(state=celery.states.FAILURE)
return
# if cancelled by user
if analysis.failed():
return
try:
analysis_status = AnalysisStatus.objects.get(analysis=analysis)
except (AnalysisStatus.DoesNotExist,
AnalysisStatus.MultipleObjectsReturned) as exc:
logger.error("Can not retrieve status for analysis '%s': '%s'",
analysis, exc)
run_analysis.update_state(state=celery.states.FAILURE)
return
if not analysis_status.refinery_import_task_group_id:
logger.info("Starting analysis '%s'", analysis)
analysis.set_status(Analysis.RUNNING_STATUS)
logger.info("Starting input file import tasks for analysis '%s'",
analysis)
refinery_import_tasks = []
for input_file_uuid in analysis.get_input_file_uuid_list():
refinery_import_task = import_file.subtask(
(input_file_uuid, False, ))
refinery_import_tasks.append(refinery_import_task)
refinery_import = TaskSet(tasks=refinery_import_tasks).apply_async()
refinery_import.save()
analysis_status.refinery_import_task_group_id = \
refinery_import.taskset_id
analysis_status.save()
run_analysis.retry(countdown=RETRY_INTERVAL)
# check if all files were successfully imported into Refinery
refinery_import = TaskSetResult.restore(
analysis_status.refinery_import_task_group_id)
if not refinery_import.ready():
logger.debug("Input file import pending for analysis '%s'", analysis)
run_analysis.retry(countdown=RETRY_INTERVAL)
elif not refinery_import.successful():
logger.error("Analysis '%s' failed during file import", analysis)
analysis.set_status(Analysis.FAILURE_STATUS)
analysis.send_email()
refinery_import.delete()
return
# import files into Galaxy and start analysis
if not analysis_status.galaxy_import_task_group_id:
logger.debug("Starting analysis execution in Galaxy")
try:
analysis.prepare_galaxy()
except (requests.exceptions.ConnectionError,
galaxy.client.ConnectionError):
logger.error("Analysis '%s' failed during preparation in Galaxy",
analysis)
analysis.set_status(Analysis.FAILURE_STATUS)
analysis.send_email()
refinery_import.delete()
return
galaxy_import_tasks = [
start_galaxy_analysis.subtask((analysis_uuid, )),
]
galaxy_import = TaskSet(tasks=galaxy_import_tasks).apply_async()
galaxy_import.save()
analysis_status.galaxy_import_task_group_id = \
galaxy_import.taskset_id
analysis_status.set_galaxy_history_state(AnalysisStatus.PROGRESS)
run_analysis.retry(countdown=RETRY_INTERVAL)
# check if data files were successfully imported into Galaxy
galaxy_import = TaskSetResult.restore(
analysis_status.galaxy_import_task_group_id)
if not galaxy_import.ready():
logger.debug("Analysis '%s' pending in Galaxy", analysis)
run_analysis.retry(countdown=RETRY_INTERVAL)
elif not galaxy_import.successful():
logger.error("Analysis '%s' failed in Galaxy", analysis)
analysis.set_status(Analysis.FAILURE_STATUS)
analysis_status.set_galaxy_history_state(AnalysisStatus.ERROR)
analysis.send_email()
refinery_import.delete()
galaxy_import.delete()
analysis.galaxy_cleanup()
return
# check if analysis has finished running in Galaxy
try:
percent_complete = analysis.galaxy_progress()
except RuntimeError:
analysis_status.set_galaxy_history_state(AnalysisStatus.ERROR)
analysis.send_email()
#.........这里部分代码省略.........