本文整理汇总了Python中airflow.operators.python_operator.PythonOperator.set_downstream方法的典型用法代码示例。如果您正苦于以下问题:Python PythonOperator.set_downstream方法的具体用法?Python PythonOperator.set_downstream怎么用?Python PythonOperator.set_downstream使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.operators.python_operator.PythonOperator
的用法示例。
在下文中一共展示了PythonOperator.set_downstream方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: PythonOperator
# 需要导入模块: from airflow.operators.python_operator import PythonOperator [as 别名]
# 或者: from airflow.operators.python_operator.PythonOperator import set_downstream [as 别名]
task_id="start_task", python_callable=print_stuff, dag=dag,
executor_config={
"KubernetesExecutor": {
"annotations": {"test": "annotation"}
}
}
)
# You can mount volume or secret to the worker pod
second_task = PythonOperator(
task_id="four_task", python_callable=test_volume_mount, dag=dag,
executor_config={
"KubernetesExecutor": {
"volumes": [
{
"name": "test-volume",
"hostPath": {"path": "/tmp/"},
},
],
"volume_mounts": [
{
"mountPath": "/foo/",
"name": "test-volume",
},
]
}
}
)
start_task.set_downstream(second_task)
示例2: DAG
# 需要导入模块: from airflow.operators.python_operator import PythonOperator [as 别名]
# 或者: from airflow.operators.python_operator.PythonOperator import set_downstream [as 别名]
subdag7 = DAG(dag_id='test_subdag_deadlock.subdag', default_args=default_args)
subdag7_task1 = PythonOperator(
task_id='test_subdag_fail',
dag=subdag7,
python_callable=fail)
subdag7_task2 = DummyOperator(
task_id='test_subdag_dummy_1',
dag=subdag7,)
subdag7_task3 = DummyOperator(
task_id='test_subdag_dummy_2',
dag=subdag7)
dag7_subdag1 = SubDagOperator(
task_id='subdag',
dag=dag7,
subdag=subdag7)
subdag7_task1.set_downstream(subdag7_task2)
subdag7_task2.set_downstream(subdag7_task3)
# DAG tests that a Dag run that doesn't complete but has a root failure is marked running
dag8 = DAG(dag_id='test_dagrun_states_root_fail_unfinished', default_args=default_args)
dag8_task1 = DummyOperator(
task_id='test_dagrun_unfinished', # The test will unset the task instance state after
# running this test
dag=dag8,
)
dag8_task2 = PythonOperator(
task_id='test_dagrun_fail',
dag=dag8,
python_callable=fail,
)
示例3: datetime
# 需要导入模块: from airflow.operators.python_operator import PythonOperator [as 别名]
# 或者: from airflow.operators.python_operator.PythonOperator import set_downstream [as 别名]
DEFAULT_DATE = datetime(2016, 1, 1)
default_args = dict(
start_date=DEFAULT_DATE,
owner='airflow')
def fail():
raise ValueError('Expected failure.')
def success(ti=None, *args, **kwargs):
if ti.execution_date != DEFAULT_DATE + timedelta(days=1):
fail()
return
# DAG tests that tasks ignore all dependencies
dag1 = DAG(dag_id='test_run_ignores_all_dependencies', default_args=dict(depends_on_past=True, **default_args))
dag1_task1 = PythonOperator(
task_id='test_run_dependency_task',
python_callable=fail,
dag=dag1,)
dag1_task2 = PythonOperator(
task_id='test_run_dependent_task',
python_callable=success,
provide_context=True,
dag=dag1,)
dag1_task1.set_downstream(dag1_task2)
示例4: PythonOperator
# 需要导入模块: from airflow.operators.python_operator import PythonOperator [as 别名]
# 或者: from airflow.operators.python_operator.PythonOperator import set_downstream [as 别名]
# You don't have to use any special KubernetesExecutor configuration if you don't want to
start_task = PythonOperator(
task_id="start_task", python_callable=print_stuff, dag=dag
)
# But you can if you want to
one_task = PythonOperator(
task_id="one_task", python_callable=print_stuff, dag=dag,
executor_config={"KubernetesExecutor": {"image": "airflow/ci:latest"}}
)
# Use the zip binary, which is only found in this special docker image
two_task = PythonOperator(
task_id="two_task", python_callable=use_zip_binary, dag=dag,
executor_config={"KubernetesExecutor": {"image": "airflow/ci_zip:latest"}}
)
# Limit resources on this operator/task with node affinity & tolerations
three_task = PythonOperator(
task_id="three_task", python_callable=print_stuff, dag=dag,
executor_config={
"KubernetesExecutor": {"request_memory": "128Mi",
"limit_memory": "128Mi",
"tolerations": tolerations,
"affinity": affinity}}
)
start_task.set_downstream([one_task, two_task, three_task])
示例5: enumerate
# 需要导入模块: from airflow.operators.python_operator import PythonOperator [as 别名]
# 或者: from airflow.operators.python_operator.PythonOperator import set_downstream [as 别名]
'service_name': service_name,
'machine_service_name': machine_service_name
},
on_failure_callback=notify,
on_retry_callback=notify,
on_success_callback=notify,
dag=dag)
service_tasks.append(get_task)
#: join_council_districts must run before get_task
get_task.set_upstream(create_prod_files)
if i == 'pothole':
#: get_task must run before sonar potholes
get_task.set_downstream(create_potholes_sonar)
filename = conf['prod_data_dir'] + "/get_it_done_*.csv"
files = [os.path.basename(x) for x in glob.glob(filename)]
for index, file_ in enumerate(files):
file_name = file_.split('.')[0]
name_parts = file_name.split('_')
task_name = '_'.join(name_parts[3:-2])
md_name = '-'.join(name_parts[3:-2])
#: Upload prod gid file to S3
upload_task = S3FileTransferOperator(
task_id='upload_' + task_name,
source_base_path=conf['prod_data_dir'],
source_key='get_it_done_{}_requests_datasd.csv'.format(