本文整理汇总了Python中airflow.operators.dummy_operator.DummyOperator.set_downstream方法的典型用法代码示例。如果您正苦于以下问题:Python DummyOperator.set_downstream方法的具体用法?Python DummyOperator.set_downstream怎么用?Python DummyOperator.set_downstream使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.operators.dummy_operator.DummyOperator
的用法示例。
在下文中一共展示了DummyOperator.set_downstream方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_dagrun_success_when_all_skipped
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_dagrun_success_when_all_skipped(self):
"""
Tests that a DAG run succeeds when all tasks are skipped
"""
dag = DAG(
dag_id='test_dagrun_success_when_all_skipped',
start_date=datetime.datetime(2017, 1, 1)
)
dag_task1 = ShortCircuitOperator(
task_id='test_short_circuit_false',
dag=dag,
python_callable=lambda: False)
dag_task2 = DummyOperator(
task_id='test_state_skipped1',
dag=dag)
dag_task3 = DummyOperator(
task_id='test_state_skipped2',
dag=dag)
dag_task1.set_downstream(dag_task2)
dag_task2.set_downstream(dag_task3)
initial_task_states = {
'test_short_circuit_false': State.SUCCESS,
'test_state_skipped1': State.SKIPPED,
'test_state_skipped2': State.SKIPPED,
}
dag_run = self.create_dag_run(dag=dag,
state=State.RUNNING,
task_states=initial_task_states)
updated_dag_state = dag_run.update_state()
self.assertEqual(State.SUCCESS, updated_dag_state)
示例2: test_infer_dag
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_infer_dag(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
dag2 = DAG('dag2', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='test_op_1', owner='test')
op2 = DummyOperator(task_id='test_op_2', owner='test')
op3 = DummyOperator(task_id='test_op_3', owner='test', dag=dag)
op4 = DummyOperator(task_id='test_op_4', owner='test', dag=dag2)
# double check dags
self.assertEqual(
[i.has_dag() for i in [op1, op2, op3, op4]],
[False, False, True, True])
# can't combine operators with no dags
self.assertRaises(AirflowException, op1.set_downstream, op2)
# op2 should infer dag from op1
op1.dag = dag
op1.set_downstream(op2)
self.assertIs(op2.dag, dag)
# can't assign across multiple DAGs
self.assertRaises(AirflowException, op1.set_downstream, op4)
self.assertRaises(AirflowException, op1.set_downstream, [op3, op4])
示例3: test_check_task_dependencies
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_check_task_dependencies(self, trigger_rule, successes, skipped,
failed, upstream_failed, done,
flag_upstream_failed,
expect_state, expect_completed):
start_date = datetime.datetime(2016, 2, 1, 0, 0, 0)
dag = models.DAG('test-dag', start_date=start_date)
downstream = DummyOperator(task_id='downstream',
dag=dag, owner='airflow',
trigger_rule=trigger_rule)
for i in range(5):
task = DummyOperator(task_id='runme_{}'.format(i),
dag=dag, owner='airflow')
task.set_downstream(downstream)
run_date = task.start_date + datetime.timedelta(days=5)
ti = TI(downstream, run_date)
dep_results = TriggerRuleDep()._evaluate_trigger_rule(
ti=ti,
successes=successes,
skipped=skipped,
failed=failed,
upstream_failed=upstream_failed,
done=done,
flag_upstream_failed=flag_upstream_failed)
completed = all([dep.passed for dep in dep_results])
self.assertEqual(completed, expect_completed)
self.assertEqual(ti.state, expect_state)
示例4: test_check_task_dependencies
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_check_task_dependencies(
self,
trigger_rule,
successes,
skipped,
failed,
upstream_failed,
done,
flag_upstream_failed,
expect_state,
expect_completed,
):
start_date = datetime.datetime(2016, 2, 1, 0, 0, 0)
dag = models.DAG("test-dag", start_date=start_date)
downstream = DummyOperator(task_id="downstream", dag=dag, owner="airflow", trigger_rule=trigger_rule)
for i in range(5):
task = DummyOperator(task_id="runme_{}".format(i), dag=dag, owner="airflow")
task.set_downstream(downstream)
run_date = task.start_date + datetime.timedelta(days=5)
ti = TI(downstream, run_date)
completed = ti.evaluate_trigger_rule(
successes=successes,
skipped=skipped,
failed=failed,
upstream_failed=upstream_failed,
done=done,
flag_upstream_failed=flag_upstream_failed,
)
self.assertEqual(completed, expect_completed)
self.assertEqual(ti.state, expect_state)
示例5: test_with_dag_run
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_with_dag_run(self):
value = False
dag = DAG('shortcircuit_operator_test_with_dag_run',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE
},
schedule_interval=INTERVAL)
short_op = ShortCircuitOperator(task_id='make_choice',
dag=dag,
python_callable=lambda: value)
branch_1 = DummyOperator(task_id='branch_1', dag=dag)
branch_1.set_upstream(short_op)
branch_2 = DummyOperator(task_id='branch_2', dag=dag)
branch_2.set_upstream(branch_1)
upstream = DummyOperator(task_id='upstream', dag=dag)
upstream.set_downstream(short_op)
dag.clear()
logging.error("Tasks {}".format(dag.tasks))
dr = dag.create_dagrun(
run_id="manual__",
start_date=datetime.datetime.now(),
execution_date=DEFAULT_DATE,
state=State.RUNNING
)
upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
self.assertEqual(len(tis), 4)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.SKIPPED)
else:
raise
value = True
dag.clear()
dr.verify_integrity()
upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
self.assertEqual(len(tis), 4)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.NONE)
else:
raise
示例6: subdag_C
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def subdag_C():
subdag_C = DAG(
'nested_cycle.opSubdag_1.opSubdag_C', default_args=DEFAULT_ARGS)
opSubdag_C_task = DummyOperator(
task_id='subdag_C.task', dag=subdag_C)
# introduce a loop in opSubdag_C
opSubdag_C_task.set_downstream(opSubdag_C_task)
return subdag_C
示例7: test_without_dag_run
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_without_dag_run(self):
"""This checks the defensive against non existent tasks in a dag run"""
value = False
dag = DAG('shortcircuit_operator_test_without_dag_run',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE
},
schedule_interval=INTERVAL)
short_op = ShortCircuitOperator(task_id='make_choice',
dag=dag,
python_callable=lambda: value)
branch_1 = DummyOperator(task_id='branch_1', dag=dag)
branch_1.set_upstream(short_op)
branch_2 = DummyOperator(task_id='branch_2', dag=dag)
branch_2.set_upstream(branch_1)
upstream = DummyOperator(task_id='upstream', dag=dag)
upstream.set_downstream(short_op)
dag.clear()
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
session = Session()
tis = session.query(TI).filter(
TI.dag_id == dag.dag_id,
TI.execution_date == DEFAULT_DATE
)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
# should not exist
raise
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.SKIPPED)
else:
raise
value = True
dag.clear()
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
# should not exist
raise
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.NONE)
else:
raise
session.close()
示例8: basic_cycle
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def basic_cycle():
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
import datetime
DAG_NAME = 'cycle_dag'
DEFAULT_ARGS = {
'owner': 'owner1',
'start_date': datetime.datetime(2016, 1, 1)
}
dag = DAG(
DAG_NAME,
default_args=DEFAULT_ARGS)
# A -> A
with dag:
opA = DummyOperator(task_id='A')
opA.set_downstream(opA)
return dag
示例9: standard_subdag
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def standard_subdag():
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.subdag_operator import SubDagOperator
import datetime
DAG_NAME = 'master'
DEFAULT_ARGS = {
'owner': 'owner1',
'start_date': datetime.datetime(2016, 1, 1)
}
dag = DAG(
DAG_NAME,
default_args=DEFAULT_ARGS)
# master:
# A -> opSubDag_0
# master.opsubdag_0:
# -> subdag_0.task
# A -> opSubDag_1
# master.opsubdag_1:
# -> subdag_1.task
with dag:
def subdag_0():
subdag_0 = DAG('master.opSubdag_0', default_args=DEFAULT_ARGS)
DummyOperator(task_id='subdag_0.task', dag=subdag_0)
return subdag_0
def subdag_1():
subdag_1 = DAG('master.opSubdag_1', default_args=DEFAULT_ARGS)
DummyOperator(task_id='subdag_1.task', dag=subdag_1)
return subdag_1
opSubdag_0 = SubDagOperator(
task_id='opSubdag_0', dag=dag, subdag=subdag_0())
opSubdag_1 = SubDagOperator(
task_id='opSubdag_1', dag=dag, subdag=subdag_1())
opA = DummyOperator(task_id='A')
opA.set_downstream(opSubdag_0)
opA.set_downstream(opSubdag_1)
return dag
示例10: SubDagOperator
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
section_1 = SubDagOperator(
task_id='section-1',
subdag=subdag(DAG_NAME, 'section-1', args),
default_args=args,
dag=dag,
)
some_other_task = DummyOperator(
task_id='some-other-task',
default_args=args,
dag=dag,
)
section_2 = SubDagOperator(
task_id='section-2',
subdag=subdag(DAG_NAME, 'section-2', args),
default_args=args,
dag=dag,
)
end = DummyOperator(
task_id='end',
default_args=args,
dag=dag,
)
start.set_downstream(section_1)
section_1.set_downstream(some_other_task)
some_other_task.set_downstream(section_2)
section_2.set_downstream(end)
示例11: ShortCircuitOperatorTest
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
class ShortCircuitOperatorTest(unittest.TestCase):
def setUp(self):
self.dag = DAG('shortcircuit_operator_test',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE},
schedule_interval=INTERVAL)
self.short_op = ShortCircuitOperator(task_id='make_choice',
dag=self.dag,
python_callable=lambda: self.value)
self.branch_1 = DummyOperator(task_id='branch_1', dag=self.dag)
self.branch_1.set_upstream(self.short_op)
self.branch_2 = DummyOperator(task_id='branch_2', dag=self.dag)
self.branch_2.set_upstream(self.branch_1)
self.upstream = DummyOperator(task_id='upstream', dag=self.dag)
self.upstream.set_downstream(self.short_op)
self.dag.clear()
self.value = True
def test_without_dag_run(self):
"""This checks the defensive against non existent tasks in a dag run"""
self.value = False
self.short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
session = Session()
tis = session.query(TI).filter(
TI.dag_id == self.dag.dag_id,
TI.execution_date == DEFAULT_DATE
)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
# should not exist
raise
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.SKIPPED)
else:
raise
self.value = True
self.dag.clear()
self.short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
# should not exist
raise
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.NONE)
else:
raise
session.close()
def test_with_dag_run(self):
self.value = False
logging.error("Tasks {}".format(self.dag.tasks))
dr = self.dag.create_dagrun(
run_id="manual__",
start_date=datetime.datetime.now(),
execution_date=DEFAULT_DATE,
state=State.RUNNING
)
self.upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
self.assertEqual(len(tis), 4)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.SKIPPED)
else:
raise
self.value = True
self.dag.clear()
dr.verify_integrity()
self.upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
self.assertEqual(len(tis), 4)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
self.assertEquals(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEquals(ti.state, State.NONE)
#.........这里部分代码省略.........
示例12: DAG
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
}
dag = DAG(
dag_id='example_branch_operator',
default_args=args,
schedule_interval="@daily")
cmd = 'ls -l'
run_this_first = DummyOperator(task_id='run_this_first', dag=dag)
options = ['branch_a', 'branch_b', 'branch_c', 'branch_d']
branching = BranchPythonOperator(
task_id='branching',
python_callable=lambda: random.choice(options),
dag=dag)
branching.set_upstream(run_this_first)
join = DummyOperator(
task_id='join',
trigger_rule='one_success',
dag=dag
)
for option in options:
t = DummyOperator(task_id=option, dag=dag)
t.set_upstream(branching)
dummy_follow = DummyOperator(task_id='follow_' + option, dag=dag)
t.set_downstream(dummy_follow)
dummy_follow.set_downstream(join)
示例13: test_dag_topological_sort
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_dag_topological_sort(self):
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B
# A -> C -> D
# ordered: B, D, C, A or D, B, C, A or D, C, B, A
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='D')
op1.set_upstream([op2, op3])
op3.set_upstream(op4)
topological_list = dag.topological_sort()
logging.info(topological_list)
tasks = [op2, op3, op4]
self.assertTrue(topological_list[0] in tasks)
tasks.remove(topological_list[0])
self.assertTrue(topological_list[1] in tasks)
tasks.remove(topological_list[1])
self.assertTrue(topological_list[2] in tasks)
tasks.remove(topological_list[2])
self.assertTrue(topological_list[3] == op1)
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# C -> (A u B) -> D
# C -> E
# ordered: E | D, A | B, C
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='D')
op5 = DummyOperator(task_id='E')
op1.set_downstream(op3)
op2.set_downstream(op3)
op1.set_upstream(op4)
op2.set_upstream(op4)
op5.set_downstream(op3)
topological_list = dag.topological_sort()
logging.info(topological_list)
set1 = [op4, op5]
self.assertTrue(topological_list[0] in set1)
set1.remove(topological_list[0])
set2 = [op1, op2]
set2.extend(set1)
self.assertTrue(topological_list[1] in set2)
set2.remove(topological_list[1])
self.assertTrue(topological_list[2] in set2)
set2.remove(topological_list[2])
self.assertTrue(topological_list[3] in set2)
self.assertTrue(topological_list[4] == op3)
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
self.assertEquals(tuple(), dag.topological_sort())
示例14: test_cycle
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
def test_cycle(self):
# test empty
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
self.assertFalse(dag.test_cycle())
# test single task
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
with dag:
opA = DummyOperator(task_id='A')
self.assertFalse(dag.test_cycle())
# test no cycle
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B -> C
# B -> D
# E -> F
with dag:
opA = DummyOperator(task_id='A')
opB = DummyOperator(task_id='B')
opC = DummyOperator(task_id='C')
opD = DummyOperator(task_id='D')
opE = DummyOperator(task_id='E')
opF = DummyOperator(task_id='F')
opA.set_downstream(opB)
opB.set_downstream(opC)
opB.set_downstream(opD)
opE.set_downstream(opF)
self.assertFalse(dag.test_cycle())
# test self loop
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> A
with dag:
opA = DummyOperator(task_id='A')
opA.set_downstream(opA)
with self.assertRaises(AirflowDagCycleException):
dag.test_cycle()
# test downstream self loop
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B -> C -> D -> E -> E
with dag:
opA = DummyOperator(task_id='A')
opB = DummyOperator(task_id='B')
opC = DummyOperator(task_id='C')
opD = DummyOperator(task_id='D')
opE = DummyOperator(task_id='E')
opA.set_downstream(opB)
opB.set_downstream(opC)
opC.set_downstream(opD)
opD.set_downstream(opE)
opE.set_downstream(opE)
with self.assertRaises(AirflowDagCycleException):
dag.test_cycle()
# large loop
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B -> C -> D -> E -> A
with dag:
opA = DummyOperator(task_id='A')
opB = DummyOperator(task_id='B')
opC = DummyOperator(task_id='C')
opD = DummyOperator(task_id='D')
opE = DummyOperator(task_id='E')
opA.set_downstream(opB)
opB.set_downstream(opC)
opC.set_downstream(opD)
opD.set_downstream(opE)
opE.set_downstream(opA)
with self.assertRaises(AirflowDagCycleException):
dag.test_cycle()
#.........这里部分代码省略.........
示例15: DAG
# 需要导入模块: from airflow.operators.dummy_operator import DummyOperator [as 别名]
# 或者: from airflow.operators.dummy_operator.DummyOperator import set_downstream [as 别名]
subdag7 = DAG(dag_id='test_subdag_deadlock.subdag', default_args=default_args)
subdag7_task1 = PythonOperator(
task_id='test_subdag_fail',
dag=subdag7,
python_callable=fail)
subdag7_task2 = DummyOperator(
task_id='test_subdag_dummy_1',
dag=subdag7,)
subdag7_task3 = DummyOperator(
task_id='test_subdag_dummy_2',
dag=subdag7)
dag7_subdag1 = SubDagOperator(
task_id='subdag',
dag=dag7,
subdag=subdag7)
subdag7_task1.set_downstream(subdag7_task2)
subdag7_task2.set_downstream(subdag7_task3)
# DAG tests that a Dag run that doesn't complete but has a root failure is marked running
dag8 = DAG(dag_id='test_dagrun_states_root_fail_unfinished', default_args=default_args)
dag8_task1 = DummyOperator(
task_id='test_dagrun_unfinished', # The test will unset the task instance state after
# running this test
dag=dag8,
)
dag8_task2 = PythonOperator(
task_id='test_dagrun_fail',
dag=dag8,
python_callable=fail,
)