本文整理汇总了Python中airflow.operators.dummy_operator.DummyOperator方法的典型用法代码示例。如果您正苦于以下问题:Python dummy_operator.DummyOperator方法的具体用法?Python dummy_operator.DummyOperator怎么用?Python dummy_operator.DummyOperator使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.operators.dummy_operator
的用法示例。
在下文中一共展示了dummy_operator.DummyOperator方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_lineage_render
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_lineage_render(self):
# tests inlets / outlets are rendered if they are added
# after initalization
dag = DAG(
dag_id='test_lineage_render',
start_date=DEFAULT_DATE
)
with dag:
op1 = DummyOperator(task_id='task1')
f1s = "/tmp/does_not_exist_1-{}"
file1 = File(f1s.format("{{ execution_date }}"))
op1.inlets.append(file1)
op1.outlets.append(file1)
# execution_date is set in the context in order to avoid creating task instances
ctx1 = {"ti": TI(task=op1, execution_date=DEFAULT_DATE),
"execution_date": DEFAULT_DATE}
op1.pre_execute(ctx1)
self.assertEqual(op1.inlets[0].url, f1s.format(DEFAULT_DATE))
self.assertEqual(op1.outlets[0].url, f1s.format(DEFAULT_DATE))
示例2: test_cycle_no_cycle
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_cycle_no_cycle(self):
# test no cycle
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B -> C
# B -> D
# E -> F
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='D')
op5 = DummyOperator(task_id='E')
op6 = DummyOperator(task_id='F')
op1.set_downstream(op2)
op2.set_downstream(op3)
op2.set_downstream(op4)
op5.set_downstream(op6)
self.assertFalse(test_cycle(dag))
示例3: test_cycle_large_loop
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_cycle_large_loop(self):
# large loop
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B -> C -> D -> E -> A
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='D')
op5 = DummyOperator(task_id='E')
op1.set_downstream(op2)
op2.set_downstream(op3)
op3.set_downstream(op4)
op4.set_downstream(op5)
op5.set_downstream(op1)
with self.assertRaises(AirflowDagCycleException):
self.assertFalse(test_cycle(dag))
示例4: test_cycle_arbitrary_loop
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_cycle_arbitrary_loop(self):
# test arbitrary loop
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# E-> A -> B -> F -> A
# -> C -> F
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='E')
op5 = DummyOperator(task_id='F')
op1.set_downstream(op2)
op1.set_downstream(op3)
op4.set_downstream(op1)
op3.set_downstream(op5)
op2.set_downstream(op5)
op5.set_downstream(op1)
with self.assertRaises(AirflowDagCycleException):
self.assertFalse(test_cycle(dag))
示例5: test_render_log_filename
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_render_log_filename(self):
try_number = 1
dag_id = 'test_render_log_filename_dag'
task_id = 'test_render_log_filename_task'
execution_date = datetime(2016, 1, 1)
dag = DAG(dag_id, start_date=execution_date)
task = DummyOperator(task_id=task_id, dag=dag)
ti = TaskInstance(task=task, execution_date=execution_date)
filename_template = "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log"
ts = ti.get_template_context()['ts']
expected_filename = "{dag_id}/{task_id}/{ts}/{try_number}.log".format(dag_id=dag_id,
task_id=task_id,
ts=ts,
try_number=try_number)
rendered_filename = helpers.render_log_filename(ti, try_number, filename_template)
self.assertEqual(rendered_filename, expected_filename)
示例6: test_infinite_slots
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_infinite_slots(self):
pool = Pool(pool='test_pool', slots=-1)
dag = DAG(
dag_id='test_infinite_slots',
start_date=DEFAULT_DATE, )
op1 = DummyOperator(task_id='dummy1', dag=dag, pool='test_pool')
op2 = DummyOperator(task_id='dummy2', dag=dag, pool='test_pool')
ti1 = TI(task=op1, execution_date=DEFAULT_DATE)
ti2 = TI(task=op2, execution_date=DEFAULT_DATE)
ti1.state = State.RUNNING
ti2.state = State.QUEUED
session = settings.Session
session.add(pool)
session.add(ti1)
session.add(ti2)
session.commit()
session.close()
self.assertEqual(float('inf'), pool.open_slots()) # pylint: disable=no-value-for-parameter
self.assertEqual(1, pool.running_slots()) # pylint: disable=no-value-for-parameter
self.assertEqual(1, pool.queued_slots()) # pylint: disable=no-value-for-parameter
self.assertEqual(2, pool.occupied_slots()) # pylint: disable=no-value-for-parameter
示例7: test_default_pool_open_slots
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_default_pool_open_slots(self):
set_default_pool_slots(5)
self.assertEqual(5, Pool.get_default_pool().open_slots())
dag = DAG(
dag_id='test_default_pool_open_slots',
start_date=DEFAULT_DATE, )
op1 = DummyOperator(task_id='dummy1', dag=dag)
op2 = DummyOperator(task_id='dummy2', dag=dag, pool_slots=2)
ti1 = TI(task=op1, execution_date=DEFAULT_DATE)
ti2 = TI(task=op2, execution_date=DEFAULT_DATE)
ti1.state = State.RUNNING
ti2.state = State.QUEUED
session = settings.Session
session.add(ti1)
session.add(ti2)
session.commit()
session.close()
self.assertEqual(2, Pool.get_default_pool().open_slots())
示例8: test_clear_task_instances_without_dag
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_clear_task_instances_without_dag(self):
dag = DAG('test_clear_task_instances_without_dag', start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10))
task0 = DummyOperator(task_id='task_0', owner='test', dag=dag)
task1 = DummyOperator(task_id='task_1', owner='test', dag=dag, retries=2)
ti0 = TI(task=task0, execution_date=DEFAULT_DATE)
ti1 = TI(task=task1, execution_date=DEFAULT_DATE)
ti0.run()
ti1.run()
with create_session() as session:
qry = session.query(TI).filter(
TI.dag_id == dag.dag_id).all()
clear_task_instances(qry, session)
# When dag is None, max_tries will be maximum of original max_tries or try_number.
ti0.refresh_from_db()
ti1.refresh_from_db()
# Next try to run will be try 2
self.assertEqual(ti0.try_number, 2)
self.assertEqual(ti0.max_tries, 1)
self.assertEqual(ti1.try_number, 2)
self.assertEqual(ti1.max_tries, 2)
示例9: test_operator_clear
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_operator_clear(self):
dag = DAG('test_operator_clear', start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10))
op1 = DummyOperator(task_id='bash_op', owner='test', dag=dag)
op2 = DummyOperator(task_id='dummy_op', owner='test', dag=dag, retries=1)
op2.set_upstream(op1)
ti1 = TI(task=op1, execution_date=DEFAULT_DATE)
ti2 = TI(task=op2, execution_date=DEFAULT_DATE)
ti2.run()
# Dependency not met
self.assertEqual(ti2.try_number, 1)
self.assertEqual(ti2.max_tries, 1)
op2.clear(upstream=True)
ti1.run()
ti2.run()
self.assertEqual(ti1.try_number, 2)
# max_tries is 0 because there is no task instance in db for ti1
# so clear won't change the max_tries.
self.assertEqual(ti1.max_tries, 0)
self.assertEqual(ti2.try_number, 2)
# try_number (0) + retries(1)
self.assertEqual(ti2.max_tries, 1)
示例10: test_dag_task_priority_weight_total_using_absolute
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_dag_task_priority_weight_total_using_absolute(self):
# Same test as above except use 'absolute' for weight calculation
weight = 10
width = 5
depth = 5
with DAG('dag', start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'}) as dag:
pipeline = [
[DummyOperator(
task_id='stage{}.{}'.format(i, j), priority_weight=weight,
weight_rule=WeightRule.ABSOLUTE)
for j in range(0, width)] for i in range(0, depth)
]
for i, stage in enumerate(pipeline):
if i == 0:
continue
for current_task in stage:
for prev_task in pipeline[i - 1]:
current_task.set_upstream(prev_task)
for task in dag.task_dict.values():
# the sum of each stages after this task + itself
correct_weight = weight
calculated_weight = task.priority_weight_total
self.assertEqual(calculated_weight, correct_weight)
示例11: test_resolve_template_files_value
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_resolve_template_files_value(self):
with NamedTemporaryFile(suffix='.template') as f:
f.write(b'{{ ds }}')
f.flush()
template_dir = os.path.dirname(f.name)
template_file = os.path.basename(f.name)
with DAG('test-dag', start_date=DEFAULT_DATE, template_searchpath=template_dir):
task = DummyOperator(task_id='op1')
task.test_field = template_file
task.template_fields = ('test_field',)
task.template_ext = ('.template',)
task.resolve_template_files()
self.assertEqual(task.test_field, '{{ ds }}')
示例12: test_resolve_template_files_list
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_resolve_template_files_list(self):
with NamedTemporaryFile(suffix='.template') as f:
f.write(b'{{ ds }}')
f.flush()
template_dir = os.path.dirname(f.name)
template_file = os.path.basename(f.name)
with DAG('test-dag', start_date=DEFAULT_DATE, template_searchpath=template_dir):
task = DummyOperator(task_id='op1')
task.test_field = [template_file, 'some_string']
task.template_fields = ('test_field',)
task.template_ext = ('.template',)
task.resolve_template_files()
self.assertEqual(task.test_field, ['{{ ds }}', 'some_string'])
示例13: test_sync_to_db_default_view
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_sync_to_db_default_view(self, mock_now):
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_view="graph",
)
with dag:
DummyOperator(task_id='task', owner='owner1')
SubDagOperator(
task_id='subtask',
owner='owner2',
subdag=DAG(
'dag.subtask',
start_date=DEFAULT_DATE,
)
)
now = datetime.datetime.utcnow().replace(tzinfo=pendulum.timezone('UTC'))
mock_now.return_value = now
session = settings.Session()
dag.sync_to_db(session=session)
orm_dag = session.query(DagModel).filter(DagModel.dag_id == 'dag').one()
self.assertIsNotNone(orm_dag.default_view)
self.assertEqual(orm_dag.default_view, "graph")
session.close()
示例14: test_tree_view
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_tree_view(self):
"""Verify correctness of dag.tree_view()."""
with DAG("test_dag", start_date=DEFAULT_DATE) as dag:
op1 = DummyOperator(task_id="t1")
op2 = DummyOperator(task_id="t2")
op3 = DummyOperator(task_id="t3")
op1 >> op2 >> op3
with redirect_stdout(io.StringIO()) as stdout:
dag.tree_view()
stdout = stdout.getvalue()
stdout_lines = stdout.split("\n")
self.assertIn('t1', stdout_lines[0])
self.assertIn('t2', stdout_lines[1])
self.assertIn('t3', stdout_lines[2])
示例15: test_skip_none_dagrun
# 需要导入模块: from airflow.operators import dummy_operator [as 别名]
# 或者: from airflow.operators.dummy_operator import DummyOperator [as 别名]
def test_skip_none_dagrun(self, mock_now):
session = settings.Session()
now = datetime.datetime.utcnow().replace(tzinfo=pendulum.timezone('UTC'))
mock_now.return_value = now
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
)
with dag:
tasks = [DummyOperator(task_id='task')]
SkipMixin().skip(
dag_run=None,
execution_date=now,
tasks=tasks,
session=session)
session.query(TI).filter(
TI.dag_id == 'dag',
TI.task_id == 'task',
TI.state == State.SKIPPED,
TI.start_date == now,
TI.end_date == now,
).one()