本文整理汇总了Python中airflow.models.DAG.add_task方法的典型用法代码示例。如果您正苦于以下问题:Python DAG.add_task方法的具体用法?Python DAG.add_task怎么用?Python DAG.add_task使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.models.DAG
的用法示例。
在下文中一共展示了DAG.add_task方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_timezone_awareness
# 需要导入模块: from airflow.models import DAG [as 别名]
# 或者: from airflow.models.DAG import add_task [as 别名]
def test_timezone_awareness(self):
NAIVE_DATETIME = DEFAULT_DATE.replace(tzinfo=None)
# check ti without dag (just for bw compat)
op_no_dag = DummyOperator(task_id='op_no_dag')
ti = TI(task=op_no_dag, execution_date=NAIVE_DATETIME)
self.assertEqual(ti.execution_date, DEFAULT_DATE)
# check with dag without localized execution_date
dag = DAG('dag', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='op_1')
dag.add_task(op1)
ti = TI(task=op1, execution_date=NAIVE_DATETIME)
self.assertEqual(ti.execution_date, DEFAULT_DATE)
# with dag and localized execution_date
tz = pendulum.timezone("Europe/Amsterdam")
execution_date = timezone.datetime(2016, 1, 1, 1, 0, 0, tzinfo=tz)
utc_date = timezone.convert_to_utc(execution_date)
ti = TI(task=op1, execution_date=execution_date)
self.assertEqual(ti.execution_date, utc_date)
示例2: test_set_task_dates
# 需要导入模块: from airflow.models import DAG [as 别名]
# 或者: from airflow.models.DAG import add_task [as 别名]
def test_set_task_dates(self):
"""
Test that tasks properly take start/end dates from DAGs
"""
dag = DAG('dag', start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10))
op1 = DummyOperator(task_id='op_1', owner='test')
self.assertTrue(op1.start_date is None and op1.end_date is None)
# dag should assign its dates to op1 because op1 has no dates
dag.add_task(op1)
self.assertTrue(
op1.start_date == dag.start_date and op1.end_date == dag.end_date)
op2 = DummyOperator(
task_id='op_2',
owner='test',
start_date=DEFAULT_DATE - datetime.timedelta(days=1),
end_date=DEFAULT_DATE + datetime.timedelta(days=11))
# dag should assign its dates to op2 because they are more restrictive
dag.add_task(op2)
self.assertTrue(
op2.start_date == dag.start_date and op2.end_date == dag.end_date)
op3 = DummyOperator(
task_id='op_3',
owner='test',
start_date=DEFAULT_DATE + datetime.timedelta(days=1),
end_date=DEFAULT_DATE + datetime.timedelta(days=9))
# op3 should keep its dates because they are more restrictive
dag.add_task(op3)
self.assertTrue(
op3.start_date == DEFAULT_DATE + datetime.timedelta(days=1))
self.assertTrue(
op3.end_date == DEFAULT_DATE + datetime.timedelta(days=9))
示例3: datetime
# 需要导入模块: from airflow.models import DAG [as 别名]
# 或者: from airflow.models.DAG import add_task [as 别名]
from airflow.models import DAG
from datetime import datetime
default_args = {
'owner': 'max',
'start_date': datetime(2014, 11, 1),
}
dag = DAG(dag_id='example_1')
# dag = DAG(dag_id='example_1', executor=SequentialExecutor())
cmd = 'ls -l'
run_this_last = DummyOperator(
task_id='run_this_last',
default_args=default_args)
dag.add_task(run_this_last)
run_this = BashOperator(
task_id='run_after_loop', bash_command='echo 1',
default_args=default_args)
dag.add_task(run_this)
run_this.set_downstream(run_this_last)
for i in range(9):
i = str(i)
task = BashOperator(
task_id='runme_'+i,
bash_command='sleep 5',
default_args=default_args)
task.set_downstream(run_this)
dag.add_task(task)
示例4: datetime
# 需要导入模块: from airflow.models import DAG [as 别名]
# 或者: from airflow.models.DAG import add_task [as 别名]
from airflow.operators import BashOperator, MySqlOperator
from airflow.models import DAG
from datetime import datetime
default_args = {
'owner': 'max',
'start_date': datetime(2014, 9, 1),
'mysql_dbid': 'local_mysql',
}
dag = DAG(dag_id='example_3')
run_this = BashOperator(
task_id='also_run_this', bash_command='ls -l', **default_args)
dag.add_task(run_this)
for i in range(5):
i = str(i)
task = BashOperator(
task_id='runme_'+i,
bash_command='sleep {{ 10 + macros.random() * 10 }}',
**default_args)
task.set_upstream(run_this)
dag.add_task(task)