当前位置: 首页>>代码示例>>Python>>正文


Python airflow.DAG属性代码示例

本文整理汇总了Python中airflow.DAG属性的典型用法代码示例。如果您正苦于以下问题:Python airflow.DAG属性的具体用法?Python airflow.DAG怎么用?Python airflow.DAG使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在airflow的用法示例。


在下文中一共展示了airflow.DAG属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_cycle_no_cycle

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def test_cycle_no_cycle(self):
        # test no cycle
        dag = DAG(
            'dag',
            start_date=DEFAULT_DATE,
            default_args={'owner': 'owner1'})

        # A -> B -> C
        #      B -> D
        # E -> F
        with dag:
            op1 = DummyOperator(task_id='A')
            op2 = DummyOperator(task_id='B')
            op3 = DummyOperator(task_id='C')
            op4 = DummyOperator(task_id='D')
            op5 = DummyOperator(task_id='E')
            op6 = DummyOperator(task_id='F')
            op1.set_downstream(op2)
            op2.set_downstream(op3)
            op2.set_downstream(op4)
            op5.set_downstream(op6)

        self.assertFalse(test_cycle(dag)) 
开发者ID:apache,项目名称:airflow,代码行数:25,代码来源:dag_cycle_tester.py

示例2: test_cycle_downstream_loop

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def test_cycle_downstream_loop(self):
        # test downstream self loop
        dag = DAG(
            'dag',
            start_date=DEFAULT_DATE,
            default_args={'owner': 'owner1'})

        # A -> B -> C -> D -> E -> E
        with dag:
            op1 = DummyOperator(task_id='A')
            op2 = DummyOperator(task_id='B')
            op3 = DummyOperator(task_id='C')
            op4 = DummyOperator(task_id='D')
            op5 = DummyOperator(task_id='E')
            op1.set_downstream(op2)
            op2.set_downstream(op3)
            op3.set_downstream(op4)
            op4.set_downstream(op5)
            op5.set_downstream(op5)

        with self.assertRaises(AirflowDagCycleException):
            self.assertFalse(test_cycle(dag)) 
开发者ID:apache,项目名称:airflow,代码行数:24,代码来源:dag_cycle_tester.py

示例3: test_cycle_large_loop

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def test_cycle_large_loop(self):
        # large loop
        dag = DAG(
            'dag',
            start_date=DEFAULT_DATE,
            default_args={'owner': 'owner1'})

        # A -> B -> C -> D -> E -> A
        with dag:
            op1 = DummyOperator(task_id='A')
            op2 = DummyOperator(task_id='B')
            op3 = DummyOperator(task_id='C')
            op4 = DummyOperator(task_id='D')
            op5 = DummyOperator(task_id='E')
            op1.set_downstream(op2)
            op2.set_downstream(op3)
            op3.set_downstream(op4)
            op4.set_downstream(op5)
            op5.set_downstream(op1)

        with self.assertRaises(AirflowDagCycleException):
            self.assertFalse(test_cycle(dag)) 
开发者ID:apache,项目名称:airflow,代码行数:24,代码来源:dag_cycle_tester.py

示例4: test_cycle_arbitrary_loop

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def test_cycle_arbitrary_loop(self):
        # test arbitrary loop
        dag = DAG(
            'dag',
            start_date=DEFAULT_DATE,
            default_args={'owner': 'owner1'})

        # E-> A -> B -> F -> A
        #       -> C -> F
        with dag:
            op1 = DummyOperator(task_id='A')
            op2 = DummyOperator(task_id='B')
            op3 = DummyOperator(task_id='C')
            op4 = DummyOperator(task_id='E')
            op5 = DummyOperator(task_id='F')
            op1.set_downstream(op2)
            op1.set_downstream(op3)
            op4.set_downstream(op1)
            op3.set_downstream(op5)
            op2.set_downstream(op5)
            op5.set_downstream(op1)

        with self.assertRaises(AirflowDagCycleException):
            self.assertFalse(test_cycle(dag)) 
开发者ID:apache,项目名称:airflow,代码行数:26,代码来源:dag_cycle_tester.py

示例5: test_execute

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def test_execute(self, mock_hook):
        mock_hook.return_value.get_instance.return_value = {"apiEndpoint": INSTANCE_URL}

        op = CloudDataFusionStartPipelineOperator(
            task_id="test_task",
            pipeline_name=PIPELINE_NAME,
            instance_name=INSTANCE_NAME,
            namespace=NAMESPACE,
            location=LOCATION,
            project_id=PROJECT_ID,
            runtime_args=RUNTIME_ARGS
        )
        op.dag = mock.MagicMock(spec=DAG, task_dict={}, dag_id="test")

        op.execute({})
        mock_hook.return_value.get_instance.assert_called_once_with(
            instance_name=INSTANCE_NAME, location=LOCATION, project_id=PROJECT_ID
        )

        mock_hook.return_value.start_pipeline.assert_called_once_with(
            instance_url=INSTANCE_URL,
            pipeline_name=PIPELINE_NAME,
            namespace=NAMESPACE,
            runtime_args=RUNTIME_ARGS,
        ) 
开发者ID:apache,项目名称:airflow,代码行数:27,代码来源:test_datafusion.py

示例6: test_remove_stale_dags

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def test_remove_stale_dags(self):
        example_dags_list = list(self._write_example_dags().values())
        # Remove SubDags from the list as they are not stored in DB in a separate row
        # and are directly added in Json blob of the main DAG
        filtered_example_dags_list = [dag for dag in example_dags_list if not dag.is_subdag]
        # Tests removing a stale DAG
        stale_dag = SDM(filtered_example_dags_list[0])
        fresh_dag = SDM(filtered_example_dags_list[1])
        # Overwrite stale_dag's last_updated to be 10 minutes ago
        stale_dag.last_updated = timezone.utcnow() - timezone.dt.timedelta(seconds=600)
        with create_session() as session:
            session.merge(stale_dag)
            session.commit()
        # Remove any stale DAGs older than 5 minutes
        SDM.remove_stale_dags(timezone.utcnow() - timezone.dt.timedelta(seconds=300))
        self.assertFalse(SDM.has_dag(stale_dag.dag_id))
        self.assertTrue(SDM.has_dag(fresh_dag.dag_id)) 
开发者ID:apache,项目名称:airflow,代码行数:19,代码来源:test_serialized_dag.py

示例7: failure_callback

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def failure_callback(context):
    """
    The function that will be executed on failure.

    :param context: The context of the executed task.
    :type context: dict
    """
    message = 'AIRFLOW TASK FAILURE TIPS:\n' \
              'DAG:    {}\n' \
              'TASKS:  {}\n' \
              'Reason: {}\n' \
        .format(context['task_instance'].dag_id,
                context['task_instance'].task_id,
                context['exception'])
    return DingdingOperator(
        task_id='dingding_success_callback',
        dingding_conn_id='dingding_default',
        message_type='text',
        message=message,
        at_all=True,
    ).execute(context) 
开发者ID:apache,项目名称:airflow,代码行数:23,代码来源:example_dingding.py

示例8: create_test_pipeline

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def create_test_pipeline(suffix, trigger_rule, dag_):
    """
    Instantiate a number of operators for the given DAG.

    :param str suffix: Suffix to append to the operator task_ids
    :param str trigger_rule: TriggerRule for the join task
    :param DAG dag_: The DAG to run the operators on
    """
    skip_operator = DummySkipOperator(task_id='skip_operator_{}'.format(suffix), dag=dag_)
    always_true = DummyOperator(task_id='always_true_{}'.format(suffix), dag=dag_)
    join = DummyOperator(task_id=trigger_rule, dag=dag_, trigger_rule=trigger_rule)
    final = DummyOperator(task_id='final_{}'.format(suffix), dag=dag_)

    skip_operator >> join
    always_true >> join
    join >> final 
开发者ID:apache,项目名称:airflow,代码行数:18,代码来源:example_skip_dag.py

示例9: subdag

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def subdag(parent_dag_name, child_dag_name, args):
    """
    Generate a DAG to be used as a subdag.

    :param str parent_dag_name: Id of the parent DAG
    :param str child_dag_name: Id of the child DAG
    :param dict args: Default arguments to provide to the subdag
    :return: DAG to use as a subdag
    :rtype: airflow.models.DAG
    """
    dag_subdag = DAG(
        dag_id='%s.%s' % (parent_dag_name, child_dag_name),
        default_args=args,
        schedule_interval="@daily",
    )

    for i in range(5):
        DummyOperator(
            task_id='%s-task-%s' % (child_dag_name, i + 1),
            default_args=args,
            dag=dag_subdag,
        )

    return dag_subdag
# [END subdag] 
开发者ID:apache,项目名称:airflow,代码行数:27,代码来源:subdag.py

示例10: create_dag

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def create_dag():
    dag = DAG(
        dag_id=DAG_ID,
        default_args=DAG_DEFAULT_ARGS,
        concurrency=3,
        max_active_runs=3,
        start_date=datetime(2003, 7, 1),
        schedule_interval='@daily',
        catchup=False,
    )

    with dag:
        start_task = get_log_operator(dag, DAG_ID, 'Starting')
        run_task = get_runner_operator(dag)
        end_task = get_log_operator(dag, DAG_ID, 'Finished')

        start_task >> run_task >> end_task

    return dag 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:21,代码来源:wikimedia_workflow.py

示例11: create_dag

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def create_dag():
    dag = DAG(
        dag_id=DAG_ID,
        default_args=DAG_DEFAULT_ARGS,
        start_date=datetime(2020, 1, 15),
        schedule_interval="@monthly",
        catchup=False
    )

    with dag:
        start_task = get_log_operator(dag, DAG_ID, "Starting")
        run_task = get_runner_operator(dag)
        end_task = get_log_operator(dag, DAG_ID, "Finished")

        start_task >> run_task >> end_task

    return dag 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:19,代码来源:rawpixel_workflow.py

示例12: create_dag

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def create_dag():
    dag = DAG(
        dag_id=DAG_ID,
        default_args=DAG_DEFAULT_ARGS,
        concurrency=1,
        max_active_runs=1,
        start_date=datetime(2020, 1, 1),
        schedule_interval='@daily',
        catchup=False,
    )

    with dag:
        start_task = get_log_operator(dag, DAG_ID, 'Starting')
        run_task = get_runner_operator(dag)
        end_task = get_log_operator(dag, DAG_ID, 'Finished')

        start_task >> run_task >> end_task

    return dag 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:21,代码来源:metropolitan_museum_workflow.py

示例13: create_dag

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def create_dag():
    dag = DAG(
        dag_id=DAG_ID,
        default_args=DAG_DEFAULT_ARGS,
        start_date=datetime(2020, 1, 15),
        schedule_interval="0 16 15 * *",
        catchup=False
    )

    with dag:
        start_task = get_log_operator(dag, DAG_ID, "Starting")
        run_task = get_runner_operator(dag)
        end_task = get_log_operator(dag, DAG_ID, "Finished")

        start_task >> run_task >> end_task

    return dag 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:19,代码来源:sync_commoncrawl_workflow.py

示例14: create_dag

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def create_dag(
        source,
        script_location,
        dag_id,
        crontab_str=None,
        default_args=DAG_DEFAULT_ARGS):

    dag = DAG(
        dag_id=dag_id,
        default_args=default_args,
        schedule_interval=crontab_str,
        catchup=False
    )

    with dag:
        start_task = get_log_operator(dag, source, 'starting')
        run_task = get_runner_operator(dag, source, script_location)
        end_task = get_log_operator(dag, source, 'finished')

        start_task >> run_task >> end_task

    return dag 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:24,代码来源:common_api_workflows.py

示例15: test_get_dated_main_runner_handles_zero_shift

# 需要导入模块: import airflow [as 别名]
# 或者: from airflow import DAG [as 别名]
def test_get_dated_main_runner_handles_zero_shift():
    dag = DAG(
        dag_id='test_dag',
        start_date=datetime.strptime('2019-01-01', '%Y-%m-%d')
    )
    execution_date = datetime.strptime(
        '2019-01-01',
        '%Y-%m-%d'
    ).replace(tzinfo=timezone.utc)
    main_func = PickleMock()
    runner = op_util.get_dated_main_runner_operator(
        dag,
        main_func,
        timedelta(minutes=1)
    )
    ti = TaskInstance(runner, execution_date)
    ti.run(ignore_task_deps=True, ignore_ti_state=True, test_mode=True)
    main_func.assert_called_with('2019-01-01') 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:20,代码来源:test_operator_util.py


注:本文中的airflow.DAG属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。