当前位置: 首页>>代码示例>>Python>>正文


Python airflow.DAG类代码示例

本文整理汇总了Python中airflow.DAG的典型用法代码示例。如果您正苦于以下问题:Python DAG类的具体用法?Python DAG怎么用?Python DAG使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了DAG类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_schedule_dag_fake_scheduled_previous

 def test_schedule_dag_fake_scheduled_previous(self):
     """
     Test scheduling a dag where there is a prior DagRun
     which has the same run_id as the next run should have
     """
     delta = timedelta(hours=1)
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_fake_scheduled_previous',
             schedule_interval=delta,
             start_date=DEFAULT_DATE)
     dag.tasks = [models.BaseOperator(task_id="faketastic",
         owner='Also fake',
         start_date=DEFAULT_DATE)]
     scheduler = jobs.SchedulerJob(test_mode=True)
     trigger = models.DagRun(
                 dag_id=dag.dag_id,
                 run_id=models.DagRun.id_for_date(DEFAULT_DATE),
                 execution_date=DEFAULT_DATE,
                 state=utils.State.SUCCESS,
                 external_trigger=True)
     settings.Session().add(trigger)
     settings.Session().commit()
     dag_run = scheduler.schedule_dag(dag)
     assert dag_run is not None
     assert dag_run.dag_id == dag.dag_id
     assert dag_run.run_id is not None
     assert dag_run.run_id != ''
     assert dag_run.execution_date == DEFAULT_DATE+delta, (
             'dag_run.execution_date did not match expectation: {0}'
             .format(dag_run.execution_date))
     assert dag_run.state == models.State.RUNNING
     assert dag_run.external_trigger == False
开发者ID:moritzpein,项目名称:airflow,代码行数:31,代码来源:core.py

示例2: setUp

 def setUp(self):
     configuration.test_mode()
     utils.initdb()
     args = {'owner': 'airflow', 'start_date': datetime(2015, 1, 1)}
     dag = DAG(TEST_DAG_ID, default_args=args)
     dag.clear(start_date=DEFAULT_DATE, end_date=datetime.now())
     self.dag = dag
开发者ID:kundeng,项目名称:airflow,代码行数:7,代码来源:core.py

示例3: BranchOperatorTest

class BranchOperatorTest(unittest.TestCase):
    def setUp(self):
        self.dag = DAG('branch_operator_test',
                       default_args={
                           'owner': 'airflow',
                           'start_date': DEFAULT_DATE},
                       schedule_interval=INTERVAL)
        self.branch_op = BranchPythonOperator(task_id='make_choice',
                                              dag=self.dag,
                                              python_callable=lambda: 'branch_1')

        self.branch_1 = DummyOperator(task_id='branch_1', dag=self.dag)
        self.branch_1.set_upstream(self.branch_op)
        self.branch_2 = DummyOperator(task_id='branch_2', dag=self.dag)
        self.branch_2.set_upstream(self.branch_op)
        self.dag.clear()

    def test_without_dag_run(self):
        """This checks the defensive against non existent tasks in a dag run"""
        self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        session = Session()
        tis = session.query(TI).filter(
            TI.dag_id == self.dag.dag_id,
            TI.execution_date == DEFAULT_DATE
        )
        session.close()

        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'branch_1':
                # should exist with state None
                self.assertEquals(ti.state, State.NONE)
            elif ti.task_id == 'branch_2':
                self.assertEquals(ti.state, State.SKIPPED)
            else:
                raise

    def test_with_dag_run(self):
        dr = self.dag.create_dagrun(
            run_id="manual__",
            start_date=datetime.datetime.now(),
            execution_date=DEFAULT_DATE,
            state=State.RUNNING
        )

        self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        tis = dr.get_task_instances()
        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'branch_1':
                self.assertEquals(ti.state, State.NONE)
            elif ti.task_id == 'branch_2':
                self.assertEquals(ti.state, State.SKIPPED)
            else:
                raise
开发者ID:Nextdoor,项目名称:airflow,代码行数:59,代码来源:python_operator.py

示例4: BashOperatorTestCase

class BashOperatorTestCase(unittest.TestCase):
    def test_echo_env_variables(self):
        """
        Test that env variables are exported correctly to the
        task bash environment.
        """
        now = datetime.utcnow()
        now = now.replace(tzinfo=timezone.utc)

        self.dag = DAG(
            dag_id='bash_op_test', default_args={
                'owner': 'airflow',
                'retries': 100,
                'start_date': DEFAULT_DATE
            },
            schedule_interval='@daily',
            dagrun_timeout=timedelta(minutes=60))

        self.dag.create_dagrun(
            run_id='manual__' + DEFAULT_DATE.isoformat(),
            execution_date=DEFAULT_DATE,
            start_date=now,
            state=State.RUNNING,
            external_trigger=False,
        )

        import tempfile
        with tempfile.NamedTemporaryFile() as f:
            fname = f.name
            t = BashOperator(
                task_id='echo_env_vars',
                dag=self.dag,
                bash_command='echo $AIRFLOW_HOME>> {0};'
                             'echo $PYTHONPATH>> {0};'
                             'echo $AIRFLOW_CTX_DAG_ID >> {0};'
                             'echo $AIRFLOW_CTX_TASK_ID>> {0};'
                             'echo $AIRFLOW_CTX_EXECUTION_DATE>> {0};'
                             'echo $AIRFLOW_CTX_DAG_RUN_ID>> {0};'.format(fname)
            )

            original_AIRFLOW_HOME = os.environ['AIRFLOW_HOME']

            os.environ['AIRFLOW_HOME'] = 'MY_PATH_TO_AIRFLOW_HOME'
            t.run(DEFAULT_DATE, DEFAULT_DATE,
                  ignore_first_depends_on_past=True, ignore_ti_state=True)

            with open(fname, 'r') as fr:
                output = ''.join(fr.readlines())
                self.assertIn('MY_PATH_TO_AIRFLOW_HOME', output)
                # exported in run_unit_tests.sh as part of PYTHONPATH
                self.assertIn('tests/test_utils', output)
                self.assertIn('bash_op_test', output)
                self.assertIn('echo_env_vars', output)
                self.assertIn(DEFAULT_DATE.isoformat(), output)
                self.assertIn('manual__' + DEFAULT_DATE.isoformat(), output)

            os.environ['AIRFLOW_HOME'] = original_AIRFLOW_HOME
开发者ID:MiguelPeralvo,项目名称:incubator-airflow,代码行数:57,代码来源:test_bash_operator.py

示例5: setUp

    def setUp(self):
        configuration.load_test_config()
        from airflow.contrib.hooks.fs_hook import FSHook

        hook = FSHook()
        args = {"owner": "airflow", "start_date": DEFAULT_DATE, "provide_context": True}
        dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", default_args=args)
        dag.schedule_interval = "@once"
        self.hook = hook
        self.dag = dag
开发者ID:asnir,项目名称:airflow,代码行数:10,代码来源:fs_operator.py

示例6: test_without_dag_run

    def test_without_dag_run(self):
        """This checks the defensive against non existent tasks in a dag run"""
        value = False
        dag = DAG('shortcircuit_operator_test_without_dag_run',
                  default_args={
                       'owner': 'airflow',
                       'start_date': DEFAULT_DATE
                  },
                  schedule_interval=INTERVAL)
        short_op = ShortCircuitOperator(task_id='make_choice',
                                        dag=dag,
                                        python_callable=lambda: value)
        branch_1 = DummyOperator(task_id='branch_1', dag=dag)
        branch_1.set_upstream(short_op)
        branch_2 = DummyOperator(task_id='branch_2', dag=dag)
        branch_2.set_upstream(branch_1)
        upstream = DummyOperator(task_id='upstream', dag=dag)
        upstream.set_downstream(short_op)
        dag.clear()

        short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        session = Session()
        tis = session.query(TI).filter(
            TI.dag_id == dag.dag_id,
            TI.execution_date == DEFAULT_DATE
        )

        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'upstream':
                # should not exist
                raise
            elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
                self.assertEquals(ti.state, State.SKIPPED)
            else:
                raise

        value = True
        dag.clear()

        short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'upstream':
                # should not exist
                raise
            elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
                self.assertEquals(ti.state, State.NONE)
            else:
                raise

        session.close()
开发者ID:Nextdoor,项目名称:airflow,代码行数:55,代码来源:python_operator.py

示例7: setUp

 def setUp(self):
     configuration.load_test_config()
     from airflow.contrib.hooks.fs_hook import FSHook
     hook = FSHook()
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
开发者ID:cjquinon,项目名称:incubator-airflow,代码行数:13,代码来源:fs_operator.py

示例8: test_schedule_dag_once

    def test_schedule_dag_once(self):
        """
        Tests scheduling a dag scheduled for @once - should be scheduled the first time
        it is called, and not scheduled the second.
        """
        dag = DAG(TEST_DAG_ID+'test_schedule_dag_once')
        dag.schedule_interval = '@once'
        dag.tasks = [models.BaseOperator(task_id="faketastic", owner='Also fake',
            start_date=datetime(2015, 1, 2, 0, 0))]
        dag_run = jobs.SchedulerJob(test_mode=True).schedule_dag(dag)
        dag_run2 = jobs.SchedulerJob(test_mode=True).schedule_dag(dag)

        assert dag_run is not None
        assert dag_run2 is None
开发者ID:moritzpein,项目名称:airflow,代码行数:14,代码来源:core.py

示例9: setUp

 def setUp(self):
     configuration.test_mode()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook()
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
开发者ID:16522855,项目名称:airflow,代码行数:14,代码来源:ssh_execute_operator.py

示例10: test_schedule_dag_no_previous_runs

 def test_schedule_dag_no_previous_runs(self):
     """
     Tests scheduling a dag with no previous runs
     """
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_no_previous_runs')
     dag.tasks = [models.BaseOperator(task_id="faketastic", owner='Also fake',
         start_date=datetime(2015, 1, 2, 0, 0))]
     dag_run = jobs.SchedulerJob(test_mode=True).schedule_dag(dag)
     assert dag_run is not None
     assert dag_run.dag_id == dag.dag_id
     assert dag_run.run_id is not None
     assert dag_run.run_id != ''
     assert dag_run.execution_date == datetime(2015, 1, 2, 0, 0), (
             'dag_run.execution_date did not match expectation: {0}'
             .format(dag_run.execution_date))
     assert dag_run.state == models.State.RUNNING
     assert dag_run.external_trigger == False
开发者ID:moritzpein,项目名称:airflow,代码行数:17,代码来源:core.py

示例11: setUp

    def setUp(self):

        if sys.version_info[0] == 3:
            raise unittest.SkipTest('SSHExecuteOperatorTest won\'t work with '
                                    'python3. No need to test anything here')

        configuration.load_test_config()
        from airflow.contrib.hooks.ssh_hook import SSHHook
        hook = mock.MagicMock(spec=SSHHook)
        hook.no_host_key_check = True
        hook.Popen.return_value.stdout = StringIO(u'stdout')
        hook.Popen.return_value.returncode = False
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
            'provide_context': True
        }
        dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'
        self.hook = hook
        self.dag = dag
开发者ID:SivaPandeti,项目名称:airflow,代码行数:21,代码来源:test_ssh_execute_operator.py

示例12: setUp

    def setUp(self):
        configuration.load_test_config()
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE
        }
        self.dag = DAG(TEST_DAG_ID, default_args=args)

        session = settings.Session()
        session.query(DagRun).delete()
        session.query(TaskInstance).delete()
        session.commit()
开发者ID:AdamUnger,项目名称:incubator-airflow,代码行数:12,代码来源:test_base_sensor.py

示例13: test_with_dag_run

    def test_with_dag_run(self):
        value = False
        dag = DAG('shortcircuit_operator_test_with_dag_run',
                  default_args={
                       'owner': 'airflow',
                       'start_date': DEFAULT_DATE
                  },
                  schedule_interval=INTERVAL)
        short_op = ShortCircuitOperator(task_id='make_choice',
                                        dag=dag,
                                        python_callable=lambda: value)
        branch_1 = DummyOperator(task_id='branch_1', dag=dag)
        branch_1.set_upstream(short_op)
        branch_2 = DummyOperator(task_id='branch_2', dag=dag)
        branch_2.set_upstream(branch_1)
        upstream = DummyOperator(task_id='upstream', dag=dag)
        upstream.set_downstream(short_op)
        dag.clear()

        logging.error("Tasks {}".format(dag.tasks))
        dr = dag.create_dagrun(
            run_id="manual__",
            start_date=datetime.datetime.now(),
            execution_date=DEFAULT_DATE,
            state=State.RUNNING
        )

        upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
        short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        tis = dr.get_task_instances()
        self.assertEqual(len(tis), 4)
        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'upstream':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
                self.assertEquals(ti.state, State.SKIPPED)
            else:
                raise

        value = True
        dag.clear()
        dr.verify_integrity()
        upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
        short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        tis = dr.get_task_instances()
        self.assertEqual(len(tis), 4)
        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'upstream':
                self.assertEquals(ti.state, State.SUCCESS)
            elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
                self.assertEquals(ti.state, State.NONE)
            else:
                raise
开发者ID:Nextdoor,项目名称:airflow,代码行数:59,代码来源:python_operator.py

示例14: setUp

 def setUp(self):
     super().setUp()
     configuration.load_test_config()
     self.dag = DAG(
         'test_dag',
         default_args={
             'owner': 'airflow',
             'start_date': DEFAULT_DATE},
         schedule_interval=INTERVAL)
     self.addCleanup(self.dag.clear)
     freezer = freeze_time(FROZEN_NOW)
     freezer.start()
     self.addCleanup(freezer.stop)
开发者ID:apache,项目名称:incubator-airflow,代码行数:13,代码来源:test_latest_only_operator.py

示例15: test_external_dag_sensor

    def test_external_dag_sensor(self):

        other_dag = DAG(
            'other_dag',
            default_args=self.args,
            end_date=DEFAULT_DATE,
            schedule_interval='@once')
        other_dag.create_dagrun(
            run_id='test',
            start_date=DEFAULT_DATE,
            execution_date=DEFAULT_DATE,
            state=State.SUCCESS)
        t = ExternalTaskSensor(
            task_id='test_external_dag_sensor_check',
            external_dag_id='other_dag',
            external_task_id=None,
            dag=self.dag
        )
        t.run(
            start_date=DEFAULT_DATE,
            end_date=DEFAULT_DATE,
            ignore_ti_state=True
        )
开发者ID:Fokko,项目名称:incubator-airflow,代码行数:23,代码来源:test_external_task_sensor.py


注:本文中的airflow.DAG类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。