当前位置: 首页>>代码示例>>Python>>正文


Python DAG.schedule_interval方法代码示例

本文整理汇总了Python中airflow.DAG.schedule_interval方法的典型用法代码示例。如果您正苦于以下问题:Python DAG.schedule_interval方法的具体用法?Python DAG.schedule_interval怎么用?Python DAG.schedule_interval使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在airflow.DAG的用法示例。


在下文中一共展示了DAG.schedule_interval方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: setUp

# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
    def setUp(self):
        configuration.load_test_config()
        from airflow.contrib.hooks.fs_hook import FSHook

        hook = FSHook()
        args = {"owner": "airflow", "start_date": DEFAULT_DATE, "provide_context": True}
        dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", default_args=args)
        dag.schedule_interval = "@once"
        self.hook = hook
        self.dag = dag
开发者ID:asnir,项目名称:airflow,代码行数:12,代码来源:fs_operator.py

示例2: setUp

# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
 def setUp(self):
     configuration.load_test_config()
     from airflow.contrib.hooks.fs_hook import FSHook
     hook = FSHook()
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
开发者ID:cjquinon,项目名称:incubator-airflow,代码行数:15,代码来源:fs_operator.py

示例3: test_schedule_dag_once

# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
    def test_schedule_dag_once(self):
        """
        Tests scheduling a dag scheduled for @once - should be scheduled the first time
        it is called, and not scheduled the second.
        """
        dag = DAG(TEST_DAG_ID+'test_schedule_dag_once')
        dag.schedule_interval = '@once'
        dag.tasks = [models.BaseOperator(task_id="faketastic", owner='Also fake',
            start_date=datetime(2015, 1, 2, 0, 0))]
        dag_run = jobs.SchedulerJob(test_mode=True).schedule_dag(dag)
        dag_run2 = jobs.SchedulerJob(test_mode=True).schedule_dag(dag)

        assert dag_run is not None
        assert dag_run2 is None
开发者ID:moritzpein,项目名称:airflow,代码行数:16,代码来源:core.py

示例4: setUp

# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
 def setUp(self):
     configuration.test_mode()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook()
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
开发者ID:16522855,项目名称:airflow,代码行数:16,代码来源:ssh_execute_operator.py

示例5: setUp

# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
    def setUp(self):

        if sys.version_info[0] == 3:
            raise unittest.SkipTest('SSHExecuteOperatorTest won\'t work with '
                                    'python3. No need to test anything here')

        configuration.load_test_config()
        from airflow.contrib.hooks.ssh_hook import SSHHook
        hook = mock.MagicMock(spec=SSHHook)
        hook.no_host_key_check = True
        hook.Popen.return_value.stdout = StringIO(u'stdout')
        hook.Popen.return_value.returncode = False
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
            'provide_context': True
        }
        dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'
        self.hook = hook
        self.dag = dag
开发者ID:SivaPandeti,项目名称:airflow,代码行数:23,代码来源:test_ssh_execute_operator.py

示例6: setUp

# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
    def setUp(self):
        configuration.load_test_config()
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
            'provide_context': True
        }
        dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'
        self.dag = dag

        self.sensor = gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor(
            task_id='sensor',
            bucket='test-bucket',
            prefix='test-prefix/path',
            inactivity_period=12,
            poke_interval=10,
            min_objects=1,
            allow_delete=False,
            previous_num_objects=0,
            dag=self.dag
        )
        self.last_mocked_date = datetime(2019, 4, 24, 0, 0, 0)
开发者ID:apache,项目名称:incubator-airflow,代码行数:25,代码来源:test_gcs_upload_session_sensor.py


注:本文中的airflow.DAG.schedule_interval方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。