本文整理汇总了Python中airflow.DAG.schedule_interval方法的典型用法代码示例。如果您正苦于以下问题:Python DAG.schedule_interval方法的具体用法?Python DAG.schedule_interval怎么用?Python DAG.schedule_interval使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.DAG
的用法示例。
在下文中一共展示了DAG.schedule_interval方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
def setUp(self):
configuration.load_test_config()
from airflow.contrib.hooks.fs_hook import FSHook
hook = FSHook()
args = {"owner": "airflow", "start_date": DEFAULT_DATE, "provide_context": True}
dag = DAG(TEST_DAG_ID + "test_schedule_dag_once", default_args=args)
dag.schedule_interval = "@once"
self.hook = hook
self.dag = dag
示例2: setUp
# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
def setUp(self):
configuration.load_test_config()
from airflow.contrib.hooks.fs_hook import FSHook
hook = FSHook()
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'provide_context': True
}
dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
dag.schedule_interval = '@once'
self.hook = hook
self.dag = dag
示例3: test_schedule_dag_once
# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
def test_schedule_dag_once(self):
"""
Tests scheduling a dag scheduled for @once - should be scheduled the first time
it is called, and not scheduled the second.
"""
dag = DAG(TEST_DAG_ID+'test_schedule_dag_once')
dag.schedule_interval = '@once'
dag.tasks = [models.BaseOperator(task_id="faketastic", owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0))]
dag_run = jobs.SchedulerJob(test_mode=True).schedule_dag(dag)
dag_run2 = jobs.SchedulerJob(test_mode=True).schedule_dag(dag)
assert dag_run is not None
assert dag_run2 is None
示例4: setUp
# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
def setUp(self):
configuration.test_mode()
from airflow.contrib.hooks.ssh_hook import SSHHook
hook = SSHHook()
hook.no_host_key_check = True
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'provide_context': True
}
dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
dag.schedule_interval = '@once'
self.hook = hook
self.dag = dag
示例5: setUp
# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
def setUp(self):
if sys.version_info[0] == 3:
raise unittest.SkipTest('SSHExecuteOperatorTest won\'t work with '
'python3. No need to test anything here')
configuration.load_test_config()
from airflow.contrib.hooks.ssh_hook import SSHHook
hook = mock.MagicMock(spec=SSHHook)
hook.no_host_key_check = True
hook.Popen.return_value.stdout = StringIO(u'stdout')
hook.Popen.return_value.returncode = False
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'provide_context': True
}
dag = DAG(TEST_DAG_ID+'test_schedule_dag_once', default_args=args)
dag.schedule_interval = '@once'
self.hook = hook
self.dag = dag
示例6: setUp
# 需要导入模块: from airflow import DAG [as 别名]
# 或者: from airflow.DAG import schedule_interval [as 别名]
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'provide_context': True
}
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
dag.schedule_interval = '@once'
self.dag = dag
self.sensor = gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor(
task_id='sensor',
bucket='test-bucket',
prefix='test-prefix/path',
inactivity_period=12,
poke_interval=10,
min_objects=1,
allow_delete=False,
previous_num_objects=0,
dag=self.dag
)
self.last_mocked_date = datetime(2019, 4, 24, 0, 0, 0)