本文整理汇总了Python中airflow.models.TaskInstance方法的典型用法代码示例。如果您正苦于以下问题:Python models.TaskInstance方法的具体用法?Python models.TaskInstance怎么用?Python models.TaskInstance使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.models
的用法示例。
在下文中一共展示了models.TaskInstance方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_lineage_render
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_lineage_render(self):
# tests inlets / outlets are rendered if they are added
# after initalization
dag = DAG(
dag_id='test_lineage_render',
start_date=DEFAULT_DATE
)
with dag:
op1 = DummyOperator(task_id='task1')
f1s = "/tmp/does_not_exist_1-{}"
file1 = File(f1s.format("{{ execution_date }}"))
op1.inlets.append(file1)
op1.outlets.append(file1)
# execution_date is set in the context in order to avoid creating task instances
ctx1 = {"ti": TI(task=op1, execution_date=DEFAULT_DATE),
"execution_date": DEFAULT_DATE}
op1.pre_execute(ctx1)
self.assertEqual(op1.inlets[0].url, f1s.format(DEFAULT_DATE))
self.assertEqual(op1.outlets[0].url, f1s.format(DEFAULT_DATE))
示例2: test_extra_link_in_gantt_view
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_extra_link_in_gantt_view(self):
exec_date = dates.days_ago(2)
start_date = datetime(2020, 4, 10, 2, 0, 0)
end_date = exec_date + timedelta(seconds=30)
with create_session() as session:
for task in self.dag.tasks:
ti = TaskInstance(task=task, execution_date=exec_date, state="success")
ti.start_date = start_date
ti.end_date = end_date
session.add(ti)
url = 'gantt?dag_id={}&execution_date={}'.format(self.dag.dag_id, exec_date)
resp = self.client.get(url, follow_redirects=True)
self.check_content_in_response('"extraLinks":', resp)
extra_links_grps = re.search(r'extraLinks\": \[(\".*?\")\]', resp.get_data(as_text=True))
extra_links = extra_links_grps.group(0)
self.assertIn('airflow', extra_links)
self.assertIn('github', extra_links)
示例3: verify_state
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def verify_state(self, dag, task_ids, execution_dates, state, old_tis, session=None):
TI = models.TaskInstance
tis = session.query(TI).filter(
TI.dag_id == dag.dag_id,
TI.execution_date.in_(execution_dates)
).all()
self.assertTrue(len(tis) > 0)
for ti in tis: # pylint: disable=too-many-nested-blocks
self.assertEqual(ti.operator, dag.get_task(ti.task_id).__class__.__name__)
if ti.task_id in task_ids and ti.execution_date in execution_dates:
self.assertEqual(ti.state, state)
if state in State.finished():
self.assertIsNotNone(ti.end_date)
else:
for old_ti in old_tis:
if old_ti.task_id == ti.task_id and old_ti.execution_date == ti.execution_date:
self.assertEqual(ti.state, old_ti.state)
示例4: test_read_nonexistent_log
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_read_nonexistent_log(self):
ts = pendulum.now()
# In ElasticMock, search is going to return all documents with matching index
# and doc_type regardless of match filters, so we delete the log entry instead
# of making a new TaskInstance to query.
self.es.delete(index=self.index_name, doc_type=self.doc_type, id=1)
logs, metadatas = self.es_task_handler.read(self.ti,
1,
{'offset': 0,
'last_log_timestamp': str(ts),
'end_of_log': False})
self.assertEqual(1, len(logs))
self.assertEqual(len(logs), len(metadatas))
self.assertEqual([''], logs)
self.assertFalse(metadatas[0]['end_of_log'])
self.assertEqual('0', metadatas[0]['offset'])
# last_log_timestamp won't change if no log lines read.
self.assertTrue(timezone.parse(metadatas[0]['last_log_timestamp']) == ts)
示例5: test_render_log_filename
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_render_log_filename(self):
try_number = 1
dag_id = 'test_render_log_filename_dag'
task_id = 'test_render_log_filename_task'
execution_date = datetime(2016, 1, 1)
dag = DAG(dag_id, start_date=execution_date)
task = DummyOperator(task_id=task_id, dag=dag)
ti = TaskInstance(task=task, execution_date=execution_date)
filename_template = "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log"
ts = ti.get_template_context()['ts']
expected_filename = "{dag_id}/{task_id}/{ts}/{try_number}.log".format(dag_id=dag_id,
task_id=task_id,
ts=ts,
try_number=try_number)
rendered_filename = helpers.render_log_filename(ti, try_number, filename_template)
self.assertEqual(rendered_filename, expected_filename)
示例6: test_file_transfer_no_intermediate_dir_error_put
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_file_transfer_no_intermediate_dir_error_put(self):
test_local_file_content = \
b"This is local file content \n which is multiline " \
b"continuing....with other character\nanother line here \n this is last line"
# create a test file locally
with open(self.test_local_filepath, 'wb') as file:
file.write(test_local_file_content)
# Try to put test file to remote
# This should raise an error with "No such file" as the directory
# does not exist
with self.assertRaises(Exception) as error:
put_test_task = SFTPOperator(
task_id="test_sftp",
ssh_hook=self.hook,
local_filepath=self.test_local_filepath,
remote_filepath=self.test_remote_filepath_int_dir,
operation=SFTPOperation.PUT,
create_intermediate_dirs=False,
dag=self.dag
)
self.assertIsNotNone(put_test_task)
ti2 = TaskInstance(task=put_test_task, execution_date=timezone.utcnow())
ti2.run()
self.assertIn('No such file', str(error.exception))
示例7: delete_remote_resource
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def delete_remote_resource(self):
if os.path.exists(self.test_remote_filepath):
# check the remote file content
remove_file_task = SSHOperator(
task_id="test_check_file",
ssh_hook=self.hook,
command="rm {0}".format(self.test_remote_filepath),
do_xcom_push=True,
dag=self.dag
)
self.assertIsNotNone(remove_file_task)
ti3 = TaskInstance(task=remove_file_task, execution_date=timezone.utcnow())
ti3.run()
if os.path.exists(self.test_remote_filepath_int_dir):
os.remove(self.test_remote_filepath_int_dir)
if os.path.exists(self.test_remote_dir):
os.rmdir(self.test_remote_dir)
示例8: test_mapred_job_name
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_mapred_job_name(self, mock_get_hook):
mock_hook = mock.MagicMock()
mock_get_hook.return_value = mock_hook
op = MockHiveOperator(
task_id='test_mapred_job_name',
hql=self.hql,
dag=self.dag)
fake_execution_date = timezone.datetime(2018, 6, 19)
fake_ti = TaskInstance(task=op, execution_date=fake_execution_date)
fake_ti.hostname = 'fake_hostname'
fake_context = {'ti': fake_ti}
op.execute(fake_context)
self.assertEqual(
"Airflow HiveOperator task for {}.{}.{}.{}"
.format(fake_ti.hostname,
self.dag.dag_id, op.task_id,
fake_execution_date.isoformat()), mock_hook.mapred_job_name)
示例9: test_render_template
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_render_template(self):
# Given
operator = SparkSubmitOperator(task_id='spark_submit_job',
dag=self.dag, **self._config)
ti = TaskInstance(operator, DEFAULT_DATE)
# When
ti.render_templates()
# Then
expected_application_args = ['-f', 'foo',
'--bar', 'bar',
'--start', (DEFAULT_DATE - timedelta(days=1))
.strftime("%Y-%m-%d"),
'--end', DEFAULT_DATE.strftime("%Y-%m-%d"),
'--with-spaces',
'args should keep embdedded spaces',
]
expected_name = 'spark_submit_job'
self.assertListEqual(expected_application_args,
getattr(operator, '_application_args'))
self.assertEqual(expected_name, getattr(operator, '_name'))
示例10: test_poke_context
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_poke_context(self, mock_session_send):
response = requests.Response()
response.status_code = 200
mock_session_send.return_value = response
def resp_check(_, execution_date):
if execution_date == DEFAULT_DATE:
return True
raise AirflowException('AirflowException raised here!')
task = HttpSensor(
task_id='http_sensor_poke_exception',
http_conn_id='http_default',
endpoint='',
request_params={},
response_check=resp_check,
timeout=5,
poke_interval=1,
dag=self.dag)
task_instance = TaskInstance(task=task, execution_date=DEFAULT_DATE)
task.execute(task_instance.get_template_context())
示例11: test_load_templated_yaml
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_load_templated_yaml(self):
dag = DAG(dag_id='example_cloudbuild_operator', start_date=TEST_DEFAULT_DATE)
with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+t') as build:
build.writelines("""
steps:
- name: 'ubuntu'
args: ['echo', 'Hello {{ params.name }}!']
""")
build.seek(0)
body_path = build.name
operator = CloudBuildCreateBuildOperator(
body=body_path,
task_id="task-id", dag=dag,
params={'name': 'airflow'}
)
operator.prepare_template()
ti = TaskInstance(operator, TEST_DEFAULT_DATE)
ti.render_templates()
expected_body = {'steps': [
{'name': 'ubuntu',
'args': ['echo', 'Hello airflow!']
}
]
}
self.assertEqual(expected_body, operator.body)
示例12: test_templates
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_templates(self, _):
dag_id = 'test_dag_id'
# pylint: disable=attribute-defined-outside-init
self.dag = DAG(dag_id, default_args={'start_date': DEFAULT_DATE})
op = CloudDataTransferServiceCreateJobOperator(
body={"description": "{{ dag.dag_id }}"},
gcp_conn_id='{{ dag.dag_id }}',
aws_conn_id='{{ dag.dag_id }}',
task_id='task-id',
dag=self.dag,
)
ti = TaskInstance(op, DEFAULT_DATE)
ti.render_templates()
self.assertEqual(dag_id, getattr(op, 'body')[DESCRIPTION])
self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
self.assertEqual(dag_id, getattr(op, 'aws_conn_id'))
示例13: test_job_delete_with_templates
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_job_delete_with_templates(self, _):
dag_id = 'test_dag_id'
args = {'start_date': DEFAULT_DATE}
self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init
op = CloudDataTransferServiceDeleteJobOperator(
job_name='{{ dag.dag_id }}',
gcp_conn_id='{{ dag.dag_id }}',
api_version='{{ dag.dag_id }}',
task_id=TASK_ID,
dag=self.dag,
)
ti = TaskInstance(op, DEFAULT_DATE)
ti.render_templates()
self.assertEqual(dag_id, getattr(op, 'job_name'))
self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
self.assertEqual(dag_id, getattr(op, 'api_version'))
示例14: test_operation_pause_with_templates
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_operation_pause_with_templates(self, _):
dag_id = 'test_dag_id'
args = {'start_date': DEFAULT_DATE}
self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init
op = CloudDataTransferServicePauseOperationOperator(
operation_name='{{ dag.dag_id }}',
gcp_conn_id='{{ dag.dag_id }}',
api_version='{{ dag.dag_id }}',
task_id=TASK_ID,
dag=self.dag,
)
ti = TaskInstance(op, DEFAULT_DATE)
ti.render_templates()
self.assertEqual(dag_id, getattr(op, 'operation_name'))
self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
self.assertEqual(dag_id, getattr(op, 'api_version'))
示例15: test_operation_resume_with_templates
# 需要导入模块: from airflow import models [as 别名]
# 或者: from airflow.models import TaskInstance [as 别名]
def test_operation_resume_with_templates(self, _):
dag_id = 'test_dag_id'
args = {'start_date': DEFAULT_DATE}
self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init
op = CloudDataTransferServiceResumeOperationOperator(
operation_name='{{ dag.dag_id }}',
gcp_conn_id='{{ dag.dag_id }}',
api_version='{{ dag.dag_id }}',
task_id=TASK_ID,
dag=self.dag,
)
ti = TaskInstance(op, DEFAULT_DATE)
ti.render_templates()
self.assertEqual(dag_id, getattr(op, 'operation_name'))
self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
self.assertEqual(dag_id, getattr(op, 'api_version'))