本文整理匯總了Python中airflow.operators.bash_operator.BashOperator.run方法的典型用法代碼示例。如果您正苦於以下問題:Python BashOperator.run方法的具體用法?Python BashOperator.run怎麽用?Python BashOperator.run使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類airflow.operators.bash_operator.BashOperator
的用法示例。
在下文中一共展示了BashOperator.run方法的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_echo_env_variables
# 需要導入模塊: from airflow.operators.bash_operator import BashOperator [as 別名]
# 或者: from airflow.operators.bash_operator.BashOperator import run [as 別名]
def test_echo_env_variables(self):
"""
Test that env variables are exported correctly to the
task bash environment.
"""
now = datetime.utcnow()
now = now.replace(tzinfo=timezone.utc)
self.dag = DAG(
dag_id='bash_op_test', default_args={
'owner': 'airflow',
'retries': 100,
'start_date': DEFAULT_DATE
},
schedule_interval='@daily',
dagrun_timeout=timedelta(minutes=60))
self.dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=now,
state=State.RUNNING,
external_trigger=False,
)
import tempfile
with tempfile.NamedTemporaryFile() as f:
fname = f.name
t = BashOperator(
task_id='echo_env_vars',
dag=self.dag,
bash_command='echo $AIRFLOW_HOME>> {0};'
'echo $PYTHONPATH>> {0};'
'echo $AIRFLOW_CTX_DAG_ID >> {0};'
'echo $AIRFLOW_CTX_TASK_ID>> {0};'
'echo $AIRFLOW_CTX_EXECUTION_DATE>> {0};'
'echo $AIRFLOW_CTX_DAG_RUN_ID>> {0};'.format(fname)
)
original_AIRFLOW_HOME = os.environ['AIRFLOW_HOME']
os.environ['AIRFLOW_HOME'] = 'MY_PATH_TO_AIRFLOW_HOME'
t.run(DEFAULT_DATE, DEFAULT_DATE,
ignore_first_depends_on_past=True, ignore_ti_state=True)
with open(fname, 'r') as fr:
output = ''.join(fr.readlines())
self.assertIn('MY_PATH_TO_AIRFLOW_HOME', output)
# exported in run_unit_tests.sh as part of PYTHONPATH
self.assertIn('tests/test_utils', output)
self.assertIn('bash_op_test', output)
self.assertIn('echo_env_vars', output)
self.assertIn(DEFAULT_DATE.isoformat(), output)
self.assertIn('manual__' + DEFAULT_DATE.isoformat(), output)
os.environ['AIRFLOW_HOME'] = original_AIRFLOW_HOME
示例2: test_external_task_sensor_fn_multiple_execution_dates
# 需要導入模塊: from airflow.operators.bash_operator import BashOperator [as 別名]
# 或者: from airflow.operators.bash_operator.BashOperator import run [as 別名]
def test_external_task_sensor_fn_multiple_execution_dates(self):
bash_command_code = """
{% set s=execution_date.time().second %}
echo "second is {{ s }}"
if [[ $(( {{ s }} % 60 )) == 1 ]]
then
exit 1
fi
exit 0
"""
dag_external_id = TEST_DAG_ID + '_external'
dag_external = DAG(
dag_external_id,
default_args=self.args,
schedule_interval=timedelta(seconds=1))
task_external_with_failure = BashOperator(
task_id="task_external_with_failure",
bash_command=bash_command_code,
retries=0,
dag=dag_external)
task_external_without_failure = DummyOperator(
task_id="task_external_without_failure",
retries=0,
dag=dag_external)
task_external_without_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + timedelta(seconds=1),
ignore_ti_state=True)
session = settings.Session()
TI = TaskInstance
try:
task_external_with_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + timedelta(seconds=1),
ignore_ti_state=True)
# The test_with_failure task is excepted to fail
# once per minute (the run on the first second of
# each minute).
except Exception as e:
failed_tis = session.query(TI).filter(
TI.dag_id == dag_external_id,
TI.state == State.FAILED,
TI.execution_date == DEFAULT_DATE + timedelta(seconds=1)).all()
if len(failed_tis) == 1 and \
failed_tis[0].task_id == 'task_external_with_failure':
pass
else:
raise e
dag_id = TEST_DAG_ID
dag = DAG(
dag_id,
default_args=self.args,
schedule_interval=timedelta(minutes=1))
task_without_failure = ExternalTaskSensor(
task_id='task_without_failure',
external_dag_id=dag_external_id,
external_task_id='task_external_without_failure',
execution_date_fn=lambda dt: [dt + timedelta(seconds=i)
for i in range(2)],
allowed_states=['success'],
retries=0,
timeout=1,
poke_interval=1,
dag=dag)
task_with_failure = ExternalTaskSensor(
task_id='task_with_failure',
external_dag_id=dag_external_id,
external_task_id='task_external_with_failure',
execution_date_fn=lambda dt: [dt + timedelta(seconds=i)
for i in range(2)],
allowed_states=['success'],
retries=0,
timeout=1,
poke_interval=1,
dag=dag)
task_without_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE,
ignore_ti_state=True)
with self.assertRaises(AirflowSensorTimeout):
task_with_failure.run(
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE,
ignore_ti_state=True)