本文整理汇总了Python中airflow.exceptions.AirflowException方法的典型用法代码示例。如果您正苦于以下问题:Python exceptions.AirflowException方法的具体用法?Python exceptions.AirflowException怎么用?Python exceptions.AirflowException使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.exceptions
的用法示例。
在下文中一共展示了exceptions.AirflowException方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_on_failure_callback
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_on_failure_callback(self):
# Annoying workaround for nonlocal not existing in python 2
data = {'called': False}
def check_failure(context, test_case=self):
data['called'] = True
error = context.get('exception')
test_case.assertIsInstance(error, AirflowException)
op = BashOperator(
task_id='check_on_failure_callback',
bash_command="exit 1",
dag=self.dag,
on_failure_callback=check_failure)
self.assertRaises(
AirflowException,
op.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
self.assertTrue(data['called'])
示例2: test_default_args
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_default_args(self):
default_args = {'test_param': True}
dummy_class = DummyClass(default_args=default_args) # pylint: disable=no-value-for-parameter
self.assertTrue(dummy_class.test_param)
default_args = {'test_param': True, 'test_sub_param': True}
dummy_subclass = DummySubClass(default_args=default_args) # pylint: disable=no-value-for-parameter
self.assertTrue(dummy_class.test_param)
self.assertTrue(dummy_subclass.test_sub_param)
default_args = {'test_param': True}
dummy_subclass = DummySubClass(default_args=default_args, test_sub_param=True)
self.assertTrue(dummy_class.test_param)
self.assertTrue(dummy_subclass.test_sub_param)
with self.assertRaisesRegex(AirflowException,
'Argument.*test_sub_param.*required'):
DummySubClass(default_args=default_args) # pylint: disable=no-value-for-parameter
示例3: test_failed_service_raises_error
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_failed_service_raises_error(self, types_mock, client_class_mock):
mock_obj = mock.Mock()
client_mock = mock.Mock(spec=APIClient)
client_mock.create_service.return_value = {'ID': 'some_id'}
client_mock.images.return_value = []
client_mock.pull.return_value = [b'{"status":"pull log"}']
client_mock.tasks.return_value = [{'Status': {'State': 'failed'}}]
types_mock.TaskTemplate.return_value = mock_obj
types_mock.ContainerSpec.return_value = mock_obj
types_mock.RestartPolicy.return_value = mock_obj
types_mock.Resources.return_value = mock_obj
client_class_mock.return_value = client_mock
operator = DockerSwarmOperator(image='', auto_remove=False, task_id='unittest', enable_logging=False)
msg = "Service failed: {'ID': 'some_id'}"
with self.assertRaises(AirflowException) as error:
operator.execute(None)
self.assertEqual(str(error.exception), msg)
示例4: test_execute_with_failures
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_execute_with_failures(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=1, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task')
with self.assertRaises(AirflowException):
aci.execute(None)
self.assertEqual(aci_mock.return_value.delete.call_count, 1)
示例5: test_execute_assertion_fail
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_execute_assertion_fail(self, mock_get_hook):
mock_cmd = mock.Mock()
mock_cmd.status = 'done'
mock_cmd.id = 123
mock_cmd.is_success = mock.Mock(
return_value=HiveCommand.is_success(mock_cmd.status))
mock_hook = mock.Mock()
mock_hook.get_first.return_value = [11]
mock_hook.cmd = mock_cmd
mock_get_hook.return_value = mock_hook
operator = self.__construct_operator('select value from tab1 limit 1;', 5, 1)
with self.assertRaisesRegex(AirflowException,
'Qubole Command Id: ' + str(mock_cmd.id)):
operator.execute()
mock_cmd.is_success.assert_called_once_with(mock_cmd.status)
示例6: test_execute_assert_query_fail
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_execute_assert_query_fail(self, mock_get_hook):
mock_cmd = mock.Mock()
mock_cmd.status = 'error'
mock_cmd.id = 123
mock_cmd.is_success = mock.Mock(
return_value=HiveCommand.is_success(mock_cmd.status))
mock_hook = mock.Mock()
mock_hook.get_first.return_value = [11]
mock_hook.cmd = mock_cmd
mock_get_hook.return_value = mock_hook
operator = self.__construct_operator('select value from tab1 limit 1;', 5, 1)
with self.assertRaises(AirflowException) as cm:
operator.execute()
self.assertNotIn('Qubole Command Id: ', str(cm.exception))
mock_cmd.is_success.assert_called_once_with(mock_cmd.status)
示例7: test_poll_for_termination_fail
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_poll_for_termination_fail(self, mock_livy):
state_list = 2 * [BatchState.RUNNING] + [BatchState.ERROR]
def side_effect(_):
if state_list:
return state_list.pop(0)
# fail if does not stop right before
raise AssertionError()
mock_livy.side_effect = side_effect
task = LivyOperator(
file='sparkapp',
polling_interval=1,
dag=self.dag,
task_id='livy_example'
)
task._livy_hook = task.get_hook()
with self.assertRaises(AirflowException):
task.poll_for_termination(BATCH_ID)
mock_livy.assert_called_with(BATCH_ID)
self.assertEqual(mock_livy.call_count, 3)
示例8: test_run_cli_failure_status_code
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_run_cli_failure_status_code(self, mock_popen):
mock_proc = mock.MagicMock()
mock_proc.returncode = 1
mock_proc.stdout = io.BytesIO(b'')
mock_popen.return_value = mock_proc
self.db_hook.pinot_admin_system_exit = True
params = ["foo", "bar", "baz"]
with self.assertRaises(AirflowException):
self.db_hook.run_cli(params)
params.insert(0, self.conn.extra_dejson.get('cmd_path'))
env = os.environ.copy()
env.update({"JAVA_OPTS": "-Dpinot.admin.system.exit=true "})
mock_popen.assert_called_once_with(params,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE,
close_fds=True,
env=env)
示例9: test_spark_process_runcmd_and_fail
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_spark_process_runcmd_and_fail(self, mock_popen):
# Given
sql = 'SELECT 1'
master = 'local'
params = '--deploy-mode cluster'
status = 1
mock_popen.return_value.wait.return_value = status
# When
with self.assertRaises(AirflowException) as e:
hook = SparkSqlHook(
conn_id='spark_default',
sql=sql,
master=master,
)
hook.run_query(params)
# Then
self.assertEqual(
str(e.exception),
"Cannot execute '{}' on {} (additional parameters: '{}'). Process exit code: {}.".format(
sql, master, params, status
)
)
示例10: test_submit_gone_wrong
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_submit_gone_wrong(self, m):
task_post = m.post(
'http://druid-overlord:8081/druid/indexer/v1/task',
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/'
'9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "FAILED"}}'
)
# The job failed for some reason
with self.assertRaises(AirflowException):
self.db_hook.submit_indexing_job('Long json file')
self.assertTrue(task_post.called_once)
self.assertTrue(status_check.called_once)
示例11: test_submit_unknown_response
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_submit_unknown_response(self, m):
task_post = m.post(
'http://druid-overlord:8081/druid/indexer/v1/task',
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/'
'9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "UNKNOWN"}}'
)
# An unknown error code
with self.assertRaises(AirflowException):
self.db_hook.submit_indexing_job('Long json file')
self.assertTrue(task_post.called_once)
self.assertTrue(status_check.called_once)
示例12: test_submit_timeout
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_submit_timeout(self, m):
self.db_hook.timeout = 1
self.db_hook.max_ingestion_time = 5
task_post = m.post(
'http://druid-overlord:8081/druid/indexer/v1/task',
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
)
status_check = m.get(
'http://druid-overlord:8081/druid/indexer/v1/task/'
'9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
text='{"status":{"status": "RUNNING"}}'
)
shutdown_post = m.post(
'http://druid-overlord:8081/druid/indexer/v1/task/'
'9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/shutdown',
text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
)
# Because the jobs keeps running
with self.assertRaises(AirflowException):
self.db_hook.submit_indexing_job('Long json file')
self.assertTrue(task_post.called_once)
self.assertTrue(status_check.called)
self.assertTrue(shutdown_post.called_once)
示例13: test_do_api_call_waits_between_retries
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_do_api_call_waits_between_retries(self, mock_sleep):
retry_delay = 5
self.hook = DatabricksHook(retry_delay=retry_delay)
for exception in [requests_exceptions.ConnectionError,
requests_exceptions.SSLError,
requests_exceptions.Timeout,
requests_exceptions.ConnectTimeout,
requests_exceptions.HTTPError]:
with mock.patch('airflow.providers.databricks.hooks.databricks.requests') as mock_requests:
with mock.patch.object(self.hook.log, 'error'):
mock_sleep.reset_mock()
setup_mock_requests(mock_requests, exception)
with self.assertRaises(AirflowException):
self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {})
self.assertEqual(len(mock_sleep.mock_calls), self.hook.retry_limit - 1)
calls = [
mock.call(retry_delay),
mock.call(retry_delay)
]
mock_sleep.assert_has_calls(calls)
示例14: test_poke_exception
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def test_poke_exception(self, mock_session_send):
"""
Exception occurs in poke function should not be ignored.
"""
response = requests.Response()
response.status_code = 200
mock_session_send.return_value = response
def resp_check(_):
raise AirflowException('AirflowException raised here!')
task = HttpSensor(
task_id='http_sensor_poke_exception',
http_conn_id='http_default',
endpoint='',
request_params={},
response_check=resp_check,
timeout=5,
poke_interval=1)
with self.assertRaisesRegex(AirflowException, 'AirflowException raised here!'):
task.execute(context={})
示例15: resolve_logs_folder
# 需要导入模块: from airflow import exceptions [as 别名]
# 或者: from airflow.exceptions import AirflowException [as 别名]
def resolve_logs_folder() -> str:
"""
Returns LOGS folder specified in current Airflow config.
"""
config_file = get_airflow_config(AIRFLOW_HOME)
conf = AirflowConfigParser()
conf.read(config_file)
try:
logs = conf.get("logging", "base_log_folder")
except AirflowException:
try:
logs = conf.get("core", "base_log_folder")
except AirflowException:
logs = os.path.join(AIRFLOW_HOME, 'logs')
return logs