本文整理汇总了Python中airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook._build_dataflow_job_name方法的典型用法代码示例。如果您正苦于以下问题:Python DataFlowHook._build_dataflow_job_name方法的具体用法?Python DataFlowHook._build_dataflow_job_name怎么用?Python DataFlowHook._build_dataflow_job_name使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook
的用法示例。
在下文中一共展示了DataFlowHook._build_dataflow_job_name方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: DataFlowHookTest
# 需要导入模块: from airflow.contrib.hooks.gcp_dataflow_hook import DataFlowHook [as 别名]
# 或者: from airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook import _build_dataflow_job_name [as 别名]
#.........这里部分代码省略.........
EXPECTED_CMD = ['java', '-cp', JAR_FILE, JOB_CLASS,
'--region=us-central1',
'--runner=DataflowRunner', '--project=test',
'--stagingLocation=gs://test/staging',
'--labels={"foo":"bar"}',
'--jobName={}-{}'.format(TASK_ID, MOCK_UUID)]
self.assertListEqual(sorted(mock_dataflow.call_args[0][0]),
sorted(EXPECTED_CMD))
@mock.patch('airflow.contrib.hooks.gcp_dataflow_hook._Dataflow.log')
@mock.patch('subprocess.Popen')
@mock.patch('select.select')
def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging):
mock_logging.info = MagicMock()
mock_logging.warning = MagicMock()
mock_proc = MagicMock()
mock_proc.stderr = MagicMock()
mock_proc.stderr.readlines = MagicMock(return_value=['test\n','error\n'])
mock_stderr_fd = MagicMock()
mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd)
mock_proc_poll = MagicMock()
mock_select.return_value = [[mock_stderr_fd]]
def poll_resp_error():
mock_proc.return_code = 1
return True
mock_proc_poll.side_effect=[None, poll_resp_error]
mock_proc.poll = mock_proc_poll
mock_popen.return_value = mock_proc
dataflow = _Dataflow(['test', 'cmd'])
mock_logging.info.assert_called_with('Running command: %s', 'test cmd')
self.assertRaises(Exception, dataflow.wait_for_done)
mock_logging.warning.assert_has_calls([call('test'), call('error')])
def test_valid_dataflow_job_name(self):
job_name = self.dataflow_hook._build_dataflow_job_name(
task_id=TASK_ID, append_job_name=False
)
self.assertEquals(job_name, TASK_ID)
def test_fix_underscore_in_task_id(self):
task_id_with_underscore = 'test_example'
fixed_job_name = task_id_with_underscore.replace(
'_', '-'
)
job_name = self.dataflow_hook._build_dataflow_job_name(
task_id=task_id_with_underscore, append_job_name=False
)
self.assertEquals(job_name, fixed_job_name)
def test_invalid_dataflow_job_name(self):
invalid_job_name = '9test_invalid_name'
fixed_name = invalid_job_name.replace(
'_', '-')
with self.assertRaises(AssertionError) as e:
self.dataflow_hook._build_dataflow_job_name(
task_id=invalid_job_name, append_job_name=False
)
# Test whether the job_name is present in the Error msg
self.assertIn('Invalid job_name ({})'.format(fixed_name),
str(e.exception))
def test_dataflow_job_regex_check(self):
self.assertEquals(self.dataflow_hook._build_dataflow_job_name(
task_id='df-job-1', append_job_name=False
), 'df-job-1')
self.assertEquals(self.dataflow_hook._build_dataflow_job_name(
task_id='df-job', append_job_name=False
), 'df-job')
self.assertEquals(self.dataflow_hook._build_dataflow_job_name(
task_id='dfjob', append_job_name=False
), 'dfjob')
self.assertEquals(self.dataflow_hook._build_dataflow_job_name(
task_id='dfjob1', append_job_name=False
), 'dfjob1')
self.assertRaises(
AssertionError,
self.dataflow_hook._build_dataflow_job_name,
task_id='1dfjob', append_job_name=False
)
self.assertRaises(
AssertionError,
self.dataflow_hook._build_dataflow_job_name,
task_id='[email protected]', append_job_name=False
)
self.assertRaises(
AssertionError,
self.dataflow_hook._build_dataflow_job_name,
task_id='df^jo', append_job_name=False
)