本文整理汇总了Python中sahara.tests.unit.service.edp.edp_test_utils.create_cluster函数的典型用法代码示例。如果您正苦于以下问题:Python create_cluster函数的具体用法?Python create_cluster怎么用?Python create_cluster使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了create_cluster函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_build_workflow_for_job_pig
def test_build_workflow_for_job_pig(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
job_binary.return_value = {"name": "script.pig"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
data_source_urls = {input_data.id: input_data.url,
output_data.id: output_data.url}
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec.job_configs,
input_data, output_data, 'hadoop', data_source_urls)
self.assertIn("""
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
self.assertIn("<script>script.pig</script>", res)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec.job_configs,
input_data, output_data, 'hadoop', data_source_urls)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>""", res)
示例2: test_job_type_supported
def test_job_type_supported(self, job_get):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
job_get.return_value = job
self.assertIsNotNone(job_manager._get_job_engine(u.create_cluster(), job_exec))
job.type = "unsupported_type"
self.assertIsNone(job_manager._get_job_engine(u.create_cluster(), job_exec))
示例3: test_build_workflow_swift_configs
def test_build_workflow_swift_configs(self, job_binary):
# Test that swift configs come from either input or output data sources
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
job_binary.return_value = {"name": "script.pig"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('hdfs://user/hadoop/out')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
input_data = u.create_data_source('hdfs://user/hadoop/in')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
job, job_exec = u.create_job_exec(
edp.JOB_TYPE_PIG, configs={'configs': {'dummy': 'value'}})
input_data = u.create_data_source('hdfs://user/hadoop/in')
output_data = u.create_data_source('hdfs://user/hadoop/out')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>dummy</name>
<value>value</value>
</property>
</configuration>""", res)
示例4: test_build_workflow_for_job_hive
def test_build_workflow_for_job_hive(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, configs={})
job_binary.return_value = {"name": "script.q"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
data_source_urls = {input_data.id: input_data.url,
output_data.id: output_data.url}
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec.job_configs,
input_data, output_data, 'hadoop', data_source_urls)
doc = xml.parseString(res)
hive = doc.getElementsByTagName('hive')[0]
self.assertEqual('/user/hadoop/conf/hive-site.xml',
xmlutils.get_text_from_node(hive, 'job-xml'))
configuration = hive.getElementsByTagName('configuration')
properties = xmlutils.get_property_dict(configuration[0])
self.assertEqual({'fs.swift.service.sahara.password': 'admin1',
'fs.swift.service.sahara.username': 'admin'},
properties)
self.assertEqual('script.q',
xmlutils.get_text_from_node(hive, 'script'))
params = xmlutils.get_param_dict(hive)
self.assertEqual({'INPUT': 'swift://ex.sahara/i',
'OUTPUT': 'swift://ex.sahara/o'}, params)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec.job_configs,
input_data, output_data, 'hadoop', data_source_urls)
doc = xml.parseString(res)
hive = doc.getElementsByTagName('hive')[0]
configuration = hive.getElementsByTagName('configuration')
properties = xmlutils.get_property_dict(configuration[0])
self.assertEqual({
'fs.swift.service.sahara.domain.name':
'sahara_proxy_domain',
'fs.swift.service.sahara.trust.id':
'0123456789abcdef0123456789abcdef',
'fs.swift.service.sahara.password':
'55555555-6666-7777-8888-999999999999',
'fs.swift.service.sahara.username':
'job_00000000-1111-2222-3333-4444444444444444'}, properties)
示例5: test_build_workflow_for_job_hive
def test_build_workflow_for_job_hive(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, configs={})
job_binary.return_value = {"name": "script.q"}
input_data = u.create_data_source("swift://ex/i")
output_data = u.create_data_source("swift://ex/o")
data_source_urls = {input_data.id: input_data.url, output_data.id: output_data.url}
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec.job_configs, input_data, output_data, "hadoop", data_source_urls
)
doc = xml.parseString(res)
hive = doc.getElementsByTagName("hive")[0]
self.assertEqual("/user/hadoop/conf/hive-site.xml", xmlutils.get_text_from_node(hive, "job-xml"))
configuration = hive.getElementsByTagName("configuration")
properties = xmlutils.get_property_dict(configuration[0])
self.assertEqual(
{"fs.swift.service.sahara.password": "admin1", "fs.swift.service.sahara.username": "admin"}, properties
)
self.assertEqual("script.q", xmlutils.get_text_from_node(hive, "script"))
params = xmlutils.get_param_dict(hive)
self.assertEqual({"INPUT": "swift://ex.sahara/i", "OUTPUT": "swift://ex.sahara/o"}, params)
# testing workflow creation with a proxy domain
self.override_config("use_domain_for_proxy_users", True)
self.override_config("proxy_user_domain_name", "sahara_proxy_domain")
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec.job_configs, input_data, output_data, "hadoop", data_source_urls
)
doc = xml.parseString(res)
hive = doc.getElementsByTagName("hive")[0]
configuration = hive.getElementsByTagName("configuration")
properties = xmlutils.get_property_dict(configuration[0])
self.assertEqual(
{
"fs.swift.service.sahara.domain.name": "sahara_proxy_domain",
"fs.swift.service.sahara.trust.id": "0123456789abcdef0123456789abcdef",
"fs.swift.service.sahara.password": "55555555-6666-7777-8888-999999999999",
"fs.swift.service.sahara.username": "job_00000000-1111-2222-3333-4444444444444444",
},
properties,
)
示例6: test_build_workflow_for_job_java
def test_build_workflow_for_job_java(self):
# If args include swift paths, user and password values
# will have to be supplied via configs instead of being
# lifted from input or output data sources
configs = {sw.HADOOP_SWIFT_USERNAME: 'admin',
sw.HADOOP_SWIFT_PASSWORD: 'admin1'}
configs = {
'configs': configs,
'args': ['swift://ex/i',
'output_path']
}
job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<main-class>%s</main-class>
<java-opts>%s</java-opts>
<arg>swift://ex.sahara/i</arg>
<arg>output_path</arg>""" % (_java_main_class, _java_opts), res)
示例7: test_build_workflow_for_job_hive
def test_build_workflow_for_job_hive(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE)
job_binary.return_value = {"name": "script.q"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
示例8: test_run_job
def test_run_job(self, exec_get, job, data_source,
update, remote, wf_factory, get_ds_urls,
prepare_cluster):
wf_factory.return_value = mock.MagicMock()
remote_class = mock.MagicMock()
remote_class.__exit__.return_value = 'closed'
remote.return_value = remote_class
job_class = mock.MagicMock()
job.return_value = job_class
job.name = "myJob"
source = mock.MagicMock()
source.url = "localhost"
data_source.return_value = source
get_ds_urls.return_value = ('url', 'url')
oje = FakeOozieJobEngine(u.create_cluster())
client_class = mock.MagicMock()
client_class.add_job = mock.MagicMock(return_value=1)
client_class.get_job_info = mock.MagicMock(
return_value={'status': 'PENDING'})
oje.get_client = mock.MagicMock(return_value=client_class)
_, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
update.return_value = job_exec
self.assertEqual((1, 'PENDING', None), oje.run_job(job_exec))
示例9: test_prepare_run_job
def test_prepare_run_job(self, job, data_source, update,
remote, wf_factory, get_ds_urls,
prepare_cluster):
wf_factory.return_value = mock.MagicMock()
remote_class = mock.MagicMock()
remote_class.__exit__.return_value = 'closed'
remote.return_value = remote_class
job_class = mock.MagicMock()
job_class.name = "myJob"
job.return_value = job_class
source = mock.MagicMock()
source.url = "localhost"
get_ds_urls.return_value = ('url', 'url')
data_source.return_value = source
oje = FakeOozieJobEngine(u.create_cluster())
_, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
update.return_value = job_exec
res = oje._prepare_run_job(job_exec)
self.assertEqual(ctx.ctx(), res['context'])
self.assertEqual('hadoop', res['hdfs_user'])
self.assertEqual(job_exec, res['job_execution'])
self.assertEqual({}, res['oozie_params'])
示例10: _build_workflow_with_conf_common
def _build_workflow_with_conf_common(self, job_type):
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
job, job_exec = u.create_job_exec(job_type,
configs={"configs": {'c': 'f'}})
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<property>
<name>c</name>
<value>f</value>
</property>""", res)
self.assertIn("""
<property>
<name>mapred.input.dir</name>
<value>swift://ex.sahara/i</value>
</property>""", res)
self.assertIn("""
<property>
<name>mapred.output.dir</name>
<value>swift://ex.sahara/o</value>
</property>""", res)
示例11: test_get_oozie_job_params
def test_get_oozie_job_params(self):
oje = FakeOozieJobEngine(u.create_cluster())
oozie_params = {'oozie.libpath': '/mylibpath',
'oozie.wf.application.path': '/wrong'}
scheduled_params = {'start': '2015-06-10T06:05Z',
'end': '2015-06-10T06:50Z',
'frequency': '10'}
job_dir = '/job_dir'
job_execution_type = 'workflow'
job_params = oje._get_oozie_job_params('hadoop',
'/tmp', oozie_params, True,
scheduled_params, job_dir,
job_execution_type)
self.assertEqual('http://localhost:50030', job_params["jobTracker"])
self.assertEqual('hdfs://localhost:8020', job_params["nameNode"])
self.assertEqual('hadoop', job_params["user.name"])
self.assertEqual('hdfs://localhost:8020/tmp',
job_params['oozie.wf.application.path'])
self.assertEqual("/mylibpath,hdfs://localhost:8020/user/"
"sahara-hbase-lib", job_params['oozie.libpath'])
# Make sure this doesn't raise an exception
job_params = oje._get_oozie_job_params('hadoop',
'/tmp', {}, True)
self.assertEqual("hdfs://localhost:8020/user/"
"sahara-hbase-lib", job_params['oozie.libpath'])
示例12: test_upload_workflow_file
def test_upload_workflow_file(self, remote_get):
oje = FakeOozieJobEngine(u.create_cluster())
remote_class = mock.MagicMock()
remote_class.__exit__.return_value = 'closed'
remote_get.return_value = remote_class
res = oje._upload_workflow_file(remote_get, "test", "hadoop.xml",
'hdfs')
self.assertEqual("test/workflow.xml", res)
示例13: test_build_workflow_for_job_java_with_adapter
def test_build_workflow_for_job_java_with_adapter(self, edp_conf_mock):
edp_conf_mock.return_value = True
configs = {"configs": {"edp.java.main_class": "some_main"}}
job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs)
res = workflow_factory.get_workflow_xml(job, u.create_cluster(), job_exec.job_configs)
self.assertIn("<main-class>org.openstack.sahara.edp.MainWrapper</main-class>", res)
self.assertNotIn("some_main", res)
示例14: test_hdfs_create_workflow_dir
def test_hdfs_create_workflow_dir(self, remote):
remote_class = mock.MagicMock()
remote_class.__exit__.return_value = 'closed'
remote.return_value = remote_class
oje = FakeOozieJobEngine(u.create_cluster())
job, _ = u.create_job_exec(edp.JOB_TYPE_PIG)
res = oje._create_hdfs_workflow_dir(mock.Mock(), job)
self.assertIn('/user/hadoop/special_name/', res)
示例15: test_run_job_unsupported_type
def test_run_job_unsupported_type(self, cluster_get, job_exec_get, job_get):
job, job_exec = u.create_job_exec("unsupported_type")
job_exec_get.return_value = job_exec
job_get.return_value = job
cluster = u.create_cluster()
cluster.status = c_u.CLUSTER_STATUS_ACTIVE
cluster_get.return_value = cluster
with testtools.ExpectedException(ex.EDPError):
job_manager._run_job(job_exec.id)