当前位置: 首页>>代码示例>>Python>>正文


Python edp_test_utils.create_job_exec函数代码示例

本文整理汇总了Python中sahara.tests.unit.service.edp.edp_test_utils.create_job_exec函数的典型用法代码示例。如果您正苦于以下问题:Python create_job_exec函数的具体用法?Python create_job_exec怎么用?Python create_job_exec使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了create_job_exec函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_build_workflow_swift_configs

    def test_build_workflow_swift_configs(self, job_binary):

        # Test that swift configs come from either input or output data sources
        job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
        job_binary.return_value = {"name": "script.pig"}

        input_data = u.create_data_source('swift://ex/i')
        output_data = u.create_data_source('hdfs://user/hadoop/out')

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec, input_data, output_data,
            'hadoop')

        self.assertIn("""
      <configuration>
        <property>
          <name>fs.swift.service.sahara.password</name>
          <value>admin1</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.username</name>
          <value>admin</value>
        </property>
      </configuration>""", res)

        input_data = u.create_data_source('hdfs://user/hadoop/in')
        output_data = u.create_data_source('swift://ex/o')

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec, input_data, output_data,
            'hadoop')

        self.assertIn("""
      <configuration>
        <property>
          <name>fs.swift.service.sahara.password</name>
          <value>admin1</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.username</name>
          <value>admin</value>
        </property>
      </configuration>""", res)

        job, job_exec = u.create_job_exec(
            edp.JOB_TYPE_PIG, configs={'configs': {'dummy': 'value'}})
        input_data = u.create_data_source('hdfs://user/hadoop/in')
        output_data = u.create_data_source('hdfs://user/hadoop/out')

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec, input_data, output_data,
            'hadoop')

        self.assertIn("""
      <configuration>
        <property>
          <name>dummy</name>
          <value>value</value>
        </property>
      </configuration>""", res)
开发者ID:degorenko,项目名称:sahara,代码行数:60,代码来源:test_job_manager.py

示例2: test_build_workflow_for_job_pig

    def test_build_workflow_for_job_pig(self, job_binary):

        job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
        job_binary.return_value = {"name": "script.pig"}

        input_data = u.create_data_source('swift://ex/i')
        output_data = u.create_data_source('swift://ex/o')
        data_source_urls = {input_data.id: input_data.url,
                            output_data.id: output_data.url}

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec.job_configs,
            input_data, output_data, 'hadoop', data_source_urls)

        self.assertIn("""
      <param>INPUT=swift://ex.sahara/i</param>
      <param>OUTPUT=swift://ex.sahara/o</param>""", res)

        self.assertIn("""
      <configuration>
        <property>
          <name>fs.swift.service.sahara.password</name>
          <value>admin1</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.username</name>
          <value>admin</value>
        </property>
      </configuration>""", res)

        self.assertIn("<script>script.pig</script>", res)

        # testing workflow creation with a proxy domain
        self.override_config('use_domain_for_proxy_users', True)
        self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
        job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, proxy=True)

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec.job_configs,
            input_data, output_data, 'hadoop', data_source_urls)

        self.assertIn("""
      <configuration>
        <property>
          <name>fs.swift.service.sahara.domain.name</name>
          <value>sahara_proxy_domain</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.password</name>
          <value>55555555-6666-7777-8888-999999999999</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.trust.id</name>
          <value>0123456789abcdef0123456789abcdef</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.username</name>
          <value>job_00000000-1111-2222-3333-4444444444444444</value>
        </property>
      </configuration>""", res)
开发者ID:rogeryu27,项目名称:sahara,代码行数:60,代码来源:test_job_manager.py

示例3: test_build_workflow_for_job_hive

    def test_build_workflow_for_job_hive(self, job_binary):

        job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, configs={})
        job_binary.return_value = {"name": "script.q"}

        input_data = u.create_data_source('swift://ex/i')
        output_data = u.create_data_source('swift://ex/o')
        data_source_urls = {input_data.id: input_data.url,
                            output_data.id: output_data.url}

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec.job_configs,
            input_data, output_data, 'hadoop', data_source_urls)

        doc = xml.parseString(res)
        hive = doc.getElementsByTagName('hive')[0]
        self.assertEqual('/user/hadoop/conf/hive-site.xml',
                         xmlutils.get_text_from_node(hive, 'job-xml'))

        configuration = hive.getElementsByTagName('configuration')
        properties = xmlutils.get_property_dict(configuration[0])
        self.assertEqual({'fs.swift.service.sahara.password': 'admin1',
                          'fs.swift.service.sahara.username': 'admin'},
                         properties)

        self.assertEqual('script.q',
                         xmlutils.get_text_from_node(hive, 'script'))

        params = xmlutils.get_param_dict(hive)
        self.assertEqual({'INPUT': 'swift://ex.sahara/i',
                          'OUTPUT': 'swift://ex.sahara/o'}, params)

        # testing workflow creation with a proxy domain
        self.override_config('use_domain_for_proxy_users', True)
        self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')

        job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, proxy=True)

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec.job_configs,
            input_data, output_data, 'hadoop', data_source_urls)

        doc = xml.parseString(res)
        hive = doc.getElementsByTagName('hive')[0]
        configuration = hive.getElementsByTagName('configuration')
        properties = xmlutils.get_property_dict(configuration[0])
        self.assertEqual({
            'fs.swift.service.sahara.domain.name':
            'sahara_proxy_domain',

            'fs.swift.service.sahara.trust.id':
            '0123456789abcdef0123456789abcdef',

            'fs.swift.service.sahara.password':
            '55555555-6666-7777-8888-999999999999',

            'fs.swift.service.sahara.username':
            'job_00000000-1111-2222-3333-4444444444444444'}, properties)
开发者ID:rogeryu27,项目名称:sahara,代码行数:58,代码来源:test_job_manager.py

示例4: test_build_workflow_for_job_hive

    def test_build_workflow_for_job_hive(self, job_binary):

        job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, configs={})
        job_binary.return_value = {"name": "script.q"}

        input_data = u.create_data_source("swift://ex/i")
        output_data = u.create_data_source("swift://ex/o")
        data_source_urls = {input_data.id: input_data.url, output_data.id: output_data.url}

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec.job_configs, input_data, output_data, "hadoop", data_source_urls
        )

        doc = xml.parseString(res)
        hive = doc.getElementsByTagName("hive")[0]
        self.assertEqual("/user/hadoop/conf/hive-site.xml", xmlutils.get_text_from_node(hive, "job-xml"))

        configuration = hive.getElementsByTagName("configuration")
        properties = xmlutils.get_property_dict(configuration[0])
        self.assertEqual(
            {"fs.swift.service.sahara.password": "admin1", "fs.swift.service.sahara.username": "admin"}, properties
        )

        self.assertEqual("script.q", xmlutils.get_text_from_node(hive, "script"))

        params = xmlutils.get_param_dict(hive)
        self.assertEqual({"INPUT": "swift://ex.sahara/i", "OUTPUT": "swift://ex.sahara/o"}, params)

        # testing workflow creation with a proxy domain
        self.override_config("use_domain_for_proxy_users", True)
        self.override_config("proxy_user_domain_name", "sahara_proxy_domain")

        job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, proxy=True)

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec.job_configs, input_data, output_data, "hadoop", data_source_urls
        )

        doc = xml.parseString(res)
        hive = doc.getElementsByTagName("hive")[0]
        configuration = hive.getElementsByTagName("configuration")
        properties = xmlutils.get_property_dict(configuration[0])
        self.assertEqual(
            {
                "fs.swift.service.sahara.domain.name": "sahara_proxy_domain",
                "fs.swift.service.sahara.trust.id": "0123456789abcdef0123456789abcdef",
                "fs.swift.service.sahara.password": "55555555-6666-7777-8888-999999999999",
                "fs.swift.service.sahara.username": "job_00000000-1111-2222-3333-4444444444444444",
            },
            properties,
        )
开发者ID:thefuyang,项目名称:sahara,代码行数:51,代码来源:test_job_manager.py

示例5: test_hdfs_upload_job_files

    def test_hdfs_upload_job_files(self, conductor_raw_data, remote_class,
                                   remote):
        remote_class.__exit__.return_value = 'closed'
        remote.return_value = remote_class
        conductor_raw_data.return_value = 'ok'

        oje = FakeOozieJobEngine(u.create_cluster())
        job, _ = u.create_job_exec(edp.JOB_TYPE_PIG)
        res = oje._upload_job_files_to_hdfs(mock.Mock(), 'job_prefix', job, {})
        self.assertEqual(['job_prefix/script.pig'], res)

        job, _ = u.create_job_exec(edp.JOB_TYPE_MAPREDUCE)
        res = oje._upload_job_files_to_hdfs(mock.Mock(), 'job_prefix', job, {})
        self.assertEqual(['job_prefix/lib/main.jar'], res)
开发者ID:gongwayne,项目名称:Openstack,代码行数:14,代码来源:test_oozie.py

示例6: test_prepare_run_job

    def test_prepare_run_job(self, job, data_source, update,
                             remote, wf_factory, get_ds_urls,
                             prepare_cluster):
        wf_factory.return_value = mock.MagicMock()

        remote_class = mock.MagicMock()
        remote_class.__exit__.return_value = 'closed'
        remote.return_value = remote_class

        job_class = mock.MagicMock()
        job_class.name = "myJob"
        job.return_value = job_class

        source = mock.MagicMock()
        source.url = "localhost"

        get_ds_urls.return_value = ('url', 'url')

        data_source.return_value = source
        oje = FakeOozieJobEngine(u.create_cluster())
        _, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
        update.return_value = job_exec

        res = oje._prepare_run_job(job_exec)
        self.assertEqual(ctx.ctx(), res['context'])
        self.assertEqual('hadoop', res['hdfs_user'])
        self.assertEqual(job_exec, res['job_execution'])
        self.assertEqual({}, res['oozie_params'])
开发者ID:openstack,项目名称:sahara,代码行数:28,代码来源:test_oozie.py

示例7: test_job_type_supported

    def test_job_type_supported(self, job_get):
        job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
        job_get.return_value = job
        self.assertIsNotNone(job_manager._get_job_engine(u.create_cluster(), job_exec))

        job.type = "unsupported_type"
        self.assertIsNone(job_manager._get_job_engine(u.create_cluster(), job_exec))
开发者ID:thefuyang,项目名称:sahara,代码行数:7,代码来源:test_job_manager.py

示例8: _build_workflow_with_conf_common

    def _build_workflow_with_conf_common(self, job_type):

        input_data = u.create_data_source('swift://ex/i')
        output_data = u.create_data_source('swift://ex/o')

        job, job_exec = u.create_job_exec(job_type,
                                          configs={"configs": {'c': 'f'}})

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec, input_data, output_data,
            'hadoop')

        self.assertIn("""
        <property>
          <name>c</name>
          <value>f</value>
        </property>""", res)

        self.assertIn("""
        <property>
          <name>mapred.input.dir</name>
          <value>swift://ex.sahara/i</value>
        </property>""", res)

        self.assertIn("""
        <property>
          <name>mapred.output.dir</name>
          <value>swift://ex.sahara/o</value>
        </property>""", res)
开发者ID:stannie42,项目名称:sahara,代码行数:29,代码来源:test_job_manager.py

示例9: test_build_workflow_for_job_java

    def test_build_workflow_for_job_java(self):
        # If args include swift paths, user and password values
        # will have to be supplied via configs instead of being
        # lifted from input or output data sources
        configs = {sw.HADOOP_SWIFT_USERNAME: 'admin',
                   sw.HADOOP_SWIFT_PASSWORD: 'admin1'}

        configs = {
            'configs': configs,
            'args': ['swift://ex/i',
                     'output_path']
        }

        job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs)
        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec)

        self.assertIn("""
      <configuration>
        <property>
          <name>fs.swift.service.sahara.password</name>
          <value>admin1</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.username</name>
          <value>admin</value>
        </property>
      </configuration>
      <main-class>%s</main-class>
      <java-opts>%s</java-opts>
      <arg>swift://ex.sahara/i</arg>
      <arg>output_path</arg>""" % (_java_main_class, _java_opts), res)
开发者ID:stannie42,项目名称:sahara,代码行数:32,代码来源:test_job_manager.py

示例10: test_build_workflow_for_job_hive

    def test_build_workflow_for_job_hive(self, job_binary):

        job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE)
        job_binary.return_value = {"name": "script.q"}

        input_data = u.create_data_source('swift://ex/i')
        output_data = u.create_data_source('swift://ex/o')

        res = workflow_factory.get_workflow_xml(
            job, u.create_cluster(), job_exec, input_data, output_data,
            'hadoop')

        self.assertIn("""
      <job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
      <configuration>
        <property>
          <name>fs.swift.service.sahara.password</name>
          <value>admin1</value>
        </property>
        <property>
          <name>fs.swift.service.sahara.username</name>
          <value>admin</value>
        </property>
      </configuration>
      <script>script.q</script>
      <param>INPUT=swift://ex.sahara/i</param>
      <param>OUTPUT=swift://ex.sahara/o</param>""", res)
开发者ID:stannie42,项目名称:sahara,代码行数:27,代码来源:test_job_manager.py

示例11: test_run_job

    def test_run_job(self, exec_get, job, data_source,
                     update, remote, wf_factory, get_ds_urls,
                     prepare_cluster):
        wf_factory.return_value = mock.MagicMock()
        remote_class = mock.MagicMock()
        remote_class.__exit__.return_value = 'closed'
        remote.return_value = remote_class

        job_class = mock.MagicMock()
        job.return_value = job_class
        job.name = "myJob"

        source = mock.MagicMock()
        source.url = "localhost"
        data_source.return_value = source

        get_ds_urls.return_value = ('url', 'url')

        oje = FakeOozieJobEngine(u.create_cluster())
        client_class = mock.MagicMock()
        client_class.add_job = mock.MagicMock(return_value=1)
        client_class.get_job_info = mock.MagicMock(
            return_value={'status': 'PENDING'})
        oje.get_client = mock.MagicMock(return_value=client_class)

        _, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
        update.return_value = job_exec

        self.assertEqual((1, 'PENDING', None), oje.run_job(job_exec))
开发者ID:openstack,项目名称:sahara,代码行数:29,代码来源:test_oozie.py

示例12: test_hdfs_create_workflow_dir

    def test_hdfs_create_workflow_dir(self, remote):
        remote_class = mock.MagicMock()
        remote_class.__exit__.return_value = 'closed'
        remote.return_value = remote_class

        oje = FakeOozieJobEngine(u.create_cluster())
        job, _ = u.create_job_exec(edp.JOB_TYPE_PIG)
        res = oje._create_hdfs_workflow_dir(mock.Mock(), job)
        self.assertIn('/user/hadoop/special_name/', res)
开发者ID:gongwayne,项目名称:Openstack,代码行数:9,代码来源:test_oozie.py

示例13: test_build_workflow_for_job_java_with_adapter

    def test_build_workflow_for_job_java_with_adapter(self, edp_conf_mock):
        edp_conf_mock.return_value = True

        configs = {"configs": {"edp.java.main_class": "some_main"}}
        job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs)
        res = workflow_factory.get_workflow_xml(job, u.create_cluster(), job_exec.job_configs)

        self.assertIn("<main-class>org.openstack.sahara.edp.MainWrapper</main-class>", res)
        self.assertNotIn("some_main", res)
开发者ID:thefuyang,项目名称:sahara,代码行数:9,代码来源:test_job_manager.py

示例14: test_run_job_handles_exceptions

    def test_run_job_handles_exceptions(self, runjob, job_ex_upd):
        runjob.side_effect = ex.SwiftClientException("Unauthorised")
        job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
        job_manager.run_job(job_exec.id)

        self.assertEqual(1, job_ex_upd.call_count)

        new_status = job_ex_upd.call_args[0][2]["info"]["status"]
        self.assertEqual(edp.JOB_STATUS_FAILED, new_status)
开发者ID:degorenko,项目名称:sahara,代码行数:9,代码来源:test_job_manager.py

示例15: test_run_job_unsupported_type

    def test_run_job_unsupported_type(self, cluster_get, job_exec_get, job_get):
        job, job_exec = u.create_job_exec("unsupported_type")
        job_exec_get.return_value = job_exec
        job_get.return_value = job

        cluster = u.create_cluster()
        cluster.status = c_u.CLUSTER_STATUS_ACTIVE
        cluster_get.return_value = cluster
        with testtools.ExpectedException(ex.EDPError):
            job_manager._run_job(job_exec.id)
开发者ID:thefuyang,项目名称:sahara,代码行数:10,代码来源:test_job_manager.py


注:本文中的sahara.tests.unit.service.edp.edp_test_utils.create_job_exec函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。