当前位置: 首页>>代码示例>>Python>>正文


Python oozie_api_tests.OozieServerProvider类代码示例

本文整理汇总了Python中liboozie.oozie_api_tests.OozieServerProvider的典型用法代码示例。如果您正苦于以下问题:Python OozieServerProvider类的具体用法?Python OozieServerProvider怎么用?Python OozieServerProvider使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了OozieServerProvider类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: setup_class

  def setup_class(cls):
    OozieServerProvider.setup_class()

    cls.username = 'hue_jobbrowser_test'
    cls.home_dir = '/user/%s' % cls.username
    cls.cluster.fs.do_as_user(cls.username, cls.cluster.fs.create_home_dir, cls.home_dir)

    cls.client = make_logged_in_client(username=cls.username, is_superuser=False, groupname='test')
    cls.user = User.objects.get(username=cls.username)
    grant_access(cls.username, 'test', 'jobsub')
    grant_access(cls.username, 'test', 'jobbrowser')
    grant_access(cls.username, 'test', 'oozie')
    add_to_group(cls.username)

    cls.prev_user = cls.cluster.fs.user
    cls.cluster.fs.setuser(cls.username)

    cls.install_examples()
    cls.design = cls.create_design()

    # Run the sleep example, since it doesn't require user home directory
    design_id = cls.design.id
    response = cls.client.post(reverse('oozie:submit_workflow',
                                args=[design_id]),
                                data={u'form-MAX_NUM_FORMS': [u''],
                                      u'form-INITIAL_FORMS': [u'1'],
                                      u'form-0-name': [u'REDUCER_SLEEP_TIME'],
                                      u'form-0-value': [u'1'],
                                      u'form-TOTAL_FORMS': [u'1']},
                                follow=True)
    oozie_jobid = response.context['oozie_workflow'].id
    OozieServerProvider.wait_until_completion(oozie_jobid)

    cls.hadoop_job_id = get_hadoop_job_id(cls.oozie, oozie_jobid, 1)
    cls.hadoop_job_id_short = views.get_shorter_id(cls.hadoop_job_id)
开发者ID:guoqinga,项目名称:hue,代码行数:35,代码来源:tests.py

示例2: setUp

  def setUp(self):
    OozieServerProvider.setup_class()
    self.cluster.fs.do_as_user('jobsub_test', self.cluster.fs.create_home_dir, '/user/jobsub_test')
    self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, '/user/jobsub_test', 0777, True) # Hum?
    self.client = make_logged_in_client(username='jobsub_test')
    self.user = User.objects.get(username='jobsub_test')

    # Ensure access to MR folder.
    # Need to chmod because jobs are submitted as a
    # different user than what was previously used.
    for i in range(0,10):
      try:
        self.cluster.fs.do_as_superuser(self.cluster.fs.chmod, '/tmp', 0777, recursive=True)
        break
      except Exception, e:
        # chmod failure likely do to async processing of resource deletion.
        # If the directory has improper permissions, should fail later in the test case.
        LOG.warn("Received the following exception while change mode attempt %d of /tmp: %s" % (i, str(e)))
        time.sleep(1)
开发者ID:cloudera,项目名称:hue,代码行数:19,代码来源:tests.py

示例3: setUp

  def setUp(self):
    """
    To clean: creating test1, test2, test3...users
    """
    TestJobBrowserWithHadoop.user_count += 1
    self.username = 'test' + str(TestJobBrowserWithHadoop.user_count)
    self.home_dir = '/user/%s' % self.username
    self.cluster.fs.do_as_user(self.username, self.cluster.fs.create_home_dir, self.home_dir)

    self.client = make_logged_in_client(username=self.username, is_superuser=False, groupname='test')
    self.user = User.objects.get(username=self.username)
    grant_access(self.username, 'test', 'jobsub')
    grant_access(self.username, 'test', 'jobbrowser')
    grant_access(self.username, 'test', 'oozie')
    add_to_group(self.username)

    self.prev_user = self.cluster.fs.user
    self.cluster.fs.setuser(self.username)

    self.install_examples()
    self.design = self.create_design()

    raise SkipTest

    # Run the sleep example, since it doesn't require user home directory
    design_id = self.design.id
    response = self.client.post(reverse('oozie:submit_workflow',
                                args=[design_id]),
                                data={u'form-MAX_NUM_FORMS': [u''],
                                      u'form-INITIAL_FORMS': [u'1'],
                                      u'form-0-name': [u'REDUCER_SLEEP_TIME'],
                                      u'form-0-value': [u'1'],
                                      u'form-TOTAL_FORMS': [u'1']},
                                follow=True)
    oozie_jobid = response.context['oozie_workflow'].id
    OozieServerProvider.wait_until_completion(oozie_jobid, timeout=120, step=1)

    self.hadoop_job_id = get_hadoop_job_id(self.oozie, oozie_jobid, 1)
    self.hadoop_job_id_short = views.get_shorter_id(self.hadoop_job_id)
开发者ID:QLGu,项目名称:hue,代码行数:39,代码来源:tests.py

示例4: setup_class

    def setup_class(cls):
        OozieServerProvider.setup_class()

        cls.username = "hue_jobbrowser_test"
        cls.home_dir = "/user/%s" % cls.username
        cls.cluster.fs.do_as_user(cls.username, cls.cluster.fs.create_home_dir, cls.home_dir)

        cls.client = make_logged_in_client(username=cls.username, is_superuser=False, groupname="test")
        cls.user = User.objects.get(username=cls.username)
        grant_access(cls.username, "test", "jobsub")
        grant_access(cls.username, "test", "jobbrowser")
        grant_access(cls.username, "test", "oozie")
        add_to_group(cls.username)

        cls.prev_user = cls.cluster.fs.user
        cls.cluster.fs.setuser(cls.username)

        cls.install_examples()
        cls.design = cls.create_design()

        # Run the sleep example, since it doesn't require user home directory
        design_id = cls.design.id
        response = cls.client.post(
            reverse("oozie:submit_workflow", args=[design_id]),
            data={
                u"form-MAX_NUM_FORMS": [u""],
                u"form-INITIAL_FORMS": [u"1"],
                u"form-0-name": [u"REDUCER_SLEEP_TIME"],
                u"form-0-value": [u"1"],
                u"form-TOTAL_FORMS": [u"1"],
            },
            follow=True,
        )
        oozie_jobid = response.context["oozie_workflow"].id
        OozieServerProvider.wait_until_completion(oozie_jobid)

        cls.hadoop_job_id = get_hadoop_job_id(cls.oozie, oozie_jobid, 1)
        cls.hadoop_job_id_short = views.get_shorter_id(cls.hadoop_job_id)
开发者ID:GorillaTester,项目名称:hue,代码行数:38,代码来源:tests.py

示例5: test_failed_jobs

  def test_failed_jobs(self):
    """
    Test jobs with genuine failure, not just killed
    """

    if is_live_cluster():
      raise SkipTest('HUE-2902: Skipping because test is not reentrant')

    # Create design that will fail because the script file isn't there
    INPUT_DIR = TestJobBrowserWithHadoop.home_dir + '/input'
    OUTPUT_DIR = TestJobBrowserWithHadoop.home_dir + '/output'
    try:
      TestJobBrowserWithHadoop.cluster.fs.mkdir(TestJobBrowserWithHadoop.home_dir + "/jt-test_failed_jobs")
      TestJobBrowserWithHadoop.cluster.fs.mkdir(INPUT_DIR)
      TestJobBrowserWithHadoop.cluster.fs.rmtree(OUTPUT_DIR)
    except:
      LOG.exception('failed to teardown tests')

    job_name = '%s_%s' % (TestJobBrowserWithHadoop.username, 'test_failed_jobs-1')
    response = TestJobBrowserWithHadoop.client.post(reverse('jobsub.views.new_design', kwargs={'node_type': 'mapreduce'}), {
        'name': [job_name],
        'description': ['description test_failed_jobs-1'],
        'args': '',
        'jar_path': '/user/hue/oozie/workspaces/lib/hadoop-examples.jar',
        'prepares': '[]',
        'archives': '[]',
        'files': '[]',
        'job_properties': ['[{"name":"mapred.input.dir","value":"%s"},\
            {"name":"mapred.output.dir","value":"%s"},\
            {"name":"mapred.mapper.class","value":"org.apache.hadoop.mapred.lib.dne"},\
            {"name":"mapred.combiner.class","value":"org.apache.hadoop.mapred.lib.dne"},\
            {"name":"mapred.reducer.class","value":"org.apache.hadoop.mapred.lib.dne"}]' % (INPUT_DIR, OUTPUT_DIR)]
        }, HTTP_X_REQUESTED_WITH='XMLHttpRequest', follow=True)

    # Submit the job
    design_dict = json.loads(response.content)
    design_id = int(design_dict['id'])
    response = TestJobBrowserWithHadoop.client.post(reverse('oozie:submit_workflow',
                                args=[design_id]),
                                data={u'form-MAX_NUM_FORMS': [u''],
                                      u'form-INITIAL_FORMS': [u'1'],
                                      u'form-0-name': [u'REDUCER_SLEEP_TIME'],
                                      u'form-0-value': [u'1'],
                                      u'form-TOTAL_FORMS': [u'1']},
                                follow=True)
    oozie_jobid = response.context['oozie_workflow'].id
    job = OozieServerProvider.wait_until_completion(oozie_jobid)
    hadoop_job_id = get_hadoop_job_id(TestJobBrowserWithHadoop.oozie, oozie_jobid, 1)
    hadoop_job_id_short = views.get_shorter_id(hadoop_job_id)

    # Select only killed jobs (should be absent)
    # Taking advantage of the fact new jobs are at the top of the list!
    response = TestJobBrowserWithHadoop.client.get('/jobbrowser/jobs/?format=json&state=killed')
    assert_false(hadoop_job_id_short in response.content)

    # Select only failed jobs (should be present)
    # Map job should succeed. Reduce job should fail.
    response = TestJobBrowserWithHadoop.client.get('/jobbrowser/jobs/?format=json&state=failed')
    assert_true(hadoop_job_id_short in response.content)

    raise SkipTest # Not compatible with MR2

    # The single job view should have the failed task table
    response = TestJobBrowserWithHadoop.client.get('/jobbrowser/jobs/%s' % (hadoop_job_id,))
    html = response.content.lower()
    assert_true('failed task' in html, html)

    # The map task should say success (empty input)
    map_task_id = TestJobBrowserWithHadoop.hadoop_job_id.replace('job', 'task') + '_m_000000'
    response = TestJobBrowserWithHadoop.client.get('/jobbrowser/jobs/%s/tasks/%s' % (hadoop_job_id, map_task_id))
    assert_true('succeed' in response.content)
    assert_true('failed' not in response.content)

    # The reduce task should say failed
    reduce_task_id = hadoop_job_id.replace('job', 'task') + '_r_000000'
    response = TestJobBrowserWithHadoop.client.get('/jobbrowser/jobs/%s/tasks/%s' % (hadoop_job_id, reduce_task_id))
    assert_true('succeed' not in response.content)
    assert_true('failed' in response.content)

    # Selecting by failed state should include the failed map
    response = TestJobBrowserWithHadoop.client.get('/jobbrowser/jobs/%s/tasks?taskstate=failed' % (hadoop_job_id,))
    assert_true('r_000000' in response.content)
    assert_true('m_000000' not in response.content)
开发者ID:guoqinga,项目名称:hue,代码行数:83,代码来源:tests.py

示例6: setup_class

 def setup_class(cls):
   OozieServerProvider.setup_class()
开发者ID:15580056814,项目名称:hue,代码行数:2,代码来源:tests.py

示例7: test_failed_jobs

    def test_failed_jobs(self):
        """
    Test jobs with genuine failure, not just killed
    """

        if is_live_cluster():
            raise SkipTest("HUE-2902: Skipping because test is not reentrant")

        # Create design that will fail because the script file isn't there
        INPUT_DIR = TestJobBrowserWithHadoop.home_dir + "/input"
        OUTPUT_DIR = TestJobBrowserWithHadoop.home_dir + "/output"
        try:
            TestJobBrowserWithHadoop.cluster.fs.mkdir(TestJobBrowserWithHadoop.home_dir + "/jt-test_failed_jobs")
            TestJobBrowserWithHadoop.cluster.fs.mkdir(INPUT_DIR)
            TestJobBrowserWithHadoop.cluster.fs.rmtree(OUTPUT_DIR)
        except:
            LOG.exception("failed to teardown tests")

        job_name = "%s_%s" % (TestJobBrowserWithHadoop.username, "test_failed_jobs-1")
        response = TestJobBrowserWithHadoop.client.post(
            reverse("jobsub.views.new_design", kwargs={"node_type": "mapreduce"}),
            {
                "name": [job_name],
                "description": ["description test_failed_jobs-1"],
                "args": "",
                "jar_path": "/user/hue/oozie/workspaces/lib/hadoop-examples.jar",
                "prepares": "[]",
                "archives": "[]",
                "files": "[]",
                "job_properties": [
                    '[{"name":"mapred.input.dir","value":"%s"},\
            {"name":"mapred.output.dir","value":"%s"},\
            {"name":"mapred.mapper.class","value":"org.apache.hadoop.mapred.lib.dne"},\
            {"name":"mapred.combiner.class","value":"org.apache.hadoop.mapred.lib.dne"},\
            {"name":"mapred.reducer.class","value":"org.apache.hadoop.mapred.lib.dne"}]'
                    % (INPUT_DIR, OUTPUT_DIR)
                ],
            },
            HTTP_X_REQUESTED_WITH="XMLHttpRequest",
            follow=True,
        )

        # Submit the job
        design_dict = json.loads(response.content)
        design_id = int(design_dict["id"])
        response = TestJobBrowserWithHadoop.client.post(
            reverse("oozie:submit_workflow", args=[design_id]),
            data={
                u"form-MAX_NUM_FORMS": [u""],
                u"form-INITIAL_FORMS": [u"1"],
                u"form-0-name": [u"REDUCER_SLEEP_TIME"],
                u"form-0-value": [u"1"],
                u"form-TOTAL_FORMS": [u"1"],
            },
            follow=True,
        )
        oozie_jobid = response.context["oozie_workflow"].id
        job = OozieServerProvider.wait_until_completion(oozie_jobid)
        hadoop_job_id = get_hadoop_job_id(TestJobBrowserWithHadoop.oozie, oozie_jobid, 1)
        hadoop_job_id_short = views.get_shorter_id(hadoop_job_id)

        # Select only killed jobs (should be absent)
        # Taking advantage of the fact new jobs are at the top of the list!
        response = TestJobBrowserWithHadoop.client.get("/jobbrowser/jobs/?format=json&state=killed")
        assert_false(hadoop_job_id_short in response.content)

        # Select only failed jobs (should be present)
        # Map job should succeed. Reduce job should fail.
        response = TestJobBrowserWithHadoop.client.get("/jobbrowser/jobs/?format=json&state=failed")
        assert_true(hadoop_job_id_short in response.content)

        raise SkipTest  # Not compatible with MR2

        # The single job view should have the failed task table
        response = TestJobBrowserWithHadoop.client.get("/jobbrowser/jobs/%s" % (hadoop_job_id,))
        html = response.content.lower()
        assert_true("failed task" in html, html)

        # The map task should say success (empty input)
        map_task_id = TestJobBrowserWithHadoop.hadoop_job_id.replace("job", "task") + "_m_000000"
        response = TestJobBrowserWithHadoop.client.get("/jobbrowser/jobs/%s/tasks/%s" % (hadoop_job_id, map_task_id))
        assert_true("succeed" in response.content)
        assert_true("failed" not in response.content)

        # The reduce task should say failed
        reduce_task_id = hadoop_job_id.replace("job", "task") + "_r_000000"
        response = TestJobBrowserWithHadoop.client.get("/jobbrowser/jobs/%s/tasks/%s" % (hadoop_job_id, reduce_task_id))
        assert_true("succeed" not in response.content)
        assert_true("failed" in response.content)

        # Selecting by failed state should include the failed map
        response = TestJobBrowserWithHadoop.client.get("/jobbrowser/jobs/%s/tasks?taskstate=failed" % (hadoop_job_id,))
        assert_true("r_000000" in response.content)
        assert_true("m_000000" not in response.content)
开发者ID:GorillaTester,项目名称:hue,代码行数:94,代码来源:tests.py


注:本文中的liboozie.oozie_api_tests.OozieServerProvider类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。