当前位置: 首页>>代码示例>>Java>>正文


Java Job类代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.v2.app.job.Job的典型用法代码示例。如果您正苦于以下问题:Java Job类的具体用法?Java Job怎么用?Java Job使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


Job类属于org.apache.hadoop.mapreduce.v2.app.job包,在下文中一共展示了Job类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testTaskId

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testTaskId() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("history")
          .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
      assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
      JSONObject json = response.getEntity(JSONObject.class);
      assertEquals("incorrect number of elements", 1, json.length());
      JSONObject info = json.getJSONObject("task");
      verifyHsSingleTask(info, task);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:TestHsWebServicesTasks.java

示例2: testJobCountersDefault

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobCountersDefault() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).path("counters/").get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("jobCounters");
    verifyAMJobCounters(info, jobsMap.get(id));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:17,代码来源:TestAMWebServicesJobs.java

示例3: testJobAttemptsSlash

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobAttemptsSlash() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("jobattempts/")
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("jobAttempts");
    verifyHsJobAttempts(info, appContext.getJob(id));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestHsWebServicesJobs.java

示例4: testJobsSlash

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobsSlash() throws JSONException, Exception {
  WebResource r = resource();
  ClientResponse response = r.path("ws").path("v1").path("mapreduce")
      .path("jobs/").accept(MediaType.APPLICATION_JSON)
      .get(ClientResponse.class);
  assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
  JSONObject json = response.getEntity(JSONObject.class);
  assertEquals("incorrect number of elements", 1, json.length());
  JSONObject jobs = json.getJSONObject("jobs");
  JSONArray arr = jobs.getJSONArray("job");
  JSONObject info = arr.getJSONObject(0);
  Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
  verifyAMJob(info, job);

}
 
开发者ID:naver,项目名称:hadoop,代码行数:17,代码来源:TestAMWebServicesJobs.java

示例5: testTaskAttempts

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testTaskAttempts() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("mapreduce")
          .path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
      assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
      JSONObject json = response.getEntity(JSONObject.class);
      verifyAMTaskAttempts(json, task);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestAMWebServicesAttempts.java

示例6: testJobsSlash

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobsSlash() throws JSONException, Exception {
  WebResource r = resource();
  ClientResponse response = r.path("ws").path("v1").path("history")
      .path("mapreduce").path("jobs/").accept(MediaType.APPLICATION_JSON)
      .get(ClientResponse.class);
  assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
  JSONObject json = response.getEntity(JSONObject.class);
  assertEquals("incorrect number of elements", 1, json.length());
  JSONObject jobs = json.getJSONObject("jobs");
  JSONArray arr = jobs.getJSONArray("job");
  assertEquals("incorrect number of elements", 1, arr.length());
  JSONObject info = arr.getJSONObject(0);
  Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
  VerifyJobsUtils.verifyHsJobPartial(info, job);

}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestHsWebServicesJobs.java

示例7: testJobCountersSlash

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobCountersSlash() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("counters/")
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("jobCounters");
    verifyHsJobCounters(info, appContext.getJob(id));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestHsWebServicesJobs.java

示例8: testJobs

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobs() throws JSONException, Exception {
  WebResource r = resource();
  ClientResponse response = r.path("ws").path("v1").path("mapreduce")
      .path("jobs").accept(MediaType.APPLICATION_JSON)
      .get(ClientResponse.class);
  assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
  JSONObject json = response.getEntity(JSONObject.class);
  assertEquals("incorrect number of elements", 1, json.length());
  JSONObject jobs = json.getJSONObject("jobs");
  JSONArray arr = jobs.getJSONArray("job");
  JSONObject info = arr.getJSONObject(0);
  Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
  verifyAMJob(info, job);

}
 
开发者ID:naver,项目名称:hadoop,代码行数:17,代码来源:TestAMWebServicesJobs.java

示例9: testJobCountersXML

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobCountersXML() throws Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).path("counters")
        .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList info = dom.getElementsByTagName("jobCounters");
    verifyAMJobCountersXML(info, jobsMap.get(id));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:22,代码来源:TestAMWebServicesJobs.java

示例10: getAllPartialJobs

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Override
public Map<JobId, Job> getAllPartialJobs() {
  LOG.debug("Called getAllPartialJobs()");
  SortedMap<JobId, Job> result = new TreeMap<JobId, Job>();
  try {
    for (HistoryFileInfo mi : hsManager.getAllFileInfo()) {
      if (mi != null) {
        JobId id = mi.getJobId();
        result.put(id, new PartialJob(mi.getJobIndexInfo(), id));
      }
    }
  } catch (IOException e) {
    LOG.warn("Error trying to scan for all FileInfos", e);
    throw new YarnRuntimeException(e);
  }
  return result;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:CachedHistoryStorage.java

示例11: getJobTaskAttempts

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@GET
@Path("/jobs/{jobid}/tasks/{taskid}/attempts")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr,
    @PathParam("jobid") String jid, @PathParam("taskid") String tid) {

  init();
  TaskAttemptsInfo attempts = new TaskAttemptsInfo();
  Job job = getJobFromJobIdString(jid, appCtx);
  checkAccess(job, hsr);
  Task task = getTaskFromTaskIdString(tid, job);

  for (TaskAttempt ta : task.getAttempts().values()) {
    if (ta != null) {
      if (task.getType() == TaskType.REDUCE) {
        attempts.add(new ReduceTaskAttemptInfo(ta, task.getType()));
      } else {
        attempts.add(new TaskAttemptInfo(ta, task.getType(), true));
      }
    }
  }
  return attempts;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:AMWebServices.java

示例12: testTimedOutTask

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
//All Task attempts are timed out, leading to Job failure
public void testTimedOutTask() throws Exception {
  MRApp app = new TimeOutTaskMRApp(1, 0);
  Configuration conf = new Configuration();
  int maxAttempts = 2;
  conf.setInt(MRJobConfig.MAP_MAX_ATTEMPTS, maxAttempts);
  // disable uberization (requires entire job to be reattempted, so max for
  // subtask attempts is overridden to 1)
  conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
  Job job = app.submit(conf);
  app.waitForState(job, JobState.FAILED);
  Map<TaskId,Task> tasks = job.getTasks();
  Assert.assertEquals("Num tasks is not correct", 1, tasks.size());
  Task task = tasks.values().iterator().next();
  Assert.assertEquals("Task state not correct", TaskState.FAILED,
      task.getReport().getTaskState());
  Map<TaskAttemptId, TaskAttempt> attempts =
      tasks.values().iterator().next().getAttempts();
  Assert.assertEquals("Num attempts is not correct", maxAttempts,
      attempts.size());
  for (TaskAttempt attempt : attempts.values()) {
    Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED,
        attempt.getReport().getTaskAttemptState());
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestFail.java

示例13: testJobIdXML

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobIdXML() throws Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).accept(MediaType.APPLICATION_XML)
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList job = dom.getElementsByTagName("job");
    verifyAMJobXML(job, appContext);
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:23,代码来源:TestAMWebServicesJobs.java

示例14: testJobId

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobId() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId)
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("job");
    VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestHsWebServicesJobs.java

示例15: testJobId

import org.apache.hadoop.mapreduce.v2.app.job.Job; //导入依赖的package包/类
@Test
public void testJobId() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).accept(MediaType.APPLICATION_JSON)
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("job");
    verifyAMJob(info, jobsMap.get(id));
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestAMWebServicesJobs.java


注:本文中的org.apache.hadoop.mapreduce.v2.app.job.Job类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。