当前位置: 首页>>代码示例>>Java>>正文


Java MRApps类代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.v2.util.MRApps的典型用法代码示例。如果您正苦于以下问题:Java MRApps类的具体用法?Java MRApps怎么用?Java MRApps使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


MRApps类属于org.apache.hadoop.mapreduce.v2.util包,在下文中一共展示了MRApps类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testTaskAttemptsSlash

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testTaskAttemptsSlash() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("mapreduce")
          .path("jobs").path(jobId).path("tasks").path(tid).path("attempts/")
          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
      assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
      JSONObject json = response.getEntity(JSONObject.class);
      verifyAMTaskAttempts(json, task);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestAMWebServicesAttempts.java

示例2: testJobCountersDefault

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testJobCountersDefault() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).path("counters/").get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("jobCounters");
    verifyAMJobCounters(info, jobsMap.get(id));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:17,代码来源:TestAMWebServicesJobs.java

示例3: serviceInit

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Override
protected void serviceInit(Configuration conf) throws Exception {
  super.serviceInit(conf);
  commitThreadCancelTimeoutMs = conf.getInt(
      MRJobConfig.MR_AM_COMMITTER_CANCEL_TIMEOUT_MS,
      MRJobConfig.DEFAULT_MR_AM_COMMITTER_CANCEL_TIMEOUT_MS);
  commitWindowMs = conf.getLong(MRJobConfig.MR_AM_COMMIT_WINDOW_MS,
      MRJobConfig.DEFAULT_MR_AM_COMMIT_WINDOW_MS);
  try {
    fs = FileSystem.get(conf);
    JobID id = TypeConverter.fromYarn(context.getApplicationID());
    JobId jobId = TypeConverter.toYarn(id);
    String user = UserGroupInformation.getCurrentUser().getShortUserName();
    startCommitFile = MRApps.getStartJobCommitFile(conf, user, jobId);
    endCommitSuccessFile = MRApps.getEndJobCommitSuccessFile(conf, user, jobId);
    endCommitFailureFile = MRApps.getEndJobCommitFailureFile(conf, user, jobId);
  } catch (IOException e) {
    throw new YarnRuntimeException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:CommitterEventHandler.java

示例4: requireJob

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
/**
 * Ensure that a JOB_ID was passed into the page.
 */
public void requireJob() {
  if ($(JOB_ID).isEmpty()) {
    badRequest("missing job ID");
    throw new RuntimeException("Bad Request: Missing job ID");
  }

  JobId jobID = MRApps.toJobID($(JOB_ID));
  app.setJob(app.context.getJob(jobID));
  if (app.getJob() == null) {
    notFound($(JOB_ID));
    throw new RuntimeException("Not Found: " + $(JOB_ID));
  }

  /* check for acl access */
  Job job = app.context.getJob(jobID);
  if (!checkAccess(job)) {
    accessDenied("User " + request().getRemoteUser() + " does not have " +
        " permission to view job " + $(JOB_ID));
    throw new RuntimeException("Access denied: User " +
        request().getRemoteUser() + " does not have permission to view job " +
        $(JOB_ID));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:AppController.java

示例5: testJobsSlash

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testJobsSlash() throws JSONException, Exception {
  WebResource r = resource();
  ClientResponse response = r.path("ws").path("v1").path("history")
      .path("mapreduce").path("jobs/").accept(MediaType.APPLICATION_JSON)
      .get(ClientResponse.class);
  assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
  JSONObject json = response.getEntity(JSONObject.class);
  assertEquals("incorrect number of elements", 1, json.length());
  JSONObject jobs = json.getJSONObject("jobs");
  JSONArray arr = jobs.getJSONArray("job");
  assertEquals("incorrect number of elements", 1, arr.length());
  JSONObject info = arr.getJSONObject(0);
  Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
  VerifyJobsUtils.verifyHsJobPartial(info, job);

}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestHsWebServicesJobs.java

示例6: testJobIdDefault

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testJobIdDefault() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("job");
    VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestHsWebServicesJobs.java

示例7: verifyHsTask

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
public void verifyHsTask(JSONArray arr, Job job, String type)
    throws JSONException {
  for (Task task : job.getTasks().values()) {
    TaskId id = task.getID();
    String tid = MRApps.toString(id);
    Boolean found = false;
    if (type != null && task.getType() == MRApps.taskType(type)) {

      for (int i = 0; i < arr.length(); i++) {
        JSONObject info = arr.getJSONObject(i);
        if (tid.matches(info.getString("id"))) {
          found = true;
          verifyHsSingleTask(info, task);
        }
      }
      assertTrue("task with id: " + tid + " not in web service output", found);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestHsWebServicesTasks.java

示例8: TaskInfo

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
public TaskInfo(Task task) {
  TaskType ttype = task.getType();
  this.type = ttype.toString();
  TaskReport report = task.getReport();
  this.startTime = report.getStartTime();
  this.finishTime = report.getFinishTime();
  this.state = report.getTaskState();
  this.elapsedTime = Times.elapsed(this.startTime, this.finishTime,
    this.state == TaskState.RUNNING);
  if (this.elapsedTime == -1) {
    this.elapsedTime = 0;
  }
  this.progress = report.getProgress() * 100;
  this.status =  report.getStatus();
  this.id = MRApps.toString(task.getID());
  this.taskNum = task.getID().getId();
  this.successful = getSuccessfulAttempt(task);
  if (successful != null) {
    this.successfulAttempt = MRApps.toString(successful.getID());
  } else {
    this.successfulAttempt = "";
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:TaskInfo.java

示例9: testTasks

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testTasks() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("tasks")
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject tasks = json.getJSONObject("tasks");
    JSONArray arr = tasks.getJSONArray("task");
    assertEquals("incorrect number of elements", 2, arr.length());

    verifyHsTask(arr, jobsMap.get(id), null);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestHsWebServicesTasks.java

示例10: getJobTasks

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@GET
@Path("/jobs/{jobid}/tasks")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TasksInfo getJobTasks(@Context HttpServletRequest hsr,
    @PathParam("jobid") String jid, @QueryParam("type") String type) {

  init();
  Job job = getJobFromJobIdString(jid, appCtx);
  checkAccess(job, hsr);
  TasksInfo allTasks = new TasksInfo();
  for (Task task : job.getTasks().values()) {
    TaskType ttype = null;
    if (type != null && !type.isEmpty()) {
      try {
        ttype = MRApps.taskType(type);
      } catch (YarnRuntimeException e) {
        throw new BadRequestException("tasktype must be either m or r");
      }
    }
    if (ttype != null && task.getType() != ttype) {
      continue;
    }
    allTasks.add(new TaskInfo(task));
  }
  return allTasks;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:AMWebServices.java

示例11: getTaskAttempts

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Override
protected Collection<TaskAttempt> getTaskAttempts() {
  List<TaskAttempt> fewTaskAttemps = new ArrayList<TaskAttempt>();
  String taskTypeStr = $(TASK_TYPE);
  TaskType taskType = MRApps.taskType(taskTypeStr);
  String attemptStateStr = $(ATTEMPT_STATE);
  TaskAttemptStateUI neededState = MRApps
      .taskAttemptState(attemptStateStr);
  for (Task task : super.app.getJob().getTasks(taskType).values()) {
    Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
    for (TaskAttempt attempt : attempts.values()) {
      if (neededState.correspondsTo(attempt.getState())) {
        fewTaskAttemps.add(attempt);
      }
    }
  }
  return fewTaskAttemps;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:AttemptsPage.java

示例12: callWithJobClassLoader

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
/**
 * Executes the given action with the job classloader set as the configuration
 * classloader as well as the thread context class loader if the job
 * classloader is enabled. After the call, the original classloader is
 * restored.
 *
 * If the job classloader is enabled and the code needs to load user-supplied
 * classes via configuration or thread context classloader, this method should
 * be used in order to load them.
 *
 * @param conf the configuration on which the classloader will be set
 * @param action the callable action to be executed
 */
<T> T callWithJobClassLoader(Configuration conf, Action<T> action) {
  // if the job classloader is enabled, we may need it to load the (custom)
  // classes; we make the job classloader available and unset it once it is
  // done
  ClassLoader currentClassLoader = conf.getClassLoader();
  boolean setJobClassLoader =
      jobClassLoader != null && currentClassLoader != jobClassLoader;
  if (setJobClassLoader) {
    MRApps.setClassLoader(jobClassLoader, conf);
  }
  try {
    return action.call(conf);
  } finally {
    if (setJobClassLoader) {
      // restore the original classloader
      MRApps.setClassLoader(currentClassLoader, conf);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:33,代码来源:MRAppMaster.java

示例13: testJobId

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testJobId() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId)
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("job");
    VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
  }

}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestHsWebServicesJobs.java

示例14: testTasksQueryMap

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testTasksQueryMap() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    String type = "m";
    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("tasks")
        .queryParam("type", type).accept(MediaType.APPLICATION_JSON)
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject tasks = json.getJSONObject("tasks");
    JSONArray arr = tasks.getJSONArray("task");
    assertEquals("incorrect number of elements", 1, arr.length());
    verifyHsTask(arr, jobsMap.get(id), type);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:TestHsWebServicesTasks.java

示例15: testJobConfXML

import org.apache.hadoop.mapreduce.v2.util.MRApps; //导入依赖的package包/类
@Test
public void testJobConfXML() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
        .path("jobs").path(jobId).path("conf")
        .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList info = dom.getElementsByTagName("conf");
    verifyHsJobConfXML(info, jobsMap.get(id));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:22,代码来源:TestHsWebServicesJobConf.java


注:本文中的org.apache.hadoop.mapreduce.v2.util.MRApps类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。