当前位置: 首页>>代码示例>>Java>>正文


Java AMInfo.getNodeManagerHost方法代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.v2.api.records.AMInfo.getNodeManagerHost方法的典型用法代码示例。如果您正苦于以下问题:Java AMInfo.getNodeManagerHost方法的具体用法?Java AMInfo.getNodeManagerHost怎么用?Java AMInfo.getNodeManagerHost使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.mapreduce.v2.api.records.AMInfo的用法示例。


在下文中一共展示了AMInfo.getNodeManagerHost方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: verifyJobAttemptsGeneric

import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; //导入方法依赖的package包/类
public void verifyJobAttemptsGeneric(Job job, String nodeHttpAddress,
    String nodeId, int id, long startTime, String containerId, String logsLink) {
  boolean attemptFound = false;
  for (AMInfo amInfo : job.getAMInfos()) {
    if (amInfo.getAppAttemptId().getAttemptId() == id) {
      attemptFound = true;
      String nmHost = amInfo.getNodeManagerHost();
      int nmHttpPort = amInfo.getNodeManagerHttpPort();
      int nmPort = amInfo.getNodeManagerPort();
      WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":"
          + nmHttpPort, nodeHttpAddress);
      WebServicesTestUtils.checkStringMatch("nodeId",
          NodeId.newInstance(nmHost, nmPort).toString(), nodeId);
      assertTrue("startime not greater than 0", startTime > 0);
      WebServicesTestUtils.checkStringMatch("containerId", amInfo
          .getContainerId().toString(), containerId);

      String localLogsLink =ujoin("node", "containerlogs", containerId,
          job.getUserName());

      assertTrue("logsLink", logsLink.contains(localLogsLink));
    }
  }
  assertTrue("attempt: " + id + " was not found", attemptFound);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:26,代码来源:TestAMWebServicesJobs.java

示例2: verifyHsJobAttemptsGeneric

import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; //导入方法依赖的package包/类
public void verifyHsJobAttemptsGeneric(Job job, String nodeHttpAddress,
    String nodeId, int id, long startTime, String containerId, String logsLink) {
  boolean attemptFound = false;
  for (AMInfo amInfo : job.getAMInfos()) {
    if (amInfo.getAppAttemptId().getAttemptId() == id) {
      attemptFound = true;
      String nmHost = amInfo.getNodeManagerHost();
      int nmHttpPort = amInfo.getNodeManagerHttpPort();
      int nmPort = amInfo.getNodeManagerPort();
      WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":"
          + nmHttpPort, nodeHttpAddress);
      WebServicesTestUtils.checkStringMatch("nodeId",
          NodeId.newInstance(nmHost, nmPort).toString(), nodeId);
      assertTrue("startime not greater than 0", startTime > 0);
      WebServicesTestUtils.checkStringMatch("containerId", amInfo
          .getContainerId().toString(), containerId);

      String localLogsLink = join(
          "hsmockwebapp",
          ujoin("logs", nodeId, containerId, MRApps.toString(job.getID()),
              job.getUserName()));

      assertTrue("logsLink", logsLink.contains(localLogsLink));
    }
  }
  assertTrue("attempt: " + id + " was not found", attemptFound);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:28,代码来源:TestHsWebServicesJobs.java

示例3: render

import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; //导入方法依赖的package包/类
@Override protected void render(Block html) {
  String rmweb = $(RM_WEB);
  DIV<Hamlet> nav = html.
    div("#nav").
      h3("Cluster").
      ul().
        li().a(url(rmweb, "cluster", "cluster"), "About")._().
        li().a(url(rmweb, "cluster", "apps"), "Applications")._().
        li().a(url(rmweb, "cluster", "scheduler"), "Scheduler")._()._().
      h3("Application").
      ul().
        li().a(url("app/info"), "About")._().
        li().a(url("app"), "Jobs")._()._();
  if (app.getJob() != null) {
    String jobid = MRApps.toString(app.getJob().getID());
    List<AMInfo> amInfos = app.getJob().getAMInfos();
    AMInfo thisAmInfo = amInfos.get(amInfos.size()-1);
    String nodeHttpAddress = thisAmInfo.getNodeManagerHost() + ":" 
        + thisAmInfo.getNodeManagerHttpPort();
    nav.
      h3("Job").
      ul().
        li().a(url("job", jobid), "Overview")._().
        li().a(url("jobcounters", jobid), "Counters")._().
        li().a(url("conf", jobid), "Configuration")._().
        li().a(url("tasks", jobid, "m"), "Map tasks")._().
        li().a(url("tasks", jobid, "r"), "Reduce tasks")._().
        li().a(".logslink", url(MRWebAppUtil.getYARNWebappScheme(),
            nodeHttpAddress, "node",
            "containerlogs", thisAmInfo.getContainerId().toString(), 
            app.getJob().getUserName()), 
            "AM Logs")._()._();
    if (app.getTask() != null) {
      String taskid = MRApps.toString(app.getTask().getID());
      nav.
        h3("Task").
        ul().
          li().a(url("task", taskid), "Task Overview")._().
          li().a(url("taskcounters", taskid), "Counters")._()._();
    }
  }
  nav.
    h3("Tools").
    ul().
      li().a("/conf", "Configuration")._().
      li().a("/logs", "Local logs")._().
      li().a("/stacks", "Server stacks")._().
      li().a("/jmx?qry=Hadoop:*", "Server metrics")._()._()._();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:50,代码来源:NavBlock.java

示例4: render

import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; //导入方法依赖的package包/类
@Override protected void render(Block html) {
  String rmweb = $(RM_WEB);
  DIV<Hamlet> nav = html.
    div("#nav").
      h3("Cluster").
      ul().
        li().a(url(rmweb, "cluster", "cluster"), "About")._().
        li().a(url(rmweb, "cluster", "apps"), "Applications")._().
        li().a(url(rmweb, "cluster", "scheduler"), "Scheduler")._()._().
      h3("Application").
      ul().
        li().a(url("app/info"), "About")._().
        li().a(url("app"), "Jobs")._()._();
  if (app.getJob() != null) {
    String jobid = MRApps.toString(app.getJob().getID());
    List<AMInfo> amInfos = app.getJob().getAMInfos();
    AMInfo thisAmInfo = amInfos.get(amInfos.size()-1);
    String nodeHttpAddress = thisAmInfo.getNodeManagerHost() + ":" 
        + thisAmInfo.getNodeManagerHttpPort();
    nav.
      h3("Job").
      ul().
        li().a(url("job", jobid), "Overview")._().
        li().a(url("jobcounters", jobid), "Counters")._().
        li().a(url("conf", jobid), "Configuration")._().
        li().a(url("tasks", jobid, "m"), "Map tasks")._().
        li().a(url("tasks", jobid, "r"), "Reduce tasks")._().
        li().a(".logslink", url(MRWebAppUtil.getYARNWebappScheme(),
            nodeHttpAddress, "node",
            "containerlogs", thisAmInfo.getContainerId().toString(), 
            app.getJob().getUserName()), 
            "AM Logs")._()._();
    if (app.getTask() != null) {
      String taskid = MRApps.toString(app.getTask().getID());
      nav.
        h3("Task").
        ul().
          li().a(url("task", taskid), "Task Overview")._().
          li().a(url("taskcounters", taskid), "Counters")._()._();
    }
  }
  nav.
    h3("Tools").
    ul().
      li().a("/conf", "Configuration")._().
      li().a("/logs", "Local logs")._().
      li().a("/stacks", "Server stacks")._().
      li().a("/metrics", "Server metrics")._()._()._();
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:50,代码来源:NavBlock.java

示例5: render

import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; //导入方法依赖的package包/类
@Override protected void render(Block html) {
  String rmweb = $(RM_WEB);
  DIV<Hamlet> nav = html.
    div("#nav").
      h3("Cluster").
      ul().
        li().a(url(rmweb, "cluster", "cluster"), "About")._().
        li().a(url(rmweb, "cluster", "apps"), "Applications")._().
        li().a(url(rmweb, "cluster", "scheduler"), "Scheduler")._()._().
      h3("Application").
      ul().
        li().a(url("app/info"), "About")._().
        li().a(url("app"), "Jobs")._()._();
  if (app.getJob() != null) {
    String jobid = MRApps.toString(app.getJob().getID());
    List<AMInfo> amInfos = app.getJob().getAMInfos();
    AMInfo thisAmInfo = amInfos.get(amInfos.size()-1);
    String nodeHttpAddress = thisAmInfo.getNodeManagerHost() + ":" 
        + thisAmInfo.getNodeManagerHttpPort();
    nav.
      h3("Job").
      ul().
        li().a(url("job", jobid), "Overview")._().
        li().a(url("jobcounters", jobid), "Counters")._().
        li().a(url("conf", jobid), "Configuration")._().
        li().a(url("tasks", jobid, "m"), "Map tasks")._().
        li().a(url("tasks", jobid, "r"), "Reduce tasks")._().
        li().a(".logslink", url(HttpConfig.getSchemePrefix(),
            nodeHttpAddress, "node",
            "containerlogs", thisAmInfo.getContainerId().toString(), 
            app.getJob().getUserName()), 
            "AM Logs")._()._();
    if (app.getTask() != null) {
      String taskid = MRApps.toString(app.getTask().getID());
      nav.
        h3("Task").
        ul().
          li().a(url("task", taskid), "Task Overview")._().
          li().a(url("taskcounters", taskid), "Counters")._()._();
    }
  }
  nav.
    h3("Tools").
    ul().
      li().a("/conf", "Configuration")._().
      li().a("/logs", "Local logs")._().
      li().a("/stacks", "Server stacks")._().
      li().a("/metrics", "Server metrics")._()._()._();
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:50,代码来源:NavBlock.java


注:本文中的org.apache.hadoop.mapreduce.v2.api.records.AMInfo.getNodeManagerHost方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。