当前位置: 首页>>代码示例>>Java>>正文


Java OutputRecord类代码示例

本文整理汇总了Java中org.apache.hadoop.metrics.spi.OutputRecord的典型用法代码示例。如果您正苦于以下问题:Java OutputRecord类的具体用法?Java OutputRecord怎么用?Java OutputRecord使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


OutputRecord类属于org.apache.hadoop.metrics.spi包,在下文中一共展示了OutputRecord类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: makeMap

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
/**
 * Collects all metric data, and returns a map:
 *   contextName -> recordName -> [ (tag->tagValue), (metric->metricValue) ].
 * The values are either String or Number.  The final value is implemented
 * as a list of TagsMetricsPair.
 */
 Map<String, Map<String, List<TagsMetricsPair>>> makeMap(
     Collection<MetricsContext> contexts) throws IOException {
  Map<String, Map<String, List<TagsMetricsPair>>> map = 
    new TreeMap<String, Map<String, List<TagsMetricsPair>>>();

  for (MetricsContext context : contexts) {
    Map<String, List<TagsMetricsPair>> records = 
      new TreeMap<String, List<TagsMetricsPair>>();
    map.put(context.getContextName(), records);
  
    for (Map.Entry<String, Collection<OutputRecord>> r : 
        context.getAllRecords().entrySet()) {
      List<TagsMetricsPair> metricsAndTags = 
        new ArrayList<TagsMetricsPair>();
      records.put(r.getKey(), metricsAndTags);
      for (OutputRecord outputRecord : r.getValue()) {
        TagMap tagMap = outputRecord.getTagsCopy();
        MetricMap metricMap = outputRecord.getMetricsCopy();
        metricsAndTags.add(new TagsMetricsPair(tagMap, metricMap));
      }
    }
  }
  return map;
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:31,代码来源:MetricsServlet.java

示例2: setUp

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
/**
 * Initializes, for testing, two NoEmitMetricsContext's, and adds one value 
 * to the first of them.
 */
@Override
public void setUp() throws IOException {
  nc1 = new NoEmitMetricsContext();
  nc1.init("test1", ContextFactory.getFactory());
  nc2 = new NoEmitMetricsContext();
  nc2.init("test2", ContextFactory.getFactory());
  contexts = new ArrayList<MetricsContext>();
  contexts.add(nc1);
  contexts.add(nc2);

  MetricsRecord r = nc1.createRecord("testRecord");
  
  r.setTag("testTag1", "testTagValue1");
  r.setTag("testTag2", "testTagValue2");
  r.setMetric("testMetric1", 1);
  r.setMetric("testMetric2", 33);
  r.update();

  Map<String, Collection<OutputRecord>> m = nc1.getAllRecords();
  assertEquals(1, m.size());
  assertEquals(1, m.values().size());
  Collection<OutputRecord> outputRecords = m.values().iterator().next();
  assertEquals(1, outputRecords.size());
  outputRecord = outputRecords.iterator().next();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:30,代码来源:TestMetricsServlet.java

示例3: emitRecord

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
@Override
public void emitRecord(String contextName, String recordName,
    OutputRecord outRec) {
  writer.print(iso8601());
  writer.print(" ");
  writer.print(contextName);
  writer.print(".");
  writer.print(recordName);
  String separator = ": ";
  for (String tagName : outRec.getTagNames()) {
    writer.print(separator);
    separator = ", ";
    writer.print(tagName);
    writer.print("=");
    writer.print(outRec.getTag(tagName));
  }
  for (String metricName : outRec.getMetricNames()) {
    writer.print(separator);
    separator = ", ";
    writer.print(metricName);
    writer.print("=");
    writer.print(outRec.getMetric(metricName));
  }
  writer.println();
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:26,代码来源:TimeStampingFileContext.java

示例4: logMetrics

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
private static void logMetrics(ThriftMetrics metrics) throws Exception {
  if (LOG.isDebugEnabled()) {
    return;
  }
  MetricsContext context = MetricsUtil.getContext( 
      ThriftMetrics.CONTEXT_NAME); 
  metrics.doUpdates(context); 
  for (String key : context.getAllRecords().keySet()) {
    for (OutputRecord record : context.getAllRecords().get(key)) {
      for (String name : record.getMetricNames()) {
        LOG.debug("metrics:" + name + " value:" +
            record.getMetric(name).intValue());
      }
    }
  }
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:17,代码来源:TestThriftHBaseServiceHandler.java

示例5: emitRecord

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
/**
 * Emits a metrics record to a file.
 */
@InterfaceAudience.Private
public void emitRecord(String contextName, String recordName, OutputRecord outRec) {
  writer.print(contextName);
  writer.print(".");
  writer.print(recordName);
  String separator = ": ";
  for (String tagName : outRec.getTagNames()) {
    writer.print(separator);
    separator = ", ";
    writer.print(tagName);
    writer.print("=");
    writer.print(outRec.getTag(tagName));
  }
  for (String metricName : outRec.getMetricNames()) {
    writer.print(separator);
    separator = ", ";
    writer.print(metricName);
    writer.print("=");
    writer.print(outRec.getMetric(metricName));
  }
  writer.println();
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:26,代码来源:FileContext.java

示例6: setUp

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
/**
 * Initializes, for testing, two NoEmitMetricsContext's, and adds one value 
 * to the first of them.
 */
public void setUp() throws IOException {
  nc1 = new NoEmitMetricsContext();
  nc1.init("test1", ContextFactory.getFactory());
  nc2 = new NoEmitMetricsContext();
  nc2.init("test2", ContextFactory.getFactory());
  contexts = new ArrayList<MetricsContext>();
  contexts.add(nc1);
  contexts.add(nc2);

  MetricsRecord r = nc1.createRecord("testRecord");
  
  r.setTag("testTag1", "testTagValue1");
  r.setTag("testTag2", "testTagValue2");
  r.setMetric("testMetric1", 1);
  r.setMetric("testMetric2", 33);
  r.update();

  Map<String, Collection<OutputRecord>> m = nc1.getAllRecords();
  assertEquals(1, m.size());
  assertEquals(1, m.values().size());
  Collection<OutputRecord> outputRecords = m.values().iterator().next();
  assertEquals(1, outputRecords.size());
  outputRecord = outputRecords.iterator().next();
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:29,代码来源:TestMetricsServlet.java

示例7: emitRecord

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
public void emitRecord(String contextName, String recordName,
  OutputRecord outRec) 
throws IOException {
  // Setup so that the records have the proper leader names so they are
  // unambiguous at the ganglia level, and this prevents a lot of rework
  StringBuilder sb = new StringBuilder();
  sb.append(contextName);
  sb.append('.');
  sb.append(recordName);
  sb.append('.');
  int sbBaseLen = sb.length();

  // emit each metric in turn
  for (String metricName : outRec.getMetricNames()) {
    Object metric = outRec.getMetric(metricName);
    String type = typeTable.get(metric.getClass());
    if (type != null) {
      sb.append(metricName);
      emitMetric(sb.toString(), type, metric.toString());
      sb.setLength(sbBaseLen);
    } else {
      LOG.warn("Unknown metrics type: " + metric.getClass());
    }
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:26,代码来源:GangliaContext.java

示例8: emitRecord

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
@InterfaceAudience.Private
public void emitRecord(String contextName, String recordName,
  OutputRecord outRec) 
throws IOException {
  // Setup so that the records have the proper leader names so they are
  // unambiguous at the ganglia level, and this prevents a lot of rework
  StringBuilder sb = new StringBuilder();
  sb.append(contextName);
  sb.append('.');
  sb.append(recordName);
  sb.append('.');
  int sbBaseLen = sb.length();

  // emit each metric in turn
  for (String metricName : outRec.getMetricNames()) {
    Object metric = outRec.getMetric(metricName);
    String type = typeTable.get(metric.getClass());
    if (type != null) {
      sb.append(metricName);
      emitMetric(sb.toString(), type, metric.toString());
      sb.setLength(sbBaseLen);
    } else {
      LOG.warn("Unknown metrics type: " + metric.getClass());
    }
  }
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:27,代码来源:GangliaContext.java

示例9: emitRecord

import org.apache.hadoop.metrics.spi.OutputRecord; //导入依赖的package包/类
/**
 * Emits a metrics record to a file.
 */
public void emitRecord(String contextName, String recordName, OutputRecord outRec) {
  writer.print(contextName);
  writer.print(".");
  writer.print(recordName);
  String separator = ": ";
  for (String tagName : outRec.getTagNames()) {
    writer.print(separator);
    separator = ", ";
    writer.print(tagName);
    writer.print("=");
    writer.print(outRec.getTag(tagName));
  }
  for (String metricName : outRec.getMetricNames()) {
    writer.print(separator);
    separator = ", ";
    writer.print(metricName);
    writer.print("=");
    writer.print(outRec.getMetric(metricName));
  }
  writer.println();
}
 
开发者ID:thisisvoa,项目名称:hadoop-0.20,代码行数:25,代码来源:FileContext.java


注:本文中的org.apache.hadoop.metrics.spi.OutputRecord类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。