当前位置: 首页>>代码示例>>Java>>正文


Java MetricsLongValue类代码示例

本文整理汇总了Java中org.apache.hadoop.metrics.util.MetricsLongValue的典型用法代码示例。如果您正苦于以下问题:Java MetricsLongValue类的具体用法?Java MetricsLongValue怎么用?Java MetricsLongValue使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


MetricsLongValue类属于org.apache.hadoop.metrics.util包,在下文中一共展示了MetricsLongValue类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: initParityMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
private void initParityMetrics() {
  parityFiles = createParityMap();
  parityBlocks = createParityMap();
  parityBytes = createParityMap();
  parityLogical = createParityMap();
  for (Codec codec : Codec.getCodecs()) {
    String code = codec.id;
    String head = (code + "_parity_").toLowerCase();
    createParityMetrics(parityFiles, code, head + "files");
    createParityMetrics(parityBlocks, code, head + "blocks");
    createParityMetrics(parityBytes, code, head + "bytes");
    createParityMetrics(parityLogical, code, head + "logical");
    String savingName = ("saving_" + code).toLowerCase();
    savingForCode.put(code, new MetricsLongValue(savingName, registry));
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:17,代码来源:RaidNodeMetrics.java

示例2: IPCLoggerChannelMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
private IPCLoggerChannelMetrics(IPCLoggerChannel ch,
    MetricsRecord metricRecords, String name) {
  this.ch = ch;
  this.metricsRecord = metricRecords;

  writeEndToEndLatency = new MetricsTimeVaryingRate("writeEndToEndLatency_"
      + name, registry);
  writeRpcLatency = new MetricsTimeVaryingRate("writeRpcLatency_" + name,
      registry);

  currentQueuedEditsSizeBytes = new MetricsLongValue(
      "currentQueuedEditsSizeBytes_" + name, registry);
  currentLagTransactions = new MetricsLongValue("currentLagTransactions_"
      + name, registry);
  currentLagTimeMicros = new MetricsLongValue("currentLagTimeMicros_" + name,
      registry);
  isOutOfSync = new MetricsIntValue("isOutOfSync_" + name, registry);
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:19,代码来源:IPCLoggerChannelMetrics.java

示例3: initPlacementMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
private void initPlacementMetrics() {
  codecToMisplacedBlocks = new HashMap<String, Map<Integer, MetricsLongValue>>();
  for (Codec codec : Codec.getCodecs()) {
    Map<Integer, MetricsLongValue> m = new HashMap<Integer, MetricsLongValue>();
    for (int i = 0; i < MAX_MONITORED_MISPLACED_BLOCKS; ++i) {
      m.put(i, new MetricsLongValue(misplacedMetricHeader +
                                 "_" + codec.id + "_" + i, registry));
    }
    codecToMisplacedBlocks.put(codec.id, m);
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:12,代码来源:RaidNodeMetrics.java

示例4: initCorruptFilesMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
public synchronized void initCorruptFilesMetrics(Configuration conf) {
  if (corruptFiles == null) {
    String[] dirs = DistBlockIntegrityMonitor.getCorruptMonitorDirs(conf);
    corruptFiles = new HashMap<String, MetricsLongValue>();
    for (String dir: dirs) {
      String name = dir + "_corrupt_files";
      corruptFiles.put(dir, new MetricsLongValue(name, registry));
    }
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:11,代码来源:RaidNodeMetrics.java

示例5: createSourceMap

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
private Map<String, Map<RaidState, MetricsLongValue>> createSourceMap() {
  Map<String, Map<RaidState, MetricsLongValue>> result =
      new HashMap<String, Map<RaidState, MetricsLongValue>>();
  for (Codec codec : Codec.getCodecs()) {
    Map<RaidState, MetricsLongValue> m =
        new HashMap<RaidState, MetricsLongValue>();
    for (RaidState state : RaidState.values()) {
      m.put(state, null);
    }
    m = new EnumMap<RaidState, MetricsLongValue>(m);
    result.put(codec.id, m);
  }
  return result;
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:15,代码来源:RaidNodeMetrics.java

示例6: createParityMap

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
private Map<String, MetricsLongValue> createParityMap() {
  Map<String, MetricsLongValue> m =
      new HashMap<String, MetricsLongValue>();
  for (Codec codec : Codec.getCodecs()) {
    m.put(codec.id, null);
  }
  return m;
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:9,代码来源:RaidNodeMetrics.java

示例7: SepMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
public SepMetrics(String recordName) {
    this.recordName = recordName;
    metricsRegistry = new MetricsRegistry();
    sepProcessingRate = new MetricsTimeVaryingRate("sepProcessed", metricsRegistry);
    lastTimestampInputProcessed = new MetricsLongValue("lastSepTimestamp", metricsRegistry);

    context = MetricsUtil.getContext("repository");
    metricsRecord = MetricsUtil.createRecord(context, recordName);
    context.registerUpdater(this);
    mbean = new SepMetricsMXBean(this.metricsRegistry);
}
 
开发者ID:NGDATA,项目名称:hbase-indexer,代码行数:12,代码来源:SepMetrics.java

示例8: createSourceMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
private void createSourceMetrics(
    Map<String, Map<RaidState, MetricsLongValue>> m,
    String code, RaidState state, String name) {
  Map<RaidState, MetricsLongValue> innerMap = m.get(code);
  innerMap.put(state, new MetricsLongValue(name, registry));
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:7,代码来源:RaidNodeMetrics.java

示例9: createParityMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
private void createParityMetrics(
    Map<String, MetricsLongValue> m,
    String code, String name) {
  m.put(code, new MetricsLongValue(name, registry));
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:6,代码来源:RaidNodeMetrics.java

示例10: JournalMetrics

import org.apache.hadoop.metrics.util.MetricsLongValue; //导入依赖的package包/类
JournalMetrics(Journal journal) {
  this.journal = journal;

  // Create a record for NameNode metrics
  MetricsContext metricsContext = MetricsUtil.getContext("dfs");
  metricsRecord = MetricsUtil.createRecord(metricsContext, "journalnode");
  String journalId = journal.getJournalId();
  metricsRecord.setTag("journalid", journalId);
  metricsContext.registerUpdater(this);

  batchesWrittenWhileLagging = new MetricsTimeVaryingLong(
      "batchesWrittenWhileLagging_" + journalId, registry,
      "batchesWrittenWhileLagging");
  batchesWritten = new MetricsTimeVaryingLong("batchesWritten_" + journalId,
      registry, "batchesWritten");
  bytesWritten = new MetricsTimeVaryingLong("bytesWritten_" + journalId,
      registry, "bytesWritten");
  txnsWritten = new MetricsTimeVaryingLong("txnsWritten_" + journalId,
      registry, "txnsWritten");
  syncTime = new MetricsTimeVaryingRate("syncTimes_" + journalId, registry);

  lastWriterEpoch = new MetricsLongValue("lastWriterEpoch_" + journalId,
      registry);
  lastPromisedEpoch = new MetricsLongValue("lastPromisedEpoch_" + journalId,
      registry);
  lastWrittenTxId = new MetricsLongValue("lastWrittenTxId_" + journalId,
      registry);
  currentTxnsLag = new MetricsLongValue("currentTxnsLag_" + journalId,
      registry);
  
  // http related metrics
  numGetJournalDoGet = new MetricsTimeVaryingLong("numGetEditsServletDoGet_"
      + journalId, registry);
  numGetImageDoGet = new MetricsTimeVaryingLong("numGetImageServletDoGet_"
      + journalId, registry);
  sizeGetJournalDoGet = new MetricsTimeVaryingLong(
      "numListPathsServletDoGet_" + journalId, registry);

  LOG.info("Initializing JournalNodeMeterics using context object:"
      + metricsContext.getClass().getName());
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:42,代码来源:JournalMetrics.java


注:本文中的org.apache.hadoop.metrics.util.MetricsLongValue类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。