本文整理汇总了Java中org.apache.hadoop.hbase.client.metrics.ScanMetrics.readFields方法的典型用法代码示例。如果您正苦于以下问题:Java ScanMetrics.readFields方法的具体用法?Java ScanMetrics.readFields怎么用?Java ScanMetrics.readFields使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.client.metrics.ScanMetrics
的用法示例。
在下文中一共展示了ScanMetrics.readFields方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: updateCounters
import org.apache.hadoop.hbase.client.metrics.ScanMetrics; //导入方法依赖的package包/类
/**
* If hbase runs on new version of mapreduce, RecordReader has access to
* counters thus can update counters based on scanMetrics.
* If hbase runs on old version of mapreduce, it won't be able to get
* access to counters and TableRecorderReader can't update counter values.
* @throws IOException
*/
private void updateCounters() throws IOException {
// we can get access to counters only if hbase uses new mapreduce APIs
if (this.getCounter == null) {
return;
}
byte[] serializedMetrics = currentScan.getAttribute(
Scan.SCAN_ATTRIBUTES_METRICS_DATA);
if (serializedMetrics == null || serializedMetrics.length == 0 ) {
return;
}
DataInputBuffer in = new DataInputBuffer();
in.reset(serializedMetrics, 0, serializedMetrics.length);
ScanMetrics scanMetrics = new ScanMetrics();
scanMetrics.readFields(in);
MetricsTimeVaryingLong[] mlvs =
scanMetrics.getMetricsTimeVaryingLongArray();
try {
for (MetricsTimeVaryingLong mlv : mlvs) {
Counter ct = (Counter)this.getCounter.invoke(context,
HBASE_COUNTER_GROUP_NAME, mlv.getName());
ct.increment(mlv.getCurrentIntervalValue());
}
((Counter) this.getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
"NUM_SCANNER_RESTARTS")).increment(numRestarts);
} catch (Exception e) {
LOG.debug("can't update counter." + StringUtils.stringifyException(e));
}
}
示例2: getScanMetrics
import org.apache.hadoop.hbase.client.metrics.ScanMetrics; //导入方法依赖的package包/类
private ScanMetrics getScanMetrics(Scan scan) throws Exception {
byte[] serializedMetrics = scan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_DATA);
assertTrue("Serialized metrics were not found.", serializedMetrics != null);
DataInputBuffer in = new DataInputBuffer();
in.reset(serializedMetrics, 0, serializedMetrics.length);
ScanMetrics scanMetrics = new ScanMetrics();
scanMetrics.readFields(in);
return scanMetrics;
}
示例3: updateCounters
import org.apache.hadoop.hbase.client.metrics.ScanMetrics; //导入方法依赖的package包/类
private void updateCounters() throws IOException {
// we can get access to counters only if hbase uses new mapreduce APIs
if (this.getCounter == null) {
return;
}
byte[] serializedMetrics = currentScan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_DATA);
if (serializedMetrics == null || serializedMetrics.length == 0) {
return;
}
DataInputBuffer in = new DataInputBuffer();
in.reset(serializedMetrics, 0, serializedMetrics.length);
ScanMetrics scanMetrics = new ScanMetrics();
scanMetrics.readFields(in);
MetricsTimeVaryingLong[] mlvs = scanMetrics.getMetricsTimeVaryingLongArray();
try {
for (MetricsTimeVaryingLong mlv : mlvs) {
Counter ct = (Counter) this.getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
mlv.getName());
ct.increment(mlv.getCurrentIntervalValue());
}
((Counter) this.getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME, "NUM_SCANNER_RESTARTS"))
.increment(numRestarts);
} catch (Exception e) {
LOG.debug("can't update counter." + StringUtils.stringifyException(e));
}
}