本文整理汇总了Java中org.apache.hadoop.mapreduce.Counter类的典型用法代码示例。如果您正苦于以下问题:Java Counter类的具体用法?Java Counter怎么用?Java Counter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Counter类属于org.apache.hadoop.mapreduce包,在下文中一共展示了Counter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: ReduceContextImpl
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
public ReduceContextImpl(Configuration conf, TaskAttemptID taskid,
RawKeyValueIterator input,
Counter inputKeyCounter,
Counter inputValueCounter,
RecordWriter<KEYOUT,VALUEOUT> output,
OutputCommitter committer,
StatusReporter reporter,
RawComparator<KEYIN> comparator,
Class<KEYIN> keyClass,
Class<VALUEIN> valueClass
) throws InterruptedException, IOException{
super(conf, taskid, output, committer, reporter);
this.input = input;
this.inputKeyCounter = inputKeyCounter;
this.inputValueCounter = inputValueCounter;
this.comparator = comparator;
this.serializationFactory = new SerializationFactory(conf);
this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
this.keyDeserializer.open(buffer);
this.valueDeserializer = serializationFactory.getDeserializer(valueClass);
this.valueDeserializer.open(buffer);
hasMore = input.next();
this.keyClass = keyClass;
this.valueClass = valueClass;
this.conf = conf;
this.taskid = taskid;
}
示例2: JobMetrics
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
public JobMetrics(Job job, String bytesReplicatedKey) {
Builder<String, Long> builder = ImmutableMap.builder();
if (job != null) {
Counters counters;
try {
counters = job.getCounters();
} catch (IOException e) {
throw new CircusTrainException("Unable to get counters from job.", e);
}
if (counters != null) {
for (CounterGroup group : counters) {
for (Counter counter : group) {
builder.put(DotJoiner.join(group.getName(), counter.getName()), counter.getValue());
}
}
}
}
metrics = builder.build();
Long bytesReplicatedValue = metrics.get(bytesReplicatedKey);
if (bytesReplicatedValue != null) {
bytesReplicated = bytesReplicatedValue;
} else {
bytesReplicated = 0L;
}
}
示例3: countersToJSON
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
@Private
public JsonNode countersToJSON(Counters counters) {
ObjectMapper mapper = new ObjectMapper();
ArrayNode nodes = mapper.createArrayNode();
if (counters != null) {
for (CounterGroup counterGroup : counters) {
ObjectNode groupNode = nodes.addObject();
groupNode.put("NAME", counterGroup.getName());
groupNode.put("DISPLAY_NAME", counterGroup.getDisplayName());
ArrayNode countersNode = groupNode.putArray("COUNTERS");
for (Counter counter : counterGroup) {
ObjectNode counterNode = countersNode.addObject();
counterNode.put("NAME", counter.getName());
counterNode.put("DISPLAY_NAME", counter.getDisplayName());
counterNode.put("VALUE", counter.getValue());
}
}
}
return nodes;
}
示例4: write
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
/**
* GenericGroup ::= displayName #counter counter*
*/
@Override
public synchronized void write(DataOutput out) throws IOException {
Text.writeString(out, displayName);
WritableUtils.writeVInt(out, counters.size());
for(Counter counter: counters.values()) {
counter.write(out);
}
}
示例5: AbstractCounters
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
/**
* Construct from another counters object.
* @param <C1> type of the other counter
* @param <G1> type of the other counter group
* @param counters the counters object to copy
* @param groupFactory the factory for new groups
*/
@InterfaceAudience.Private
public <C1 extends Counter, G1 extends CounterGroupBase<C1>>
AbstractCounters(AbstractCounters<C1, G1> counters,
CounterGroupFactory<C, G> groupFactory) {
this.groupFactory = groupFactory;
for(G1 group: counters) {
String name = group.getName();
G newGroup = groupFactory.newGroup(name, group.getDisplayName(), limits);
(isFrameworkGroup(name) ? fgroups : groups).put(name, newGroup);
for(Counter counter: group) {
newGroup.addCounter(counter.getName(), counter.getDisplayName(),
counter.getValue());
}
}
}
示例6: toString
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
/**
* Return textual representation of the counter values.
* @return the string
*/
@Override
public synchronized String toString() {
StringBuilder sb = new StringBuilder("Counters: " + countCounters());
for (G group: this) {
sb.append("\n\t").append(group.getDisplayName());
for (Counter counter: group) {
sb.append("\n\t\t").append(counter.getDisplayName()).append("=")
.append(counter.getValue());
}
}
return sb.toString();
}
示例7: write
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
/**
* FileSystemGroup ::= #scheme (scheme #counter (key value)*)*
*/
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, map.size()); // #scheme
for (Map.Entry<String, Object[]> entry : map.entrySet()) {
WritableUtils.writeString(out, entry.getKey()); // scheme
// #counter for the above scheme
WritableUtils.writeVInt(out, numSetCounters(entry.getValue()));
for (Object counter : entry.getValue()) {
if (counter == null) continue;
@SuppressWarnings("unchecked")
FSCounter c = (FSCounter) ((Counter)counter).getUnderlyingCounter();
WritableUtils.writeVInt(out, c.key.ordinal()); // key
WritableUtils.writeVLong(out, c.getValue()); // value
}
}
}
示例8: toAvro
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
static JhCounters toAvro(Counters counters, String name) {
JhCounters result = new JhCounters();
result.name = new Utf8(name);
result.groups = new ArrayList<JhCounterGroup>(0);
if (counters == null) return result;
for (CounterGroup group : counters) {
JhCounterGroup g = new JhCounterGroup();
g.name = new Utf8(group.getName());
g.displayName = new Utf8(group.getDisplayName());
g.counts = new ArrayList<JhCounter>(group.size());
for (Counter counter : group) {
JhCounter c = new JhCounter();
c.name = new Utf8(counter.getName());
c.displayName = new Utf8(counter.getDisplayName());
c.value = counter.getValue();
g.counts.add(c);
}
result.groups.add(g);
}
return result;
}
示例9: updateCounters
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
protected static void updateCounters(ScanMetrics scanMetrics, long numScannerRestarts,
Method getCounter, TaskAttemptContext context, long numStale) {
// we can get access to counters only if hbase uses new mapreduce APIs
if (getCounter == null) {
return;
}
try {
for (Map.Entry<String, Long> entry:scanMetrics.getMetricsMap().entrySet()) {
Counter ct = (Counter)getCounter.invoke(context,
HBASE_COUNTER_GROUP_NAME, entry.getKey());
ct.increment(entry.getValue());
}
((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
"NUM_SCANNER_RESTARTS")).increment(numScannerRestarts);
((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
"NUM_SCAN_RESULTS_STALE")).increment(numStale);
} catch (Exception e) {
LOG.debug("can't update counter." + StringUtils.stringifyException(e));
}
}
示例10: verifyExpectedValues
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
/**
* Verify the values in the Counters against the expected number of entries written.
*
* @param expectedReferenced
* Expected number of referenced entrires
* @param counters
* The Job's Counters object
* @return True if the values match what's expected, false otherwise
*/
protected boolean verifyExpectedValues(long expectedReferenced, Counters counters) {
final Counter referenced = counters.findCounter(Counts.REFERENCED);
final Counter unreferenced = counters.findCounter(Counts.UNREFERENCED);
boolean success = true;
if (expectedReferenced != referenced.getValue()) {
LOG.error("Expected referenced count does not match with actual referenced count. " +
"expected referenced=" + expectedReferenced + " ,actual=" + referenced.getValue());
success = false;
}
if (unreferenced.getValue() > 0) {
final Counter multiref = counters.findCounter(Counts.EXTRAREFERENCES);
boolean couldBeMultiRef = (multiref.getValue() == unreferenced.getValue());
LOG.error("Unreferenced nodes were not expected. Unreferenced count=" + unreferenced.getValue()
+ (couldBeMultiRef ? "; could be due to duplicate random numbers" : ""));
success = false;
}
return success;
}
示例11: verifyUnexpectedValues
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
/**
* Verify that the Counters don't contain values which indicate an outright failure from the Reducers.
*
* @param counters
* The Job's counters
* @return True if the "bad" counter objects are 0, false otherwise
*/
protected boolean verifyUnexpectedValues(Counters counters) {
final Counter undefined = counters.findCounter(Counts.UNDEFINED);
final Counter lostfamilies = counters.findCounter(Counts.LOST_FAMILIES);
boolean success = true;
if (undefined.getValue() > 0) {
LOG.error("Found an undefined node. Undefined count=" + undefined.getValue());
success = false;
}
if (lostfamilies.getValue() > 0) {
LOG.error("Found nodes which lost big or tiny families, count=" + lostfamilies.getValue());
success = false;
}
return success;
}
示例12: TaskCounterGroupInfo
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
public TaskCounterGroupInfo(String name, CounterGroup group) {
this.counterGroupName = name;
this.counter = new ArrayList<TaskCounterInfo>();
for (Counter c : group) {
TaskCounterInfo cinfo = new TaskCounterInfo(c.getName(), c.getValue());
this.counter.add(cinfo);
}
}
示例13: toAvro
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
static JhCounters toAvro(Counters counters, String name) {
JhCounters result = new JhCounters();
result.setName(new Utf8(name));
result.setGroups(new ArrayList<JhCounterGroup>(0));
if (counters == null) return result;
for (CounterGroup group : counters) {
JhCounterGroup g = new JhCounterGroup();
g.setName(new Utf8(group.getName()));
g.setDisplayName(new Utf8(group.getDisplayName()));
g.setCounts(new ArrayList<JhCounter>(group.size()));
for (Counter counter : group) {
JhCounter c = new JhCounter();
c.setName(new Utf8(counter.getName()));
c.setDisplayName(new Utf8(counter.getDisplayName()));
c.setValue(counter.getValue());
g.getCounts().add(c);
}
result.getGroups().add(g);
}
return result;
}
示例14: bigItemCount
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
private boolean bigItemCount(String output) throws IOException, ClassNotFoundException, InterruptedException {
Job job = Job.getInstance(this.getConf(), "Counting items from " + this.input);
job.setJarByClass(TopPIoverHadoop.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(this.input));
FileOutputFormat.setOutputPath(job, new Path(output));
job.setMapperClass(ItemBigCountingMapper.class);
job.setReducerClass(ItemBigCountingReducer.class);
boolean success = job.waitForCompletion(true);
if (success) {
Counter rebasingMaxID = job.getCounters().findCounter(TaskCounter.REDUCE_OUTPUT_RECORDS);
this.getConf().setInt(KEY_REBASING_MAX_ID, (int) rebasingMaxID.getValue());
}
return success;
}
示例15: CounterGroupInfo
import org.apache.hadoop.mapreduce.Counter; //导入依赖的package包/类
public CounterGroupInfo(String name, CounterGroup group, CounterGroup mg,
CounterGroup rg) {
this.counterGroupName = name;
this.counter = new ArrayList<CounterInfo>();
for (Counter c : group) {
Counter mc = mg == null ? null : mg.findCounter(c.getName());
Counter rc = rg == null ? null : rg.findCounter(c.getName());
CounterInfo cinfo = new CounterInfo(c, mc, rc);
this.counter.add(cinfo);
}
}