本文整理匯總了Java中org.apache.hadoop.mapred.Reducer.reduce方法的典型用法代碼示例。如果您正苦於以下問題:Java Reducer.reduce方法的具體用法?Java Reducer.reduce怎麽用?Java Reducer.reduce使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.mapred.Reducer
的用法示例。
在下文中一共展示了Reducer.reduce方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: combineAndSpill
import org.apache.hadoop.mapred.Reducer; //導入方法依賴的package包/類
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getCombinerKeyGroupingComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
示例2: combineAndSpill
import org.apache.hadoop.mapred.Reducer; //導入方法依賴的package包/類
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getOutputKeyComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
示例3: runOldCombiner
import org.apache.hadoop.mapred.Reducer; //導入方法依賴的package包/類
private void runOldCombiner(final TezRawKeyValueIterator rawIter, final Writer writer) throws IOException {
Class<? extends Reducer> reducerClazz = (Class<? extends Reducer>) conf.getClass("mapred.combiner.class", null, Reducer.class);
Reducer combiner = ReflectionUtils.newInstance(reducerClazz, conf);
OutputCollector collector = new OutputCollector() {
@Override
public void collect(Object key, Object value) throws IOException {
writer.append(key, value);
}
};
CombinerValuesIterator values = new CombinerValuesIterator(rawIter, keyClass, valClass, comparator);
while (values.moveToNext()) {
combiner.reduce(values.getKey(), values.getValues().iterator(), collector, reporter);
}
}
示例4: runOldCombiner
import org.apache.hadoop.mapred.Reducer; //導入方法依賴的package包/類
private void runOldCombiner(final TezRawKeyValueIterator rawIter, final Writer writer) throws IOException {
Class<? extends Reducer> reducerClazz = (Class<? extends Reducer>) conf.getClass("mapred.combiner.class", null, Reducer.class);
Reducer combiner = ReflectionUtils.newInstance(reducerClazz, conf);
OutputCollector collector = new OutputCollector() {
@Override
public void collect(Object key, Object value) throws IOException {
writer.append(key, value);
combineOutputRecordsCounter.increment(1);
}
};
CombinerValuesIterator values = new CombinerValuesIterator(rawIter, keyClass, valClass, comparator);
while (values.moveToNext()) {
combiner.reduce(values.getKey(), values.getValues().iterator(), collector, reporter);
}
}
示例5: runOldReducer
import org.apache.hadoop.mapred.Reducer; //導入方法依賴的package包/類
void runOldReducer(JobConf job,
final MRTaskReporter reporter,
KeyValuesReader input,
RawComparator comparator,
Class keyClass,
Class valueClass,
final KeyValueWriter output) throws IOException, InterruptedException {
Reducer reducer =
ReflectionUtils.newInstance(job.getReducerClass(), job);
// make output collector
OutputCollector collector =
new OutputCollector() {
public void collect(Object key, Object value)
throws IOException {
output.write(key, value);
}
};
// apply reduce function
try {
ReduceValuesIterator values =
new ReduceValuesIterator(
input, reporter, reduceInputValueCounter);
values.informReduceProgress();
while (values.more()) {
reduceInputKeyCounter.increment(1);
reducer.reduce(values.getKey(), values, collector, reporter);
values.informReduceProgress();
}
// Set progress to 1.0f if there was no exception,
reporter.setProgress(1.0f);
//Clean up: repeated in catch block below
reducer.close();
//End of clean up.
} catch (IOException ioe) {
try {
reducer.close();
} catch (IOException ignored) {
}
throw ioe;
}
}