本文整理汇总了Java中org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction类的典型用法代码示例。如果您正苦于以下问题:Java HadoopReduceFunction类的具体用法?Java HadoopReduceFunction怎么用?Java HadoopReduceFunction使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
HadoopReduceFunction类属于org.apache.flink.hadoopcompatibility.mapred包,在下文中一共展示了HadoopReduceFunction类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testStandardGrouping
import org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction; //导入依赖的package包/类
@Test
public void testStandardGrouping() throws Exception{
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<IntWritable, Text>> ds = HadoopTestData.getKVPairDataSet(env).
map(new Mapper1());
DataSet<Tuple2<IntWritable, IntWritable>> commentCnts = ds.
groupBy(0).
reduceGroup(new HadoopReduceFunction<IntWritable, Text, IntWritable, IntWritable>(new CommentCntReducer()));
String resultPath = tempFolder.newFile().toURI().toString();
commentCnts.writeAsText(resultPath);
env.execute();
String expected = "(0,0)\n" +
"(1,3)\n" +
"(2,5)\n" +
"(3,5)\n" +
"(4,2)\n";
compareResultsByLinesInMemory(expected, resultPath);
}
示例2: testStandardGrouping
import org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction; //导入依赖的package包/类
@Test
public void testStandardGrouping() throws Exception{
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<IntWritable, Text>> ds = HadoopTestData.getKVPairDataSet(env).
map(new Mapper1());
DataSet<Tuple2<IntWritable, IntWritable>> commentCnts = ds.
groupBy(0).
reduceGroup(new HadoopReduceFunction<IntWritable, Text, IntWritable, IntWritable>(new CommentCntReducer()));
String resultPath = tempFolder.newFile().toURI().toString();
commentCnts.writeAsText(resultPath);
env.execute();
String expected = "(0,0)\n"+
"(1,3)\n" +
"(2,5)\n" +
"(3,5)\n" +
"(4,2)\n";
compareResultsByLinesInMemory(expected, resultPath);
}
示例3: testConfigurationViaJobConf
import org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction; //导入依赖的package包/类
@Test
public void testConfigurationViaJobConf() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
JobConf conf = new JobConf();
conf.set("my.cntPrefix", "Hello");
DataSet<Tuple2<IntWritable, Text>> ds = HadoopTestData.getKVPairDataSet(env).
map(new Mapper2());
DataSet<Tuple2<IntWritable, IntWritable>> helloCnts = ds.
groupBy(0).
reduceGroup(new HadoopReduceFunction<IntWritable, Text, IntWritable, IntWritable>(
new ConfigurableCntReducer(), conf));
String resultPath = tempFolder.newFile().toURI().toString();
helloCnts.writeAsText(resultPath);
env.execute();
String expected = "(0,0)\n" +
"(1,0)\n" +
"(2,1)\n" +
"(3,1)\n" +
"(4,1)\n";
compareResultsByLinesInMemory(expected, resultPath);
}
示例4: testConfigurationViaJobConf
import org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction; //导入依赖的package包/类
@Test
public void testConfigurationViaJobConf() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
JobConf conf = new JobConf();
conf.set("my.cntPrefix", "Hello");
DataSet<Tuple2<IntWritable, Text>> ds = HadoopTestData.getKVPairDataSet(env).
map(new Mapper4());
DataSet<Tuple2<IntWritable, IntWritable>> hellos = ds.
groupBy(0).
reduceGroup(new HadoopReduceFunction<IntWritable, Text, IntWritable, IntWritable>(
new ConfigurableCntReducer(), conf));
String resultPath = tempFolder.newFile().toURI().toString();
hellos.writeAsText(resultPath);
env.execute();
// return expected result
String expected = "(0,0)\n" +
"(1,0)\n" +
"(2,1)\n" +
"(3,1)\n" +
"(4,1)\n";
compareResultsByLinesInMemory(expected, resultPath);
}
示例5: testConfigurationViaJobConf
import org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction; //导入依赖的package包/类
@Test
public void testConfigurationViaJobConf() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
JobConf conf = new JobConf();
conf.set("my.cntPrefix", "Hello");
DataSet<Tuple2<IntWritable, Text>> ds = HadoopTestData.getKVPairDataSet(env).
map(new Mapper2());
DataSet<Tuple2<IntWritable, IntWritable>> helloCnts = ds.
groupBy(0).
reduceGroup(new HadoopReduceFunction<IntWritable, Text, IntWritable, IntWritable>(
new ConfigurableCntReducer(), conf));
String resultPath = tempFolder.newFile().toURI().toString();
helloCnts.writeAsText(resultPath);
env.execute();
String expected = "(0,0)\n"+
"(1,0)\n" +
"(2,1)\n" +
"(3,1)\n" +
"(4,1)\n";
compareResultsByLinesInMemory(expected, resultPath);
}
示例6: testConfigurationViaJobConf
import org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction; //导入依赖的package包/类
@Test
public void testConfigurationViaJobConf() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
JobConf conf = new JobConf();
conf.set("my.cntPrefix", "Hello");
DataSet<Tuple2<IntWritable, Text>> ds = HadoopTestData.getKVPairDataSet(env).
map(new Mapper4());
DataSet<Tuple2<IntWritable, IntWritable>> hellos = ds.
groupBy(0).
reduceGroup(new HadoopReduceFunction<IntWritable, Text, IntWritable, IntWritable>(
new ConfigurableCntReducer(), conf));
String resultPath = tempFolder.newFile().toURI().toString();
hellos.writeAsText(resultPath);
env.execute();
// return expected result
String expected = "(0,0)\n"+
"(1,0)\n" +
"(2,1)\n" +
"(3,1)\n" +
"(4,1)\n";
compareResultsByLinesInMemory(expected, resultPath);
}
示例7: testUngroupedHadoopReducer
import org.apache.flink.hadoopcompatibility.mapred.HadoopReduceFunction; //导入依赖的package包/类
@Test
public void testUngroupedHadoopReducer() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<IntWritable, Text>> ds = HadoopTestData.getKVPairDataSet(env);
DataSet<Tuple2<IntWritable, IntWritable>> commentCnts = ds.
reduceGroup(new HadoopReduceFunction<IntWritable, Text, IntWritable, IntWritable>(new AllCommentCntReducer()));
String resultPath = tempFolder.newFile().toURI().toString();
commentCnts.writeAsText(resultPath);
env.execute();
String expected = "(42,15)\n";
compareResultsByLinesInMemory(expected, resultPath);
}