本文整理汇总了Java中org.apache.hadoop.mapreduce.MapReduceTestUtil.DataCopyReducer类的典型用法代码示例。如果您正苦于以下问题:Java DataCopyReducer类的具体用法?Java DataCopyReducer怎么用?Java DataCopyReducer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
DataCopyReducer类属于org.apache.hadoop.mapreduce.MapReduceTestUtil包,在下文中一共展示了DataCopyReducer类的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testContextStatus
import org.apache.hadoop.mapreduce.MapReduceTestUtil.DataCopyReducer; //导入依赖的package包/类
/**
* Tests context.setStatus method.
* TODO fix testcase
* @throws IOException
* @throws InterruptedException
* @throws ClassNotFoundException
*/
@Test
@Ignore
public void testContextStatus()
throws IOException, InterruptedException, ClassNotFoundException {
Path test = new Path(testRootTempDir, "testContextStatus");
// test with 1 map and 0 reducers
// test with custom task status
int numMaps = 1;
Job job = MapReduceTestUtil.createJob(createJobConf(),
new Path(test, "in"), new Path(test, "out"), numMaps, 0);
job.setMapperClass(MyMapper.class);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
TaskReport[] reports = job.getTaskReports(TaskType.MAP);
assertEquals(numMaps, reports.length);
assertEquals(myStatus, reports[0].getState());
// test with 1 map and 1 reducer
// test with default task status
int numReduces = 1;
job = MapReduceTestUtil.createJob(createJobConf(),
new Path(test, "in"), new Path(test, "out"), numMaps, numReduces);
job.setMapperClass(DataCopyMapper.class);
job.setReducerClass(DataCopyReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
// fail early
job.setMaxMapAttempts(1);
job.setMaxReduceAttempts(0);
// run the job and wait for completion
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
// check map task reports
// TODO fix testcase
// Disabling checks for now to get builds to run
/*
reports = job.getTaskReports(TaskType.MAP);
assertEquals(numMaps, reports.length);
assertEquals("map > sort", reports[0].getState());
// check reduce task reports
reports = job.getTaskReports(TaskType.REDUCE);
assertEquals(numReduces, reports.length);
assertEquals("reduce > reduce", reports[0].getState());
*/
}
示例2: testContextStatus
import org.apache.hadoop.mapreduce.MapReduceTestUtil.DataCopyReducer; //导入依赖的package包/类
/**
* Tests context.setStatus method.
*
* @throws IOException
* @throws InterruptedException
* @throws ClassNotFoundException
*/
@Test
public void testContextStatus()
throws IOException, InterruptedException, ClassNotFoundException {
Path test = new Path(testRootTempDir, "testContextStatus");
// test with 1 map and 0 reducers
// test with custom task status
int numMaps = 1;
Job job = MapReduceTestUtil.createJob(createJobConf(),
new Path(test, "in"), new Path(test, "out"), numMaps, 0);
job.setMapperClass(MyMapper.class);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
TaskReport[] reports = job.getTaskReports(TaskType.MAP);
assertEquals(numMaps, reports.length);
assertEquals(myStatus, reports[0].getState());
// test with 1 map and 1 reducer
// test with default task status
int numReduces = 1;
job = MapReduceTestUtil.createJob(createJobConf(),
new Path(test, "in"), new Path(test, "out"), numMaps, numReduces);
job.setMapperClass(DataCopyMapper.class);
job.setReducerClass(DataCopyReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
// fail early
job.setMaxMapAttempts(1);
job.setMaxReduceAttempts(0);
// run the job and wait for completion
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
// check map task reports
reports = job.getTaskReports(TaskType.MAP);
assertEquals(numMaps, reports.length);
assertEquals("map > sort", reports[0].getState());
// check reduce task reports
reports = job.getTaskReports(TaskType.REDUCE);
assertEquals(numReduces, reports.length);
assertEquals("reduce > reduce", reports[0].getState());
}