当前位置: 首页>>代码示例>>Java>>正文


Java MapReduceTestUtil类代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.MapReduceTestUtil的典型用法代码示例。如果您正苦于以下问题:Java MapReduceTestUtil类的具体用法?Java MapReduceTestUtil怎么用?Java MapReduceTestUtil使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


MapReduceTestUtil类属于org.apache.hadoop.mapreduce包,在下文中一共展示了MapReduceTestUtil类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: launchWordCount

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
public static TestResult launchWordCount(JobConf conf,
                                         Path inDir,
                                         Path outDir,
                                         String input,
                                         int numMaps,
                                         int numReduces,
                                         String sysDir) throws IOException {
  FileSystem inFs = inDir.getFileSystem(conf);
  FileSystem outFs = outDir.getFileSystem(conf);
  outFs.delete(outDir, true);
  if (!inFs.mkdirs(inDir)) {
    throw new IOException("Mkdirs failed to create " + inDir.toString());
  }
  {
    DataOutputStream file = inFs.create(new Path(inDir, "part-0"));
    file.writeBytes(input);
    file.close();
  }
  conf.setJobName("wordcount");
  conf.setInputFormat(TextInputFormat.class);
  
  // the keys are words (strings)
  conf.setOutputKeyClass(Text.class);
  // the values are counts (ints)
  conf.setOutputValueClass(IntWritable.class);
  
  conf.setMapperClass(WordCount.MapClass.class);        
  conf.setCombinerClass(WordCount.Reduce.class);
  conf.setReducerClass(WordCount.Reduce.class);
  FileInputFormat.setInputPaths(conf, inDir);
  FileOutputFormat.setOutputPath(conf, outDir);
  conf.setNumMapTasks(numMaps);
  conf.setNumReduceTasks(numReduces);
  conf.set(JTConfig.JT_SYSTEM_DIR, "/tmp/subru/mapred/system");
  JobClient jobClient = new JobClient(conf);
  RunningJob job = jobClient.runJob(conf);
  // Checking that the Job Client system dir is not used
  assertFalse(FileSystem.get(conf).exists(
    new Path(conf.get(JTConfig.JT_SYSTEM_DIR)))); 
  // Check if the Job Tracker system dir is propogated to client
  assertFalse(sysDir.contains("/tmp/subru/mapred/system"));
  assertTrue(sysDir.contains("custom"));
  return new TestResult(job, MapReduceTestUtil.readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:45,代码来源:TestJobSysDirWithDFS.java

示例2: testSuccessfulJob

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
private void testSuccessfulJob(String filename,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
  job.setOutputFormatClass(output);

  assertTrue("Job failed!", job.waitForCompletion(true));

  Path testFile = new Path(outDir, filename);
  assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for successful job "
        + job.getJobID(), fs.exists(file));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestJobOutputCommitter.java

示例3: testFailedJob

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
private void testFailedJob(String fileName,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createFailJob(conf, outDir, inDir);
  job.setOutputFormatClass(output);

  assertFalse("Job did not fail!", job.waitForCompletion(true));

  if (fileName != null) {
    Path testFile = new Path(outDir, fileName);
    assertTrue("File " + testFile + " missing for failed job " + job.getJobID(),
        fs.exists(testFile));
  }

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for failed job "
        + job.getJobID(), fs.exists(file));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:22,代码来源:TestJobOutputCommitter.java

示例4: testChainFail

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:TestChainErrors.java

示例5: testReducerFail

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
/**
 * Tests Reducer throwing exception.
 * 
 * @throws Exception
 */
public void testReducerFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:TestChainErrors.java

示例6: testChainMapNoOuptut

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
/**
 * Tests one of the maps consuming output.
 * 
 * @throws Exception
 */
public void testChainMapNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:TestChainErrors.java

示例7: testChainReduceNoOuptut

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestChainErrors.java

示例8: testNoChain

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
public void testNoChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "a\nb\na\n";
  String expectedOutput = "a\t2\nb\t1\n";

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
      Text.class, Text.class, IntWritable.class, null);

  ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
      IntWritable.class, Text.class, IntWritable.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:23,代码来源:TestSingleElementChain.java

示例9: readSplit

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
private static List<Text> readSplit(KeyValueTextInputFormat format, 
    InputSplit split, Job job) throws IOException, InterruptedException {
  List<Text> result = new ArrayList<Text>();
  Configuration conf = job.getConfiguration();
  TaskAttemptContext context = MapReduceTestUtil.
    createDummyMapTaskAttemptContext(conf);
  RecordReader<Text, Text> reader = format.createRecordReader(split, 
    MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
  MapContext<Text, Text, Text, Text> mcontext = 
    new MapContextImpl<Text, Text, Text, Text>(conf, 
    context.getTaskAttemptID(), reader, null, null,
    MapReduceTestUtil.createDummyReporter(), 
    split);
  reader.initialize(split, mcontext);
  while (reader.nextKeyValue()) {
    result.add(new Text(reader.getCurrentValue()));
  }
  reader.close();
  return result;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:TestMRKeyValueTextInputFormat.java

示例10: readSplit

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
private static List<Text> readSplit(InputFormat<LongWritable,Text> format,
  InputSplit split, Job job) throws IOException, InterruptedException {
  List<Text> result = new ArrayList<Text>();
  Configuration conf = job.getConfiguration();
  TaskAttemptContext context = MapReduceTestUtil.
    createDummyMapTaskAttemptContext(conf);
  RecordReader<LongWritable, Text> reader = format.createRecordReader(split,
    MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
  MapContext<LongWritable,Text,LongWritable,Text> mcontext =
    new MapContextImpl<LongWritable,Text,LongWritable,Text>(conf,
    context.getTaskAttemptID(), reader, null, null,
    MapReduceTestUtil.createDummyReporter(),
    split);
  reader.initialize(split, mcontext);
  while (reader.nextKeyValue()) {
    result.add(new Text(reader.getCurrentValue()));
  }
  return result;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestCombineTextInputFormat.java

示例11: testJobControlWithFailJob

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
public void testJobControlWithFailJob() throws Exception {
  LOG.info("Starting testJobControlWithFailJob");
  Configuration conf = createJobConf();

  cleanupData(conf);
  
  // create a Fail job
  Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
  
  // create job dependencies
  JobControl theControl = createDependencies(conf, job1);
  
  // wait till all the jobs complete
  waitTillAllFinished(theControl);
  
  assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
  assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
  assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
  assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);

  theControl.stop();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:23,代码来源:TestMapReduceJobControl.java

示例12: testControlledJob

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
@Test(timeout = 30000)
public void testControlledJob() throws Exception {
  LOG.info("Starting testControlledJob");

  Configuration conf = createJobConf();
  cleanupData(conf);
  Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
  JobControl theControl = createDependencies(conf, job1);
  while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
    try {
      Thread.sleep(100);
    } catch (InterruptedException e) {
      break;
    }
  }
  Assert.assertNotNull(cjob1.getMapredJobId());

  // wait till all the jobs complete
  waitTillAllFinished(theControl);
  assertEquals("Some jobs failed", 0, theControl.getFailedJobList().size());
  theControl.stop();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:23,代码来源:TestMapReduceJobControl.java

示例13: testSplit

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
/**
 * Create a new reader from the split, and match the edits against the passed columns.
 */
private void testSplit(InputSplit split, byte[]... columns) throws Exception {
  final WALRecordReader reader = getReader();
  reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));

  for (byte[] column : columns) {
    assertTrue(reader.nextKeyValue());
    Cell cell = reader.getCurrentValue().getCells().get(0);
    if (!Bytes.equals(column, cell.getQualifier())) {
      assertTrue("expected [" + Bytes.toString(column) + "], actual ["
          + Bytes.toString(cell.getQualifier()) + "]", false);
    }
  }
  assertFalse(reader.nextKeyValue());
  reader.close();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:19,代码来源:TestWALRecordReader.java

示例14: testJobControlWithFailJob

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入依赖的package包/类
@Test
public void testJobControlWithFailJob() throws Exception {
  LOG.info("Starting testJobControlWithFailJob");
  Configuration conf = createJobConf();

  cleanupData(conf);
  
  // create a Fail job
  Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
  
  // create job dependencies
  JobControl theControl = createDependencies(conf, job1);
  
  // wait till all the jobs complete
  waitTillAllFinished(theControl);
  
  assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
  assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
  assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
  assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);

  theControl.stop();
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:24,代码来源:TestMapReduceJobControl.java


注:本文中的org.apache.hadoop.mapreduce.MapReduceTestUtil类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。