当前位置: 首页>>代码示例>>Java>>正文


Java MapReduceTestUtil.createJob方法代码示例

本文整理汇总了Java中org.apache.hadoop.mapreduce.MapReduceTestUtil.createJob方法的典型用法代码示例。如果您正苦于以下问题:Java MapReduceTestUtil.createJob方法的具体用法?Java MapReduceTestUtil.createJob怎么用?Java MapReduceTestUtil.createJob使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.mapreduce.MapReduceTestUtil的用法示例。


在下文中一共展示了MapReduceTestUtil.createJob方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testSuccessfulJob

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
private void testSuccessfulJob(String filename,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
  job.setOutputFormatClass(output);

  assertTrue("Job failed!", job.waitForCompletion(true));

  Path testFile = new Path(outDir, filename);
  assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for successful job "
        + job.getJobID(), fs.exists(file));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TestJobOutputCommitter.java

示例2: testChainFail

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:TestChainErrors.java

示例3: testReducerFail

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
/**
 * Tests Reducer throwing exception.
 * 
 * @throws Exception
 */
public void testReducerFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:TestChainErrors.java

示例4: testChainMapNoOuptut

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
/**
 * Tests one of the maps consuming output.
 * 
 * @throws Exception
 */
public void testChainMapNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:TestChainErrors.java

示例5: testChainReduceNoOuptut

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestChainErrors.java

示例6: testNoChain

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
public void testNoChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "a\nb\na\n";
  String expectedOutput = "a\t2\nb\t1\n";

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
      Text.class, Text.class, IntWritable.class, null);

  ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
      IntWritable.class, Text.class, IntWritable.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:23,代码来源:TestSingleElementChain.java

示例7: testSuccessfulJob

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
private void testSuccessfulJob(String filename,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
  job.setOutputFormatClass(output);

  assertTrue("Job failed!", job.waitForCompletion(true));

  Path testFile = new Path(outDir, filename);
  assertTrue("Done file missing for job ", fs.exists(testFile));

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse(
        "File " + file + " should not be present for successful job ", fs
            .exists(file));
  }
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:20,代码来源:TestJobOutputCommitter.java

示例8: testChainReduceNoOuptut

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
@Test
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:28,代码来源:TestChainErrors.java

示例9: testChainFail

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
@Test
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:26,代码来源:TestChainErrors.java

示例10: launch

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
public static void launch() throws Exception {
  Configuration conf = new Configuration();
  FileSystem fs = FileSystem.get(conf);
  int numOfInputLines = 10;

  Path outDir = new Path(testDir, "output_for_field_selection_test");
  Path inDir = new Path(testDir, "input_for_field_selection_test");

  StringBuffer inputData = new StringBuffer();
  StringBuffer expectedOutput = new StringBuffer();
  constructInputOutputData(inputData, expectedOutput, numOfInputLines);
  
  conf.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
  conf.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
  conf.set(
    FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir,
    1, 1, inputData.toString());
  job.setMapperClass(FieldSelectionMapper.class);
  job.setReducerClass(FieldSelectionReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(Text.class);
  job.setNumReduceTasks(1);

  job.waitForCompletion(true);
  assertTrue("Job Failed!", job.isSuccessful());

  //
  // Finally, we compare the reconstructed answer key with the
  // original one.  Remember, we need to ignore zero-count items
  // in the original key.
  //
  String outdata = MapReduceTestUtil.readOutput(outDir, conf);
  assertEquals("Outputs doesnt match.",expectedOutput.toString(), outdata);
  fs.delete(outDir, true);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:37,代码来源:TestMRFieldSelection.java

示例11: run

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
private void run(boolean ioEx, boolean rtEx) throws Exception {
  String localPathRoot = System.getProperty("test.build.data", "/tmp");
  Path inDir = new Path(localPathRoot, "testing/mt/input");
  Path outDir = new Path(localPathRoot, "testing/mt/output");


  Configuration conf = createJobConf();
  if (ioEx) {
    conf.setBoolean("multithreaded.ioException", true);
  }
  if (rtEx) {
    conf.setBoolean("multithreaded.runtimeException", true);
  }

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1);
  job.setJobName("mt");

  job.setMapperClass(MultithreadedMapper.class);
  MultithreadedMapper.setMapperClass(job, IDMap.class);
  MultithreadedMapper.setNumberOfThreads(job, 2);
  job.setReducerClass(Reducer.class);

  job.waitForCompletion(true);

  if (job.isSuccessful()) {
    assertFalse(ioEx || rtEx);
  }
  else {
    assertTrue(ioEx || rtEx);
  }
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:32,代码来源:TestMultithreadedMapper.java

示例12: testComparator

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
private void testComparator(String keySpec, int expect) 
    throws Exception {
  String root = System.getProperty("test.build.data", "/tmp");
  Path inDir = new Path(root, "test_cmp/in");
  Path outDir = new Path(root, "test_cmp/out");
  
  conf.set("mapreduce.partition.keycomparator.options", keySpec);
  conf.set("mapreduce.partition.keypartitioner.options", "-k1.1,1.1");
  conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1,
              line1 +"\n" + line2 + "\n"); 
  job.setMapperClass(InverseMapper.class);
  job.setReducerClass(Reducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(LongWritable.class);
  job.setSortComparatorClass(KeyFieldBasedComparator.class);
  job.setPartitionerClass(KeyFieldBasedPartitioner.class);

  job.waitForCompletion(true);
  assertTrue(job.isSuccessful());

  // validate output
  Path[] outputFiles = FileUtil.stat2Paths(getFileSystem().listStatus(outDir,
      new Utils.OutputFileUtils.OutputFilesFilter()));
  if (outputFiles.length > 0) {
    InputStream is = getFileSystem().open(outputFiles[0]);
    BufferedReader reader = new BufferedReader(new InputStreamReader(is));
    String line = reader.readLine();
    //make sure we get what we expect as the first line, and also
    //that we have two lines (both the lines must end up in the same
    //reducer since the partitioner takes the same key spec for all
    //lines
    if (expect == 1) {
      assertTrue(line.startsWith(line1));
    } else if (expect == 2) {
      assertTrue(line.startsWith(line2));
    }
    line = reader.readLine();
    if (expect == 1) {
      assertTrue(line.startsWith(line2));
    } else if (expect == 2) {
      assertTrue(line.startsWith(line1));
    }
    reader.close();
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:48,代码来源:TestMRKeyFieldBasedComparator.java

示例13: testChain

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
public void testChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "1\n2\n";
  String expectedOutput = "0\t1ABCRDEF\n2\t2ABCRDEF\n";

  Configuration conf = createJobConf();
  cleanFlags(conf);
  conf.set("a", "X");

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  Configuration mapAConf = new Configuration(false);
  mapAConf.set("a", "A");
  ChainMapper.addMapper(job, AMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapAConf);

  ChainMapper.addMapper(job, BMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, CMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration reduceConf = new Configuration(false);
  reduceConf.set("a", "C");
  ChainReducer.setReducer(job, RReduce.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, reduceConf);

  ChainReducer.addMapper(job, DMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration mapEConf = new Configuration(false);
  mapEConf.set("a", "E");
  ChainReducer.addMapper(job, EMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapEConf);

  ChainReducer.addMapper(job, FMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());

  String str = "flag not set";
  assertTrue(str, getFlag(conf, "map.setup.A"));
  assertTrue(str, getFlag(conf, "map.setup.B"));
  assertTrue(str, getFlag(conf, "map.setup.C"));
  assertTrue(str, getFlag(conf, "reduce.setup.R"));
  assertTrue(str, getFlag(conf, "map.setup.D"));
  assertTrue(str, getFlag(conf, "map.setup.E"));
  assertTrue(str, getFlag(conf, "map.setup.F"));

  assertTrue(str, getFlag(conf, "map.A.value.1"));
  assertTrue(str, getFlag(conf, "map.A.value.2"));
  assertTrue(str, getFlag(conf, "map.B.value.1A"));
  assertTrue(str, getFlag(conf, "map.B.value.2A"));
  assertTrue(str, getFlag(conf, "map.C.value.1AB"));
  assertTrue(str, getFlag(conf, "map.C.value.2AB"));
  assertTrue(str, getFlag(conf, "reduce.R.value.1ABC"));
  assertTrue(str, getFlag(conf, "reduce.R.value.2ABC"));
  assertTrue(str, getFlag(conf, "map.D.value.1ABCR"));
  assertTrue(str, getFlag(conf, "map.D.value.2ABCR"));
  assertTrue(str, getFlag(conf, "map.E.value.1ABCRD"));
  assertTrue(str, getFlag(conf, "map.E.value.2ABCRD"));
  assertTrue(str, getFlag(conf, "map.F.value.1ABCRDE"));
  assertTrue(str, getFlag(conf, "map.F.value.2ABCRDE"));

  assertTrue(getFlag(conf, "map.cleanup.A"));
  assertTrue(getFlag(conf, "map.cleanup.B"));
  assertTrue(getFlag(conf, "map.cleanup.C"));
  assertTrue(getFlag(conf, "reduce.cleanup.R"));
  assertTrue(getFlag(conf, "map.cleanup.D"));
  assertTrue(getFlag(conf, "map.cleanup.E"));
  assertTrue(getFlag(conf, "map.cleanup.F"));

  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:79,代码来源:TestMapReduceChain.java

示例14: testChain

import org.apache.hadoop.mapreduce.MapReduceTestUtil; //导入方法依赖的package包/类
@Test
public void testChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "1\n2\n";
  String expectedOutput = "0\t1ABCRDEF\n2\t2ABCRDEF\n";

  Configuration conf = createJobConf();
  cleanFlags(conf);
  conf.set("a", "X");

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  Configuration mapAConf = new Configuration(false);
  mapAConf.set("a", "A");
  ChainMapper.addMapper(job, AMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapAConf);

  ChainMapper.addMapper(job, BMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, CMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration reduceConf = new Configuration(false);
  reduceConf.set("a", "C");
  ChainReducer.setReducer(job, RReduce.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, reduceConf);

  ChainReducer.addMapper(job, DMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration mapEConf = new Configuration(false);
  mapEConf.set("a", "E");
  ChainReducer.addMapper(job, EMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapEConf);

  ChainReducer.addMapper(job, FMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());

  String str = "flag not set";
  assertTrue(str, getFlag(conf, "map.setup.A"));
  assertTrue(str, getFlag(conf, "map.setup.B"));
  assertTrue(str, getFlag(conf, "map.setup.C"));
  assertTrue(str, getFlag(conf, "reduce.setup.R"));
  assertTrue(str, getFlag(conf, "map.setup.D"));
  assertTrue(str, getFlag(conf, "map.setup.E"));
  assertTrue(str, getFlag(conf, "map.setup.F"));

  assertTrue(str, getFlag(conf, "map.A.value.1"));
  assertTrue(str, getFlag(conf, "map.A.value.2"));
  assertTrue(str, getFlag(conf, "map.B.value.1A"));
  assertTrue(str, getFlag(conf, "map.B.value.2A"));
  assertTrue(str, getFlag(conf, "map.C.value.1AB"));
  assertTrue(str, getFlag(conf, "map.C.value.2AB"));
  assertTrue(str, getFlag(conf, "reduce.R.value.1ABC"));
  assertTrue(str, getFlag(conf, "reduce.R.value.2ABC"));
  assertTrue(str, getFlag(conf, "map.D.value.1ABCR"));
  assertTrue(str, getFlag(conf, "map.D.value.2ABCR"));
  assertTrue(str, getFlag(conf, "map.E.value.1ABCRD"));
  assertTrue(str, getFlag(conf, "map.E.value.2ABCRD"));
  assertTrue(str, getFlag(conf, "map.F.value.1ABCRDE"));
  assertTrue(str, getFlag(conf, "map.F.value.2ABCRDE"));

  assertTrue(getFlag(conf, "map.cleanup.A"));
  assertTrue(getFlag(conf, "map.cleanup.B"));
  assertTrue(getFlag(conf, "map.cleanup.C"));
  assertTrue(getFlag(conf, "reduce.cleanup.R"));
  assertTrue(getFlag(conf, "map.cleanup.D"));
  assertTrue(getFlag(conf, "map.cleanup.E"));
  assertTrue(getFlag(conf, "map.cleanup.F"));

  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:80,代码来源:TestMapReduceChain.java


注:本文中的org.apache.hadoop.mapreduce.MapReduceTestUtil.createJob方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。