当前位置: 首页>>代码示例>>Java>>正文


Java MapDriver.run方法代码示例

本文整理汇总了Java中org.apache.hadoop.mrunit.mapreduce.MapDriver.run方法的典型用法代码示例。如果您正苦于以下问题:Java MapDriver.run方法的具体用法?Java MapDriver.run怎么用?Java MapDriver.run使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.mrunit.mapreduce.MapDriver的用法示例。


在下文中一共展示了MapDriver.run方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testMapper

import org.apache.hadoop.mrunit.mapreduce.MapDriver; //导入方法依赖的package包/类
@Test
public void testMapper() throws Exception {
  MorphlineMapper mapper = new MorphlineMapper();
  MapDriver<LongWritable, Text, Text, SolrInputDocumentWritable> mapDriver = MapDriver.newMapDriver(mapper);;

  Configuration config = mapDriver.getConfiguration();
  setupHadoopConfig(config);

  mapDriver.withInput(new LongWritable(0L), new Text("hdfs://localhost/" + DOCUMENTS_DIR + "/sample-statuses-20120906-141433.avro"));

  SolrInputDocument sid = new SolrInputDocument();
  sid.addField("id", "uniqueid1");
  sid.addField("user_name", "user1");
  sid.addField("text", "content of record one");
  SolrInputDocumentWritable sidw = new SolrInputDocumentWritable(sid);

  mapDriver
    .withCacheArchive(solrHomeZip.getAbsolutePath())
    .withOutput(new Text("0"), sidw);
  //mapDriver.runTest();
  List<Pair<Text, SolrInputDocumentWritable>> result = mapDriver.run();
  for (Pair<Text, SolrInputDocumentWritable> p: result) {
    System.out.println(p.getFirst());
    System.out.println(p.getSecond());
  }
}
 
开发者ID:europeana,项目名称:search,代码行数:27,代码来源:MorphlineMapperTest.java

示例2: verifyMapReduce

import org.apache.hadoop.mrunit.mapreduce.MapDriver; //导入方法依赖的package包/类
public static void verifyMapReduce(SmartMapper mapper, SmartReducer reducer, Object key, Object input)
    throws Exception
{
  MapDriver mapDriver = new MapDriver();
  mapDriver.setMapper(mapper);
  MapReduceDriver mapReduceDriver = new MapReduceDriver();
  mapReduceDriver.setMapper(mapper);
  Object writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  Object writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapDriver.withInput(writableKey, writableValue);
  List results = mapDriver.run();
  Collections.sort(results, PairComparer.INSTANCE);
  mapReduceDriver = new MapReduceDriver<LongWritable, Text, Text, LongWritable, Text, LongWritable>();
  writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapReduceDriver.withInput(writableKey, writableValue);
  mapReduceDriver.setMapper(mapper);
  mapReduceDriver.setReducer(reducer);
  List finalResults = mapReduceDriver.run();
  String text = String.format("[%s]\n\n -> maps via %s to -> \n\n%s\n\n -> reduces via %s to -> \n\n%s", input,
      mapper.getClass().getSimpleName(), ArrayUtils.toString(results, Echo.INSTANCE),
      reducer.getClass().getSimpleName(), ArrayUtils.toString(finalResults, Echo.INSTANCE));
  Approvals.verify(text);
}
 
开发者ID:approvals,项目名称:ApprovalTests.Java,代码行数:25,代码来源:HadoopApprovals.java

示例3: generateAndSerializeStarTree

import org.apache.hadoop.mrunit.mapreduce.MapDriver; //导入方法依赖的package包/类
private void generateAndSerializeStarTree() throws IOException
{
  List<Pair<BytesWritable,  BytesWritable>> inputRecords = new ArrayList<Pair<BytesWritable,  BytesWritable>>();
  String []combination1 = {"A1", "B1", "C1"};
  DimensionKey dimKey = new DimensionKey(combination1);
  MetricTimeSeries timeSeries = TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  Pair<BytesWritable,  BytesWritable> record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  String []combination2 = {"A1", "B1", "C2"};
  dimKey = new DimensionKey(combination2);
  timeSeries = TestStarTreeBootstrapPhase2.TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  String []combination3 = {"A1", "B1", "C3"};
  dimKey = new DimensionKey(combination3);
  timeSeries = TestStarTreeBootstrapPhase2.TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  String []combination4 = {"A2", "B1", "C3"};
  dimKey = new DimensionKey(combination4);
  timeSeries = TestStarTreeBootstrapPhase2.TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  MapDriver<BytesWritable, BytesWritable, BytesWritable, BytesWritable> mapDriver;
  StarTreeGenerationMapper mapper = new StarTreeGenerationMapper();
  mapDriver = MapDriver.newMapDriver(mapper);
  Configuration config = mapDriver.getConfiguration();
  config.set(StarTreeGenerationConstants.STAR_TREE_GEN_CONFIG_PATH.toString(), ClassLoader.getSystemResource(CONF_FILE).toString());
  config.set(StarTreeGenerationConstants.STAR_TREE_GEN_OUTPUT_PATH.toString(), thirdEyeRoot + File.separator + "startree_generation");
  mapDriver.addAll(inputRecords);
  mapDriver.run();
}
 
开发者ID:Hanmourang,项目名称:Pinot,代码行数:41,代码来源:TestStarTreeBootstrapPhase2.java

示例4: testThrowingErrorsWorks

import org.apache.hadoop.mrunit.mapreduce.MapDriver; //导入方法依赖的package包/类
/**
 * Tests that an error thrown from JavaScript works as expected.
 *
 * @throws Exception if anything goes wrong
 */
@Test
public void testThrowingErrorsWorks() throws Exception {
    final MapDriver<WritableComparable<?>, Writable, WritableComparable<?>, Writable> driver =
            new MapDriver<>();
    final LembosMapper mapper = new LembosMapper();
    final String moduleName = "LembosMapReduceEnvironmentTest-testThrowingErrorsWorks";

    driver.withMapper(mapper);

    driver.getConfiguration().set(LembosConstants.MR_MODULE_NAME, moduleName);
    driver.getConfiguration().set(LembosConstants.MR_MODULE_PATH, TestUtils.getModulePath(moduleName));

    final List<Pair<WritableComparable<?>, Writable>> inputs = ImmutableList.of(
            new Pair<WritableComparable<?>, Writable>(new Text(Long.toString(new Date().getTime())),
                                                             new Text("Alice")),
            new Pair<WritableComparable<?>, Writable>(new Text(Long.toString(new Date().getTime())),
                                                             new Text("Bob")),
            new Pair<WritableComparable<?>, Writable>(new Text(Long.toString(new Date().getTime())),
                                                             new Text("Sally")),
            new Pair<WritableComparable<?>, Writable>(new Text(Long.toString(new Date().getTime())),
                                                             new Text("Bob")),
            new Pair<WritableComparable<?>, Writable>(new Text(Long.toString(new Date().getTime())),
                                                             new Text("Alice"))
    );

    driver.withAll(inputs);

    try {
        driver.run();

        fail("The line above should had failed");
    } catch (RuntimeException e) {
        assertEquals("Error: This is an Error!", e.getMessage());
    }
}
 
开发者ID:apigee,项目名称:lembos,代码行数:41,代码来源:LembosMapReduceEnvironmentTest.java

示例5: testUsingNodeJSModule

import org.apache.hadoop.mrunit.mapreduce.MapDriver; //导入方法依赖的package包/类
/**
 * Tests using a Node.js module works as expected.
 *
 * @throws Exception if anything goes wrong
 */
@Test
public void testUsingNodeJSModule() throws Exception {
    final MapDriver<WritableComparable<?>, Writable, WritableComparable<?>, Writable> driver =
            new MapDriver<>();
    final LembosMapper mapper = new LembosMapper();
    final String moduleName = "LembosMapReduceEnvironmentTest-testUsingNodeJSModule";

    driver.withMapper(mapper);

    driver.getConfiguration().set(LembosConstants.MR_MODULE_NAME, moduleName);
    driver.getConfiguration().set(LembosConstants.MR_MODULE_PATH, TestUtils.getModulePath(moduleName));

    final List<Pair<WritableComparable<?>, Writable>> inputs = ImmutableList.of(
            new Pair<WritableComparable<?>, Writable>(new Text(Long.toString(new Date().getTime())),
                                                             new Text("Alice"))
    );

    driver.withAll(inputs);

    final List<Pair<WritableComparable<?>, Writable>> outputs = driver.run();

    assertEquals(1, outputs.size());

    final Pair<WritableComparable<?>, Writable> output = outputs.get(0);
    final String key = output.getFirst().toString();
    final String val = output.getSecond().toString();

    assertEquals("ip", key);
    assertTrue(InetAddressValidator.getInstance().isValidInet4Address(val));
}
 
开发者ID:apigee,项目名称:lembos,代码行数:36,代码来源:LembosMapReduceEnvironmentTest.java

示例6: verifyMapping

import org.apache.hadoop.mrunit.mapreduce.MapDriver; //导入方法依赖的package包/类
public static void verifyMapping(SmartMapper mapper, Object key, Object input) throws Exception
{
  MapDriver mapDriver = new MapDriver();
  mapDriver.setMapper(mapper);
  Object writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  Object writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapDriver.withInput(writableKey, writableValue);
  List results = mapDriver.run();
  Collections.sort(results, PairComparer.INSTANCE);
  String header = String.format("[%s]\n\n -> maps via %s to -> \n", input, mapper.getClass().getSimpleName());
  Approvals.verifyAll(header, results, Echo.INSTANCE);
}
 
开发者ID:approvals,项目名称:ApprovalTests.Java,代码行数:13,代码来源:HadoopApprovals.java

示例7: generateAndSerializeStarTree

import org.apache.hadoop.mrunit.mapreduce.MapDriver; //导入方法依赖的package包/类
private void generateAndSerializeStarTree() throws IOException
{
  List<Pair<BytesWritable,  BytesWritable>> inputRecords = new ArrayList<Pair<BytesWritable,  BytesWritable>>();
  String []combination1 = {"A1", "B1", "C1"};
  DimensionKey dimKey = new DimensionKey(combination1);
  MetricTimeSeries timeSeries = TestStarTreeBootstrapPhase1.TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  Pair<BytesWritable,  BytesWritable> record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  String []combination2 = {"A1", "B1", "C2"};
  dimKey = new DimensionKey(combination2);
  timeSeries = TestStarTreeBootstrapPhase1.TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  String []combination3 = {"A1", "B1", "C3"};
  dimKey = new DimensionKey(combination3);
  timeSeries = TestStarTreeBootstrapPhase1.TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  String []combination4 = {"A2", "B1", "C3"};
  dimKey = new DimensionKey(combination4);
  timeSeries = TestStarTreeBootstrapPhase1.TestHelper.generateRandomMetricTimeSeries(starTreeBootstrapConfig);
  record = new Pair<BytesWritable, BytesWritable>(new BytesWritable(dimKey.toBytes())
                                                                        , new BytesWritable(timeSeries.toBytes()));
  inputRecords.add(record);

  MapDriver<BytesWritable, BytesWritable, BytesWritable, BytesWritable> mapDriver;
  StarTreeGenerationMapper mapper = new StarTreeGenerationMapper();
  mapDriver = MapDriver.newMapDriver(mapper);
  Configuration config = mapDriver.getConfiguration();
  config.set(StarTreeGenerationConstants.STAR_TREE_GEN_CONFIG_PATH.toString(), ClassLoader.getSystemResource(CONF_FILE).toString());
  config.set(StarTreeGenerationConstants.STAR_TREE_GEN_OUTPUT_PATH.toString(), thirdEyeRoot + File.separator + "startree_generation");
  mapDriver.addAll(inputRecords);
  mapDriver.run();

}
 
开发者ID:Hanmourang,项目名称:Pinot,代码行数:42,代码来源:TestStarTreeBootstrapPhase1.java


注:本文中的org.apache.hadoop.mrunit.mapreduce.MapDriver.run方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。