当前位置: 首页>>代码示例>>Java>>正文


Java Pair类代码示例

本文整理汇总了Java中org.apache.hadoop.mrunit.types.Pair的典型用法代码示例。如果您正苦于以下问题:Java Pair类的具体用法?Java Pair怎么用?Java Pair使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


Pair类属于org.apache.hadoop.mrunit.types包,在下文中一共展示了Pair类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: assertOutputs

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
public static <K, V, T extends TestDriver<K, V, T>> void assertOutputs(
    TestDriver<K, V, T> driver, List<Pair<K, V>> actuals) {

  List<Pair<K, V>> expected = driver.getExpectedOutputs();

  assertEquals("Number of expected records don't match actual number",
      expected.size(), actuals.size());

  // make sure all actual outputs are in the expected set,
  // and at the proper position.
  for (int i = 0; i < expected.size(); i++) {
    Pair<K, V> actual = actuals.get(i);
    Pair<K, V> expect = expected.get(i);
    assertEquals("Records don't match at position " + i,
        expect, actual);
  }
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:18,代码来源:MRUnitJUnitAsserts.java

示例2: testReduce

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testReduce() throws IOException 
{
	List<Pair<Text,List<Text>>> inputs = new LinkedList<Pair<Text,List<Text>>>();
	List<Pair<Text,Text>> outputs = new LinkedList<Pair<Text,Text>>();

	List<Text> jasonTriples = new LinkedList<Text>();
	List<Text> sufjanTriples = new LinkedList<Text>();
	jasonTriples.add(new Text("<http://ex.com/jason> foaf:firstName \"Jason\" ."));
	jasonTriples.add(new Text("<http://ex.com/jason> foaf:lastName \"Slepicka\" ."));
	jasonTriples.add(new Text("<http://ex.com/jason> foaf:lastName \"Slepicka\" ."));

	sufjanTriples.add(new Text("<http://ex.com/sufjan> foaf:firstName \"Sufjan\" ."));
	sufjanTriples.add(new Text("<http://ex.com/sufjan> foaf:firstName \"Sufjan\" ."));
	sufjanTriples.add(new Text("<http://ex.com/sufjan> foaf:lastName \"Slepicka\" ."));


	inputs.add(new Pair<Text, List<Text>>(new Text("<http://ex.com/jason>"), jasonTriples));
	inputs.add(new Pair<Text, List<Text>>(new Text("<http://ex.com/sufjan>"), sufjanTriples));
	reduceDriver.withAll(inputs);
	outputs.add(new Pair<Text, Text>(new Text("<http://ex.com/jason>"), new Text("<http://ex.com/jason> foaf:lastName \"Slepicka\" .\n<http://ex.com/jason> foaf:firstName \"Jason\" .\n")));
	outputs.add(new Pair<Text, Text>(new Text("<http://ex.com/sufjan>"), new Text("<http://ex.com/sufjan> foaf:lastName \"Slepicka\" .\n<http://ex.com/sufjan> foaf:firstName \"Sufjan\" .\n")));
	reduceDriver.addAllOutput(outputs);
	reduceDriver.runTest();

}
 
开发者ID:therelaxist,项目名称:spring-usc,代码行数:27,代码来源:TestN3MapReduce.java

示例3: testGrokFail

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testGrokFail() throws Exception {
  Path path = new Path(GrokIngestMapperTest.class.getClassLoader()
      .getResource("grok" + File.separator + "IP-WORD.conf").getPath());
  jobConf.set(GrokIngestMapper.GROK_URI, path.toString());
  mapper.getFixture().init(jobConf);
  mapDriver.withConfiguration(jobConf);
  String splitFilePath = "/path/to/log";
  mapDriver.setMapInputPath(new Path(splitFilePath));

  LongWritable lineNumb = new LongWritable(10);

  String message = "non matching string";
  mapDriver.withInput(lineNumb, new Text(message));

  List<Pair<Text, LWDocumentWritable>> run = mapDriver.run();
  Assert.assertEquals(1, run.size());

  Pair<Text, LWDocumentWritable> pair = run.get(0);
  LWDocument doc = pair.getSecond().getLWDocument();
  // TODO: Check Fields
}
 
开发者ID:lucidworks,项目名称:hadoop-solr,代码行数:23,代码来源:GrokIngestMapperTest.java

示例4: testMonthDayYearGreedy

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testMonthDayYearGreedy() throws Exception {
  Path path = new Path(GrokIngestMapperTest.class.getClassLoader()
      .getResource("grok" + File.separator + "Month-Day-Year-Greedy.conf").getPath());
  jobConf.set(GrokIngestMapper.GROK_URI, path.toString());
  mapper.getFixture().init(jobConf);
  mapDriver.withConfiguration(jobConf);
  String splitFilePath = "/path/to/log";
  mapDriver.setMapInputPath(new Path(splitFilePath));

  LongWritable lineNumb = new LongWritable(10);

  String message = "Jan 05 2014 key1=value1 key2=value2 key3=value3";
  mapDriver.withInput(lineNumb, new Text(message));

  List<Pair<Text, LWDocumentWritable>> run = mapDriver.run();
  Assert.assertEquals(1, run.size());

  Pair<Text, LWDocumentWritable> pair = run.get(0);
  LWDocument doc = pair.getSecond().getLWDocument();
  Assert.assertNotNull(doc);
  // TODO: Check Fields
}
 
开发者ID:lucidworks,项目名称:hadoop-solr,代码行数:24,代码来源:GrokIngestMapperTest.java

示例5: testGrok2Fail

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testGrok2Fail() throws Exception {

  Path path = new Path(GrokIngestMapperTest.class.getClassLoader()
      .getResource("grok" + File.separator + "Month-Day-Year-Greedy.conf").getPath());
  jobConf.set(GrokIngestMapper.GROK_URI, path.toString());
  mapper.getFixture().init(jobConf);
  mapDriver.withConfiguration(jobConf);
  String splitFilePath = "/path/to/log";
  mapDriver.setMapInputPath(new Path(splitFilePath));

  LongWritable lineNumb = new LongWritable(10);

  String message = "non matching string";
  mapDriver.withInput(lineNumb, new Text(message));

  List<Pair<Text, LWDocumentWritable>> run = mapDriver.run();
  Assert.assertEquals(1, run.size());

  Pair<Text, LWDocumentWritable> pair = run.get(0);
  LWDocument doc = pair.getSecond().getLWDocument();
  // TODO: Check Fields
}
 
开发者ID:lucidworks,项目名称:hadoop-solr,代码行数:24,代码来源:GrokIngestMapperTest.java

示例6: testFirewall

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testFirewall() throws Exception {

  // Adding configuration file
  Path path = new Path(GrokIngestMapperTest.class.getClassLoader()
      .getResource("grok" + File.separator + "firewall.conf").getPath());
  jobConf.set(GrokIngestMapper.GROK_URI, path.toString());
  mapper.getFixture().init(jobConf);
  mapDriver.withConfiguration(jobConf);
  String splitFilePath = "/path/to/log";
  mapDriver.setMapInputPath(new Path(splitFilePath));

  LongWritable lineNumb = new LongWritable(10);

  String message = "Mar 31 2014 18:02:36: %ASA-5-106100: access-list inbound denied tcp outside/128.241.220.82(3154) -> asp3/62.84.96.19(32005) hit-cnt 1 first hit [0x91c26a3, 0x0]";
  mapDriver.withInput(lineNumb, new Text(message));

  List<Pair<Text, LWDocumentWritable>> run = mapDriver.run();
  Assert.assertEquals(1, run.size());

  Pair<Text, LWDocumentWritable> pair = run.get(0);
  LWDocument doc = pair.getSecond().getLWDocument();
  Assert.assertNotNull(doc);
  // TODO: Check Fields
}
 
开发者ID:lucidworks,项目名称:hadoop-solr,代码行数:26,代码来源:GrokIngestMapperTest.java

示例7: testStrategy

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testStrategy() throws Exception {
  Configuration conf = mapDriver.getConfiguration();
  conf.set(CSVIngestMapper.CSV_IGNORE_FIRST_LINE_COMMENT, "false");
  conf.set(CSVIngestMapper.CSV_DELIMITER, ",");
  conf.set(CSVIngestMapper.CSV_FIELD_MAPPING, "0=id,1=count,2=body, 3=title,4=footer");
  conf.set(CSVIngestMapper.CSV_STRATEGY, CSVIngestMapper.EXCEL_STRATEGY);
  LongWritable key = new LongWritable(0);// not skipped
  Text value = new Text("id-0,bar,junk,zen,hockey");
  LWDocument doc;
  mapDriver.withInput(key, value);
  List<Pair<Text, LWDocumentWritable>> run = mapDriver.run(false);
  Assert.assertEquals(1, run.size());
  doc = run.get(0).getSecond().getLWDocument();

  Assert.assertNotNull(doc);
  // TODO: check fields
}
 
开发者ID:lucidworks,项目名称:hadoop-solr,代码行数:19,代码来源:CSVIngestMapperTest.java

示例8: runMyTest

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test @SuppressWarnings({ "rawtypes", "unchecked" })
public void runMyTest() {

	List<Pair<LongWritable, Text>> inputs = new ArrayList<>();
	inputs.add(new Pair<>(
			new LongWritable(1), new Text("the quick brown fox jumped over the lazy dog.")));

	MapReduceDriver driver = getTestDriver();
	driver.addAll(inputs);

	try {
		driver.runTest();
	} catch (IOException e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
开发者ID:conversant,项目名称:mara,代码行数:18,代码来源:DistributedWordCountMapReduceTest.java

示例9: testMapper

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testMapper() throws Exception {
  MorphlineMapper mapper = new MorphlineMapper();
  MapDriver<LongWritable, Text, Text, SolrInputDocumentWritable> mapDriver = MapDriver.newMapDriver(mapper);;

  Configuration config = mapDriver.getConfiguration();
  setupHadoopConfig(config);

  mapDriver.withInput(new LongWritable(0L), new Text("hdfs://localhost/" + DOCUMENTS_DIR + "/sample-statuses-20120906-141433.avro"));

  SolrInputDocument sid = new SolrInputDocument();
  sid.addField("id", "uniqueid1");
  sid.addField("user_name", "user1");
  sid.addField("text", "content of record one");
  SolrInputDocumentWritable sidw = new SolrInputDocumentWritable(sid);

  mapDriver
    .withCacheArchive(solrHomeZip.getAbsolutePath())
    .withOutput(new Text("0"), sidw);
  //mapDriver.runTest();
  List<Pair<Text, SolrInputDocumentWritable>> result = mapDriver.run();
  for (Pair<Text, SolrInputDocumentWritable> p: result) {
    System.out.println(p.getFirst());
    System.out.println(p.getSecond());
  }
}
 
开发者ID:europeana,项目名称:search,代码行数:27,代码来源:MorphlineMapperTest.java

示例10: testMapReduce4Items

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testMapReduce4Items() throws IOException {
  mapReduceDriver.withAll(Arrays.asList(
      new Pair<>(new LongWritable(0), new Text("hello")),
      new Pair<>(new LongWritable(1), new Text("world")),
      new Pair<>(new LongWritable(2), new Text("nadja")),
      new Pair<>(new LongWritable(3), new Text("basti"))
      ));
  mapReduceDriver.withAllOutput(Arrays.asList(
      new Pair<>(new Text("nadja"), NullWritable.get()),
      new Pair<>(new Text("world"), NullWritable.get()),
      new Pair<>(new Text("basti"), NullWritable.get()),
      new Pair<>(new Text("hello"), NullWritable.get())
      ));
  mapReduceDriver.runTest();
}
 
开发者ID:europeana,项目名称:search,代码行数:17,代码来源:LineRandomizerMapperReducerTest.java

示例11: testStarTreeGeneration

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testStarTreeGeneration() throws Exception
{
  List<Pair<BytesWritable,  BytesWritable>> input = generateTestData();
  mapDriver.addAll(input);
  mapDriver.run();

  // verify that the tree can be deserialized appropriately
  String starTreeOutputPath = mapDriver.getConfiguration().get(StarTreeGenerationConstants.STAR_TREE_GEN_OUTPUT_PATH.toString());
  String collectionName = starTreeGenerationConfig.getCollectionName();
  Path pathToTree = new Path(starTreeOutputPath,  "tree.bin");
  FileSystem dfs = FileSystem.get(mapDriver.getConfiguration());
  InputStream is = dfs.open(pathToTree);
  StarTreeNode starTreeRootNode = StarTreePersistanceUtil.loadStarTree(is);
  Assert.assertNotNull(starTreeRootNode);
  Assert.assertFalse(starTreeRootNode.isLeaf());

  LinkedList<StarTreeNode> leafNodes = new LinkedList<StarTreeNode>();
  StarTreeUtils.traverseAndGetLeafNodes(leafNodes, starTreeRootNode);
  Assert.assertEquals(5, leafNodes.size());
}
 
开发者ID:Hanmourang,项目名称:Pinot,代码行数:22,代码来源:TestStarTreeGeneration.java

示例12: generateTestDataMapper

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
private List<Pair<GenericRecord,NullWritable>> generateTestDataMapper() throws Exception
{
  Schema schema = new Schema.Parser().parse(ClassLoader.getSystemResourceAsStream(SCHEMA_FILE));
  List<Pair<GenericRecord, NullWritable>> inputRecords = new ArrayList<Pair<GenericRecord, NullWritable>>();

  GenericRecord input = new GenericData.Record(schema);
  input.put("d1", "A1");
  input.put("d2", "B1");
  input.put("d3", "C1");
  input.put("time", TestStarTreeBootstrapPhase1.TestHelper.generateRandomTime());
  input.put("hoursSinceEpoch", TestStarTreeBootstrapPhase1.TestHelper.generateRandomHoursSinceEpoch());
  input.put("m1", 10);
  input.put("m2", 20);
  inputRecords.add(new Pair<GenericRecord, NullWritable>(input, NullWritable.get()));

  return inputRecords;
}
 
开发者ID:Hanmourang,项目名称:Pinot,代码行数:18,代码来源:TestStarTreeBootstrapPhase1.java

示例13: generateTestDataReducer

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
private List<Pair<BytesWritable,List<BytesWritable>>> generateTestDataReducer() throws Exception
{

  List<Pair<BytesWritable,List<BytesWritable>>> inputRecords = new ArrayList<Pair<BytesWritable,List<BytesWritable>>>();
  List<BytesWritable> list = new ArrayList<BytesWritable>();
  String []combination = {"?", "?", "?"};
  DimensionKey key = new DimensionKey(combination);
  UUID uuid = UUID.randomUUID();
  BootstrapPhaseMapOutputKey outputKey = new BootstrapPhaseMapOutputKey(uuid, key.toMD5());
  BootstrapPhaseMapOutputValue outputValue1 = new BootstrapPhaseMapOutputValue(key, TestStarTreeBootstrapPhase1.TestHelper.generateMetricTimeSeries(starTreeBootstrapConfig, 10));
  list.add(new BytesWritable(outputValue1.toBytes()));
  BootstrapPhaseMapOutputValue outputValue2 = new BootstrapPhaseMapOutputValue(key, TestStarTreeBootstrapPhase1.TestHelper.generateMetricTimeSeries(starTreeBootstrapConfig, 20));
  list.add(new BytesWritable(outputValue2.toBytes()));

  inputRecords.add(new Pair<BytesWritable, List<BytesWritable>>(new BytesWritable(outputKey.toBytes()), list));
  return inputRecords;
}
 
开发者ID:Hanmourang,项目名称:Pinot,代码行数:18,代码来源:TestStarTreeBootstrapPhase1.java

示例14: testStarTreeBootstrapPhase1

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
@Test
public void testStarTreeBootstrapPhase1() throws Exception
{
  List<Pair<GenericRecord,NullWritable>> inputRecords = generateTestDataMapper();
  generateAndSerializeStarTree();
  for(Pair<GenericRecord,NullWritable> p : inputRecords){
    AvroKey<GenericRecord> inKey = new AvroKey<GenericRecord>();
    inKey.datum(p.getFirst());
    mapDriver.addInput(new Pair<AvroKey<GenericRecord>, NullWritable> (inKey, NullWritable.get()));
  }
  List<Pair<BytesWritable, BytesWritable>> result = mapDriver.run();
  // should give two records for {"A1", "B1", "C1"} and {"A1", "B1", "?"}
  Assert.assertEquals(2, result.size());

  List<Pair<BytesWritable,List<BytesWritable>>> input =  generateTestDataReducer();
  reduceDriver.addAll(input);
  result = reduceDriver.run();
  BootstrapPhaseMapOutputValue output = BootstrapPhaseMapOutputValue.fromBytes(result.get(0).getSecond().getBytes(), TestHelper.getMetricSchema(starTreeBootstrapConfig));
  Assert.assertEquals(TestStarTreeBootstrapPhase1.TestHelper.generateMetricTimeSeries(starTreeBootstrapConfig, 30),output.getMetricTimeSeries());
}
 
开发者ID:Hanmourang,项目名称:Pinot,代码行数:21,代码来源:TestStarTreeBootstrapPhase1.java

示例15: generateTestDataReducer

import org.apache.hadoop.mrunit.types.Pair; //导入依赖的package包/类
private List<Pair<BytesWritable,List<BytesWritable>>> generateTestDataReducer(StarTreeNode root) throws IOException
{
  List<Pair<BytesWritable,List<BytesWritable>>> inputRecords = new ArrayList<Pair<BytesWritable,List<BytesWritable>>>();
  List<BytesWritable> list = new ArrayList<BytesWritable>();

  UUID uuid = null;
  LinkedList<StarTreeNode> leafNodes = new LinkedList<StarTreeNode>();
  StarTreeUtils.traverseAndGetLeafNodes(leafNodes, root);
  for (StarTreeNode node : leafNodes) {
    if(node.getDimensionValue().equals("C1")){
      uuid = node.getId();
      break;
    }
  }

  String []combination1 = {"A1", "B1", "C1"};
  DimensionKey dimKey = new DimensionKey(combination1);
  BootstrapPhaseMapOutputValue outputValue1 = new BootstrapPhaseMapOutputValue(dimKey, TestStarTreeBootstrapPhase2.TestHelper.generateMetricTimeSeries(starTreeBootstrapConfig, 10));
  list.add(new BytesWritable(outputValue1.toBytes()));

  BootstrapPhaseMapOutputValue outputValue2 = new BootstrapPhaseMapOutputValue(dimKey, TestStarTreeBootstrapPhase2.TestHelper.generateMetricTimeSeries(starTreeBootstrapConfig, 10));
  list.add(new BytesWritable(outputValue2.toBytes()));

  inputRecords.add(new Pair<BytesWritable,List<BytesWritable>>(new BytesWritable(uuid.toString().getBytes()),list));
  return inputRecords;
}
 
开发者ID:Hanmourang,项目名称:Pinot,代码行数:27,代码来源:TestStarTreeBootstrapPhase2.java


注:本文中的org.apache.hadoop.mrunit.types.Pair类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。