当前位置: 首页>>代码示例>>Java>>正文


Java LongWritable类代码示例

本文整理汇总了Java中org.apache.hadoop.io.LongWritable的典型用法代码示例。如果您正苦于以下问题:Java LongWritable类的具体用法?Java LongWritable怎么用?Java LongWritable使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


LongWritable类属于org.apache.hadoop.io包,在下文中一共展示了LongWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: run

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
@Override
public int run(String[] args) throws Exception {
  if (args.length != 2) {
    System.err.println("Usage: wordmean <in> <out>");
    return 0;
  }

  Configuration conf = getConf();

  Job job = Job.getInstance(conf, "word mean");
  job.setJarByClass(WordMean.class);
  job.setMapperClass(WordMeanMapper.class);
  job.setCombinerClass(WordMeanReducer.class);
  job.setReducerClass(WordMeanReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(LongWritable.class);
  FileInputFormat.addInputPath(job, new Path(args[0]));
  Path outputpath = new Path(args[1]);
  FileOutputFormat.setOutputPath(job, outputpath);
  boolean result = job.waitForCompletion(true);
  mean = readAndCalcMean(outputpath, conf);

  return (result ? 0 : 1);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:WordMean.java

示例2: map

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
public void map(LongWritable key, Text val,
    OutputCollector<LongWritable, Text> output, Reporter reporter)
    throws IOException {
  String str = val.toString();
  LOG.debug("MAP key:" +key +"  value:" + str);
  if(MAPPER_BAD_RECORDS.get(0).equals(str)) {
    LOG.warn("MAP Encountered BAD record");
    System.exit(-1);
  }
  else if(MAPPER_BAD_RECORDS.get(1).equals(str)) {
    LOG.warn("MAP Encountered BAD record");
    throw new RuntimeException("Bad record "+str);
  }
  else if(MAPPER_BAD_RECORDS.get(2).equals(str)) {
    try {
      LOG.warn("MAP Encountered BAD record");
      Thread.sleep(15*60*1000);
    } catch (InterruptedException e) {
      e.printStackTrace();
    }
  }
  output.collect(key, val);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:TestBadRecords.java

示例3: testStandAloneClient

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
@Test(timeout=60000)
public void testStandAloneClient() throws IOException {
  Client client = new Client(LongWritable.class, conf);
  InetSocketAddress address = new InetSocketAddress("127.0.0.1", 10);
  try {
    client.call(new LongWritable(RANDOM.nextLong()),
            address, null, null, 0, conf);
    fail("Expected an exception to have been thrown");
  } catch (IOException e) {
    String message = e.getMessage();
    String addressText = address.getHostName() + ":" + address.getPort();
    assertTrue("Did not find "+addressText+" in "+message,
            message.contains(addressText));
    Throwable cause=e.getCause();
    assertNotNull("No nested exception in "+e,cause);
    String causeText=cause.getMessage();
    assertTrue("Did not find " + causeText + " in " + message,
            message.contains(causeText));
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:TestIPC.java

示例4: reduce

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
@Override
protected void reduce(LongWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {

    int k = context.getConfiguration().getInt("k", -1);

    double[] result = new double[k];

    for (Text value : values) {
        String[] ai = value.toString().split(",");
        for (int j = 0; j < k; j++) {
            result[j] += Double.parseDouble(ai[j]);
        }
    }

    StringBuilder res = new StringBuilder(prefix);

    for (int i = 0; i < k; i++) {
        res.append(result[i]);
        if (i < k - 1) {
            res.append(",");
        }
    }
    context.write(key, new Text(res.toString()));
}
 
开发者ID:Romm17,项目名称:MRNMF,代码行数:25,代码来源:MM2.java

示例5: map

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
public void map(LongWritable key, Text val, Context c)
    throws IOException, InterruptedException {

  // Create a whole bunch of objects.
  List<Integer> lst = new ArrayList<Integer>();
  for (int i = 0; i < 20000; i++) {
    lst.add(new Integer(i));
  }

  // Actually use this list, to ensure that it isn't just optimized away.
  int sum = 0;
  for (int x : lst) {
    sum += x;
  }

  // throw away the list and run a GC.
  lst = null;
  System.gc();

  c.write(new LongWritable(sum), val);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:22,代码来源:TestLocalRunner.java

示例6: runIOTest

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
private void runIOTest(
        Class<? extends Mapper<Text, LongWritable, Text, Text>> mapperClass, 
        Path outputDir) throws IOException {
  JobConf job = new JobConf(config, TestDFSIO.class);

  FileInputFormat.setInputPaths(job, getControlDir(config));
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(mapperClass);
  job.setReducerClass(AccumulatingReducer.class);

  FileOutputFormat.setOutputPath(job, outputDir);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(Text.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestDFSIO.java

示例7: run

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
@Override
public void run() {
  for (int i = 0; i < count; i++) {
    try {
      final long param = RANDOM.nextLong();
      LongWritable value = call(client, param, server, conf);
      if (value.get() != param) {
        LOG.fatal("Call failed!");
        failed = true;
        break;
      }
    } catch (Exception e) {
      LOG.fatal("Caught: " + StringUtils.stringifyException(e));
      failed = true;
    }
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:18,代码来源:TestIPC.java

示例8: testStandAloneClient

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
@Test(timeout=60000)
public void testStandAloneClient() throws IOException {
  Client client = new Client(LongWritable.class, conf);
  InetSocketAddress address = new InetSocketAddress("127.0.0.1", 10);
  try {
    call(client, RANDOM.nextLong(), address, conf);
    fail("Expected an exception to have been thrown");
  } catch (IOException e) {
    String message = e.getMessage();
    String addressText = address.getHostName() + ":" + address.getPort();
    assertTrue("Did not find "+addressText+" in "+message,
            message.contains(addressText));
    Throwable cause=e.getCause();
    assertNotNull("No nested exception in "+e,cause);
    String causeText=cause.getMessage();
    assertTrue("Did not find " + causeText + " in " + message,
            message.contains(causeText));
  } finally {
    client.stop();
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:22,代码来源:TestIPC.java

示例9: testIpcConnectTimeout

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
@Test(timeout=60000)
public void testIpcConnectTimeout() throws IOException {
  // start server
  Server server = new TestServer(1, true);
  InetSocketAddress addr = NetUtils.getConnectAddress(server);
  //Intentionally do not start server to get a connection timeout

  // start client
  Client.setConnectTimeout(conf, 100);
  Client client = new Client(LongWritable.class, conf);
  // set the rpc timeout to twice the MIN_SLEEP_TIME
  try {
    call(client, new LongWritable(RANDOM.nextLong()), addr,
        MIN_SLEEP_TIME * 2, conf);
    fail("Expected an exception to have been thrown");
  } catch (SocketTimeoutException e) {
    LOG.info("Get a SocketTimeoutException ", e);
  }
  client.stop();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:21,代码来源:TestIPC.java

示例10: getSplits

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
public List<InputSplit> getSplits(JobContext job)
    throws IOException {

  Configuration conf = job.getConfiguration();
  Path src = new Path(conf.get(INDIRECT_INPUT_FILE, null));
  FileSystem fs = src.getFileSystem(conf);

  List<InputSplit> splits = new ArrayList<InputSplit>();
  LongWritable key = new LongWritable();
  Text value = new Text();
  for (SequenceFile.Reader sl = new SequenceFile.Reader(fs, src, conf);
       sl.next(key, value);) {
    splits.add(new IndirectSplit(new Path(value.toString()), key.get()));
  }

  return splits;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:GenericMRLoadGenerator.java

示例11: testChainReduceNoOuptut

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestChainErrors.java

示例12: testChainFail

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:25,代码来源:TestChainErrors.java

示例13: seekTest

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:TestFileSystem.java

示例14: testNestedIterable

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestJoinTupleWritable.java

示例15: map

import org.apache.hadoop.io.LongWritable; //导入依赖的package包/类
/**
 * Emits random words sequence of desired size. Note that the desired output
 * size is passed as the value parameter to this map.
 */
@Override
public void map(NullWritable key, LongWritable value, Context context)
throws IOException, InterruptedException {
  //TODO Control the extra data written ..
  //TODO Should the key\tvalue\n be considered for measuring size?
  //     Can counters like BYTES_WRITTEN be used? What will be the value of
  //     such counters in LocalJobRunner?
  for (long bytes = value.get(); bytes > 0;) {
    String randomKey = rtg.getRandomWord();
    String randomValue = rtg.getRandomWord();
    context.write(new Text(randomKey), new Text(randomValue));
    bytes -= (randomValue.getBytes(charsetUTF8).length +
        randomKey.getBytes(charsetUTF8).length);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:CompressionEmulationUtil.java


注:本文中的org.apache.hadoop.io.LongWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。