当前位置: 首页>>代码示例>>Java>>正文


Java VLongWritable类代码示例

本文整理汇总了Java中org.apache.hadoop.io.VLongWritable的典型用法代码示例。如果您正苦于以下问题:Java VLongWritable类的具体用法?Java VLongWritable怎么用?Java VLongWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


VLongWritable类属于org.apache.hadoop.io包,在下文中一共展示了VLongWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: init

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
@Override
public void init() throws IOException {
  registerKey(NullWritable.class.getName(), NullWritableSerializer.class);
  registerKey(Text.class.getName(), TextSerializer.class);
  registerKey(LongWritable.class.getName(), LongWritableSerializer.class);
  registerKey(IntWritable.class.getName(), IntWritableSerializer.class);
  registerKey(Writable.class.getName(), DefaultSerializer.class);
  registerKey(BytesWritable.class.getName(), BytesWritableSerializer.class);
  registerKey(BooleanWritable.class.getName(), BoolWritableSerializer.class);
  registerKey(ByteWritable.class.getName(), ByteWritableSerializer.class);
  registerKey(FloatWritable.class.getName(), FloatWritableSerializer.class);
  registerKey(DoubleWritable.class.getName(), DoubleWritableSerializer.class);
  registerKey(VIntWritable.class.getName(), VIntWritableSerializer.class);
  registerKey(VLongWritable.class.getName(), VLongWritableSerializer.class);

  LOG.info("Hadoop platform inited");
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:18,代码来源:HadoopPlatform.java

示例2: compute

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
@Override
public void compute(Iterable<DoubleWritable> messages) throws IOException {
    if (getSuperstep() == 0) {
        setValue(new DoubleWritable(Double.MAX_VALUE));
    }

    double minDist = isSource() ? 0d : Double.MAX_VALUE;
    for (DoubleWritable message : messages) {
        minDist = Math.min(minDist, message.get());
    }

    if (minDist < getValue().get()) {
        setValue(new DoubleWritable(minDist));
        for (Edge<VLongWritable, DoubleWritable> edge : getEdges()) {
            double distance = minDist + edge.getValue().get();
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(distance));
        }
    }

    voteToHalt();
}
 
开发者ID:pregelix,项目名称:pregelix,代码行数:22,代码来源:ShortestPathsVertex.java

示例3: buildJob2

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
private Job buildJob2(Path input, Path output) throws Exception {
    Configuration conf = getConf();
    conf.setInt(Constants.PROP_BLOCK_SIZE, blockSize);

    Job job = new Job(conf, "data-piqid.piqconnect.IterationStage2");
    job.setJarByClass(Runner.class);

    job.setMapperClass(Mapper.class);
    job.setReducerClass(IterationStage2._Reducer.class);
    job.setNumReduceTasks(numberOfReducers);
    job.setInputFormatClass(SequenceFileInputFormat.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    job.setMapOutputKeyClass(VLongWritable.class);
    job.setMapOutputValueClass(BlockWritable.class);
    job.setOutputKeyClass(BlockIndexWritable.class);
    job.setOutputValueClass(BlockWritable.class);
    job.setSortComparatorClass(VLongWritableComparator.class);

    SequenceFileInputFormat.setInputPaths(job, input);
    FileOutputFormat.setOutputPath(job, output);
    FileOutputFormat.setCompressOutput(job, true);

    setCompression(job);
    return job;
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:26,代码来源:Runner.java

示例4: buildJob3

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
private Job buildJob3(Path input, Path output) throws Exception {
    Configuration conf = getConf();
    conf.setInt(Constants.PROP_BLOCK_SIZE, blockSize);

    Job job = new Job(conf, "data-piqid.piqconnect.FinalResultBuilder");
    job.setJarByClass(Runner.class);

    job.setMapperClass(FinalResultBuilder._Mapper.class);
    job.setInputFormatClass(SequenceFileInputFormat.class);
    job.setNumReduceTasks(0);
    job.setOutputKeyClass(VLongWritable.class);
    job.setOutputValueClass(VLongWritable.class);

    FileInputFormat.setInputPaths(job, input);
    FileOutputFormat.setOutputPath(job, output);
    FileOutputFormat.setCompressOutput(job, true);

    setCompression(job);
    return job;
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:21,代码来源:Runner.java

示例5: reduce

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
@Override
public void reduce(VLongWritable key, Iterable<Text> values, Context ctx) throws IOException, InterruptedException {
    long     start_node;
    long     end_node;
    String[] line;

    for (Text value : values) {
        line = value.toString().split("\t");
        start_node = Long.parseLong(line[0]);
        end_node = Long.parseLong(line[1]);
        for (long i = start_node; i <= end_node; i++) {
            KEY.set(i);
            VALUE.set(Long.toString(i));
            ctx.write(KEY, VALUE);
        }
    }
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:18,代码来源:InitialVectorGenerator.java

示例6: buildJob

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
private Job buildJob() throws Exception {
    Configuration conf = getConf();
    conf.setLong("numberOfNodes", numberOfNodes);

    Job job = new Job(conf, "data-piqid.piqconnect.ConCmptIVGen_Stage1");
    job.setJarByClass(InitialVectorGenerator.class);
    job.setMapperClass(_Mapper.class);
    job.setReducerClass(_Reducer.class);
    job.setNumReduceTasks(numberOfReducers);
    job.setOutputKeyClass(VLongWritable.class);
    job.setOutputValueClass(Text.class);

    FileInputFormat.setInputPaths(job, pathBitmask);
    FileOutputFormat.setOutputPath(job, pathVector);
    FileOutputFormat.setCompressOutput(job, true);

    return job;
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:19,代码来源:InitialVectorGenerator.java

示例7: reduce1

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
@Test
public void reduce1() throws IOException {
    reduceDriver.getConfiguration().setInt(Constants.PROP_BLOCK_SIZE, 2);

    reduceDriver.addInput(new VLongWritable(0), Arrays.asList(
            blockVector(TYPE.VECTOR_INITIAL, 0, 1),
            blockVector(TYPE.VECTOR_INCOMPLETE, 0, 0),
            blockVector(TYPE.VECTOR_INCOMPLETE, 2, 2)));

    reduceDriver.addInput(new VLongWritable(1), Arrays.asList(
            blockVector(TYPE.VECTOR_INITIAL, 2, 3),
            blockVector(TYPE.VECTOR_INCOMPLETE, 0, 1),
            blockVector(TYPE.VECTOR_INCOMPLETE, 2, 3)));

    reduceDriver.addOutput(blockIndex(0), blockVector(TYPE.VECTOR_INCOMPLETE, 0, 0));
    reduceDriver.addOutput(blockIndex(1), blockVector(TYPE.VECTOR_INCOMPLETE, 0, 1));

    reduceDriver.runTest();
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:20,代码来源:Runner2Test.java

示例8: reduce

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
@Test
public void reduce() throws IOException {
    reduceDriver.getConfiguration().setInt(Constants.PROP_BLOCK_SIZE, 3);

    int block_col = 0;

    BlockWritable e1 = blockVector(0, 1, 2);
    BlockWritable e2 = blockMatrix(block_col, 0, 1, 1, 0, 1, 2, 2, 1);

    reduceDriver.addInput(new IterationStage1.JoinKey(true, block_col), Arrays.asList(e1, e2));

    BlockWritable v1 = blockVector(BlockWritable.TYPE.VECTOR_INITIAL, 0, 1, 2);
    BlockWritable v2 = blockVector(BlockWritable.TYPE.VECTOR_INCOMPLETE, 1, 0, 1);

    reduceDriver.addOutput(new VLongWritable(block_col), v1); // initial vector
    reduceDriver.addOutput(new VLongWritable(block_col), v2); // after multiplication
    reduceDriver.runTest();
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:19,代码来源:Runner1Test.java

示例9: reduce2

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
@Test
public void reduce2() throws IOException {
    reduceDriver.getConfiguration().setInt(Constants.PROP_BLOCK_SIZE, 3);

    int block_col = 0;

    BlockWritable e1 = blockVector(1, -1, -1);
    BlockWritable e2 = blockMatrix(block_col, 2, 0);

    reduceDriver.addInput(new IterationStage1.JoinKey(true, block_col), Arrays.asList(e1, e2));

    BlockWritable v1 = blockVector(BlockWritable.TYPE.VECTOR_INITIAL, 1, -1, -1);
    BlockWritable v2 = blockVector(BlockWritable.TYPE.VECTOR_INCOMPLETE, -1, -1, 1);

    reduceDriver.addOutput(new VLongWritable(block_col), v1); // initial vector
    reduceDriver.addOutput(new VLongWritable(block_col), v2); // after multiplication
    reduceDriver.runTest();
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:19,代码来源:Runner1Test.java

示例10: mapReduce2

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
@Test
public void mapReduce2() throws IOException {
    mrDriver.getConfiguration().setInt(Constants.PROP_BLOCK_SIZE, 2);

    mrDriver.addInput(blockIndex(0), blockVector(0, 1));
    mrDriver.addInput(blockIndex(1), blockVector(2, 3));

    mrDriver.addInput(blockIndex(0, 0), blockMatrix(0L, 0, 1, 1, 0));
    mrDriver.addInput(blockIndex(0, 1), blockMatrix(1L, 1, 0));
    mrDriver.addInput(blockIndex(1, 0), blockMatrix(0L, 0, 1));
    mrDriver.addInput(blockIndex(1, 1), blockMatrix(1L, 1, 1));

    mrDriver.addOutput(new VLongWritable(0), blockVector(BlockWritable.TYPE.VECTOR_INITIAL, 0, 1));
    mrDriver.addOutput(new VLongWritable(0), blockVector(BlockWritable.TYPE.VECTOR_INCOMPLETE, 1, 0));
    mrDriver.addOutput(new VLongWritable(1), blockVector(BlockWritable.TYPE.VECTOR_INCOMPLETE, 1, -1));
    mrDriver.addOutput(new VLongWritable(1), blockVector(BlockWritable.TYPE.VECTOR_INITIAL, 2, 3));
    mrDriver.addOutput(new VLongWritable(0), blockVector(BlockWritable.TYPE.VECTOR_INCOMPLETE, -1, 2));
    mrDriver.addOutput(new VLongWritable(1), blockVector(BlockWritable.TYPE.VECTOR_INCOMPLETE, -1, 3));

    mrDriver.runTest();
}
 
开发者ID:placeiq,项目名称:piqconnect,代码行数:22,代码来源:Runner1Test.java

示例11: getValue

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
public static Value getValue(final VLongWritable w) {
  if (w == null) {
    throw new IllegalArgumentException("Writable cannot be null");
  }
  ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
  DataOutputStream out = new DataOutputStream(byteStream);
  
  // We could also close it, but we know that VLongWritable and BAOS don't need it.
  try {
    w.write(out);
  } catch (IOException e) {
    // If this ever happens, some seriously screwed up is happening or someone subclasses VLongWritable
    // and made it do crazy stuff.
    throw new RuntimeException(e);
  }
  
  return new Value(byteStream.toByteArray());
}
 
开发者ID:joshelser,项目名称:cosmos,代码行数:19,代码来源:GroupByRowSuffixIterator.java

示例12: getWritable

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
public static VLongWritable getWritable(final Value v) {
  if (null == v) {
    throw new IllegalArgumentException("Value cannot be null");
  }
  
  ByteArrayInputStream bais = new ByteArrayInputStream(v.get());
  DataInputStream in = new DataInputStream(bais);
  
  VLongWritable writable = new VLongWritable();
  try {
    writable.readFields(in); 
  } catch (IOException e) {
    // If this ever happens, some seriously screwed up is happening or someone subclasses Value
    // and made it do crazy stuff.
    throw new RuntimeException(e);
  }
  
  return writable;
}
 
开发者ID:joshelser,项目名称:cosmos,代码行数:20,代码来源:GroupByRowSuffixIterator.java

示例13: write

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
public void write(Writable w) throws IOException {
  if (w instanceof TypedBytesWritable) {
    writeTypedBytes((TypedBytesWritable) w);
  } else if (w instanceof BytesWritable) {
    writeBytes((BytesWritable) w);
  } else if (w instanceof ByteWritable) {
    writeByte((ByteWritable) w);
  } else if (w instanceof BooleanWritable) {
    writeBoolean((BooleanWritable) w);
  } else if (w instanceof IntWritable) {
    writeInt((IntWritable) w);
  } else if (w instanceof VIntWritable) {
    writeVInt((VIntWritable) w);
  } else if (w instanceof LongWritable) {
    writeLong((LongWritable) w);
  } else if (w instanceof VLongWritable) {
    writeVLong((VLongWritable) w);
  } else if (w instanceof FloatWritable) {
    writeFloat((FloatWritable) w);
  } else if (w instanceof DoubleWritable) {
    writeDouble((DoubleWritable) w);
  } else if (w instanceof Text) {
    writeText((Text) w);
  } else if (w instanceof ArrayWritable) {
    writeArray((ArrayWritable) w);
  } else if (w instanceof MapWritable) {
    writeMap((MapWritable) w);
  } else if (w instanceof SortedMapWritable) {
    writeSortedMap((SortedMapWritable) w);
  } else if (w instanceof Record) {
    writeRecord((Record) w);
  } else {
    writeWritable(w); // last resort
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:36,代码来源:TypedBytesWritableOutput.java

示例14: readType

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
public Class<? extends Writable> readType() throws IOException {
  Type type = in.readType();
  if (type == null) {
    return null;
  }
  switch (type) {
  case BYTES:
    return BytesWritable.class;
  case BYTE:
    return ByteWritable.class;
  case BOOL:
    return BooleanWritable.class;
  case INT:
    return VIntWritable.class;
  case LONG:
    return VLongWritable.class;
  case FLOAT:
    return FloatWritable.class;
  case DOUBLE:
    return DoubleWritable.class;
  case STRING:
    return Text.class;
  case VECTOR:
    return ArrayWritable.class;
  case MAP:
    return MapWritable.class;
  case WRITABLE:
    return Writable.class;
  default:
    throw new RuntimeException("unknown type");
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:33,代码来源:TypedBytesWritableInput.java

示例15: updateObject

import org.apache.hadoop.io.VLongWritable; //导入依赖的package包/类
public static void updateObject(Writable obj, byte[] seed) {
  if (obj instanceof IntWritable) {
    ((IntWritable)obj).set(Ints.fromByteArray(seed));
  } else if (obj instanceof FloatWritable) {
    ((FloatWritable)obj).set(r.nextFloat());
  } else if (obj instanceof DoubleWritable) {
    ((DoubleWritable)obj).set(r.nextDouble());
  } else if (obj instanceof LongWritable) {
    ((LongWritable)obj).set(Longs.fromByteArray(seed));
  } else if (obj instanceof VIntWritable) {
    ((VIntWritable)obj).set(Ints.fromByteArray(seed));
  } else if (obj instanceof VLongWritable) {
    ((VLongWritable)obj).set(Longs.fromByteArray(seed));
  } else if (obj instanceof BooleanWritable) {
    ((BooleanWritable)obj).set(seed[0] % 2 == 1 ? true : false);
  } else if (obj instanceof Text) {
    ((Text)obj).set(BytesUtil.toStringBinary(seed));
  } else if (obj instanceof ByteWritable) {
    ((ByteWritable)obj).set(seed.length > 0 ? seed[0] : 0);
  } else if (obj instanceof BytesWritable) {
    ((BytesWritable)obj).set(seed, 0, seed.length);
  } else if (obj instanceof UTF8) {
    ((UTF8)obj).set(BytesUtil.toStringBinary(seed));
  } else if (obj instanceof MockValueClass) {
    ((MockValueClass)obj).set(seed);
  } else {
    throw new IllegalArgumentException("unknown writable: " +
                                       obj.getClass().getName());
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:31,代码来源:BytesFactory.java


注:本文中的org.apache.hadoop.io.VLongWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。