当前位置: 首页>>代码示例>>Java>>正文


Java RandomDatum类代码示例

本文整理汇总了Java中org.apache.hadoop.io.RandomDatum的典型用法代码示例。如果您正苦于以下问题:Java RandomDatum类的具体用法?Java RandomDatum怎么用?Java RandomDatum使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


RandomDatum类属于org.apache.hadoop.io包,在下文中一共展示了RandomDatum类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setUp

import org.apache.hadoop.io.RandomDatum; //导入依赖的package包/类
@Before
public void setUp() throws IOException {
  // Generate data
  final int seed = new Random().nextInt();
  final DataOutputBuffer dataBuf = new DataOutputBuffer();
  final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  for(int i = 0; i < count; ++i) {
    generator.next();
    final RandomDatum key = generator.getKey();
    final RandomDatum value = generator.getValue();
    
    key.write(dataBuf);
    value.write(dataBuf);
  }
  LOG.info("Generated " + count + " records");
  data = dataBuf.getData();
  dataLen = dataBuf.getLength();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:19,代码来源:CryptoStreamsTestBase.java

示例2: testSequenceFileSync

import org.apache.hadoop.io.RandomDatum; //导入依赖的package包/类
/** Test hsync via SequenceFiles */
@Test
public void testSequenceFileSync() throws Exception {
  Configuration conf = new HdfsConfiguration();
  MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();

  final FileSystem fs = cluster.getFileSystem();
  final Path p = new Path("/testSequenceFileSync/foo");
  final int len = 1 << 16;
  FSDataOutputStream out = fs.create(p, FsPermission.getDefault(),
      EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE, CreateFlag.SYNC_BLOCK),
      4096, (short) 1, len, null);
  Writer w = SequenceFile.createWriter(new Configuration(),
      Writer.stream(out),
      Writer.keyClass(RandomDatum.class),
      Writer.valueClass(RandomDatum.class),
      Writer.compression(CompressionType.NONE, new DefaultCodec()));
  w.hflush();
  checkSyncMetric(cluster, 0);
  w.hsync();
  checkSyncMetric(cluster, 1);
  int seed = new Random().nextInt();
  RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  generator.next();
  w.append(generator.getKey(), generator.getValue());
  w.hsync();
  checkSyncMetric(cluster, 2);
  w.close();
  checkSyncMetric(cluster, 2);
  out.close();
  checkSyncMetric(cluster, 3);
  cluster.shutdown();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:34,代码来源:TestHSync.java

示例3: testSequenceFileSync

import org.apache.hadoop.io.RandomDatum; //导入依赖的package包/类
/**
 * Test hsync via SequenceFiles
 */
@Test
public void testSequenceFileSync() throws Exception {
  Configuration conf = new HdfsConfiguration();
  MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();

  final FileSystem fs = cluster.getFileSystem();
  final Path p = new Path("/testSequenceFileSync/foo");
  final int len = 1 << 16;
  FSDataOutputStream out = fs.create(p, FsPermission.getDefault(), EnumSet
      .of(CreateFlag.CREATE, CreateFlag.OVERWRITE, CreateFlag.SYNC_BLOCK),
      4096, (short) 1, len, null);
  Writer w = SequenceFile
      .createWriter(new Configuration(), Writer.stream(out),
          Writer.keyClass(RandomDatum.class),
          Writer.valueClass(RandomDatum.class),
          Writer.compression(CompressionType.NONE, new DefaultCodec()));
  w.hflush();
  checkSyncMetric(cluster, 0);
  w.hsync();
  checkSyncMetric(cluster, 1);
  int seed = new Random().nextInt();
  RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  generator.next();
  w.append(generator.getKey(), generator.getValue());
  w.hsync();
  checkSyncMetric(cluster, 2);
  w.close();
  checkSyncMetric(cluster, 2);
  out.close();
  checkSyncMetric(cluster, 3);
  cluster.shutdown();
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:36,代码来源:TestHSync.java

示例4: codecTest

import org.apache.hadoop.io.RandomDatum; //导入依赖的package包/类
private static void codecTest(Configuration conf, int seed, int count, 
                              String codecClass) 
  throws IOException {
  
  // Create the codec
  CompressionCodec codec = null;
  try {
    codec = (CompressionCodec)
      ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException("Illegal codec!");
  }
  LOG.info("Created a Codec object of type: " + codecClass);

  // Generate data
  DataOutputBuffer data = new DataOutputBuffer();
  RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  for(int i=0; i < count; ++i) {
    generator.next();
    RandomDatum key = generator.getKey();
    RandomDatum value = generator.getValue();
    
    key.write(data);
    value.write(data);
  }
  DataInputBuffer originalData = new DataInputBuffer();
  DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
  originalData.reset(data.getData(), 0, data.getLength());
  
  LOG.info("Generated " + count + " records");
  
  // Compress data
  DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
  CompressionOutputStream deflateFilter = 
    codec.createOutputStream(compressedDataBuffer);
  DataOutputStream deflateOut = 
    new DataOutputStream(new BufferedOutputStream(deflateFilter));
  deflateOut.write(data.getData(), 0, data.getLength());
  deflateOut.flush();
  deflateFilter.finish();
  LOG.info("Finished compressing data");
  
  // De-compress data
  DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
  deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, 
                               compressedDataBuffer.getLength());
  CompressionInputStream inflateFilter = 
    codec.createInputStream(deCompressedDataBuffer);
  DataInputStream inflateIn = 
    new DataInputStream(new BufferedInputStream(inflateFilter));

  // Check
  for(int i=0; i < count; ++i) {
    RandomDatum k1 = new RandomDatum();
    RandomDatum v1 = new RandomDatum();
    k1.readFields(originalIn);
    v1.readFields(originalIn);
    
    RandomDatum k2 = new RandomDatum();
    RandomDatum v2 = new RandomDatum();
    k2.readFields(inflateIn);
    v2.readFields(inflateIn);
  }
  LOG.info("SUCCESS! Completed checking " + count + " records");
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:66,代码来源:TestCodec.java

示例5: codecTest

import org.apache.hadoop.io.RandomDatum; //导入依赖的package包/类
private static void codecTest(Configuration conf, int seed, int count, 
                              String codecClass) 
  throws IOException {
  
  // Create the codec
  CompressionCodec codec = null;
  try {
    codec = (CompressionCodec)
      ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException("Illegal codec!");
  }
  LOG.info("Created a Codec object of type: " + codecClass);

  // Generate data
  DataOutputBuffer data = new DataOutputBuffer();
  RandomDatum.Generator generator = new RandomDatum.Generator(seed);
  for(int i=0; i < count; ++i) {
    generator.next();
    RandomDatum key = generator.getKey();
    RandomDatum value = generator.getValue();
    
    key.write(data);
    value.write(data);
  }
  DataInputBuffer originalData = new DataInputBuffer();
  DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
  originalData.reset(data.getData(), 0, data.getLength());
  
  LOG.info("Generated " + count + " records");
  
  // Compress data
  DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
  CompressionOutputStream deflateFilter = 
    codec.createOutputStream(compressedDataBuffer);
  DataOutputStream deflateOut = 
    new DataOutputStream(new BufferedOutputStream(deflateFilter));
  deflateOut.write(data.getData(), 0, data.getLength());
  deflateOut.flush();
  deflateFilter.finish();
  LOG.info("Finished compressing data");
  
  // De-compress data
  DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
  deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, 
                               compressedDataBuffer.getLength());
  CompressionInputStream inflateFilter = 
    codec.createInputStream(deCompressedDataBuffer);
  DataInputStream inflateIn = 
    new DataInputStream(new BufferedInputStream(inflateFilter));

  // Check
  for(int i=0; i < count; ++i) {
    RandomDatum k1 = new RandomDatum();
    RandomDatum v1 = new RandomDatum();
    k1.readFields(originalIn);
    v1.readFields(originalIn);
    
    RandomDatum k2 = new RandomDatum();
    RandomDatum v2 = new RandomDatum();
    k2.readFields(inflateIn);
    v2.readFields(inflateIn);
    assertTrue("original and compressed-then-decompressed-output not equal",
               k1.equals(k2) && v1.equals(v2));
  }
  LOG.info("SUCCESS! Completed checking " + count + " records");
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:68,代码来源:TestCodec.java


注:本文中的org.apache.hadoop.io.RandomDatum类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。