当前位置: 首页>>代码示例>>Java>>正文


Java AbstractHFileWriter类代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter的典型用法代码示例。如果您正苦于以下问题:Java AbstractHFileWriter类的具体用法?Java AbstractHFileWriter怎么用?Java AbstractHFileWriter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


AbstractHFileWriter类属于org.apache.hadoop.hbase.io.hfile包,在下文中一共展示了AbstractHFileWriter类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setUp

import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
@Override
void setUp() throws Exception {

  HFileContextBuilder builder = new HFileContextBuilder()
      .withCompression(AbstractHFileWriter.compressionByName(codec))
      .withBlockSize(RFILE_BLOCKSIZE);
  
  if (cipher == "aes") {
    byte[] cipherKey = new byte[AES.KEY_LENGTH];
    new SecureRandom().nextBytes(cipherKey);
    builder.withEncryptionContext(Encryption.newContext(conf)
        .setCipher(Encryption.getCipher(conf, cipher))
        .setKey(cipherKey));
  } else if (!"none".equals(cipher)) {
    throw new IOException("Cipher " + cipher + " not supported.");
  }
  
  HFileContext hFileContext = builder.build();

  writer = HFile.getWriterFactoryNoCache(conf)
      .withPath(fs, mf)
      .withFileContext(hFileContext)
      .withComparator(new KeyValue.RawBytesComparator())
      .create();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:26,代码来源:HFilePerformanceEvaluation.java

示例2: doSmokeTest

import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
public static void doSmokeTest(FileSystem fs, Path path, String codec)
throws Exception {
  Configuration conf = HBaseConfiguration.create();
  HFileContext context = new HFileContextBuilder()
                         .withCompression(AbstractHFileWriter.compressionByName(codec)).build();
  HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
      .withPath(fs, path)
      .withFileContext(context)
      .create();
  writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
  writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
  writer.close();

  HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
  reader.loadFileInfo();
  byte[] key = reader.getFirstKey();
  boolean rc = Bytes.toString(key).equals("testkey");
  reader.close();

  if (!rc) {
    throw new Exception("Read back incorrect result: " +
                        Bytes.toStringBinary(key));
  }
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:25,代码来源:CompressionTest.java

示例3: createFamilyCompressionMap

import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
/**
 * Runs inside the task to deserialize column family to compression algorithm
 * map from the configuration.
 *
 * @param conf to read the serialized values from
 * @return a map from column family to the configured compression algorithm
 */
@VisibleForTesting
static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration
    conf) {
  Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
      COMPRESSION_FAMILIES_CONF_KEY);
  Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],
      Algorithm>(Bytes.BYTES_COMPARATOR);
  for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
    Algorithm algorithm = AbstractHFileWriter.compressionByName
        (e.getValue());
    compressionMap.put(e.getKey(), algorithm);
  }
  return compressionMap;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:22,代码来源:HFileOutputFormat2.java

示例4: doSmokeTest

import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
public static void doSmokeTest(FileSystem fs, Path path, String codec)
throws Exception {
  Configuration conf = HBaseConfiguration.create();
  HFileContext context = new HFileContextBuilder()
                         .withCompression(AbstractHFileWriter.compressionByName(codec)).build();
  HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
      .withPath(fs, path)
      .withFileContext(context)
      .create();
  // Write any-old Cell...
  final byte [] rowKey = Bytes.toBytes("compressiontestkey");
  Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval"));
  writer.append(c);
  writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
  writer.close();
  Cell cc = null;
  HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
  try {
    reader.loadFileInfo();
    HFileScanner scanner = reader.getScanner(false, true);
    scanner.seekTo(); // position to the start of file
    // Scanner does not do Cells yet. Do below for now till fixed.
    cc = scanner.getKeyValue();
    if (CellComparator.compareRows(c, cc) != 0) {
      throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
    }
  } finally {
    reader.close();
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:31,代码来源:CompressionTest.java

示例5: createFamilyCompressionMap

import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
/**
 * Runs inside the task to deserialize column family to compression
 * algorithm map from the configuration.
 *
 * @param conf to read the serialized values from
 * @return a map from column family to the configured compression algorithm
 */
@VisibleForTesting
static Map<byte[], Compression.Algorithm> createFamilyCompressionMap(Configuration conf) {
    Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
            COMPRESSION_FAMILIES_CONF_KEY);
    Map<byte[], Compression.Algorithm> compressionMap = new TreeMap<byte[], Compression.Algorithm>(Bytes.BYTES_COMPARATOR);
    for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
        Compression.Algorithm algorithm = AbstractHFileWriter.compressionByName(e.getValue());
        compressionMap.put(e.getKey(), algorithm);
    }
    return compressionMap;
}
 
开发者ID:htools,项目名称:htools,代码行数:19,代码来源:BulkOutputFormat.java

示例6: createFamilyCompressionMap

import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
/**
 * Runs inside the task to deserialize column family to compression algorithm
 * map from the configuration.
 *
 * @param conf to read the serialized values from
 * @return a map from column family to the configured compression algorithm
 */
@VisibleForTesting
static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration
                                                                 conf) {
    Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
            COMPRESSION_FAMILIES_CONF_KEY);
    Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],
            Algorithm>(Bytes.BYTES_COMPARATOR);
    for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
        Algorithm algorithm = AbstractHFileWriter.compressionByName(e.getValue());
        compressionMap.put(e.getKey(), algorithm);
    }
    return compressionMap;
}
 
开发者ID:apache,项目名称:kylin,代码行数:21,代码来源:HFileOutputFormat3.java


注:本文中的org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。