当前位置: 首页>>代码示例>>Java>>正文


Java BloomType.valueOf方法代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.regionserver.BloomType.valueOf方法的典型用法代码示例。如果您正苦于以下问题:Java BloomType.valueOf方法的具体用法?Java BloomType.valueOf怎么用?Java BloomType.valueOf使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hbase.regionserver.BloomType的用法示例。


在下文中一共展示了BloomType.valueOf方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: colDescFromThrift

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * This utility method creates a new Hbase HColumnDescriptor object based on a
 * Thrift ColumnDescriptor "struct".
 *
 * @param in
 *          Thrift ColumnDescriptor object
 * @return HColumnDescriptor
 * @throws IllegalArgument
 */
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
    throws IllegalArgument {
  Compression.Algorithm comp =
    Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
  BloomType bt =
    BloomType.valueOf(in.bloomFilterType);

  if (in.name == null || !in.name.hasRemaining()) {
    throw new IllegalArgument("column name is empty");
  }
  byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
  HColumnDescriptor col = new HColumnDescriptor(parsedName)
      .setMaxVersions(in.maxVersions)
      .setCompressionType(comp)
      .setInMemory(in.inMemory)
      .setBlockCacheEnabled(in.blockCacheEnabled)
      .setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE)
      .setBloomFilterType(bt);
  return col;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:30,代码来源:ThriftUtilities.java

示例2: parseColumnFamilyOptions

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
private void parseColumnFamilyOptions(CommandLine cmd) {
  String dataBlockEncodingStr = cmd.getOptionValue(OPT_DATA_BLOCK_ENCODING);
  dataBlockEncodingAlgo = dataBlockEncodingStr == null ? null :
      DataBlockEncoding.valueOf(dataBlockEncodingStr);

  String compressStr = cmd.getOptionValue(OPT_COMPRESSION);
  compressAlgo = compressStr == null ? Compression.Algorithm.NONE :
      Compression.Algorithm.valueOf(compressStr);

  String bloomStr = cmd.getOptionValue(OPT_BLOOM);
  bloomType = bloomStr == null ? BloomType.ROW :
      BloomType.valueOf(bloomStr);

  inMemoryCF = cmd.hasOption(OPT_INMEMORY);
  if (cmd.hasOption(OPT_ENCRYPTION)) {
    cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));
  }

}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:20,代码来源:LoadTestTool.java

示例3: colDescFromThrift

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * This utility method creates a new Hbase HColumnDescriptor object based on a
 * Thrift ColumnDescriptor "struct".
 *
 * @param in
 *          Thrift ColumnDescriptor object
 * @return HColumnDescriptor
 * @throws IllegalArgument
 */
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
    throws IllegalArgument {
  Compression.Algorithm comp =
    Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
  BloomType bt =
    BloomType.valueOf(in.bloomFilterType);

  if (in.name == null || !in.name.hasRemaining()) {
    throw new IllegalArgument("column name is empty");
  }
  byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
  HColumnDescriptor col = new HColumnDescriptor(parsedName)
      .setMaxVersions(in.maxVersions)
      .setCompressionType(comp)
      .setInMemory(in.inMemory)
      .setBlockCacheEnabled(in.blockCacheEnabled)
      .setTimeToLive(in.timeToLive)
      .setBloomFilterType(bt);
  return col;
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:30,代码来源:ThriftUtilities.java

示例4: parseColumnFamilyOptions

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
private void parseColumnFamilyOptions(CommandLine cmd) {
  String dataBlockEncodingStr = cmd.getOptionValue(OPT_DATA_BLOCK_ENCODING);
  dataBlockEncodingAlgo = dataBlockEncodingStr == null ? null :
      DataBlockEncoding.valueOf(dataBlockEncodingStr);

  String compressStr = cmd.getOptionValue(OPT_COMPRESSION);
  compressAlgo = compressStr == null ? Compression.Algorithm.NONE :
      Compression.Algorithm.valueOf(compressStr);

  String bloomStr = cmd.getOptionValue(OPT_BLOOM);
  bloomType = bloomStr == null ? null :
      BloomType.valueOf(bloomStr);

  inMemoryCF = cmd.hasOption(OPT_INMEMORY);
  if (cmd.hasOption(OPT_ENCRYPTION)) {
    cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));
  }
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:19,代码来源:LoadTestTool.java

示例5: colDescFromThrift

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * This utility method creates a new Hbase HColumnDescriptor object based on a
 * Thrift ColumnDescriptor "struct".
 *
 * @param in Thrift ColumnDescriptor object
 * @return HColumnDescriptor
 * @throws IllegalArgument if the column name is empty
 */
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
    throws IllegalArgument {
  Compression.Algorithm comp =
    Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT));
  BloomType bt =
    BloomType.valueOf(in.bloomFilterType);

  if (in.name == null || !in.name.hasRemaining()) {
    throw new IllegalArgument("column name is empty");
  }
  byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0];
  HColumnDescriptor col = new HColumnDescriptor(parsedName)
      .setMaxVersions(in.maxVersions)
      .setCompressionType(comp)
      .setInMemory(in.inMemory)
      .setBlockCacheEnabled(in.blockCacheEnabled)
      .setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE)
      .setBloomFilterType(bt);
  return col;
}
 
开发者ID:apache,项目名称:hbase,代码行数:29,代码来源:ThriftUtilities.java

示例6: parseColumnFamilyOptions

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
private void parseColumnFamilyOptions(CommandLine cmd) {
  String dataBlockEncodingStr = cmd.getOptionValue(HFileTestUtil.OPT_DATA_BLOCK_ENCODING);
  dataBlockEncodingAlgo = dataBlockEncodingStr == null ? null :
      DataBlockEncoding.valueOf(dataBlockEncodingStr);

  String compressStr = cmd.getOptionValue(OPT_COMPRESSION);
  compressAlgo = compressStr == null ? Compression.Algorithm.NONE :
      Compression.Algorithm.valueOf(compressStr);

  String bloomStr = cmd.getOptionValue(OPT_BLOOM);
  bloomType = bloomStr == null ? BloomType.ROW :
      BloomType.valueOf(bloomStr);

  inMemoryCF = cmd.hasOption(OPT_INMEMORY);
  if (cmd.hasOption(OPT_ENCRYPTION)) {
    cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));
  }

}
 
开发者ID:apache,项目名称:hbase,代码行数:20,代码来源:LoadTestTool.java

示例7: parseColumnFamilyOptions

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
private void parseColumnFamilyOptions(CommandLine cmd) {
  String dataBlockEncodingStr = cmd.getOptionValue(OPT_DATA_BLOCK_ENCODING);
  dataBlockEncodingAlgo = dataBlockEncodingStr == null ? null :
      DataBlockEncoding.valueOf(dataBlockEncodingStr);

  String compressStr = cmd.getOptionValue(OPT_COMPRESSION);
  compressAlgo = compressStr == null ? Compression.Algorithm.NONE :
      Compression.Algorithm.valueOf(compressStr);

  String bloomStr = cmd.getOptionValue(OPT_BLOOM);
  bloomType = bloomStr == null ? null :
      BloomType.valueOf(bloomStr);

  inMemoryCF = cmd.hasOption(OPT_INMEMORY);
  
}
 
开发者ID:cloud-software-foundation,项目名称:c5,代码行数:17,代码来源:LoadTestTool.java

示例8: createFamilyBloomTypeMap

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * Runs inside the task to deserialize column family to bloom filter type
 * map from the configuration.
 *
 * @param conf to read the serialized values from
 * @return a map from column family to the the configured bloom filter type
 */
@VisibleForTesting
static Map<byte[], BloomType> createFamilyBloomTypeMap(Configuration conf) {
  Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
      BLOOM_TYPE_FAMILIES_CONF_KEY);
  Map<byte[], BloomType> bloomTypeMap = new TreeMap<byte[],
      BloomType>(Bytes.BYTES_COMPARATOR);
  for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
    BloomType bloomType = BloomType.valueOf(e.getValue());
    bloomTypeMap.put(e.getKey(), bloomType);
  }
  return bloomTypeMap;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:20,代码来源:HFileOutputFormat2.java

示例9: getBloomFilterType

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * @return bloom filter type used for new StoreFiles in ColumnFamily
 */
public BloomType getBloomFilterType() {
  String n = getValue(BLOOMFILTER);
  if (n == null) {
    n = DEFAULT_BLOOMFILTER;
  }
  return BloomType.valueOf(n.toUpperCase());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:11,代码来源:HColumnDescriptor.java

示例10: createFamilyBloomTypeMap

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * Runs inside the task to deserialize column family to bloom filter type
 * map from the configuration.
 *
 * @param conf to read the serialized values from
 * @return a map from column family to the the configured bloom filter type
 */
@VisibleForTesting
static Map<byte[], BloomType> createFamilyBloomTypeMap(Configuration conf) {
    Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
            BLOOM_TYPE_FAMILIES_CONF_KEY);
    Map<byte[], BloomType> bloomTypeMap = new TreeMap<byte[], BloomType>(Bytes.BYTES_COMPARATOR);
    for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
        BloomType bloomType = BloomType.valueOf(e.getValue());
        bloomTypeMap.put(e.getKey(), bloomType);
    }
    return bloomTypeMap;
}
 
开发者ID:htools,项目名称:htools,代码行数:19,代码来源:BulkOutputFormat.java

示例11: createFamilyBloomTypeMap

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * Runs inside the task to deserialize column family to bloom filter type
 * map from the configuration.
 *
 * @param conf to read the serialized values from
 * @return a map from column family to the the configured bloom filter type
 */
@VisibleForTesting
static Map<byte[], BloomType> createFamilyBloomTypeMap(Configuration conf) {
    Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
            BLOOM_TYPE_FAMILIES_CONF_KEY);
    Map<byte[], BloomType> bloomTypeMap = new TreeMap<byte[],
            BloomType>(Bytes.BYTES_COMPARATOR);
    for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
        BloomType bloomType = BloomType.valueOf(e.getValue());
        bloomTypeMap.put(e.getKey(), bloomType);
    }
    return bloomTypeMap;
}
 
开发者ID:apache,项目名称:kylin,代码行数:20,代码来源:HFileOutputFormat3.java

示例12: createFamilyBloomTypeMap

import org.apache.hadoop.hbase.regionserver.BloomType; //导入方法依赖的package包/类
/**
 * Runs inside the task to deserialize column family to bloom filter type
 * map from the configuration.
 *
 * @param conf to read the serialized values from
 * @return a map from column family to the the configured bloom filter type
 */
@VisibleForTesting
static Map<byte[], BloomType> createFamilyBloomTypeMap(Configuration conf) {
  Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
      BLOOM_TYPE_FAMILIES_CONF_KEY);
  Map<byte[], BloomType> bloomTypeMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
  for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
    BloomType bloomType = BloomType.valueOf(e.getValue());
    bloomTypeMap.put(e.getKey(), bloomType);
  }
  return bloomTypeMap;
}
 
开发者ID:apache,项目名称:hbase,代码行数:19,代码来源:HFileOutputFormat2.java


注:本文中的org.apache.hadoop.hbase.regionserver.BloomType.valueOf方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。