本文整理汇总了Java中org.apache.hadoop.hbase.util.BloomFilterWriter类的典型用法代码示例。如果您正苦于以下问题:Java BloomFilterWriter类的具体用法?Java BloomFilterWriter怎么用?Java BloomFilterWriter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
BloomFilterWriter类属于org.apache.hadoop.hbase.util包,在下文中一共展示了BloomFilterWriter类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: addBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
private void addBloomFilter(final BloomFilterWriter bfw,
final BlockType blockType) {
if (bfw.getKeyCount() <= 0)
return;
if (blockType != BlockType.GENERAL_BLOOM_META &&
blockType != BlockType.DELETE_FAMILY_BLOOM_META) {
throw new RuntimeException("Block Type: " + blockType.toString() +
"is not supported");
}
additionalLoadOnOpenData.add(new BlockWritable() {
@Override
public BlockType getBlockType() {
return blockType;
}
@Override
public void writeToBlock(DataOutput out) throws IOException {
bfw.getMetaWriter().write(out);
Writable dataWriter = bfw.getDataWriter();
if (dataWriter != null)
dataWriter.write(out);
}
});
}
示例2: addBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
@Override
public void addBloomFilter(final BloomFilterWriter bfw) {
if (bfw.getKeyCount() <= 0)
return;
additionalLoadOnOpenData.add(new BlockWritable() {
@Override
public BlockType getBlockType() {
return BlockType.BLOOM_META;
}
@Override
public void writeToBlock(DataOutput out) throws IOException {
bfw.getMetaWriter().write(out);
Writable dataWriter = bfw.getDataWriter();
if (dataWriter != null)
dataWriter.write(out);
}
});
}
示例3: closeBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
private boolean closeBloomFilter(BloomFilterWriter bfw) throws IOException {
boolean haveBloom = (bfw != null && bfw.getKeyCount() > 0);
if (haveBloom) {
bfw.compactBloom();
}
return haveBloom;
}
示例4: addGeneralBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
/**
* Version 1 general Bloom filters are stored in two meta blocks with two different
* keys.
*/
@Override
public void addGeneralBloomFilter(BloomFilterWriter bfw) {
appendMetaBlock(BLOOM_FILTER_META_KEY,
bfw.getMetaWriter());
Writable dataWriter = bfw.getDataWriter();
if (dataWriter != null) {
appendMetaBlock(BLOOM_FILTER_DATA_KEY, dataWriter);
}
}
示例5: closeBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
private boolean closeBloomFilter(BloomFilterWriter bfw) throws IOException {
boolean haveBloom = (bfw != null && bfw.getKeyCount() > 0);
if (haveBloom) {
bfw.compactBloom();
}
return haveBloom;
}
示例6: addBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
/**
* Version 1 Bloom filters are stored in two meta blocks with two different
* keys.
*/
@Override
public void addBloomFilter(BloomFilterWriter bfw) {
appendMetaBlock(BLOOM_FILTER_META_KEY,
bfw.getMetaWriter());
Writable dataWriter = bfw.getDataWriter();
if (dataWriter != null) {
appendMetaBlock(BLOOM_FILTER_DATA_KEY, dataWriter);
}
}
示例7: addGeneralBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
@Override
public void addGeneralBloomFilter(final BloomFilterWriter bfw) {
this.addBloomFilter(bfw, BlockType.GENERAL_BLOOM_META);
}
示例8: addDeleteFamilyBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
@Override
public void addDeleteFamilyBloomFilter(final BloomFilterWriter bfw) {
this.addBloomFilter(bfw, BlockType.DELETE_FAMILY_BLOOM_META);
}
示例9: addDeleteFamilyBloomFilter
import org.apache.hadoop.hbase.util.BloomFilterWriter; //导入依赖的package包/类
@Override
public void addDeleteFamilyBloomFilter(BloomFilterWriter bfw)
throws IOException {
throw new IOException("Delete Bloom filter is not supported in HFile V1");
}