本文整理汇总了Java中org.apache.hadoop.hbase.KeyValue.getTagsLength方法的典型用法代码示例。如果您正苦于以下问题:Java KeyValue.getTagsLength方法的具体用法?Java KeyValue.getTagsLength怎么用?Java KeyValue.getTagsLength使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.KeyValue
的用法示例。
在下文中一共展示了KeyValue.getTagsLength方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testCompressUncompressTags1
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Test
public void testCompressUncompressTags1() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
KeyValue kv1 = createKVWithTags(2);
int tagsLength1 = kv1.getTagsLength();
ByteBuffer ib = ByteBuffer.wrap(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1);
context.compressTags(baos, ib, tagsLength1);
KeyValue kv2 = createKVWithTags(3);
int tagsLength2 = kv2.getTagsLength();
ib = ByteBuffer.wrap(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2);
context.compressTags(baos, ib, tagsLength2);
context.clear();
byte[] dest = new byte[tagsLength1];
ByteBuffer ob = ByteBuffer.wrap(baos.toByteArray());
context.uncompressTags(ob, dest, 0, tagsLength1);
assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0,
tagsLength1));
dest = new byte[tagsLength2];
context.uncompressTags(ob, dest, 0, tagsLength2);
assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0,
tagsLength2));
}
示例2: testCompressUncompressTags2
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Test
public void testCompressUncompressTags2() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
KeyValue kv1 = createKVWithTags(1);
int tagsLength1 = kv1.getTagsLength();
context.compressTags(baos, kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1);
KeyValue kv2 = createKVWithTags(3);
int tagsLength2 = kv2.getTagsLength();
context.compressTags(baos, kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2);
context.clear();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
byte[] dest = new byte[tagsLength1];
context.uncompressTags(bais, dest, 0, tagsLength1);
assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0,
tagsLength1));
dest = new byte[tagsLength2];
context.uncompressTags(bais, dest, 0, tagsLength2);
assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0,
tagsLength2));
}
示例3: writeCell
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
static void writeCell(PositionedByteRange pbr, KeyValue kv) throws Exception {
pbr.putInt(kv.getKeyLength());
pbr.putInt(kv.getValueLength());
pbr.put(kv.getBuffer(), kv.getKeyOffset(), kv.getKeyLength());
pbr.put(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength());
int tagsLen = kv.getTagsLength();
pbr.put((byte) (tagsLen >> 8 & 0xff));
pbr.put((byte) (tagsLen & 0xff));
pbr.put(kv.getTagsArray(), kv.getTagsOffset(), tagsLen);
pbr.putVLong(kv.getMvccVersion());
}
示例4: checkStatistics
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* Check statistics for given HFile for different data block encoders.
* @param scanner Of file which will be compressed.
* @param kvLimit Maximal count of KeyValue which will be processed.
* @throws IOException thrown if scanner is invalid
*/
public void checkStatistics(final KeyValueScanner scanner, final int kvLimit)
throws IOException {
scanner.seek(KeyValue.LOWESTKEY);
KeyValue currentKV;
byte[] previousKey = null;
byte[] currentKey;
DataBlockEncoding[] encodings = DataBlockEncoding.values();
ByteArrayOutputStream uncompressedOutputStream =
new ByteArrayOutputStream();
int j = 0;
while ((currentKV = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
// Iterates through key/value pairs
j++;
currentKey = currentKV.getKey();
if (previousKey != null) {
for (int i = 0; i < previousKey.length && i < currentKey.length &&
previousKey[i] == currentKey[i]; ++i) {
totalKeyRedundancyLength++;
}
}
uncompressedOutputStream.write(currentKV.getBuffer(),
currentKV.getOffset(), currentKV.getLength());
previousKey = currentKey;
int kLen = currentKV.getKeyLength();
int vLen = currentKV.getValueLength();
int cfLen = currentKV.getFamilyLength(currentKV.getFamilyOffset());
int restLen = currentKV.getLength() - kLen - vLen;
totalKeyLength += kLen;
totalValueLength += vLen;
totalPrefixLength += restLen;
totalCFLength += cfLen;
}
rawKVs = uncompressedOutputStream.toByteArray();
boolean useTag = (currentKV.getTagsLength() > 0);
for (DataBlockEncoding encoding : encodings) {
if (encoding == DataBlockEncoding.NONE) {
continue;
}
DataBlockEncoder d = encoding.getEncoder();
HFileContext meta = new HFileContextBuilder()
.withCompression(Compression.Algorithm.NONE)
.withIncludesMvcc(includesMemstoreTS)
.withIncludesTags(useTag).build();
codecs.add(new EncodedDataBlock(d, encoding, rawKVs, meta ));
}
}