本文整理汇总了Java中org.apache.hadoop.hbase.KeyValue.getBuffer方法的典型用法代码示例。如果您正苦于以下问题:Java KeyValue.getBuffer方法的具体用法?Java KeyValue.getBuffer怎么用?Java KeyValue.getBuffer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.KeyValue
的用法示例。
在下文中一共展示了KeyValue.getBuffer方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: writeKV
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* Compresses and writes ourKV to out, a DataOutput.
*
* @param out the DataOutput
* @param keyVal the KV to compress and write
* @param writeContext the compressionContext to use.
* @throws IOException
*/
public static void writeKV(final DataOutput out, KeyValue keyVal,
CompressionContext writeContext) throws IOException {
byte[] backingArray = keyVal.getBuffer();
int offset = keyVal.getOffset();
// we first write the KeyValue infrastructure as VInts.
WritableUtils.writeVInt(out, keyVal.getKeyLength());
WritableUtils.writeVInt(out, keyVal.getValueLength());
WritableUtils.writeVInt(out, keyVal.getTagsLength());
// now we write the row key, as the row key is likely to be repeated
// We save space only if we attempt to compress elements with duplicates
Compressor.writeCompressed(keyVal.getBuffer(), keyVal.getRowOffset(),
keyVal.getRowLength(), out, writeContext.rowDict);
// now family, if it exists. if it doesn't, we write a 0 length array.
Compressor.writeCompressed(keyVal.getBuffer(), keyVal.getFamilyOffset(),
keyVal.getFamilyLength(), out, writeContext.familyDict);
// qualifier next
Compressor.writeCompressed(keyVal.getBuffer(), keyVal.getQualifierOffset(),
keyVal.getQualifierLength(), out,
writeContext.qualifierDict);
// now we write the rest uncompressed
int pos = keyVal.getTimestampOffset();
int remainingLength = keyVal.getLength() + offset - (pos);
out.write(backingArray, pos, remainingLength);
}
示例2: verifyCodecs
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* Verify if all data block encoders are working properly.
*
* @param scanner Of file which was compressed.
* @param kvLimit Maximal count of KeyValue which will be processed.
* @return true if all data block encoders compressed/decompressed correctly.
* @throws IOException thrown if scanner is invalid
*/
public boolean verifyCodecs(final KeyValueScanner scanner, final int kvLimit)
throws IOException {
KeyValue currentKv;
scanner.seek(KeyValue.LOWESTKEY);
List<Iterator<Cell>> codecIterators =
new ArrayList<Iterator<Cell>>();
for(EncodedDataBlock codec : codecs) {
codecIterators.add(codec.getIterator(HFileBlock.headerSize(useHBaseChecksum)));
}
int j = 0;
while ((currentKv = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
// Iterates through key/value pairs
++j;
for (Iterator<Cell> it : codecIterators) {
Cell c = it.next();
KeyValue codecKv = KeyValueUtil.ensureKeyValue(c);
if (codecKv == null || 0 != Bytes.compareTo(
codecKv.getBuffer(), codecKv.getOffset(), codecKv.getLength(),
currentKv.getBuffer(), currentKv.getOffset(),
currentKv.getLength())) {
if (codecKv == null) {
LOG.error("There is a bug in codec " + it +
" it returned null KeyValue,");
} else {
int prefix = 0;
int limitLength = 2 * Bytes.SIZEOF_INT +
Math.min(codecKv.getLength(), currentKv.getLength());
while (prefix < limitLength &&
codecKv.getBuffer()[prefix + codecKv.getOffset()] ==
currentKv.getBuffer()[prefix + currentKv.getOffset()]) {
prefix++;
}
LOG.error("There is bug in codec " + it.toString() +
"\n on element " + j +
"\n codecKv.getKeyLength() " + codecKv.getKeyLength() +
"\n codecKv.getValueLength() " + codecKv.getValueLength() +
"\n codecKv.getLength() " + codecKv.getLength() +
"\n currentKv.getKeyLength() " + currentKv.getKeyLength() +
"\n currentKv.getValueLength() " + currentKv.getValueLength() +
"\n codecKv.getLength() " + currentKv.getLength() +
"\n currentKV rowLength " + currentKv.getRowLength() +
" familyName " + currentKv.getFamilyLength() +
" qualifier " + currentKv.getQualifierLength() +
"\n prefix " + prefix +
"\n codecKv '" + Bytes.toStringBinary(codecKv.getBuffer(),
codecKv.getOffset(), prefix) + "' diff '" +
Bytes.toStringBinary(codecKv.getBuffer(),
codecKv.getOffset() + prefix, codecKv.getLength() -
prefix) + "'" +
"\n currentKv '" + Bytes.toStringBinary(
currentKv.getBuffer(),
currentKv.getOffset(), prefix) + "' diff '" +
Bytes.toStringBinary(currentKv.getBuffer(),
currentKv.getOffset() + prefix, currentKv.getLength() -
prefix) + "'"
);
}
return false;
}
}
}
LOG.info("Verification was successful!");
return true;
}
示例3: testFirstKeyInBlockOnSample
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* Test whether the decompression of first key is implemented correctly.
* @throws IOException
*/
@Test
public void testFirstKeyInBlockOnSample() throws IOException {
List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
if (encoding.getEncoder() == null) {
continue;
}
DataBlockEncoder encoder = encoding.getEncoder();
ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv,
getEncodingContext(Compression.Algorithm.NONE, encoding));
ByteBuffer keyBuffer = encoder.getFirstKeyInBlock(encodedBuffer);
KeyValue firstKv = sampleKv.get(0);
if (0 != Bytes.compareTo(keyBuffer.array(), keyBuffer.arrayOffset(), keyBuffer.limit(),
firstKv.getBuffer(), firstKv.getKeyOffset(), firstKv.getKeyLength())) {
int commonPrefix = 0;
int length = Math.min(keyBuffer.limit(), firstKv.getKeyLength());
while (commonPrefix < length
&& keyBuffer.array()[keyBuffer.arrayOffset() + commonPrefix] == firstKv.getBuffer()[firstKv
.getKeyOffset() + commonPrefix]) {
commonPrefix++;
}
fail(String.format("Bug in '%s' commonPrefix %d", encoder.toString(), commonPrefix));
}
}
}
示例4: IndexKeyValue
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
public IndexKeyValue(KeyValue kv) {
this(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength(), kv.getBuffer(), kv
.getQualifierOffset(), kv.getQualifierLength(), kv.getBuffer(), kv.getValueOffset(), kv
.getValueLength());
}
示例5: testNextOnSample
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Test
public void testNextOnSample() throws IOException {
List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
if (encoding.getEncoder() == null) {
continue;
}
DataBlockEncoder encoder = encoding.getEncoder();
ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv,
getEncodingContext(Compression.Algorithm.NONE, encoding));
HFileContext meta = new HFileContextBuilder()
.withHBaseCheckSum(false)
.withIncludesMvcc(includesMemstoreTS)
.withIncludesTags(includesTags)
.withCompression(Compression.Algorithm.NONE)
.build();
DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR,
encoder.newDataBlockDecodingContext(meta));
seeker.setCurrentBuffer(encodedBuffer);
int i = 0;
do {
KeyValue expectedKeyValue = sampleKv.get(i);
KeyValue keyValue = KeyValueUtil.copyToNewKeyValue(seeker.getKeyValue());
if (0 != Bytes.compareTo(keyValue.getBuffer(), keyValue.getOffset(), keyValue.getLength(),
expectedKeyValue.getBuffer(), expectedKeyValue.getOffset(),
expectedKeyValue.getLength())) {
int commonPrefix = 0;
byte[] left = keyValue.getBuffer();
byte[] right = expectedKeyValue.getBuffer();
int leftOff = keyValue.getOffset();
int rightOff = expectedKeyValue.getOffset();
int length = Math.min(keyValue.getLength(), expectedKeyValue.getLength());
while (commonPrefix < length
&& left[commonPrefix + leftOff] == right[commonPrefix + rightOff]) {
commonPrefix++;
}
fail(String.format("next() produces wrong results "
+ "encoder: %s i: %d commonPrefix: %d" + "\n expected %s\n actual %s", encoder
.toString(), i, commonPrefix, Bytes.toStringBinary(expectedKeyValue.getBuffer(),
expectedKeyValue.getOffset(), expectedKeyValue.getLength()), Bytes
.toStringBinary(keyValue.getBuffer())));
}
i++;
} while (seeker.next());
}
}