本文整理汇总了Java中org.apache.hadoop.hbase.KeyValue.getKey方法的典型用法代码示例。如果您正苦于以下问题:Java KeyValue.getKey方法的具体用法?Java KeyValue.getKey怎么用?Java KeyValue.getKey使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.KeyValue
的用法示例。
在下文中一共展示了KeyValue.getKey方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createBloomKey
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* Prepare an ordered pair of row and qualifier to be compared using
* KeyValue.KeyComparator. This is only used for row-column Bloom
* filters.
*/
@Override
public byte[] createBloomKey(byte[] row, int roffset, int rlength,
byte[] qualifier, int qoffset, int qlength) {
if (qualifier == null)
qualifier = DUMMY;
// Make sure this does not specify a timestamp so that the default maximum
// (most recent) timestamp is used.
KeyValue kv = KeyValueUtil.createFirstOnRow(row, roffset, rlength, DUMMY, 0, 0,
qualifier, qoffset, qlength);
return kv.getKey();
}
示例2: checkStatistics
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* Check statistics for given HFile for different data block encoders.
* @param scanner Of file which will be compressed.
* @param kvLimit Maximal count of KeyValue which will be processed.
* @throws IOException thrown if scanner is invalid
*/
public void checkStatistics(final KeyValueScanner scanner, final int kvLimit)
throws IOException {
scanner.seek(KeyValue.LOWESTKEY);
KeyValue currentKV;
byte[] previousKey = null;
byte[] currentKey;
DataBlockEncoding[] encodings = DataBlockEncoding.values();
ByteArrayOutputStream uncompressedOutputStream =
new ByteArrayOutputStream();
int j = 0;
while ((currentKV = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
// Iterates through key/value pairs
j++;
currentKey = currentKV.getKey();
if (previousKey != null) {
for (int i = 0; i < previousKey.length && i < currentKey.length &&
previousKey[i] == currentKey[i]; ++i) {
totalKeyRedundancyLength++;
}
}
uncompressedOutputStream.write(currentKV.getBuffer(),
currentKV.getOffset(), currentKV.getLength());
previousKey = currentKey;
int kLen = currentKV.getKeyLength();
int vLen = currentKV.getValueLength();
int cfLen = currentKV.getFamilyLength(currentKV.getFamilyOffset());
int restLen = currentKV.getLength() - kLen - vLen;
totalKeyLength += kLen;
totalValueLength += vLen;
totalPrefixLength += restLen;
totalCFLength += cfLen;
}
rawKVs = uncompressedOutputStream.toByteArray();
boolean useTag = (currentKV.getTagsLength() > 0);
for (DataBlockEncoding encoding : encodings) {
if (encoding == DataBlockEncoding.NONE) {
continue;
}
DataBlockEncoder d = encoding.getEncoder();
HFileContext meta = new HFileContextBuilder()
.withCompression(Compression.Algorithm.NONE)
.withIncludesMvcc(includesMemstoreTS)
.withIncludesTags(useTag).build();
codecs.add(new EncodedDataBlock(d, encoding, rawKVs, meta ));
}
}
示例3: getSomeKey
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
private byte[] getSomeKey(int rowId) {
KeyValue kv = new KeyValue(String.format(localFormatter, Integer.valueOf(rowId)).getBytes(),
Bytes.toBytes("family"), Bytes.toBytes("qual"), HConstants.LATEST_TIMESTAMP, Type.Put);
return kv.getKey();
}