本文整理匯總了Java中org.apache.hadoop.hbase.KeyValue.getKey方法的典型用法代碼示例。如果您正苦於以下問題:Java KeyValue.getKey方法的具體用法?Java KeyValue.getKey怎麽用?Java KeyValue.getKey使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hbase.KeyValue
的用法示例。
在下文中一共展示了KeyValue.getKey方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createBloomKey
import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
/**
* Prepare an ordered pair of row and qualifier to be compared using
* KeyValue.KeyComparator. This is only used for row-column Bloom
* filters.
*/
@Override
public byte[] createBloomKey(byte[] row, int roffset, int rlength,
byte[] qualifier, int qoffset, int qlength) {
if (qualifier == null)
qualifier = DUMMY;
// Make sure this does not specify a timestamp so that the default maximum
// (most recent) timestamp is used.
KeyValue kv = KeyValueUtil.createFirstOnRow(row, roffset, rlength, DUMMY, 0, 0,
qualifier, qoffset, qlength);
return kv.getKey();
}
示例2: checkStatistics
import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
/**
* Check statistics for given HFile for different data block encoders.
* @param scanner Of file which will be compressed.
* @param kvLimit Maximal count of KeyValue which will be processed.
* @throws IOException thrown if scanner is invalid
*/
public void checkStatistics(final KeyValueScanner scanner, final int kvLimit)
throws IOException {
scanner.seek(KeyValue.LOWESTKEY);
KeyValue currentKV;
byte[] previousKey = null;
byte[] currentKey;
DataBlockEncoding[] encodings = DataBlockEncoding.values();
ByteArrayOutputStream uncompressedOutputStream =
new ByteArrayOutputStream();
int j = 0;
while ((currentKV = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
// Iterates through key/value pairs
j++;
currentKey = currentKV.getKey();
if (previousKey != null) {
for (int i = 0; i < previousKey.length && i < currentKey.length &&
previousKey[i] == currentKey[i]; ++i) {
totalKeyRedundancyLength++;
}
}
uncompressedOutputStream.write(currentKV.getBuffer(),
currentKV.getOffset(), currentKV.getLength());
previousKey = currentKey;
int kLen = currentKV.getKeyLength();
int vLen = currentKV.getValueLength();
int cfLen = currentKV.getFamilyLength(currentKV.getFamilyOffset());
int restLen = currentKV.getLength() - kLen - vLen;
totalKeyLength += kLen;
totalValueLength += vLen;
totalPrefixLength += restLen;
totalCFLength += cfLen;
}
rawKVs = uncompressedOutputStream.toByteArray();
boolean useTag = (currentKV.getTagsLength() > 0);
for (DataBlockEncoding encoding : encodings) {
if (encoding == DataBlockEncoding.NONE) {
continue;
}
DataBlockEncoder d = encoding.getEncoder();
HFileContext meta = new HFileContextBuilder()
.withCompression(Compression.Algorithm.NONE)
.withIncludesMvcc(includesMemstoreTS)
.withIncludesTags(useTag).build();
codecs.add(new EncodedDataBlock(d, encoding, rawKVs, meta ));
}
}
示例3: getSomeKey
import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
private byte[] getSomeKey(int rowId) {
KeyValue kv = new KeyValue(String.format(localFormatter, Integer.valueOf(rowId)).getBytes(),
Bytes.toBytes("family"), Bytes.toBytes("qual"), HConstants.LATEST_TIMESTAMP, Type.Put);
return kv.getKey();
}