本文整理匯總了Java中org.apache.hadoop.io.WritableUtils.decodeVIntSize方法的典型用法代碼示例。如果您正苦於以下問題:Java WritableUtils.decodeVIntSize方法的具體用法?Java WritableUtils.decodeVIntSize怎麽用?Java WritableUtils.decodeVIntSize使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.io.WritableUtils
的用法示例。
在下文中一共展示了WritableUtils.decodeVIntSize方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: _readMvccVersion
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* Actually do the mvcc read. Does no checks.
* @param position
*/
private void _readMvccVersion(final int position) {
// This is Bytes#bytesToVint inlined so can save a few instructions in this hot method; i.e.
// previous if one-byte vint, we'd redo the vint call to find int size.
// Also the method is kept small so can be inlined.
byte firstByte = blockBuffer.array()[position];
int len = WritableUtils.decodeVIntSize(firstByte);
if (len == 1) {
this.currMemstoreTS = firstByte;
} else {
long i = 0;
for (int idx = 0; idx < len - 1; idx++) {
byte b = blockBuffer.array()[position + 1 + idx];
i = i << 8;
i = i | (b & 0xFF);
}
currMemstoreTS = (WritableUtils.isNegativeVInt(firstByte) ? ~i : i);
}
this.currMemstoreTSLen = len;
}
示例2: bytesToVint
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* @param buffer buffer to convert
* @return vint bytes as an integer.
*/
public static long bytesToVint(final byte [] buffer) {
int offset = 0;
byte firstByte = buffer[offset++];
int len = WritableUtils.decodeVIntSize(firstByte);
if (len == 1) {
return firstByte;
}
long i = 0;
for (int idx = 0; idx < len-1; idx++) {
byte b = buffer[offset++];
i = i << 8;
i = i | (b & 0xFF);
}
return (WritableUtils.isNegativeVInt(firstByte) ? ~i : i);
}
示例3: compare
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public int compare(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
int n1 = WritableUtils.decodeVIntSize(b1[s1]);
int n2 = WritableUtils.decodeVIntSize(b2[s2]);
List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
if (allKeySpecs.size() == 0) {
return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
}
int []lengthIndicesFirst =
keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
int []lengthIndicesSecond =
keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);
for (KeyDescription keySpec : allKeySpecs) {
int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
lengthIndicesFirst, keySpec);
int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1,
lengthIndicesFirst, keySpec);
int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
lengthIndicesSecond, keySpec);
int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2,
lengthIndicesSecond, keySpec);
int result;
if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2,
startCharSecond, endCharSecond, keySpec)) != 0) {
return result;
}
}
return 0;
}
示例4: compare
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int n1 = WritableUtils.decodeVIntSize(b1[s1]);
int n2 = WritableUtils.decodeVIntSize(b2[s2]);
n1 -= WritableUtils.getVIntSize(n1);
n2 -= WritableUtils.getVIntSize(n2);
return compareBytes(b1, s1+n1, l1-n1, b2, s2+n2, l2-n2);
}
示例5: readVLong
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* Similar to {@link WritableUtils#readVLong(DataInput)} but reads from a
* {@link ByteBuffer}.
*/
public static long readVLong(ByteBuffer in) {
byte firstByte = in.get();
int len = WritableUtils.decodeVIntSize(firstByte);
if (len == 1) {
return firstByte;
}
long i = 0;
for (int idx = 0; idx < len-1; idx++) {
byte b = in.get();
i = i << 8;
i = i | (b & 0xFF);
}
return (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
}
示例6: readAsVLong
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
/**
* Reads a zero-compressed encoded long from input buffer and returns it.
* @param buffer Binary array
* @param offset Offset into array at which vint begins.
* @return deserialized long from buffer.
*/
public static long readAsVLong(final byte [] buffer, final int offset) {
byte firstByte = buffer[offset];
int len = WritableUtils.decodeVIntSize(firstByte);
if (len == 1) {
return firstByte;
}
long i = 0;
for (int idx = 0; idx < len-1; idx++) {
byte b = buffer[offset + 1 + idx];
i = i << 8;
i = i | (b & 0xFF);
}
return (WritableUtils.isNegativeVInt(firstByte) ? ~i : i);
}
示例7: compare
import org.apache.hadoop.io.WritableUtils; //導入方法依賴的package包/類
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int n1 = WritableUtils.decodeVIntSize(b1[s1]);
int n2 = WritableUtils.decodeVIntSize(b2[s2]);
return -1 * WritableComparator.compareBytes(b1, s1+n1, l1-n1,
b2, s2+n2, l2-n2);
}