本文整理汇总了Java中org.apache.hadoop.hbase.util.ByteBufferUtils.moveBufferToStream方法的典型用法代码示例。如果您正苦于以下问题:Java ByteBufferUtils.moveBufferToStream方法的具体用法?Java ByteBufferUtils.moveBufferToStream怎么用?Java ByteBufferUtils.moveBufferToStream使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.util.ByteBufferUtils
的用法示例。
在下文中一共展示了ByteBufferUtils.moveBufferToStream方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: compressKeyValues
import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
@Override
public void compressKeyValues(DataOutputStream out,
ByteBuffer in, boolean includesMemstoreTS) throws IOException {
in.rewind();
ByteBufferUtils.putInt(out, in.limit());
ByteBufferUtils.moveBufferToStream(out, in, in.limit());
}
示例2: addKV
import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
private int addKV(int prevKeyOffset, DataOutputStream out,
ByteBuffer in, int prevKeyLength) throws IOException {
int keyLength = in.getInt();
int valueLength = in.getInt();
if (prevKeyOffset == -1) {
// copy the key, there is no common prefix with none
ByteBufferUtils.putCompressedInt(out, keyLength);
ByteBufferUtils.putCompressedInt(out, valueLength);
ByteBufferUtils.putCompressedInt(out, 0);
ByteBufferUtils.moveBufferToStream(out, in, keyLength + valueLength);
} else {
// find a common prefix and skip it
int common = ByteBufferUtils.findCommonPrefix(
in, prevKeyOffset + KeyValue.ROW_OFFSET,
in.position(),
Math.min(prevKeyLength, keyLength));
ByteBufferUtils.putCompressedInt(out, keyLength - common);
ByteBufferUtils.putCompressedInt(out, valueLength);
ByteBufferUtils.putCompressedInt(out, common);
ByteBufferUtils.skip(in, common);
ByteBufferUtils.moveBufferToStream(out, in, keyLength - common
+ valueLength);
}
return keyLength;
}
示例3: internalEncodeKeyValues
import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
@Override
public void internalEncodeKeyValues(DataOutputStream out,
ByteBuffer in, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
in.rewind();
ByteBufferUtils.putInt(out, in.limit());
ByteBufferUtils.moveBufferToStream(out, in, in.limit());
}
示例4: afterEncodingKeyValue
import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
protected final void afterEncodingKeyValue(ByteBuffer in,
DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
if (encodingCtx.getHFileContext().isIncludesTags()) {
short tagsLength = in.getShort();
ByteBufferUtils.putCompressedInt(out, tagsLength);
// There are some tags to be written
if (tagsLength > 0) {
TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
// When tag compression is enabled, tagCompressionContext will have a not null value. Write
// the tags using Dictionary compression in such a case
if (tagCompressionContext != null) {
tagCompressionContext.compressTags(out, in, tagsLength);
} else {
ByteBufferUtils.moveBufferToStream(out, in, tagsLength);
}
}
}
if (encodingCtx.getHFileContext().isIncludesMvcc()) {
// Copy memstore timestamp from the byte buffer to the output stream.
long memstoreTS = -1;
try {
memstoreTS = ByteBufferUtils.readVLong(in);
WritableUtils.writeVLong(out, memstoreTS);
} catch (IOException ex) {
throw new RuntimeException("Unable to copy memstore timestamp " +
memstoreTS + " after encoding a key/value");
}
}
}
示例5: internalEncodeKeyValues
import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
@Override
public void internalEncodeKeyValues(DataOutputStream out,
ByteBuffer in, boolean includesMemstoreTS) throws IOException {
in.rewind();
ByteBufferUtils.putInt(out, in.limit());
ByteBufferUtils.moveBufferToStream(out, in, in.limit());
}