当前位置: 首页>>代码示例>>Java>>正文


Java ByteBufferUtils.putCompressedInt方法代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.util.ByteBufferUtils.putCompressedInt方法的典型用法代码示例。如果您正苦于以下问题:Java ByteBufferUtils.putCompressedInt方法的具体用法?Java ByteBufferUtils.putCompressedInt怎么用?Java ByteBufferUtils.putCompressedInt使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hbase.util.ByteBufferUtils的用法示例。


在下文中一共展示了ByteBufferUtils.putCompressedInt方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: afterEncodingKeyValue

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
/**
 * @param cell
 * @param out
 * @param encodingCtx
 * @return unencoded size added
 * @throws IOException
 */
protected final int afterEncodingKeyValue(Cell cell, DataOutputStream out,
    HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  int size = 0;
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = cell.getTagsLength();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        tagCompressionContext
            .compressTags(out, cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
      } else {
        out.write(cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
      }
    }
    size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = cell.getSequenceId();
    WritableUtils.writeVLong(out, memstoreTS);
    // TODO use a writeVLong which returns the #bytes written so that 2 time parsing can be
    // avoided.
    size += WritableUtils.getVIntSize(memstoreTS);
  }
  return size;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:38,代码来源:BufferedDataBlockEncoder.java

示例2: internalEncode

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
@Override
public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext,
    DataOutputStream out) throws IOException {
  int klength = KeyValueUtil.keyLength(cell);
  int vlength = cell.getValueLength();
  EncodingState state = encodingContext.getEncodingState();
  if (state.prevCell == null) {
    // copy the key, there is no common prefix with none
    ByteBufferUtils.putCompressedInt(out, klength);
    ByteBufferUtils.putCompressedInt(out, vlength);
    ByteBufferUtils.putCompressedInt(out, 0);
    CellUtil.writeFlatKey(cell, out);
  } else {
    // find a common prefix and skip it
    int common = CellUtil.findCommonPrefixInFlatKey(cell, state.prevCell, true, true);
    ByteBufferUtils.putCompressedInt(out, klength - common);
    ByteBufferUtils.putCompressedInt(out, vlength);
    ByteBufferUtils.putCompressedInt(out, common);
    writeKeyExcludingCommon(cell, common, out);
  }
  // Write the value part
  out.write(cell.getValueArray(), cell.getValueOffset(), vlength);
  int size = klength + vlength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
  size += afterEncodingKeyValue(cell, out, encodingContext);
  state.prevCell = cell;
  return size;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:28,代码来源:PrefixKeyDeltaEncoder.java

示例3: addKV

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
private int addKV(int prevKeyOffset, DataOutputStream out,
    ByteBuffer in, int prevKeyLength) throws IOException {
  int keyLength = in.getInt();
  int valueLength = in.getInt();

  if (prevKeyOffset == -1) {
    // copy the key, there is no common prefix with none
    ByteBufferUtils.putCompressedInt(out, keyLength);
    ByteBufferUtils.putCompressedInt(out, valueLength);
    ByteBufferUtils.putCompressedInt(out, 0);
    ByteBufferUtils.moveBufferToStream(out, in, keyLength + valueLength);
  } else {
    // find a common prefix and skip it
    int common = ByteBufferUtils.findCommonPrefix(
        in, prevKeyOffset + KeyValue.ROW_OFFSET,
        in.position(),
        Math.min(prevKeyLength, keyLength));

    ByteBufferUtils.putCompressedInt(out, keyLength - common);
    ByteBufferUtils.putCompressedInt(out, valueLength);
    ByteBufferUtils.putCompressedInt(out, common);

    ByteBufferUtils.skip(in, common);
    ByteBufferUtils.moveBufferToStream(out, in, keyLength - common
        + valueLength);
  }

  return keyLength;
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:30,代码来源:PrefixKeyDeltaEncoder.java

示例4: afterEncodingKeyValue

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
protected final void afterEncodingKeyValue(ByteBuffer in,
    DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    short tagsLength = in.getShort();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        tagCompressionContext.compressTags(out, in, tagsLength);
      } else {
        ByteBufferUtils.moveBufferToStream(out, in, tagsLength);
      }
    }
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = -1;
    try {
      memstoreTS = ByteBufferUtils.readVLong(in);
      WritableUtils.writeVLong(out, memstoreTS);
    } catch (IOException ex) {
      throw new RuntimeException("Unable to copy memstore timestamp " +
          memstoreTS + " after encoding a key/value");
    }
  }
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:30,代码来源:BufferedDataBlockEncoder.java

示例5: afterEncodingKeyValue

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
/**
 * @param cell
 * @param out
 * @param encodingCtx
 * @return unencoded size added
 * @throws IOException
 */
protected final int afterEncodingKeyValue(Cell cell, DataOutputStream out,
    HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  int size = 0;
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = cell.getTagsLength();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        // Not passing tagsLength considering that parsing of the tagsLength is not costly
        PrivateCellUtil.compressTags(out, cell, tagCompressionContext);
      } else {
        PrivateCellUtil.writeTags(out, cell, tagsLength);
      }
    }
    size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = cell.getSequenceId();
    WritableUtils.writeVLong(out, memstoreTS);
    // TODO use a writeVLong which returns the #bytes written so that 2 time parsing can be
    // avoided.
    size += WritableUtils.getVIntSize(memstoreTS);
  }
  return size;
}
 
开发者ID:apache,项目名称:hbase,代码行数:38,代码来源:BufferedDataBlockEncoder.java

示例6: internalEncode

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
@Override
public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext,
    DataOutputStream out) throws IOException {
  int klength = KeyValueUtil.keyLength(cell);
  int vlength = cell.getValueLength();
  EncodingState state = encodingContext.getEncodingState();
  if (state.prevCell == null) {
    // copy the key, there is no common prefix with none
    ByteBufferUtils.putCompressedInt(out, klength);
    ByteBufferUtils.putCompressedInt(out, vlength);
    ByteBufferUtils.putCompressedInt(out, 0);
    PrivateCellUtil.writeFlatKey(cell, (DataOutput)out);
  } else {
    // find a common prefix and skip it
    int common = PrivateCellUtil.findCommonPrefixInFlatKey(cell, state.prevCell, true, true);
    ByteBufferUtils.putCompressedInt(out, klength - common);
    ByteBufferUtils.putCompressedInt(out, vlength);
    ByteBufferUtils.putCompressedInt(out, common);
    writeKeyExcludingCommon(cell, common, out);
  }
  // Write the value part
  PrivateCellUtil.writeValue(out, cell, vlength);
  int size = klength + vlength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
  size += afterEncodingKeyValue(cell, out, encodingContext);
  state.prevCell = cell;
  return size;
}
 
开发者ID:apache,项目名称:hbase,代码行数:28,代码来源:PrefixKeyDeltaEncoder.java

示例7: afterEncodingKeyValue

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
/**
 * @param kv
 * @param out
 * @param encodingCtx
 * @return unencoded size added
 * @throws IOException
 */
protected final int afterEncodingKeyValue(KeyValue kv, DataOutputStream out,
    HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  int size = 0;
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    short tagsLength = kv.getTagsLength();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        tagCompressionContext
            .compressTags(out, kv.getTagsArray(), kv.getTagsOffset(), tagsLength);
      } else {
        out.write(kv.getTagsArray(), kv.getTagsOffset(), tagsLength);
      }
    }
    size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = kv.getMvccVersion();
    WritableUtils.writeVLong(out, memstoreTS);
    // TODO use a writeVLong which returns the #bytes written so that 2 time parsing can be
    // avoided.
    size += WritableUtils.getVIntSize(memstoreTS);
  }
  return size;
}
 
开发者ID:shenli-uiuc,项目名称:PyroDB,代码行数:38,代码来源:BufferedDataBlockEncoder.java

示例8: internalEncode

import org.apache.hadoop.hbase.util.ByteBufferUtils; //导入方法依赖的package包/类
@Override
public int internalEncode(KeyValue kv, HFileBlockDefaultEncodingContext encodingContext,
    DataOutputStream out) throws IOException {
  byte[] kvBuf = kv.getBuffer();
  int klength = kv.getKeyLength();
  int vlength = kv.getValueLength();
  EncodingState state = encodingContext.getEncodingState();
  if (state.prevKv == null) {
    // copy the key, there is no common prefix with none
    ByteBufferUtils.putCompressedInt(out, klength);
    ByteBufferUtils.putCompressedInt(out, vlength);
    ByteBufferUtils.putCompressedInt(out, 0);
    out.write(kvBuf, kv.getKeyOffset(), klength + vlength);
  } else {
    // find a common prefix and skip it
    int common = ByteBufferUtils.findCommonPrefix(state.prevKv.getBuffer(),
        state.prevKv.getKeyOffset(), state.prevKv.getKeyLength(), kvBuf, kv.getKeyOffset(),
        kv.getKeyLength());
    ByteBufferUtils.putCompressedInt(out, klength - common);
    ByteBufferUtils.putCompressedInt(out, vlength);
    ByteBufferUtils.putCompressedInt(out, common);
    out.write(kvBuf, kv.getKeyOffset() + common, klength - common + vlength);
  }
  int size = klength + vlength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
  size += afterEncodingKeyValue(kv, out, encodingContext);
  state.prevKv = kv;
  return size;
}
 
开发者ID:shenli-uiuc,项目名称:PyroDB,代码行数:29,代码来源:PrefixKeyDeltaEncoder.java


注:本文中的org.apache.hadoop.hbase.util.ByteBufferUtils.putCompressedInt方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。