當前位置: 首頁>>代碼示例>>Java>>正文


Java KeyValue.getOffset方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hbase.KeyValue.getOffset方法的典型用法代碼示例。如果您正苦於以下問題:Java KeyValue.getOffset方法的具體用法?Java KeyValue.getOffset怎麽用?Java KeyValue.getOffset使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hbase.KeyValue的用法示例。


在下文中一共展示了KeyValue.getOffset方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: writeKV

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
/**
 * Compresses and writes ourKV to out, a DataOutput.
 * 
 * @param out the DataOutput
 * @param keyVal the KV to compress and write
 * @param writeContext the compressionContext to use.
 * @throws IOException
 */
public static void writeKV(final DataOutput out, KeyValue keyVal,
    CompressionContext writeContext) throws IOException {
  byte[] backingArray = keyVal.getBuffer();
  int offset = keyVal.getOffset();

  // we first write the KeyValue infrastructure as VInts.
  WritableUtils.writeVInt(out, keyVal.getKeyLength());
  WritableUtils.writeVInt(out, keyVal.getValueLength());
  WritableUtils.writeVInt(out, keyVal.getTagsLength());

  // now we write the row key, as the row key is likely to be repeated
  // We save space only if we attempt to compress elements with duplicates
  Compressor.writeCompressed(keyVal.getBuffer(), keyVal.getRowOffset(),
      keyVal.getRowLength(), out, writeContext.rowDict);


  // now family, if it exists. if it doesn't, we write a 0 length array.
  Compressor.writeCompressed(keyVal.getBuffer(), keyVal.getFamilyOffset(),
      keyVal.getFamilyLength(), out, writeContext.familyDict);

  // qualifier next
  Compressor.writeCompressed(keyVal.getBuffer(), keyVal.getQualifierOffset(),
      keyVal.getQualifierLength(), out,
      writeContext.qualifierDict);

  // now we write the rest uncompressed
  int pos = keyVal.getTimestampOffset();
  int remainingLength = keyVal.getLength() + offset - (pos);
  out.write(backingArray, pos, remainingLength);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:39,代碼來源:KeyValueCompression.java

示例2: verifyCodecs

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
/**
 * Verify if all data block encoders are working properly.
 *
 * @param scanner Of file which was compressed.
 * @param kvLimit Maximal count of KeyValue which will be processed.
 * @return true if all data block encoders compressed/decompressed correctly.
 * @throws IOException thrown if scanner is invalid
 */
public boolean verifyCodecs(final KeyValueScanner scanner, final int kvLimit)
    throws IOException {
  KeyValue currentKv;

  scanner.seek(KeyValue.LOWESTKEY);
  List<Iterator<Cell>> codecIterators =
      new ArrayList<Iterator<Cell>>();
  for(EncodedDataBlock codec : codecs) {
    codecIterators.add(codec.getIterator(HFileBlock.headerSize(useHBaseChecksum)));
  }

  int j = 0;
  while ((currentKv = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
    // Iterates through key/value pairs
    ++j;
    for (Iterator<Cell> it : codecIterators) {
      Cell c = it.next();
      KeyValue codecKv = KeyValueUtil.ensureKeyValue(c);
      if (codecKv == null || 0 != Bytes.compareTo(
          codecKv.getBuffer(), codecKv.getOffset(), codecKv.getLength(),
          currentKv.getBuffer(), currentKv.getOffset(),
          currentKv.getLength())) {
        if (codecKv == null) {
          LOG.error("There is a bug in codec " + it +
              " it returned null KeyValue,");
        } else {
          int prefix = 0;
          int limitLength = 2 * Bytes.SIZEOF_INT +
              Math.min(codecKv.getLength(), currentKv.getLength());
          while (prefix < limitLength &&
              codecKv.getBuffer()[prefix + codecKv.getOffset()] ==
              currentKv.getBuffer()[prefix + currentKv.getOffset()]) {
            prefix++;
          }

          LOG.error("There is bug in codec " + it.toString() +
              "\n on element " + j +
              "\n codecKv.getKeyLength() " + codecKv.getKeyLength() +
              "\n codecKv.getValueLength() " + codecKv.getValueLength() +
              "\n codecKv.getLength() " + codecKv.getLength() +
              "\n currentKv.getKeyLength() " + currentKv.getKeyLength() +
              "\n currentKv.getValueLength() " + currentKv.getValueLength() +
              "\n codecKv.getLength() " + currentKv.getLength() +
              "\n currentKV rowLength " + currentKv.getRowLength() +
              " familyName " + currentKv.getFamilyLength() +
              " qualifier " + currentKv.getQualifierLength() +
              "\n prefix " + prefix +
              "\n codecKv   '" + Bytes.toStringBinary(codecKv.getBuffer(),
                  codecKv.getOffset(), prefix) + "' diff '" +
                  Bytes.toStringBinary(codecKv.getBuffer(),
                      codecKv.getOffset() + prefix, codecKv.getLength() -
                      prefix) + "'" +
              "\n currentKv '" + Bytes.toStringBinary(
                 currentKv.getBuffer(),
                 currentKv.getOffset(), prefix) + "' diff '" +
                 Bytes.toStringBinary(currentKv.getBuffer(),
                     currentKv.getOffset() + prefix, currentKv.getLength() -
                     prefix) + "'"
              );
        }
        return false;
      }
    }
  }

  LOG.info("Verification was successful!");

  return true;
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:78,代碼來源:DataBlockEncodingTool.java

示例3: testNextOnSample

import org.apache.hadoop.hbase.KeyValue; //導入方法依賴的package包/類
@Test
public void testNextOnSample() throws IOException {
  List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);

  for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
    if (encoding.getEncoder() == null) {
      continue;
    }
    DataBlockEncoder encoder = encoding.getEncoder();
    ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv,
        getEncodingContext(Compression.Algorithm.NONE, encoding));
    HFileContext meta = new HFileContextBuilder()
                        .withHBaseCheckSum(false)
                        .withIncludesMvcc(includesMemstoreTS)
                        .withIncludesTags(includesTags)
                        .withCompression(Compression.Algorithm.NONE)
                        .build();
    DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR,
        encoder.newDataBlockDecodingContext(meta));
    seeker.setCurrentBuffer(encodedBuffer);
    int i = 0;
    do {
      KeyValue expectedKeyValue = sampleKv.get(i);
      KeyValue keyValue = KeyValueUtil.copyToNewKeyValue(seeker.getKeyValue());
      if (0 != Bytes.compareTo(keyValue.getBuffer(), keyValue.getOffset(), keyValue.getLength(),
          expectedKeyValue.getBuffer(), expectedKeyValue.getOffset(),
          expectedKeyValue.getLength())) {

        int commonPrefix = 0;
        byte[] left = keyValue.getBuffer();
        byte[] right = expectedKeyValue.getBuffer();
        int leftOff = keyValue.getOffset();
        int rightOff = expectedKeyValue.getOffset();
        int length = Math.min(keyValue.getLength(), expectedKeyValue.getLength());
        while (commonPrefix < length
            && left[commonPrefix + leftOff] == right[commonPrefix + rightOff]) {
          commonPrefix++;
        }

        fail(String.format("next() produces wrong results "
            + "encoder: %s i: %d commonPrefix: %d" + "\n expected %s\n actual      %s", encoder
            .toString(), i, commonPrefix, Bytes.toStringBinary(expectedKeyValue.getBuffer(),
            expectedKeyValue.getOffset(), expectedKeyValue.getLength()), Bytes
            .toStringBinary(keyValue.getBuffer())));
      }
      i++;
    } while (seeker.next());
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:50,代碼來源:TestDataBlockEncoders.java


注:本文中的org.apache.hadoop.hbase.KeyValue.getOffset方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。