当前位置: 首页>>代码示例>>Java>>正文


Java Codec.getDecoder方法代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.codec.Codec.getDecoder方法的典型用法代码示例。如果您正苦于以下问题:Java Codec.getDecoder方法的具体用法?Java Codec.getDecoder怎么用?Java Codec.getDecoder使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hbase.codec.Codec的用法示例。


在下文中一共展示了Codec.getDecoder方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: doCodec

import org.apache.hadoop.hbase.codec.Codec; //导入方法依赖的package包/类
static void doCodec(final Codec codec, final Cell [] cells, final int cycles, final int count,
    final int initialBufferSize)
throws IOException {
  byte [] bytes = null;
  Cell [] cellsDecoded = null;
  for (int i = 0; i < cycles; i++) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream(initialBufferSize);
    Codec.Encoder encoder = codec.getEncoder(baos);
    bytes = runEncoderTest(i, initialBufferSize, baos, encoder, cells);
  }
  for (int i = 0; i < cycles; i++) {
    ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
    Codec.Decoder decoder = codec.getDecoder(bais);
    cellsDecoded = CodecPerformance.runDecoderTest(i, count, decoder);
  }
  verifyCells(cells, cellsDecoded);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:18,代码来源:CodecPerformance.java

示例2: testEmptyWorks

import org.apache.hadoop.hbase.codec.Codec; //导入方法依赖的package包/类
@Test
public void testEmptyWorks() throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  CountingOutputStream cos = new CountingOutputStream(baos);
  DataOutputStream dos = new DataOutputStream(cos);
  Codec codec = new CellCodec();
  Codec.Encoder encoder = codec.getEncoder(dos);
  encoder.flush();
  dos.close();
  long offset = cos.getCount();
  assertEquals(0, offset);
  CountingInputStream cis =
    new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
  DataInputStream dis = new DataInputStream(cis);
  Codec.Decoder decoder = codec.getDecoder(dis);
  assertFalse(decoder.advance());
  dis.close();
  assertEquals(0, cis.getCount());
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:20,代码来源:TestCellCodec.java

示例3: testOne

import org.apache.hadoop.hbase.codec.Codec; //导入方法依赖的package包/类
@Test
public void testOne() throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  CountingOutputStream cos = new CountingOutputStream(baos);
  DataOutputStream dos = new DataOutputStream(cos);
  Codec codec = new CellCodec();
  Codec.Encoder encoder = codec.getEncoder(dos);
  final KeyValue kv =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
  kv.setMvccVersion(Long.MAX_VALUE);
  encoder.write(kv);
  encoder.flush();
  dos.close();
  long offset = cos.getCount();
  CountingInputStream cis =
    new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
  DataInputStream dis = new DataInputStream(cis);
  Codec.Decoder decoder = codec.getDecoder(dis);
  assertTrue(decoder.advance()); // First read should pull in the KV
  // Second read should trip over the end-of-stream marker and return false
  assertFalse(decoder.advance());
  dis.close();
  assertEquals(offset, cis.getCount());
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:25,代码来源:TestCellCodec.java

示例4: testOne

import org.apache.hadoop.hbase.codec.Codec; //导入方法依赖的package包/类
@Test
public void testOne() throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  CountingOutputStream cos = new CountingOutputStream(baos);
  DataOutputStream dos = new DataOutputStream(cos);
  Codec codec = new CellCodec();
  Codec.Encoder encoder = codec.getEncoder(dos);
  final KeyValue kv =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
  encoder.write(kv);
  encoder.flush();
  dos.close();
  long offset = cos.getCount();
  CountingInputStream cis =
    new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
  DataInputStream dis = new DataInputStream(cis);
  Codec.Decoder decoder = codec.getDecoder(dis);
  assertTrue(decoder.advance()); // First read should pull in the KV
  // Second read should trip over the end-of-stream marker and return false
  assertFalse(decoder.advance());
  dis.close();
  assertEquals(offset, cis.getCount());
}
 
开发者ID:cloud-software-foundation,项目名称:c5,代码行数:24,代码来源:TestCellCodec.java

示例5: testThree

import org.apache.hadoop.hbase.codec.Codec; //导入方法依赖的package包/类
@Test
public void testThree() throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  CountingOutputStream cos = new CountingOutputStream(baos);
  DataOutputStream dos = new DataOutputStream(cos);
  Codec codec = new CellCodec();
  Codec.Encoder encoder = codec.getEncoder(dos);
  final KeyValue kv1 =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1"));
  final KeyValue kv2 =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2"));
  final KeyValue kv3 =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3"));
  encoder.write(kv1);
  encoder.write(kv2);
  encoder.write(kv3);
  encoder.flush();
  dos.close();
  long offset = cos.getCount();
  CountingInputStream cis =
    new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
  DataInputStream dis = new DataInputStream(cis);
  Codec.Decoder decoder = codec.getDecoder(dis);
  assertTrue(decoder.advance());
  Cell c = decoder.current();
  assertTrue(CellComparator.equals(c, kv1));
  assertTrue(decoder.advance());
  c = decoder.current();
  assertTrue(CellComparator.equals(c, kv2));
  assertTrue(decoder.advance());
  c = decoder.current();
  assertTrue(CellComparator.equals(c, kv3));
  assertFalse(decoder.advance());
  dis.close();
  assertEquals(offset, cis.getCount());
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:37,代码来源:TestCellCodec.java

示例6: createCellScanner

import org.apache.hadoop.hbase.codec.Codec; //导入方法依赖的package包/类
/**
 * @param codec to use for cellblock
 * @param cellBlock to encode
 * @return CellScanner to work against the content of <code>cellBlock</code>
 * @throws IOException if encoding fails
 */
public CellScanner createCellScanner(final Codec codec, final CompressionCodec compressor,
    final byte[] cellBlock) throws IOException {
  // Use this method from Client side to create the CellScanner
  if (compressor != null) {
    ByteBuffer cellBlockBuf = decompress(compressor, cellBlock);
    return codec.getDecoder(new ByteBufferInputStream(cellBlockBuf));
  }
  // Not making the Decoder over the ByteBuffer purposefully. The Decoder over the BB will
  // make Cells directly over the passed BB. This method is called at client side and we don't
  // want the Cells to share the same byte[] where the RPC response is being read. Caching of any
  // of the Cells at user's app level will make it not possible to GC the response byte[]
  return codec.getDecoder(new ByteArrayInputStream(cellBlock));
}
 
开发者ID:apache,项目名称:hbase,代码行数:20,代码来源:CellBlockBuilder.java

示例7: createCellScannerReusingBuffers

import org.apache.hadoop.hbase.codec.Codec; //导入方法依赖的package包/类
/**
 * @param codec to use for cellblock
 * @param cellBlock ByteBuffer containing the cells written by the Codec. The buffer should be
 *          position()'ed at the start of the cell block and limit()'ed at the end.
 * @return CellScanner to work against the content of <code>cellBlock</code>. All cells created
 *         out of the CellScanner will share the same ByteBuffer being passed.
 * @throws IOException if cell encoding fails
 */
public CellScanner createCellScannerReusingBuffers(final Codec codec,
    final CompressionCodec compressor, ByteBuff cellBlock) throws IOException {
  // Use this method from HRS to create the CellScanner
  // If compressed, decompress it first before passing it on else we will leak compression
  // resources if the stream is not closed properly after we let it out.
  if (compressor != null) {
    cellBlock = decompress(compressor, cellBlock);
  }
  return codec.getDecoder(cellBlock);
}
 
开发者ID:apache,项目名称:hbase,代码行数:19,代码来源:CellBlockBuilder.java


注:本文中的org.apache.hadoop.hbase.codec.Codec.getDecoder方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。