当前位置: 首页>>代码示例>>Java>>正文


Java KeyValueCodec类代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.codec.KeyValueCodec的典型用法代码示例。如果您正苦于以下问题:Java KeyValueCodec类的具体用法?Java KeyValueCodec怎么用?Java KeyValueCodec使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


KeyValueCodec类属于org.apache.hadoop.hbase.codec包,在下文中一共展示了KeyValueCodec类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
/**
 * For running a few tests of methods herein.
 * @param args
 * @throws IOException
 */
public static void main(String[] args) throws IOException {
  int count = 1024;
  int size = 10240;
  for (String arg: args) {
    if (arg.startsWith(COUNT)) {
      count = Integer.parseInt(arg.replace(COUNT, ""));
    } else if (arg.startsWith(SIZE)) {
      size = Integer.parseInt(arg.replace(SIZE, ""));
    } else {
      usage(1);
    }
  }
  IPCUtil util = new IPCUtil(HBaseConfiguration.create());
  ((Log4JLogger)IPCUtil.LOG).getLogger().setLevel(Level.ALL);
  timerTests(util, count, size,  new KeyValueCodec(), null);
  timerTests(util, count, size,  new KeyValueCodec(), new DefaultCodec());
  timerTests(util, count, size,  new KeyValueCodec(), new GzipCodec());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:24,代码来源:TestIPCUtil.java

示例2: testEmptyWorks

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@Test
public void testEmptyWorks() throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  CountingOutputStream cos = new CountingOutputStream(baos);
  DataOutputStream dos = new DataOutputStream(cos);
  KeyValueCodec kvc = new KeyValueCodec();
  Codec.Encoder encoder = kvc.getEncoder(dos);
  encoder.flush();
  dos.close();
  long offset = cos.getCount();
  assertEquals(0, offset);
  CountingInputStream cis =
    new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
  DataInputStream dis = new DataInputStream(cis);
  Codec.Decoder decoder = kvc.getDecoder(dis);
  assertFalse(decoder.advance());
  dis.close();
  assertEquals(0, cis.getCount());
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:20,代码来源:TestKeyValueCodec.java

示例3: testOne

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@Test
public void testOne() throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  CountingOutputStream cos = new CountingOutputStream(baos);
  DataOutputStream dos = new DataOutputStream(cos);
  KeyValueCodec kvc = new KeyValueCodec();
  Codec.Encoder encoder = kvc.getEncoder(dos);
  final KeyValue kv =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
  final long length = kv.getLength() + Bytes.SIZEOF_INT; 
  encoder.write(kv);
  encoder.flush();
  dos.close();
  long offset = cos.getCount();
  assertEquals(length, offset);
  CountingInputStream cis =
    new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
  DataInputStream dis = new DataInputStream(cis);
  Codec.Decoder decoder = kvc.getDecoder(dis);
  assertTrue(decoder.advance()); // First read should pull in the KV
  // Second read should trip over the end-of-stream  marker and return false
  assertFalse(decoder.advance());
  dis.close();
  assertEquals(length, cis.getCount());
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:26,代码来源:TestKeyValueCodec.java

示例4: beforeClass

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@BeforeClass public static void beforeClass() throws Exception {
  // Uncomment the following lines if more verbosity is needed for
  // debugging (see HBASE-12285 for details).
  //((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
  //((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
  //((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
  UTIL.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY,
      KeyValueCodec.class.getCanonicalName());
  UTIL.getConfiguration().setBoolean(LoadBalancer.TABLES_ON_MASTER, true);
  // We used to ask for system tables on Master exclusively but not needed by test and doesn't
  // work anyways -- so commented out.
  // UTIL.getConfiguration().setBoolean(LoadBalancer.SYSTEM_TABLES_ON_MASTER, true);
  UTIL.getConfiguration()
      .set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, MyMasterObserver.class.getName());
  UTIL.startMiniCluster(slaves);
  Table t = UTIL.createMultiRegionTable(TEST_TABLE, Bytes.toBytes(FAMILY));
  UTIL.waitTableEnabled(TEST_TABLE);
  t.close();
  CONNECTION = ConnectionFactory.createConnection(UTIL.getConfiguration());
  assertTrue(MyMasterObserver.start.get());
}
 
开发者ID:apache,项目名称:hbase,代码行数:22,代码来源:TestMultiParallel.java

示例5: main

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
/**
 * For running a few tests of methods herein.
 * @param args
 * @throws IOException
 */
public static void main(String[] args) throws IOException {
  int count = 1024;
  int size = 10240;
  for (String arg : args) {
    if (arg.startsWith(COUNT)) {
      count = Integer.parseInt(arg.replace(COUNT, ""));
    } else if (arg.startsWith(SIZE)) {
      size = Integer.parseInt(arg.replace(SIZE, ""));
    } else {
      usage(1);
    }
  }
  CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create());
  timerTests(builder, count, size, new KeyValueCodec(), null);
  timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec());
  timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec());
}
 
开发者ID:apache,项目名称:hbase,代码行数:23,代码来源:TestCellBlockBuilder.java

示例6: beforeClass

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@BeforeClass public static void beforeClass() throws Exception {
  // Uncomment the following lines if more verbosity is needed for
  // debugging (see HBASE-12285 for details).
  //((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
  //((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
  //((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
  UTIL.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY,
      KeyValueCodec.class.getCanonicalName());
  UTIL.startMiniCluster(slaves);
  HTable t = UTIL.createMultiRegionTable(TEST_TABLE, Bytes.toBytes(FAMILY));
  UTIL.waitTableEnabled(TEST_TABLE);
  t.close();
  CONNECTION = ConnectionFactory.createConnection(UTIL.getConfiguration());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:15,代码来源:TestMultiParallel.java

示例7: main

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
  // How many Cells to encode/decode on each cycle.
  final int count = 100000;
  // How many times to do an operation; repeat gives hotspot chance to warm up.
  final int cycles = 30;

  Cell [] cells = getCells(count);
  int size = getRoughSize(cells);
  int initialBufferSize = 2 * size; // Multiply by 2 to ensure we don't have to grow buffer

  // Test KeyValue codec.
  doCodec(new KeyValueCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new CellCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new MessageCodec(), cells, cycles, count, initialBufferSize);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:16,代码来源:CodecPerformance.java

示例8: getDefaultCodec

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@VisibleForTesting
public static String getDefaultCodec(final Configuration c) {
  // If "hbase.client.default.rpc.codec" is empty string -- you can't set it to null because
  // Configuration will complain -- then no default codec (and we'll pb everything).  Else
  // default is KeyValueCodec
  return c.get(DEFAULT_CODEC_CLASS, KeyValueCodec.class.getCanonicalName());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:8,代码来源:AbstractRpcClient.java

示例9: getDefaultCodec

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@VisibleForTesting
public static String getDefaultCodec(final Configuration c) {
  // If "hbase.client.default.rpc.codec" is empty string -- you can't set it to null because
  // Configuration will complain -- then no default codec (and we'll pb everything).  Else
  // default is KeyValueCodec
  return c.get("hbase.client.default.rpc.codec", KeyValueCodec.class.getCanonicalName());
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:8,代码来源:RpcClient.java

示例10: testThree

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@Test
public void testThree() throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  CountingOutputStream cos = new CountingOutputStream(baos);
  DataOutputStream dos = new DataOutputStream(cos);
  KeyValueCodec kvc = new KeyValueCodec();
  Codec.Encoder encoder = kvc.getEncoder(dos);
  final KeyValue kv1 =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1"));
  final KeyValue kv2 =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2"));
  final KeyValue kv3 =
    new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3"));
  final long length = kv1.getLength() + Bytes.SIZEOF_INT; 
  encoder.write(kv1);
  encoder.write(kv2);
  encoder.write(kv3);
  encoder.flush();
  dos.close();
  long offset = cos.getCount();
  assertEquals(length * 3, offset);
  CountingInputStream cis =
    new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
  DataInputStream dis = new DataInputStream(cis);
  Codec.Decoder decoder = kvc.getDecoder(dis);
  assertTrue(decoder.advance());
  KeyValue kv = (KeyValue)decoder.current();
  assertTrue(kv1.equals(kv));
  assertTrue(decoder.advance());
  kv = (KeyValue)decoder.current();
  assertTrue(kv2.equals(kv));
  assertTrue(decoder.advance());
  kv = (KeyValue)decoder.current();
  assertTrue(kv3.equals(kv));
  assertFalse(decoder.advance());
  dis.close();
  assertEquals((length * 3), cis.getCount());
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:39,代码来源:TestKeyValueCodec.java

示例11: getDefaultCodec

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@VisibleForTesting
public static String getDefaultCodec(final Configuration c) {
  // If "hbase.client.default.rpc.codec" is empty string -- you can't set it to null because
  // Configuration will complain -- then no default codec (and we'll pb everything). Else
  // default is KeyValueCodec
  return c.get(DEFAULT_CODEC_CLASS, KeyValueCodec.class.getCanonicalName());
}
 
开发者ID:apache,项目名称:hbase,代码行数:8,代码来源:AbstractRpcClient.java

示例12: testBuildCellBlock

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@Test
public void testBuildCellBlock() throws IOException {
  doBuildCellBlockUndoCellBlock(this.util, new KeyValueCodec(), null);
  doBuildCellBlockUndoCellBlock(this.util, new KeyValueCodec(), new DefaultCodec());
  doBuildCellBlockUndoCellBlock(this.util, new KeyValueCodec(), new GzipCodec());
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:7,代码来源:TestIPCUtil.java

示例13: getDecoder

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@Override
public Decoder getDecoder(InputStream is) {
  return
      (compression == null) ? new KeyValueCodec.KeyValueDecoder((DataInputStream) is)
          : new KeyValueCompression.CompressedKvDecoder((DataInputStream) is, compression);
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:7,代码来源:WALEditCodec.java

示例14: getEncoder

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@Override
public Encoder getEncoder(OutputStream os) {
  return
      (compression == null) ? new KeyValueCodec.KeyValueEncoder((DataOutputStream) os)
      : new CompressedKvEncoder((DataOutputStream) os, compression);
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:7,代码来源:WALEditCodec.java

示例15: getDecoder

import org.apache.hadoop.hbase.codec.KeyValueCodec; //导入依赖的package包/类
@Override
public Decoder getDecoder(InputStream is) {
  return (compression == null)
      ? new KeyValueCodec.KeyValueDecoder(is) : new CompressedKvDecoder(is, compression);
}
 
开发者ID:grokcoder,项目名称:pbase,代码行数:6,代码来源:WALCellCodec.java


注:本文中的org.apache.hadoop.hbase.codec.KeyValueCodec类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。