当前位置: 首页>>代码示例>>Java>>正文


Java ByteBufferOutputStream类代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.io.ByteBufferOutputStream的典型用法代码示例。如果您正苦于以下问题:Java ByteBufferOutputStream类的具体用法?Java ByteBufferOutputStream怎么用?Java ByteBufferOutputStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


ByteBufferOutputStream类属于org.apache.hadoop.hbase.io包,在下文中一共展示了ByteBufferOutputStream类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: doRawSaslReply

import org.apache.hadoop.hbase.io.ByteBufferOutputStream; //导入依赖的package包/类
/**
 * No protobuf encoding of raw sasl messages
 */
protected final void doRawSaslReply(SaslStatus status, Writable rv,
    String errorClass, String error) throws IOException {
  BufferChain bc;
  // In my testing, have noticed that sasl messages are usually
  // in the ballpark of 100-200. That's why the initial capacity is 256.
  try (ByteBufferOutputStream saslResponse = new ByteBufferOutputStream(256);
      DataOutputStream  out = new DataOutputStream(saslResponse)) {
    out.writeInt(status.state); // write status
    if (status == SaslStatus.SUCCESS) {
      rv.write(out);
    } else {
      WritableUtils.writeString(out, errorClass);
      WritableUtils.writeString(out, error);
    }
    bc = new BufferChain(saslResponse.getByteBuffer());
  }
  doRespond(() -> bc);
}
 
开发者ID:apache,项目名称:hbase,代码行数:22,代码来源:ServerRpcConnection.java

示例2: responseConnectionHeader

import org.apache.hadoop.hbase.io.ByteBufferOutputStream; //导入依赖的package包/类
/**
 * Send the response for connection header
 */
private void responseConnectionHeader(RPCProtos.ConnectionHeaderResponse.Builder chrBuilder)
    throws FatalConnectionException {
  // Response the connection header if Crypto AES is enabled
  if (!chrBuilder.hasCryptoCipherMeta()) return;
  try {
    byte[] connectionHeaderResBytes = chrBuilder.build().toByteArray();
    // encrypt the Crypto AES cipher meta data with sasl server, and send to client
    byte[] unwrapped = new byte[connectionHeaderResBytes.length + 4];
    Bytes.putBytes(unwrapped, 0, Bytes.toBytes(connectionHeaderResBytes.length), 0, 4);
    Bytes.putBytes(unwrapped, 4, connectionHeaderResBytes, 0, connectionHeaderResBytes.length);
    byte[] wrapped = saslServer.wrap(unwrapped, 0, unwrapped.length);
    BufferChain bc;
    try (ByteBufferOutputStream response = new ByteBufferOutputStream(wrapped.length + 4);
        DataOutputStream out = new DataOutputStream(response)) {
      out.writeInt(wrapped.length);
      out.write(wrapped);
      bc = new BufferChain(response.getByteBuffer());
    }
    doRespond(() -> bc);
  } catch (IOException ex) {
    throw new UnsupportedCryptoException(ex.getMessage(), ex);
  }
}
 
开发者ID:apache,项目名称:hbase,代码行数:27,代码来源:ServerRpcConnection.java

示例3: testVarInt

import org.apache.hadoop.hbase.io.ByteBufferOutputStream; //导入依赖的package包/类
@Test
public void testVarInt() throws Exception {
  //byte[] thingy = new byte[50];

  long[] lengths = {1, 20, 200, 1024, 2048, 4000, 10000, 50000,
      100000, 1024 * 1024,
      ((long) Integer.MAX_VALUE) * 100,
      -1, -200, -5000};


  for (long value : lengths) {
    // do the test:
    ByteBufferOutputStream bbos = new ByteBufferOutputStream(12);
    CodedOutputStream cos = CodedOutputStream.newInstance(bbos);
    long newvalue = (value << 4) | 8;
    //cos.writeRawVarint64(newvalue);
    cos.writeSInt64NoTag(newvalue);
    cos.flush();

    ByteBuffer bb = bbos.getByteBuffer();
    System.out.println("value: " + value + ", length: " + bb.remaining());

    ByteBufferInputStream bbis = new ByteBufferInputStream(bb);
    CodedInputStream cis = CodedInputStream.newInstance(bbis);
    long outval = cis.readSInt64();
    long actual = outval >> 4;
    long tag = outval & 0x0F;
    System.out.println("  transformed we are: " + outval + " actual: " + actual + " tag: " + tag);
  }
}
 
开发者ID:cloud-software-foundation,项目名称:c5,代码行数:31,代码来源:EncodeTest.java

示例4: buildCellBlock

import org.apache.hadoop.hbase.io.ByteBufferOutputStream; //导入依赖的package包/类
/**
 * Puts CellScanner Cells into a cell block using passed in <code>codec</code> and/or
 * <code>compressor</code>.
 * @param codec
 * @param compressor
 * @param cellScanner
 * @return Null or byte buffer filled with a cellblock filled with passed-in Cells encoded using
 * passed in <code>codec</code> and/or <code>compressor</code>; the returned buffer has been
 * flipped and is ready for reading.  Use limit to find total size.
 * @throws IOException
 */
@SuppressWarnings("resource")
public ByteBuffer buildCellBlock(final Codec codec, final CompressionCodec compressor,
  final CellScanner cellScanner)
throws IOException {
  if (cellScanner == null) return null;
  if (codec == null) throw new CellScannerButNoCodecException();
  int bufferSize = this.cellBlockBuildingInitialBufferSize;
  if (cellScanner instanceof HeapSize) {
    long longSize = ((HeapSize)cellScanner).heapSize();
    // Just make sure we don't have a size bigger than an int.
    if (longSize > Integer.MAX_VALUE) {
      throw new IOException("Size " + longSize + " > " + Integer.MAX_VALUE);
    }
    bufferSize = ClassSize.align((int)longSize);
  } // TODO: Else, get estimate on size of buffer rather than have the buffer resize.
  // See TestIPCUtil main for experiment where we spin through the Cells getting estimate of
  // total size before creating the buffer.  It costs somw small percentage.  If we are usually
  // within the estimated buffer size, then the cost is not worth it.  If we are often well
  // outside the guesstimated buffer size, the processing can be done in half the time if we
  // go w/ the estimated size rather than let the buffer resize.
  ByteBufferOutputStream baos = new ByteBufferOutputStream(bufferSize);
  OutputStream os = baos;
  Compressor poolCompressor = null;
  try {
    if (compressor != null) {
      if (compressor instanceof Configurable) ((Configurable)compressor).setConf(this.conf);
      poolCompressor = CodecPool.getCompressor(compressor);
      os = compressor.createOutputStream(os, poolCompressor);
    }
    Codec.Encoder encoder = codec.getEncoder(os);
    int count = 0;
    while (cellScanner.advance()) {
      encoder.write(cellScanner.current());
      count++;
    }
    encoder.flush();
    // If no cells, don't mess around.  Just return null (could be a bunch of existence checking
    // gets or something -- stuff that does not return a cell).
    if (count == 0) return null;
  } finally {
    os.close();
    if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor);
  }
  if (LOG.isTraceEnabled()) {
    if (bufferSize < baos.size()) {
      LOG.trace("Buffer grew from initial bufferSize=" + bufferSize + " to " + baos.size() +
        "; up hbase.ipc.cellblock.building.initial.buffersize?");
    }
  }
  return baos.getByteBuffer();
}
 
开发者ID:grokcoder,项目名称:pbase,代码行数:63,代码来源:IPCUtil.java

示例5: buildCellBlock

import org.apache.hadoop.hbase.io.ByteBufferOutputStream; //导入依赖的package包/类
/**
 * Puts CellScanner Cells into a cell block using passed in <code>codec</code> and/or
 * <code>compressor</code>.
 * @param codec
 * @param compressor
 * @Param cellScanner
 * @return Null or byte buffer filled with a cellblock filled with passed-in Cells encoded using
 * passed in <code>codec</code> and/or <code>compressor</code>; the returned buffer has been
 * flipped and is ready for reading.  Use limit to find total size.
 * @throws IOException
 */
@SuppressWarnings("resource")
ByteBuffer buildCellBlock(final Codec codec, final CompressionCodec compressor,
  final CellScanner cellScanner)
throws IOException {
  if (cellScanner == null) return null;
  if (codec == null) throw new CellScannerButNoCodecException();
  int bufferSize = this.cellBlockBuildingInitialBufferSize;
  if (cellScanner instanceof HeapSize) {
    long longSize = ((HeapSize)cellScanner).heapSize();
    // Just make sure we don't have a size bigger than an int.
    if (longSize > Integer.MAX_VALUE) {
      throw new IOException("Size " + longSize + " > " + Integer.MAX_VALUE);
    }
    bufferSize = ClassSize.align((int)longSize);
  } // TODO: Else, get estimate on size of buffer rather than have the buffer resize.
  // See TestIPCUtil main for experiment where we spin through the Cells getting estimate of
  // total size before creating the buffer.  It costs somw small percentage.  If we are usually
  // within the estimated buffer size, then the cost is not worth it.  If we are often well
  // outside the guesstimated buffer size, the processing can be done in half the time if we
  // go w/ the estimated size rather than let the buffer resize.
  ByteBufferOutputStream baos = new ByteBufferOutputStream(bufferSize);
  OutputStream os = baos;
  Compressor poolCompressor = null;
  try {
    if (compressor != null) {
      if (compressor instanceof Configurable) ((Configurable)compressor).setConf(this.conf);
      poolCompressor = CodecPool.getCompressor(compressor);
      os = compressor.createOutputStream(os, poolCompressor);
    }
    Codec.Encoder encoder = codec.getEncoder(os);
    int count = 0;
    while (cellScanner.advance()) {
      encoder.write(cellScanner.current());
      count++;
    }
    encoder.flush();
    // If no cells, don't mess around.  Just return null (could be a bunch of existence checking
    // gets or something -- stuff that does not return a cell).
    if (count == 0) return null;
  } finally {
    os.close();
    if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor);
  }
  if (LOG.isTraceEnabled()) {
    if (bufferSize < baos.size()) {
      LOG.trace("Buffer grew from initial bufferSize=" + bufferSize + " to " + baos.size() +
        "; up hbase.ipc.cellblock.building.initial.buffersize?");
    }
  }
  return baos.getByteBuffer();
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:63,代码来源:IPCUtil.java

示例6: get

import org.apache.hadoop.hbase.io.ByteBufferOutputStream; //导入依赖的package包/类
@Override
public OutputStream get(int expectedSize) {
  baos = new ByteBufferOutputStream(expectedSize);
  return baos;
}
 
开发者ID:apache,项目名称:hbase,代码行数:6,代码来源:CellBlockBuilder.java


注:本文中的org.apache.hadoop.hbase.io.ByteBufferOutputStream类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。