当前位置: 首页>>代码示例>>Java>>正文


Java ClassSize.is32BitJVM方法代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.util.ClassSize.is32BitJVM方法的典型用法代码示例。如果您正苦于以下问题:Java ClassSize.is32BitJVM方法的具体用法?Java ClassSize.is32BitJVM怎么用?Java ClassSize.is32BitJVM使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hbase.util.ClassSize的用法示例。


在下文中一共展示了ClassSize.is32BitJVM方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testObjectSize

import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
@Test
public void testObjectSize() throws IOException {
  LOG.info("header:" + ClassSize.OBJECT);
  LOG.info("array header:" + ClassSize.ARRAY);

  if (ClassSize.is32BitJVM()) {
    assertEquals(ClassSize.OBJECT, 8);
  } else {
    assertTrue(ClassSize.OBJECT == 12 || ClassSize.OBJECT == 16); // depending on CompressedOops
  }
  if (ClassSize.useUnsafeLayout()) {
    assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 4);
  } else {
    assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
  }
}
 
开发者ID:apache,项目名称:hbase,代码行数:17,代码来源:TestHeapSize.java

示例2: testBlockHeapSizeInternals

import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
protected void testBlockHeapSizeInternals() {
  if (ClassSize.is32BitJVM()) {
    assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 64);
  } else {
    assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 80);
  }

  for (int size : new int[] { 100, 256, 12345 }) {
    byte[] byteArr = new byte[HConstants.HFILEBLOCK_HEADER_SIZE + size];
    ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size);
    HFileContext meta = new HFileContextBuilder()
                        .withIncludesMvcc(includesMemstoreTS)
                        .withIncludesTags(includesTag)
                        .withHBaseCheckSum(false)
                        .withCompression(Algorithm.NONE)
                        .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
                        .withChecksumType(ChecksumType.NULL).build();
    HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
        HFileBlock.FILL_HEADER, -1, 
        0, meta);
    long byteBufferExpectedSize =
        ClassSize.align(ClassSize.estimateBase(buf.getClass(), true)
            + HConstants.HFILEBLOCK_HEADER_SIZE + size);
    long hfileMetaSize =  ClassSize.align(ClassSize.estimateBase(HFileContext.class, true));
    long hfileBlockExpectedSize =
        ClassSize.align(ClassSize.estimateBase(HFileBlock.class, true));
    long expected = hfileBlockExpectedSize + byteBufferExpectedSize + hfileMetaSize;
    assertEquals("Block data size: " + size + ", byte buffer expected " +
        "size: " + byteBufferExpectedSize + ", HFileBlock class expected " +
        "size: " + hfileBlockExpectedSize + ";", expected,
        block.heapSize());
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:34,代码来源:TestHFileBlock.java

示例3: testBlockHeapSize

import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
@Test
public void testBlockHeapSize() {
  if (ClassSize.is32BitJVM()) {
    assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 64);
  } else {
    assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 80);
  }

  for (int size : new int[] { 100, 256, 12345 }) {
    byte[] byteArr = new byte[HFileBlock.HEADER_SIZE_WITH_CHECKSUMS + size];
    ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size);
    HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
        HFileBlock.FILL_HEADER, -1, includesMemstoreTS, 
        HFileBlock.MINOR_VERSION_NO_CHECKSUM, 0, ChecksumType.NULL.getCode(),
        0);
    long byteBufferExpectedSize =
        ClassSize.align(ClassSize.estimateBase(buf.getClass(), true)
            + HFileBlock.HEADER_SIZE_WITH_CHECKSUMS + size);
    long hfileBlockExpectedSize =
        ClassSize.align(ClassSize.estimateBase(HFileBlock.class, true));
    long expected = hfileBlockExpectedSize + byteBufferExpectedSize;
    assertEquals("Block data size: " + size + ", byte buffer expected " +
        "size: " + byteBufferExpectedSize + ", HFileBlock class expected " +
        "size: " + hfileBlockExpectedSize + ";", expected,
        block.heapSize());
  }
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:28,代码来源:TestHFileBlock.java

示例4: testBlockHeapSizeInternals

import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
protected void testBlockHeapSizeInternals() {
  if (ClassSize.is32BitJVM()) {
    assertEquals(64, HFileBlock.MULTI_BYTE_BUFFER_HEAP_SIZE);
  } else {
    assertEquals(72, HFileBlock.MULTI_BYTE_BUFFER_HEAP_SIZE);
  }

  for (int size : new int[] { 100, 256, 12345 }) {
    byte[] byteArr = new byte[HConstants.HFILEBLOCK_HEADER_SIZE + size];
    ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size);
    HFileContext meta = new HFileContextBuilder()
                        .withIncludesMvcc(includesMemstoreTS)
                        .withIncludesTags(includesTag)
                        .withHBaseCheckSum(false)
                        .withCompression(Algorithm.NONE)
                        .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
                        .withChecksumType(ChecksumType.NULL).build();
    HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
        HFileBlock.FILL_HEADER, -1, 0, -1, meta);
    long byteBufferExpectedSize = ClassSize.align(ClassSize.estimateBase(
        new MultiByteBuff(buf).getClass(), true)
        + HConstants.HFILEBLOCK_HEADER_SIZE + size);
    long hfileMetaSize =  ClassSize.align(ClassSize.estimateBase(HFileContext.class, true));
    long hfileBlockExpectedSize =
        ClassSize.align(ClassSize.estimateBase(HFileBlock.class, true));
    long expected = hfileBlockExpectedSize + byteBufferExpectedSize + hfileMetaSize;
    assertEquals("Block data size: " + size + ", byte buffer expected " +
        "size: " + byteBufferExpectedSize + ", HFileBlock class expected " +
        "size: " + hfileBlockExpectedSize + ";", expected,
        block.heapSize());
  }
}
 
开发者ID:apache,项目名称:hbase,代码行数:33,代码来源:TestHFileBlock.java

示例5: testBlockHeapSize

import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
@Test
public void testBlockHeapSize() {
  if (ClassSize.is32BitJVM()) {
    assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 64);
  } else {
    assertTrue(HFileBlock.BYTE_BUFFER_HEAP_SIZE == 80);
  }

  for (int size : new int[] { 100, 256, 12345 }) {
    byte[] byteArr = new byte[HConstants.HFILEBLOCK_HEADER_SIZE + size];
    ByteBuffer buf = ByteBuffer.wrap(byteArr, 0, size);
    HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
        HFileBlock.FILL_HEADER, -1, includesMemstoreTS,
        HFileBlock.MINOR_VERSION_NO_CHECKSUM, 0, ChecksumType.NULL.getCode(),
        0);
    long byteBufferExpectedSize =
        ClassSize.align(ClassSize.estimateBase(buf.getClass(), true)
            + HConstants.HFILEBLOCK_HEADER_SIZE + size);
    long hfileBlockExpectedSize =
        ClassSize.align(ClassSize.estimateBase(HFileBlock.class, true));
    long expected = hfileBlockExpectedSize + byteBufferExpectedSize;
    assertEquals("Block data size: " + size + ", byte buffer expected " +
        "size: " + byteBufferExpectedSize + ", HFileBlock class expected " +
        "size: " + hfileBlockExpectedSize + ";", expected,
        block.heapSize());
  }
}
 
开发者ID:cloud-software-foundation,项目名称:c5,代码行数:28,代码来源:TestHFileBlock.java


注:本文中的org.apache.hadoop.hbase.util.ClassSize.is32BitJVM方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。