本文整理汇总了Java中org.apache.hadoop.hbase.util.ClassSize.estimateBase方法的典型用法代码示例。如果您正苦于以下问题:Java ClassSize.estimateBase方法的具体用法?Java ClassSize.estimateBase怎么用?Java ClassSize.estimateBase使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.util.ClassSize
的用法示例。
在下文中一共展示了ClassSize.estimateBase方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testHeapSizeForBlockIndex
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
/** Checks if the HeapSize calculator is within reason */
@Test
public void testHeapSizeForBlockIndex() throws IOException {
Class<HFileBlockIndex.BlockIndexReader> cl =
HFileBlockIndex.BlockIndexReader.class;
long expected = ClassSize.estimateBase(cl, false);
HFileBlockIndex.BlockIndexReader bi =
new HFileBlockIndex.BlockIndexReader(KeyValue.RAW_COMPARATOR, 1);
long actual = bi.heapSize();
// Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
// int [] blockDataSizes) are all null they are not going to show up in the
// HeapSize calculation, so need to remove those array costs from expected.
expected -= ClassSize.align(3 * ClassSize.ARRAY);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
示例2: Slab
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
Slab(int blockSize, int numBlocks) {
buffers = new LinkedBlockingQueue<ByteBuffer>();
slabs = new ConcurrentLinkedQueue<ByteBuffer>();
this.blockSize = blockSize;
this.numBlocks = numBlocks;
this.heapSize = ClassSize.estimateBase(this.getClass(), false);
int maxBlocksPerSlab = Integer.MAX_VALUE / blockSize;
int maxSlabSize = maxBlocksPerSlab * blockSize;
int numFullSlabs = numBlocks / maxBlocksPerSlab;
int partialSlabSize = (numBlocks % maxBlocksPerSlab) * blockSize;
for (int i = 0; i < numFullSlabs; i++) {
allocateAndSlice(maxSlabSize, blockSize);
}
if (partialSlabSize > 0) {
allocateAndSlice(partialSlabSize, blockSize);
}
}
示例3: testHeapSizeForBlockIndex
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
/** Checks if the HeapSize calculator is within reason */
@Test
public void testHeapSizeForBlockIndex() throws IOException {
Class<HFileBlockIndex.BlockIndexReader> cl =
HFileBlockIndex.BlockIndexReader.class;
long expected = ClassSize.estimateBase(cl, false);
HFileBlockIndex.BlockIndexReader bi =
new HFileBlockIndex.BlockIndexReader(Bytes.BYTES_RAWCOMPARATOR, 1);
long actual = bi.heapSize();
// Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
// int [] blockDataSizes) are all null they are not going to show up in the
// HeapSize calculation, so need to remove those array costs from expected.
expected -= ClassSize.align(3 * ClassSize.ARRAY);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
示例4: testHeapSizeForBlockIndex
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
/** Checks if the HeapSize calculator is within reason */
@Test
public void testHeapSizeForBlockIndex() throws IOException {
Class<HFileBlockIndex.BlockIndexReader> cl =
HFileBlockIndex.BlockIndexReader.class;
long expected = ClassSize.estimateBase(cl, false);
HFileBlockIndex.BlockIndexReader bi =
new HFileBlockIndex.ByteArrayKeyBlockIndexReader(1);
long actual = bi.heapSize();
// Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
// int [] blockDataSizes) are all null they are not going to show up in the
// HeapSize calculation, so need to remove those array costs from expected.
// Already the block keys are not there in this case
expected -= ClassSize.align(2 * ClassSize.ARRAY);
if (expected != actual) {
expected = ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
示例5: testHeapSizeForBlockIndex
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
/** Checks if the HeapSize calculator is within reason */
//@Test
public void testHeapSizeForBlockIndex() throws IOException {
Class<HFileBlockIndex.BlockIndexReader> cl =
HFileBlockIndex.BlockIndexReader.class;
long expected = ClassSize.estimateBase(cl, false);
HFileBlockIndex.BlockIndexReader bi =
new HFileBlockIndex.BlockIndexReader(KeyValue.RAW_COMPARATOR, 1);
long actual = bi.heapSize();
// Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
// int [] blockDataSizes) are all null they are not going to show up in the
// HeapSize calculation, so need to remove those array costs from expected.
expected -= ClassSize.align(3 * ClassSize.ARRAY);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
示例6: testMutations
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
@Test
public void testMutations(){
Class<?> cl;
long expected;
long actual;
cl = TimeRange.class;
actual = ClassSize.TIMERANGE;
expected = ClassSize.estimateBase(cl, false);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
byte[] row = new byte[] { 0 };
cl = Put.class;
actual = new Put(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
cl = Delete.class;
actual = new Delete(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
示例7: allocateAndSlice
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
private void allocateAndSlice(int size, int sliceSize) {
ByteBuffer newSlab = ByteBuffer.allocateDirect(size);
slabs.add(newSlab);
for (int j = 0; j < newSlab.capacity(); j += sliceSize) {
newSlab.limit(j + sliceSize).position(j);
ByteBuffer aSlice = newSlab.slice();
buffers.add(aSlice);
heapSize += ClassSize.estimateBase(aSlice.getClass(), false);
}
}
示例8: testMutations
import org.apache.hadoop.hbase.util.ClassSize; //导入方法依赖的package包/类
@Test
public void testMutations(){
Class<?> cl;
long expected;
long actual;
cl = TimeRange.class;
actual = ClassSize.TIMERANGE;
expected = ClassSize.estimateBase(cl, false);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
byte[] row = new byte[] { 0 };
cl = Put.class;
actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP);
expected += ClassSize.align(ClassSize.INTEGER); // priority
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
cl = Delete.class;
actual = Mutation.MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP);
expected += ClassSize.align(ClassSize.INTEGER); // priority
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}