当前位置: 首页>>代码示例>>Java>>正文


Java HFilePrettyPrinter.run方法代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter.run方法的典型用法代码示例。如果您正苦于以下问题:Java HFilePrettyPrinter.run方法的具体用法?Java HFilePrettyPrinter.run怎么用?Java HFilePrettyPrinter.run使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter的用法示例。


在下文中一共展示了HFilePrettyPrinter.run方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: runMergeWorkload

import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter; //导入方法依赖的package包/类
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  Store store = new Store(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:55,代码来源:HFileReadWriteTest.java

示例2: runMergeWorkload

import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter; //导入方法依赖的package包/类
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd, null);
  HStore store = new HStore(region, columnDescriptor, conf);

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false, region.getReadpoint(IsolationLevel.READ_COMMITTED));

  StoreFile.Writer writer = store.createWriterInTmp(maxKeyCount, compression, false, true, false);

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:45,代码来源:HFileReadWriteTest.java

示例3: runMergeWorkload

import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter; //导入方法依赖的package包/类
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd, null);
  HStore store = new HStore(region, columnDescriptor, conf);

  StoreFile.Writer writer = store.createWriterInTmp(maxKeyCount, compression, false, true);

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
 
开发者ID:cloud-software-foundation,项目名称:c5,代码行数:45,代码来源:HFileReadWriteTest.java

示例4: runMergeWorkload

import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter; //导入方法依赖的package包/类
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  HStore store = new HStore(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
 
开发者ID:daidong,项目名称:DominoHBase,代码行数:55,代码来源:HFileReadWriteTest.java


注:本文中的org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter.run方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。