本文整理汇总了Java中org.apache.hadoop.hbase.regionserver.StoreFileScanner.getScannersForStoreFiles方法的典型用法代码示例。如果您正苦于以下问题:Java StoreFileScanner.getScannersForStoreFiles方法的具体用法?Java StoreFileScanner.getScannersForStoreFiles怎么用?Java StoreFileScanner.getScannersForStoreFiles使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.regionserver.StoreFileScanner
的用法示例。
在下文中一共展示了StoreFileScanner.getScannersForStoreFiles方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readCell
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Reads a cell from the mob file.
* @param search The cell need to be searched in the mob file.
* @param cacheMobBlocks Should this scanner cache blocks.
* @param readPt the read point.
* @return The cell in the mob file.
* @throws IOException
*/
public Cell readCell(Cell search, boolean cacheMobBlocks, long readPt) throws IOException {
Cell result = null;
StoreFileScanner scanner = null;
List<HStoreFile> sfs = new ArrayList<>();
sfs.add(sf);
try {
List<StoreFileScanner> sfScanners = StoreFileScanner.getScannersForStoreFiles(sfs,
cacheMobBlocks, true, false, false, readPt);
if (!sfScanners.isEmpty()) {
scanner = sfScanners.get(0);
if (scanner.seek(search)) {
result = scanner.peek();
}
}
} finally {
if (scanner != null) {
scanner.close();
}
}
return result;
}
示例2: countDelCellsInDelFiles
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Gets the number of del cell in the del files
* @param paths the del file paths
* @return the cell size
*/
private int countDelCellsInDelFiles(List<Path> paths) throws IOException {
List<HStoreFile> sfs = new ArrayList<>();
int size = 0;
for (Path path : paths) {
HStoreFile sf = new HStoreFile(fs, path, conf, cacheConf, BloomType.NONE, true);
sfs.add(sf);
}
List<KeyValueScanner> scanners = new ArrayList<>(StoreFileScanner.getScannersForStoreFiles(sfs,
false, true, false, false, HConstants.LATEST_TIMESTAMP));
long timeToPurgeDeletes = Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);
long ttl = HStore.determineTTLFromFamily(hcd);
ScanInfo scanInfo = new ScanInfo(conf, hcd, ttl, timeToPurgeDeletes, CellComparatorImpl.COMPARATOR);
StoreScanner scanner = new StoreScanner(scanInfo, ScanType.COMPACT_RETAIN_DELETES, scanners);
List<Cell> results = new ArrayList<>();
boolean hasMore = true;
while (hasMore) {
hasMore = scanner.next(results);
size += results.size();
results.clear();
}
scanner.close();
return size;
}
示例3: readCell
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Reads a cell from the mob file.
* @param search The KeyValue need to be searched in the mob file.
* @param cacheMobBlocks Should this scanner cache blocks.
* @return The KeyValue in the mob file.
* @throws IOException
*/
public KeyValue readCell(KeyValue search, boolean cacheMobBlocks) throws IOException {
KeyValue result = null;
StoreFileScanner scanner = null;
List<StoreFile> sfs = new ArrayList<StoreFile>();
sfs.add(sf);
try {
List<StoreFileScanner> sfScanners = StoreFileScanner.getScannersForStoreFiles(sfs,
cacheMobBlocks, true, false, null, sf.getMaxMemstoreTS());
if (!sfScanners.isEmpty()) {
scanner = sfScanners.get(0);
if (scanner.seek(search)) {
result = scanner.peek();
}
}
} finally {
if (scanner != null) {
scanner.close();
}
}
return result;
}
示例4: createFileScanners
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Creates file scanners for compaction.
*
* @param filesToCompact Files.
* @return Scanners.
*/
protected List<StoreFileScanner> createFileScanners(final Collection<StoreFile> filesToCompact,
long smallestReadPoint, boolean useDropBehind) throws IOException {
return StoreFileScanner.getScannersForStoreFiles(filesToCompact,
/* cache blocks = */false,
/* use pread = */false,
/* is compaction */true,
/* use Drop Behind */useDropBehind, smallestReadPoint);
}
示例5: createScanner
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Creates a store scanner.
* @param filesToCompact The files to be compacted.
* @param scanType The scan type.
* @return The store scanner.
* @throws IOException if IO failure is encountered
*/
private StoreScanner createScanner(List<HStoreFile> filesToCompact, ScanType scanType)
throws IOException {
List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(filesToCompact,
false, true, false, false, HConstants.LATEST_TIMESTAMP);
long ttl = HStore.determineTTLFromFamily(column);
ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparator.getInstance());
return new StoreScanner(scanInfo, scanType, scanners);
}
示例6: getScanner
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Internal use only. This is used by the sweeper.
*
* @return The store file scanner.
* @throws IOException
*/
public StoreFileScanner getScanner() throws IOException {
List<HStoreFile> sfs = new ArrayList<>();
sfs.add(sf);
List<StoreFileScanner> sfScanners = StoreFileScanner.getScannersForStoreFiles(sfs, false, true,
false, false, sf.getMaxMemStoreTS());
return sfScanners.get(0);
}
示例7: getScanner
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Internal use only. This is used by the sweeper.
*
* @return The store file scanner.
* @throws IOException
*/
public StoreFileScanner getScanner() throws IOException {
List<StoreFile> sfs = new ArrayList<StoreFile>();
sfs.add(sf);
List<StoreFileScanner> sfScanners = StoreFileScanner.getScannersForStoreFiles(sfs, false, true,
false, null, sf.getMaxMemstoreTS());
return sfScanners.get(0);
}
示例8: createFileScanners
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
/**
* Creates file scanners for compaction.
* @param filesToCompact Files.
* @return Scanners.
*/
protected List<StoreFileScanner> createFileScanners(
final Collection<StoreFile> filesToCompact, long smallestReadPoint) throws IOException {
return StoreFileScanner.getScannersForStoreFiles(filesToCompact, false, false, true,
smallestReadPoint);
}
示例9: createFileScanners
import org.apache.hadoop.hbase.regionserver.StoreFileScanner; //导入方法依赖的package包/类
protected List<StoreFileScanner> createFileScanners(
final Collection<StoreFile> filesToCompact) throws IOException {
return StoreFileScanner.getScannersForStoreFiles(filesToCompact, false, false, true);
}