本文整理汇总了Java中org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl.next方法的典型用法代码示例。如果您正苦于以下问题:Java RegionScannerImpl.next方法的具体用法?Java RegionScannerImpl.next怎么用?Java RegionScannerImpl.next使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl
的用法示例。
在下文中一共展示了RegionScannerImpl.next方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: scanColSet
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; //导入方法依赖的package包/类
private void scanColSet(int[] colSet, int[] expectedResultCols)
throws IOException {
LOG.info("Scanning column set: " + Arrays.toString(colSet));
Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
addColumnSetToScan(scan, colSet);
RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
assertEquals(0, storeHeap.getHeap().size());
StoreScanner storeScanner =
(StoreScanner) storeHeap.getCurrentForTesting();
@SuppressWarnings({ "unchecked", "rawtypes" })
List<StoreFileScanner> scanners = (List<StoreFileScanner>)
(List) storeScanner.getAllScannersForTesting();
// Sort scanners by their HFile's modification time.
Collections.sort(scanners, new Comparator<StoreFileScanner>() {
@Override
public int compare(StoreFileScanner s1, StoreFileScanner s2) {
Path p1 = s1.getReader().getHFileReader().getPath();
Path p2 = s2.getReader().getHFileReader().getPath();
long t1, t2;
try {
t1 = fs.getFileStatus(p1).getModificationTime();
t2 = fs.getFileStatus(p2).getModificationTime();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
}
});
StoreFile.Reader lastStoreFileReader = null;
for (StoreFileScanner sfScanner : scanners)
lastStoreFileReader = sfScanner.getReader();
new HFilePrettyPrinter(conf).run(new String[]{ "-m", "-p", "-f",
lastStoreFileReader.getHFileReader().getPath().toString()});
// Disable Bloom filter for the last store file. The disabled Bloom filter
// will always return "true".
LOG.info("Disabling Bloom filter for: "
+ lastStoreFileReader.getHFileReader().getName());
lastStoreFileReader.disableBloomFilterForTesting();
List<Cell> allResults = new ArrayList<Cell>();
{ // Limit the scope of results.
List<Cell> results = new ArrayList<Cell>();
while (scanner.next(results) || results.size() > 0) {
allResults.addAll(results);
results.clear();
}
}
List<Integer> actualIds = new ArrayList<Integer>();
for (Cell kv : allResults) {
String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
assertTrue(qual.startsWith(QUALIFIER_PREFIX));
actualIds.add(Integer.valueOf(qual.substring(
QUALIFIER_PREFIX.length())));
}
List<Integer> expectedIds = new ArrayList<Integer>();
for (int expectedId : expectedResultCols)
expectedIds.add(expectedId);
LOG.info("Column ids returned: " + actualIds + ", expected: "
+ expectedIds);
assertEquals(expectedIds.toString(), actualIds.toString());
}
示例2: scanColSet
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; //导入方法依赖的package包/类
private void scanColSet(int[] colSet, int[] expectedResultCols)
throws IOException {
LOG.info("Scanning column set: " + Arrays.toString(colSet));
Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
addColumnSetToScan(scan, colSet);
RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
assertEquals(0, storeHeap.getHeap().size());
StoreScanner storeScanner =
(StoreScanner) storeHeap.getCurrentForTesting();
@SuppressWarnings({ "unchecked", "rawtypes" })
List<StoreFileScanner> scanners = (List<StoreFileScanner>)
(List) storeScanner.getAllScannersForTesting();
// Sort scanners by their HFile's modification time.
Collections.sort(scanners, new Comparator<StoreFileScanner>() {
@Override
public int compare(StoreFileScanner s1, StoreFileScanner s2) {
Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
long t1, t2;
try {
t1 = fs.getFileStatus(p1).getModificationTime();
t2 = fs.getFileStatus(p2).getModificationTime();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
}
});
StoreFile.Reader lastStoreFileReader = null;
for (StoreFileScanner sfScanner : scanners)
lastStoreFileReader = sfScanner.getReaderForTesting();
new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
lastStoreFileReader.getHFileReader().getPath().toString()});
// Disable Bloom filter for the last store file. The disabled Bloom filter
// will always return "true".
LOG.info("Disabling Bloom filter for: "
+ lastStoreFileReader.getHFileReader().getName());
lastStoreFileReader.disableBloomFilterForTesting();
List<KeyValue> allResults = new ArrayList<KeyValue>();
{ // Limit the scope of results.
List<KeyValue> results = new ArrayList<KeyValue>();
while (scanner.next(results) || results.size() > 0) {
allResults.addAll(results);
results.clear();
}
}
List<Integer> actualIds = new ArrayList<Integer>();
for (KeyValue kv : allResults) {
String qual = Bytes.toString(kv.getQualifier());
assertTrue(qual.startsWith(QUALIFIER_PREFIX));
actualIds.add(Integer.valueOf(qual.substring(
QUALIFIER_PREFIX.length())));
}
List<Integer> expectedIds = new ArrayList<Integer>();
for (int expectedId : expectedResultCols)
expectedIds.add(expectedId);
LOG.info("Column ids returned: " + actualIds + ", expected: "
+ expectedIds);
assertEquals(expectedIds.toString(), actualIds.toString());
}
示例3: scanColSet
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; //导入方法依赖的package包/类
private void scanColSet(int[] colSet, int[] expectedResultCols)
throws IOException {
LOG.info("Scanning column set: " + Arrays.toString(colSet));
Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
addColumnSetToScan(scan, colSet);
RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
assertEquals(0, storeHeap.getHeap().size());
StoreScanner storeScanner =
(StoreScanner) storeHeap.getCurrentForTesting();
@SuppressWarnings({ "unchecked", "rawtypes" })
List<StoreFileScanner> scanners = (List<StoreFileScanner>)
(List) storeScanner.getAllScannersForTesting();
// Sort scanners by their HFile's modification time.
Collections.sort(scanners, new Comparator<StoreFileScanner>() {
@Override
public int compare(StoreFileScanner s1, StoreFileScanner s2) {
Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
long t1, t2;
try {
t1 = fs.getFileStatus(p1).getModificationTime();
t2 = fs.getFileStatus(p2).getModificationTime();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
}
});
StoreFile.Reader lastStoreFileReader = null;
for (StoreFileScanner sfScanner : scanners)
lastStoreFileReader = sfScanner.getReaderForTesting();
new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
lastStoreFileReader.getHFileReader().getPath().toString()});
// Disable Bloom filter for the last store file. The disabled Bloom filter
// will always return "true".
LOG.info("Disabling Bloom filter for: "
+ lastStoreFileReader.getHFileReader().getName());
lastStoreFileReader.disableBloomFilterForTesting();
List<Cell> allResults = new ArrayList<Cell>();
{ // Limit the scope of results.
List<Cell> results = new ArrayList<Cell>();
while (scanner.next(results) || results.size() > 0) {
allResults.addAll(results);
results.clear();
}
}
List<Integer> actualIds = new ArrayList<Integer>();
for (Cell kv : allResults) {
String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
assertTrue(qual.startsWith(QUALIFIER_PREFIX));
actualIds.add(Integer.valueOf(qual.substring(
QUALIFIER_PREFIX.length())));
}
List<Integer> expectedIds = new ArrayList<Integer>();
for (int expectedId : expectedResultCols)
expectedIds.add(expectedId);
LOG.info("Column ids returned: " + actualIds + ", expected: "
+ expectedIds);
assertEquals(expectedIds.toString(), actualIds.toString());
}
示例4: testReverseScanShouldNotScanMemstoreIfReadPtLesser
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; //导入方法依赖的package包/类
@Test
public void testReverseScanShouldNotScanMemstoreIfReadPtLesser() throws Exception {
byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = { cf1 };
byte[] col = Bytes.toBytes("C");
HBaseConfiguration conf = new HBaseConfiguration();
this.region = initHRegion(tableName, method, conf, families);
try {
// setup with one storefile and one memstore, to create scanner and get an earlier readPt
Put put = new Put(Bytes.toBytes("19996"));
put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
Put put2 = new Put(Bytes.toBytes("19995"));
put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
// create a reverse scan
Scan scan = new Scan(Bytes.toBytes("19996"));
scan.setReversed(true);
RegionScannerImpl scanner = region.getScanner(scan);
// flush the cache. This will reset the store scanner
region.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
// create one memstore contains many rows will be skipped
// to check MemStoreScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes("" + i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
List<Cell> currRow = new ArrayList<>();
boolean hasNext;
boolean assertDone = false;
do {
hasNext = scanner.next(currRow);
// With HBASE-15871, after the scanner is reset the memstore scanner should not be
// added here
if (!assertDone) {
StoreScanner current =
(StoreScanner) (scanner.storeHeap).getCurrentForTesting();
List<KeyValueScanner> scanners = current.getAllScannersForTesting();
assertEquals("There should be only one scanner the store file scanner", 1,
scanners.size());
assertDone = true;
}
} while (hasNext);
assertEquals(2, currRow.size());
assertEquals("19996", Bytes.toString(currRow.get(0).getRowArray(),
currRow.get(0).getRowOffset(), currRow.get(0).getRowLength()));
assertEquals("19995", Bytes.toString(currRow.get(1).getRowArray(),
currRow.get(1).getRowOffset(), currRow.get(1).getRowLength()));
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
示例5: scanColSet
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; //导入方法依赖的package包/类
private void scanColSet(int[] colSet, int[] expectedResultCols)
throws IOException {
LOG.info("Scanning column set: " + Arrays.toString(colSet));
Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
addColumnSetToScan(scan, colSet);
RegionScannerImpl scanner = region.getScanner(scan);
KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
assertEquals(0, storeHeap.getHeap().size());
StoreScanner storeScanner =
(StoreScanner) storeHeap.getCurrentForTesting();
@SuppressWarnings({ "unchecked", "rawtypes" })
List<StoreFileScanner> scanners = (List<StoreFileScanner>)
(List) storeScanner.getAllScannersForTesting();
// Sort scanners by their HFile's modification time.
Collections.sort(scanners, new Comparator<StoreFileScanner>() {
@Override
public int compare(StoreFileScanner s1, StoreFileScanner s2) {
Path p1 = s1.getReader().getHFileReader().getPath();
Path p2 = s2.getReader().getHFileReader().getPath();
long t1, t2;
try {
t1 = fs.getFileStatus(p1).getModificationTime();
t2 = fs.getFileStatus(p2).getModificationTime();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
}
});
StoreFileReader lastStoreFileReader = null;
for (StoreFileScanner sfScanner : scanners)
lastStoreFileReader = sfScanner.getReader();
new HFilePrettyPrinter(conf).run(new String[]{ "-m", "-p", "-f",
lastStoreFileReader.getHFileReader().getPath().toString()});
// Disable Bloom filter for the last store file. The disabled Bloom filter
// will always return "true".
LOG.info("Disabling Bloom filter for: "
+ lastStoreFileReader.getHFileReader().getName());
lastStoreFileReader.disableBloomFilterForTesting();
List<Cell> allResults = new ArrayList<>();
{ // Limit the scope of results.
List<Cell> results = new ArrayList<>();
while (scanner.next(results) || results.size() > 0) {
allResults.addAll(results);
results.clear();
}
}
List<Integer> actualIds = new ArrayList<>();
for (Cell kv : allResults) {
String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
assertTrue(qual.startsWith(QUALIFIER_PREFIX));
actualIds.add(Integer.valueOf(qual.substring(
QUALIFIER_PREFIX.length())));
}
List<Integer> expectedIds = new ArrayList<>();
for (int expectedId : expectedResultCols)
expectedIds.add(expectedId);
LOG.info("Column ids returned: " + actualIds + ", expected: "
+ expectedIds);
assertEquals(expectedIds.toString(), actualIds.toString());
}