本文整理汇总了Java中org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange方法的典型用法代码示例。如果您正苦于以下问题:Java MultiRowRangeFilter.RowRange方法的具体用法?Java MultiRowRangeFilter.RowRange怎么用?Java MultiRowRangeFilter.RowRange使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.filter.MultiRowRangeFilter
的用法示例。
在下文中一共展示了MultiRowRangeFilter.RowRange方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: parseRowRangeParameter
import org.apache.hadoop.hbase.filter.MultiRowRangeFilter; //导入方法依赖的package包/类
private static List<MultiRowRangeFilter.RowRange> parseRowRangeParameter(
String arg, String rangeSwitch) {
final String[] ranges = arg.substring(rangeSwitch.length()).split(";");
final List<MultiRowRangeFilter.RowRange> rangeList = new ArrayList<>();
for (String range : ranges) {
String[] startEnd = range.split(",", 2);
if (startEnd.length != 2 || startEnd[1].contains(",")) {
printUsage("Please specify range in such format as \"--range=a,b\" " +
"or, with only one boundary, \"--range=,b\" or \"--range=a,\"");
throw new IllegalArgumentException("Wrong range specification: " + range);
}
String startKey = startEnd[0];
String endKey = startEnd[1];
rangeList.add(new MultiRowRangeFilter.RowRange(
Bytes.toBytesBinary(startKey), true,
Bytes.toBytesBinary(endKey), false));
}
return rangeList;
}
示例2: setScanFilter
import org.apache.hadoop.hbase.filter.MultiRowRangeFilter; //导入方法依赖的package包/类
/**
* Sets filter {@link FilterBase} to the {@link Scan} instance.
* If provided rowRangeList contains more than one element,
* method sets filter which is instance of {@link MultiRowRangeFilter}.
* Otherwise, method sets filter which is instance of {@link FirstKeyOnlyFilter}.
* If rowRangeList contains exactly one element, startRow and stopRow are set to the scan.
* @param scan
* @param rowRangeList
*/
private static void setScanFilter(Scan scan, List<MultiRowRangeFilter.RowRange> rowRangeList) {
final int size = rowRangeList == null ? 0 : rowRangeList.size();
if (size <= 1) {
scan.setFilter(new FirstKeyOnlyFilter());
}
if (size == 1) {
MultiRowRangeFilter.RowRange range = rowRangeList.get(0);
scan.setStartRow(range.getStartRow()); //inclusive
scan.setStopRow(range.getStopRow()); //exclusive
} else if (size > 1) {
scan.setFilter(new MultiRowRangeFilter(rowRangeList));
}
}
示例3: createScanner
import org.apache.hadoop.hbase.filter.MultiRowRangeFilter; //导入方法依赖的package包/类
private CloseableIterable<Result> createScanner() {
// End of input ids
if (null != idsIterator && !idsIterator.hasNext()) {
return null;
}
Table table = null;
try {
final Scan scan = new Scan();
if (null != idsIterator) {
final List<MultiRowRangeFilter.RowRange> rowRanges = new ArrayList<>();
final int maxEntriesForBatchScanner = store.getProperties().getMaxEntriesForBatchScanner();
int count = 0;
while (idsIterator.hasNext() && count < maxEntriesForBatchScanner) {
count++;
rowRanges.addAll(rowRangeFactory.getRowRange(idsIterator.next(), operation));
}
if (rowRanges.isEmpty()) {
return new WrappedCloseableIterable<>(Collections.emptyList());
}
scan.setFilter(new MultiRowRangeFilter(rowRanges));
}
scan.setAuthorizations(authorisations);
scan.setAttribute(HBaseStoreConstants.SCHEMA, store.getSchema().toCompactJson());
scan.setAttribute(HBaseStoreConstants.INCLUDE_MATCHED_VERTEX, Bytes.toBytes(Boolean.toString(includeMatchedVertex)));
scan.setAttribute(HBaseStoreConstants.VIEW, operation.getView().toCompactJson());
if (null != operation.getDirectedType()) {
scan.setAttribute(HBaseStoreConstants.DIRECTED_TYPE, Bytes.toBytes(operation.getDirectedType().name()));
}
if (null != extraProcessors) {
scan.setAttribute(HBaseStoreConstants.EXTRA_PROCESSORS, extraProcessors);
}
scan.setMaxVersions();
table = store.getTable();
return new WrappedCloseableIterable<>(table.getScanner(scan));
} catch (final IOException | StoreException e) {
if (null != table) {
CloseableUtil.close(table);
}
throw new RuntimeException(e);
}
}
示例4: createSubmittableJob
import org.apache.hadoop.hbase.filter.MultiRowRangeFilter; //导入方法依赖的package包/类
/**
* Sets up the actual job.
*
* @param conf The current configuration.
* @param args The command line parameters.
* @return The newly created job.
* @throws IOException When setting up the job fails.
*/
public static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException {
String tableName = args[0];
List<MultiRowRangeFilter.RowRange> rowRangeList = null;
long startTime = 0;
long endTime = 0;
StringBuilder sb = new StringBuilder();
final String rangeSwitch = "--range=";
final String startTimeArgKey = "--starttime=";
final String endTimeArgKey = "--endtime=";
final String expectedCountArg = "--expected-count=";
// First argument is table name, starting from second
for (int i = 1; i < args.length; i++) {
if (args[i].startsWith(rangeSwitch)) {
try {
rowRangeList = parseRowRangeParameter(args[i], rangeSwitch);
} catch (IllegalArgumentException e) {
return null;
}
continue;
}
if (args[i].startsWith(startTimeArgKey)) {
startTime = Long.parseLong(args[i].substring(startTimeArgKey.length()));
continue;
}
if (args[i].startsWith(endTimeArgKey)) {
endTime = Long.parseLong(args[i].substring(endTimeArgKey.length()));
continue;
}
if (args[i].startsWith(expectedCountArg)) {
conf.setLong(EXPECTED_COUNT_KEY,
Long.parseLong(args[i].substring(expectedCountArg.length())));
continue;
}
// if no switch, assume column names
sb.append(args[i]);
sb.append(" ");
}
if (endTime < startTime) {
printUsage("--endtime=" + endTime + " needs to be greater than --starttime=" + startTime);
return null;
}
Job job = Job.getInstance(conf, conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName));
job.setJarByClass(RowCounter.class);
Scan scan = new Scan();
scan.setCacheBlocks(false);
setScanFilter(scan, rowRangeList);
if (sb.length() > 0) {
for (String columnName : sb.toString().trim().split(" ")) {
String family = StringUtils.substringBefore(columnName, ":");
String qualifier = StringUtils.substringAfter(columnName, ":");
if (StringUtils.isBlank(qualifier)) {
scan.addFamily(Bytes.toBytes(family));
}
else {
scan.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier));
}
}
}
scan.setTimeRange(startTime, endTime == 0 ? HConstants.LATEST_TIMESTAMP : endTime);
job.setOutputFormatClass(NullOutputFormat.class);
TableMapReduceUtil.initTableMapperJob(tableName, scan,
RowCounterMapper.class, ImmutableBytesWritable.class, Result.class, job);
job.setNumReduceTasks(0);
return job;
}