本文整理匯總了Java中org.apache.hadoop.hbase.filter.FilterList.getFilters方法的典型用法代碼示例。如果您正苦於以下問題:Java FilterList.getFilters方法的具體用法?Java FilterList.getFilters怎麽用?Java FilterList.getFilters使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hbase.filter.FilterList
的用法示例。
在下文中一共展示了FilterList.getFilters方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: handleFilterWithinAND
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
private Filter handleFilterWithinAND(Filter filter) {
if (filter instanceof FilterList) {
FilterList fList = (FilterList) filter;
if (fList.getOperator() == Operator.MUST_PASS_ONE) {
return new FilterGroupingWorker().group(fList);
} else {
List<Filter> filters = fList.getFilters();
for (Filter subFilter : filters) {
handleFilterWithinAND(subFilter);
}
}
} else if (filter instanceof SingleColumnValueFilter) {
handleScvf((SingleColumnValueFilter) filter);
} // TODO when we expose SingleColumnRangeFilter to handle that also here.
return null;
}
示例2: evalFilterForIndexSelection
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
FilterNode evalFilterForIndexSelection(Filter filter, List<IndexSpecification> indices) {
if (filter instanceof FilterList) {
FilterList fList = (FilterList) filter;
GroupingCondition condition =
(fList.getOperator() == Operator.MUST_PASS_ALL) ? GroupingCondition.AND
: GroupingCondition.OR;
NonLeafFilterNode nonLeafFilterNode = new NonLeafFilterNode(condition);
List<Filter> filters = fList.getFilters();
for (Filter fltr : filters) {
FilterNode node = evalFilterForIndexSelection(fltr, indices);
nonLeafFilterNode.addFilterNode(node);
}
return handleNonLeafFilterNode(nonLeafFilterNode);
} else if (filter instanceof SingleColumnValueFilter) {
// Check for the availability of index
return selectBestFitAndPossibleIndicesForSCVF(indices, (SingleColumnValueFilter) filter);
} else if (filter instanceof SingleColumnRangeFilter) {
return selectBestFitAndPossibleIndicesForSCRF(indices, (SingleColumnRangeFilter) filter);
}
return new NoIndexFilterNode();
}
示例3: filterMustPassOne
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
private RowFilter filterMustPassOne(FilterAdapterContext context, FilterList filter)
throws IOException {
Interleave.Builder interLeaveBuilder = Interleave.newBuilder();
for (Filter subFilter : filter.getFilters()) {
interLeaveBuilder.addFilters(subFilterAdapter.adaptFilter(context, subFilter));
}
return RowFilter.newBuilder()
.setInterleave(interLeaveBuilder)
.build();
}
示例4: filterMustPassAll
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
private RowFilter filterMustPassAll(FilterAdapterContext context, FilterList filter)
throws IOException {
Chain.Builder chainBuilder = Chain.newBuilder();
for (Filter subFilter : filter.getFilters()) {
chainBuilder.addFilters(subFilterAdapter.adaptFilter(context, subFilter));
}
return RowFilter.newBuilder()
.setChain(chainBuilder)
.build();
}
示例5: collectUnsupportedStatuses
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
@Override
public void collectUnsupportedStatuses(
FilterAdapterContext context,
FilterList filter,
List<FilterSupportStatus> unsupportedStatuses) {
for (Filter subFilter : filter.getFilters()) {
subFilterAdapter.collectUnsupportedStatuses(context, subFilter, unsupportedStatuses);
}
}
示例6: handleFilterWithinOR
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
/**
* Since you can use Filter Lists as children of Filter Lists, you can create a hierarchy of
* filters to be evaluated. In the hierarchy if OR branch having any filter type other than SCVF
* as child then we should not consider the branch for scanning because we cannot fetch seek
* points from other type of filters without column and value details. Ex: AND AND
* __________|_______ | | | --> SCVF OR SCVF _______|______ | | ROWFILTER SVCF If the OR is root
* then we should skip index table scanning for this filter. OR _______|______ --> null | |
* ROWFILTER SVCF If the OR is child of another OR branch then parent OR branch will be excluded
* for scanning. Ex: AND AND __________|_______ | | | --> SCVF OR SCVF _______|______ | | OR SVCF
* _______|______ | | ROWFILTER SVCF
* @param filter
* @return if filter is filter list with AND condition then we will return AND branch after
* grouping. if filter is filter list with OR condition return null if no children is of
* type other than SCVF or filter list else return different filter. if filter is SCVF
* then return null. returning null means we are combining the filter(s) with children of
* parent OR filter to perform optimizations.
*/
private Filter handleFilterWithinOR(Filter filter) {
if (filter instanceof FilterList) {
FilterList fList = (FilterList) filter;
if (fList.getOperator() == Operator.MUST_PASS_ONE) {
List<Filter> filters = fList.getFilters();
Filter resultFilter = null;
for (Filter subFilter : filters) {
// If this OR branch in the filter list have filter type other than SCVF we should report
// it to parent by returning the other type of filter in such a way that the branch will
// be skipped from index scan.
resultFilter = handleFilterWithinOR(subFilter);
if (resultFilter == null || (resultFilter instanceof FilterList)) {
continue;
} else {
return resultFilter;
}
}
return null;
} else {
return new FilterGroupingWorker().group(fList);
}
} else if (filter instanceof SingleColumnValueFilter) {
handleScvfOfOR((SingleColumnValueFilter) filter);
return null;
}// TODO when we expose SingleColumnRangeFilter to handle that also here.
// filter other than SingleColumnValueFilter.
return filter;
}
示例7: wrap
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
/**
* Wraps an existing {@link FilterList} filter into a {@link ModelAwareFilterList}.
*/
public static ModelAwareFilterList wrap(FilterList filter) {
return new ModelAwareFilterList(filter.getOperator(), filter.getFilters());
}