本文整理匯總了Java中org.apache.hadoop.hbase.filter.FilterList.getOperator方法的典型用法代碼示例。如果您正苦於以下問題:Java FilterList.getOperator方法的具體用法?Java FilterList.getOperator怎麽用?Java FilterList.getOperator使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hbase.filter.FilterList
的用法示例。
在下文中一共展示了FilterList.getOperator方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: handleFilterWithinAND
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
private Filter handleFilterWithinAND(Filter filter) {
if (filter instanceof FilterList) {
FilterList fList = (FilterList) filter;
if (fList.getOperator() == Operator.MUST_PASS_ONE) {
return new FilterGroupingWorker().group(fList);
} else {
List<Filter> filters = fList.getFilters();
for (Filter subFilter : filters) {
handleFilterWithinAND(subFilter);
}
}
} else if (filter instanceof SingleColumnValueFilter) {
handleScvf((SingleColumnValueFilter) filter);
} // TODO when we expose SingleColumnRangeFilter to handle that also here.
return null;
}
示例2: evalFilterForIndexSelection
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
FilterNode evalFilterForIndexSelection(Filter filter, List<IndexSpecification> indices) {
if (filter instanceof FilterList) {
FilterList fList = (FilterList) filter;
GroupingCondition condition =
(fList.getOperator() == Operator.MUST_PASS_ALL) ? GroupingCondition.AND
: GroupingCondition.OR;
NonLeafFilterNode nonLeafFilterNode = new NonLeafFilterNode(condition);
List<Filter> filters = fList.getFilters();
for (Filter fltr : filters) {
FilterNode node = evalFilterForIndexSelection(fltr, indices);
nonLeafFilterNode.addFilterNode(node);
}
return handleNonLeafFilterNode(nonLeafFilterNode);
} else if (filter instanceof SingleColumnValueFilter) {
// Check for the availability of index
return selectBestFitAndPossibleIndicesForSCVF(indices, (SingleColumnValueFilter) filter);
} else if (filter instanceof SingleColumnRangeFilter) {
return selectBestFitAndPossibleIndicesForSCRF(indices, (SingleColumnRangeFilter) filter);
}
return new NoIndexFilterNode();
}
示例3: adapt
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
@Override
public RowFilter adapt(FilterAdapterContext context, FilterList filter) throws IOException {
if (filter.getOperator() == Operator.MUST_PASS_ALL) {
return filterMustPassAll(context, filter);
} else {
return filterMustPassOne(context, filter);
}
}
示例4: handleFilterWithinOR
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
/**
* Since you can use Filter Lists as children of Filter Lists, you can create a hierarchy of
* filters to be evaluated. In the hierarchy if OR branch having any filter type other than SCVF
* as child then we should not consider the branch for scanning because we cannot fetch seek
* points from other type of filters without column and value details. Ex: AND AND
* __________|_______ | | | --> SCVF OR SCVF _______|______ | | ROWFILTER SVCF If the OR is root
* then we should skip index table scanning for this filter. OR _______|______ --> null | |
* ROWFILTER SVCF If the OR is child of another OR branch then parent OR branch will be excluded
* for scanning. Ex: AND AND __________|_______ | | | --> SCVF OR SCVF _______|______ | | OR SVCF
* _______|______ | | ROWFILTER SVCF
* @param filter
* @return if filter is filter list with AND condition then we will return AND branch after
* grouping. if filter is filter list with OR condition return null if no children is of
* type other than SCVF or filter list else return different filter. if filter is SCVF
* then return null. returning null means we are combining the filter(s) with children of
* parent OR filter to perform optimizations.
*/
private Filter handleFilterWithinOR(Filter filter) {
if (filter instanceof FilterList) {
FilterList fList = (FilterList) filter;
if (fList.getOperator() == Operator.MUST_PASS_ONE) {
List<Filter> filters = fList.getFilters();
Filter resultFilter = null;
for (Filter subFilter : filters) {
// If this OR branch in the filter list have filter type other than SCVF we should report
// it to parent by returning the other type of filter in such a way that the branch will
// be skipped from index scan.
resultFilter = handleFilterWithinOR(subFilter);
if (resultFilter == null || (resultFilter instanceof FilterList)) {
continue;
} else {
return resultFilter;
}
}
return null;
} else {
return new FilterGroupingWorker().group(fList);
}
} else if (filter instanceof SingleColumnValueFilter) {
handleScvfOfOR((SingleColumnValueFilter) filter);
return null;
}// TODO when we expose SingleColumnRangeFilter to handle that also here.
// filter other than SingleColumnValueFilter.
return filter;
}
示例5: wrap
import org.apache.hadoop.hbase.filter.FilterList; //導入方法依賴的package包/類
/**
* Wraps an existing {@link FilterList} filter into a {@link ModelAwareFilterList}.
*/
public static ModelAwareFilterList wrap(FilterList filter) {
return new ModelAwareFilterList(filter.getOperator(), filter.getFilters());
}