當前位置: 首頁>>代碼示例>>Java>>正文


Java FilterList類代碼示例

本文整理匯總了Java中org.apache.hadoop.hbase.filter.FilterList的典型用法代碼示例。如果您正苦於以下問題:Java FilterList類的具體用法?Java FilterList怎麽用?Java FilterList使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


FilterList類屬於org.apache.hadoop.hbase.filter包,在下文中一共展示了FilterList類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: andFilterAtIndex

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
public static Filter andFilterAtIndex(Filter currentFilter, int index, Filter newFilter) {
  if (currentFilter == null) {
    return newFilter;
  } else if (newFilter == null) {
    return currentFilter;
  }

  List<Filter> allFilters = Lists.newArrayList();
  if (currentFilter instanceof FilterList && ((FilterList)currentFilter).getOperator() == FilterList.Operator.MUST_PASS_ALL) {
    allFilters.addAll(((FilterList)currentFilter).getFilters());
  } else {
    allFilters.add(currentFilter);
  }
  allFilters.add((index == LAST_FILTER ? allFilters.size() : index), newFilter);
  return new FilterList(FilterList.Operator.MUST_PASS_ALL, allFilters);
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:17,代碼來源:HBaseUtils.java

示例2: orFilterAtIndex

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
public static Filter orFilterAtIndex(Filter currentFilter, int index, Filter newFilter) {
  if (currentFilter == null) {
    return newFilter;
  } else if (newFilter == null) {
    return currentFilter;
  }

  List<Filter> allFilters = Lists.newArrayList();
  if (currentFilter instanceof FilterList && ((FilterList)currentFilter).getOperator() == FilterList.Operator.MUST_PASS_ONE) {
    allFilters.addAll(((FilterList)currentFilter).getFilters());
  } else {
    allFilters.add(currentFilter);
  }
  allFilters.add((index == LAST_FILTER ? allFilters.size() : index), newFilter);
  return new FilterList(FilterList.Operator.MUST_PASS_ONE, allFilters);
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:17,代碼來源:HBaseUtils.java

示例3: createFilter

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
static Optional<Filter> createFilter(Object[] args) {
  if (args.length == 0) {
    return Optional.empty();
  }
  FilterList filters = new FilterList();
  for (int i = 0; i < args.length; i++) {
    Object filter = args[i];
    try {
      checkArgument(filter instanceof Filter,
          "Filter " + i + " must be of type " + Filter.class.getName()
              + " but is of type " + filter.getClass().getName());
    } catch (IllegalArgumentException e) {
      throw new CacheLoaderException(e);
    }
    filters.addFilter((Filter) filter);
  }
  return Optional.of(filters);
}
 
開發者ID:bakdata,項目名稱:ignite-hbase,代碼行數:19,代碼來源:FilterParser.java

示例4: testTwoFilterWithMustAllPassFailed

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
@Test
public void testTwoFilterWithMustAllPassFailed() throws IOException {
    clean();
    {
        Put put = new Put(Bytes.toBytes(rowPrefix));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_1"), Bytes.toBytes("col_1_var"));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_2"), Bytes.toBytes("col_2_var"));
        table.put(put);
    }

    {
        Get get = new Get(Bytes.toBytes(rowPrefix));
        Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_1"),
                CompareFilter.CompareOp.EQUAL, Bytes.toBytes("col_1_var"));
        Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_2"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_2_var"));
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter1);
        filterList.addFilter(filter2);

        get.setFilter(filterList);
        Result result = table.get(get);
        assertTrue(result.getRow() == null);
    }
}
 
開發者ID:aliyun,項目名稱:aliyun-tablestore-hbase-client,代碼行數:26,代碼來源:TestFilterList.java

示例5: testTwoFilterWithMustOnePassFailed

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
@Test
public void testTwoFilterWithMustOnePassFailed() throws IOException {
    clean();
    {
        Put put = new Put(Bytes.toBytes(rowPrefix));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_1"), Bytes.toBytes("col_1_var"));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_2"), Bytes.toBytes("col_2_var"));
        table.put(put);
    }

    {
        Get get = new Get(Bytes.toBytes(rowPrefix));
        Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_1"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_1_var"));
        Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_2"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_2_var"));
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter1);
        filterList.addFilter(filter2);

        get.setFilter(filterList);
        Result result = table.get(get);
        assertTrue(result.getRow() == null);
    }
}
 
開發者ID:aliyun,項目名稱:aliyun-tablestore-hbase-client,代碼行數:26,代碼來源:TestFilterList.java

示例6: doRawScan

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
private void doRawScan() throws IOException {
  FilterList filterList = new FilterList();
  CompareFilter.CompareOp startOp = CompareFilter.CompareOp.GREATER_OR_EQUAL;
  CompareFilter.CompareOp stopOp = CompareFilter.CompareOp.LESS_OR_EQUAL;
  for (int i = 0; i < indexColumnNames.length && i < scanValues.length; i++) {
    filterList.addFilter(
        new SingleColumnValueFilter(familyName, Bytes.toBytes(indexColumnNames[i]), startOp,
            Bytes.toBytes(scanValues[i][0])));
    filterList.addFilter(
        new SingleColumnValueFilter(familyName, Bytes.toBytes(indexColumnNames[i]), stopOp,
            Bytes.toBytes(scanValues[i][1])));
  }
  Scan scan = new Scan();
  scan.setFilter(filterList);
  scan.setId("raw-scan");
  Table table = conn.getTable(tableName);
  ResultScanner scanner = table.getScanner(scan);
  Result result;
  int count = 0;
  while ((result = scanner.next()) != null) {
    ++count;
    if (PRINT_RESULT) printResult(result);
  }
  scanner.close();
  System.out.println("raw scan has " + count + " records");
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:27,代碼來源:LMDTester.java

示例7: preprocess

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
public static ConditionTree preprocess(HRegion region, Filter filter, float maxScale) {
  if (filter == null) return null;
  ConditionTree tree = null;
  if (isIndexFilter(region, filter)) {
    System.out.println("preprocess A");
    tree = new ConditionTreeNoneLeafNode(region, (SingleColumnValueFilter) filter, maxScale);
  } else if (filter instanceof FilterList) {
    System.out.println("preprocess B");
    tree = new ConditionTreeNoneLeafNode(region, (FilterList) filter, maxScale);
  }
  if (tree.isPrune()) {
    System.out.println("return null for prune");
    return null;
  } else {
    return tree;
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:18,代碼來源:ScanPreprocess.java

示例8: testRow

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
@Override
void testRow(final int i) throws IOException {
  Scan scan = new Scan(getRandomRow(this.rand, opts.totalRows));
  scan.setCaching(opts.caching);
  FilterList list = new FilterList();
  if (opts.addColumns) {
    scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
  } else {
    scan.addFamily(FAMILY_NAME);
  }
  if (opts.filterAll) {
    list.addFilter(new FilterAllFilter());
  }
  list.addFilter(new WhileMatchFilter(new PageFilter(120)));
  scan.setFilter(list);
  ResultScanner s = this.table.getScanner(scan);
  for (Result rr; (rr = s.next()) != null;) {
    updateValueSize(rr);
  }
  s.close();
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:22,代碼來源:PerformanceEvaluation.java

示例9: constructScan

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
protected Scan constructScan(byte[] valuePrefix) throws IOException {
  FilterList list = new FilterList();
  Filter filter = new SingleColumnValueFilter(
      FAMILY_NAME, COLUMN_ZERO, CompareFilter.CompareOp.EQUAL,
      new BinaryComparator(valuePrefix)
  );
  list.addFilter(filter);
  if(opts.filterAll) {
    list.addFilter(new FilterAllFilter());
  }
  Scan scan = new Scan();
  scan.setCaching(opts.caching);
  if (opts.addColumns) {
    scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
  } else {
    scan.addFamily(FAMILY_NAME);
  }
  scan.setFilter(list);
  return scan;
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:21,代碼來源:PerformanceEvaluation.java

示例10: buildScanner

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
private ResultScanner buildScanner(String keyPrefix, String value, Table ht)
    throws IOException {
  // OurFilterList allFilters = new OurFilterList();
  FilterList allFilters = new FilterList(/* FilterList.Operator.MUST_PASS_ALL */);
  allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
  SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes
      .toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes
      .toBytes(value));
  filter.setFilterIfMissing(true);
  allFilters.addFilter(filter);

  // allFilters.addFilter(new
  // RowExcludingSingleColumnValueFilter(Bytes.toBytes("trans-tags"),
  // Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)));

  Scan scan = new Scan();
  scan.addFamily(Bytes.toBytes("trans-blob"));
  scan.addFamily(Bytes.toBytes("trans-type"));
  scan.addFamily(Bytes.toBytes("trans-date"));
  scan.addFamily(Bytes.toBytes("trans-tags"));
  scan.addFamily(Bytes.toBytes("trans-group"));
  scan.setFilter(allFilters);

  return ht.getScanner(scan);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:26,代碼來源:TestFromClientSide.java

示例11: buildScanner

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
private InternalScanner buildScanner(String keyPrefix, String value, HRegion r)
    throws IOException {
  // Defaults FilterList.Operator.MUST_PASS_ALL.
  FilterList allFilters = new FilterList();
  allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
  // Only return rows where this column value exists in the row.
  SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"),
      Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
  filter.setFilterIfMissing(true);
  allFilters.addFilter(filter);
  Scan scan = new Scan();
  scan.addFamily(Bytes.toBytes("trans-blob"));
  scan.addFamily(Bytes.toBytes("trans-type"));
  scan.addFamily(Bytes.toBytes("trans-date"));
  scan.addFamily(Bytes.toBytes("trans-tags"));
  scan.addFamily(Bytes.toBytes("trans-group"));
  scan.setFilter(allFilters);
  return r.getScanner(scan);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:20,代碼來源:TestHRegion.java

示例12: getVertexIndexScanWithLimit

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
private Scan getVertexIndexScanWithLimit(String label, boolean isUnique, String key, Object from, int limit, boolean reversed) {
    byte[] prefix = serializeForRead(label, isUnique, key, null);
    byte[] startRow = from != null
            ? serializeForRead(label, isUnique, key, from)
            : prefix;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    if (graph.configuration().getInstanceType() == HBaseGraphConfiguration.InstanceType.BIGTABLE) {
        if (reversed) {
            throw new UnsupportedOperationException("Reverse scans not supported by Bigtable");
        } else {
            // PrefixFilter in Bigtable does not automatically stop
            // See https://github.com/GoogleCloudPlatform/cloud-bigtable-client/issues/1087
            stopRow = HBaseGraphUtils.incrementBytes(prefix);
        }
    }
    if (reversed) startRow = HBaseGraphUtils.incrementBytes(startRow);
    Scan scan = new Scan(startRow, stopRow);
    FilterList filterList = new FilterList();
    filterList.addFilter(new PrefixFilter(prefix));
    filterList.addFilter(new PageFilter(limit));
    scan.setFilter(filterList);
    scan.setReversed(reversed);
    return scan;
}
 
開發者ID:rayokota,項目名稱:hgraphdb,代碼行數:25,代碼來源:VertexIndexModel.java

示例13: getColumnValueFilters

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
private FilterList getColumnValueFilters(Row row) {
  FilterList filterList = new FilterList(Operator.MUST_PASS_ALL);
  Set<String> filterColumnNames = Sets.newHashSet(row.schema().fieldNames());
  
  for (Map.Entry<String, ColumnDef> column : columns.entrySet()) {
    if (!column.getValue().cf.equals("rowkey")) {
      if (filterColumnNames.contains(column.getKey())) {
        byte[] value = getColumnValueAsBytes(column.getValue().name, column.getValue().type, row);
        if (value != null) {
          SingleColumnValueFilter columnValueFilter = new SingleColumnValueFilter(
              Bytes.toBytes(column.getValue().cf),
              Bytes.toBytes(column.getValue().name),
              CompareFilter.CompareOp.EQUAL,
              value
          );
          filterList.addFilter(columnValueFilter);
        }
      }
    }
  }
  
  return filterList;
}
 
開發者ID:cloudera-labs,項目名稱:envelope,代碼行數:24,代碼來源:DefaultHBaseSerde.java

示例14: run

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
@Override
public void run() {
    try (HTableInterface table = connection.getTable(tableName.getBytes())) {
        // Do not use Get not to increase read request count metric.
        // Use Scan.
        Scan scan = new Scan("".getBytes(), "".getBytes());
        FilterList filterList = new FilterList();
        filterList.addFilter(new KeyOnlyFilter());
        filterList.addFilter(new FirstKeyOnlyFilter());
        scan.setFilter(filterList);
        //noinspection EmptyTryBlock
        try(ResultScanner ignored = table.getScanner(scan)) {
        }
        return;
    } catch (IOException ignore) {
    }

    clean(tableName);
}
 
開發者ID:kakao,項目名稱:hbase-tools,代碼行數:20,代碼來源:RegionLocationCleaner.java

示例15: constructFilterForContain

import org.apache.hadoop.hbase.filter.FilterList; //導入依賴的package包/類
private static Filter constructFilterForContain(
        HBaseColumnSchema hbaseColumnSchema, CompareOp compareOp,
        List<Object> list, Operator operator) {
    Util.checkNull(hbaseColumnSchema);
    Util.checkNull(compareOp);
    Util.checkNull(list);
    Util.checkNull(operator);

    List<Filter> filters = new ArrayList<Filter>();
    for (Object obj : list) {
        filters.add(constructFilter(hbaseColumnSchema, compareOp, obj));
    }

    FilterList filterList = new FilterList(operator, filters);
    return filterList;
}
 
開發者ID:xushaomin,項目名稱:apple-data,代碼行數:17,代碼來源:FilterVisitor.java


注:本文中的org.apache.hadoop.hbase.filter.FilterList類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。