當前位置: 首頁>>代碼示例>>Java>>正文


Java Filter類代碼示例

本文整理匯總了Java中org.apache.hadoop.hbase.filter.Filter的典型用法代碼示例。如果您正苦於以下問題:Java Filter類的具體用法?Java Filter怎麽用?Java Filter使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


Filter類屬於org.apache.hadoop.hbase.filter包,在下文中一共展示了Filter類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: configure

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
@Override
public void configure(JobConf job) {
  try {
    Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job));
    TableName tableName = TableName.valueOf("exampleJobConfigurableTable");
    // mandatory
    initializeTable(connection, tableName);
    byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
      Bytes.toBytes("columnB") };
    //optional
    Scan scan = new Scan();
    for (byte[] family : inputColumns) {
      scan.addFamily(family);
    }
    Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
    scan.setFilter(exampleFilter);
    setScan(scan);
  } catch (IOException exception) {
    throw new RuntimeException("Failed to initialize.", exception);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:22,代碼來源:TestTableInputFormat.java

示例2: QueryByCondition2

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
public static void QueryByCondition2(String tableName) {

        try {
            HTablePool pool = new HTablePool(configuration, 1000);
            HTable table = (HTable) pool.getTable(tableName);
            Filter filter = new SingleColumnValueFilter(Bytes
                    .toBytes("column1"), null, CompareOp.EQUAL, Bytes
                    .toBytes("aaa")); // 當列column1的值為aaa時進行查詢
            Scan s = new Scan();
            s.setFilter(filter);
            ResultScanner rs = table.getScanner(s);
            for (Result r : rs) {
                System.out.println("獲得到rowkey:" + new String(r.getRow()));
                for (KeyValue keyValue : r.raw()) {
                    System.out.println("列:" + new String(keyValue.getFamily())
                            + "====值:" + new String(keyValue.getValue()));
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
開發者ID:yjp123456,項目名稱:SparkDemo,代碼行數:24,代碼來源:MyClass.java

示例3: mergeScanSpecs

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
private HBaseScanSpec mergeScanSpecs(String functionName, HBaseScanSpec leftScanSpec, HBaseScanSpec rightScanSpec) {
  Filter newFilter = null;
  byte[] startRow = HConstants.EMPTY_START_ROW;
  byte[] stopRow = HConstants.EMPTY_END_ROW;

  switch (functionName) {
  case "booleanAnd":
    newFilter = HBaseUtils.andFilterAtIndex(leftScanSpec.filter, HBaseUtils.LAST_FILTER, rightScanSpec.filter);
    startRow = HBaseUtils.maxOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.minOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
    break;
  case "booleanOr":
    newFilter = HBaseUtils.orFilterAtIndex(leftScanSpec.filter, HBaseUtils.LAST_FILTER, rightScanSpec.filter);
    startRow = HBaseUtils.minOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.maxOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
  }
  return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, newFilter);
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:19,代碼來源:HBaseFilterBuilder.java

示例4: andFilterAtIndex

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
public static Filter andFilterAtIndex(Filter currentFilter, int index, Filter newFilter) {
  if (currentFilter == null) {
    return newFilter;
  } else if (newFilter == null) {
    return currentFilter;
  }

  List<Filter> allFilters = Lists.newArrayList();
  if (currentFilter instanceof FilterList && ((FilterList)currentFilter).getOperator() == FilterList.Operator.MUST_PASS_ALL) {
    allFilters.addAll(((FilterList)currentFilter).getFilters());
  } else {
    allFilters.add(currentFilter);
  }
  allFilters.add((index == LAST_FILTER ? allFilters.size() : index), newFilter);
  return new FilterList(FilterList.Operator.MUST_PASS_ALL, allFilters);
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:17,代碼來源:HBaseUtils.java

示例5: orFilterAtIndex

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
public static Filter orFilterAtIndex(Filter currentFilter, int index, Filter newFilter) {
  if (currentFilter == null) {
    return newFilter;
  } else if (newFilter == null) {
    return currentFilter;
  }

  List<Filter> allFilters = Lists.newArrayList();
  if (currentFilter instanceof FilterList && ((FilterList)currentFilter).getOperator() == FilterList.Operator.MUST_PASS_ONE) {
    allFilters.addAll(((FilterList)currentFilter).getFilters());
  } else {
    allFilters.add(currentFilter);
  }
  allFilters.add((index == LAST_FILTER ? allFilters.size() : index), newFilter);
  return new FilterList(FilterList.Operator.MUST_PASS_ONE, allFilters);
}
 
開發者ID:skhalifa,項目名稱:QDrill,代碼行數:17,代碼來源:HBaseUtils.java

示例6: parseTree

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
public HBaseScanSpec parseTree() {
  HBaseScanSpec parsedSpec = le.accept(this, null);
  if (parsedSpec != null) {
    parsedSpec = mergeScanSpecs("booleanAnd", this.groupScan.getHBaseScanSpec(), parsedSpec);
    /*
     * If RowFilter is THE filter attached to the scan specification,
     * remove it since its effect is also achieved through startRow and stopRow.
     */
    Filter parsedFilter = HBaseUtils.deserializeFilter(parsedSpec.filter);
    if (parsedFilter instanceof RowFilter &&
        ((RowFilter)parsedFilter).getComparator() instanceof BinaryComparator) {
      parsedSpec.filter = null;
    }
  }
  return parsedSpec;
}
 
開發者ID:dremio,項目名稱:dremio-oss,代碼行數:17,代碼來源:HBaseFilterBuilder.java

示例7: mergeScanSpecs

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
private HBaseScanSpec mergeScanSpecs(String functionName, HBaseScanSpec leftScanSpec, HBaseScanSpec rightScanSpec) {
  Filter newFilter = null;
  byte[] startRow = HConstants.EMPTY_START_ROW;
  byte[] stopRow = HConstants.EMPTY_END_ROW;

  switch (functionName) {
  case "booleanAnd":
    newFilter = HBaseUtils.andFilterAtIndex(
        HBaseUtils.deserializeFilter(leftScanSpec.filter),
        HBaseUtils.LAST_FILTER,
        HBaseUtils.deserializeFilter(rightScanSpec.filter));
    startRow = HBaseUtils.maxOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.minOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
    break;
  case "booleanOr":
    newFilter = HBaseUtils.orFilterAtIndex(
        HBaseUtils.deserializeFilter(leftScanSpec.filter),
        HBaseUtils.LAST_FILTER,
        HBaseUtils.deserializeFilter(rightScanSpec.filter));
    startRow = HBaseUtils.minOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.maxOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
  }
  return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, newFilter);
}
 
開發者ID:dremio,項目名稱:dremio-oss,代碼行數:25,代碼來源:HBaseFilterBuilder.java

示例8: testTwoFilterWithMustAllPassFailed

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
@Test
public void testTwoFilterWithMustAllPassFailed() throws IOException {
    clean();
    {
        Put put = new Put(Bytes.toBytes(rowPrefix));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_1"), Bytes.toBytes("col_1_var"));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_2"), Bytes.toBytes("col_2_var"));
        table.put(put);
    }

    {
        Get get = new Get(Bytes.toBytes(rowPrefix));
        Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_1"),
                CompareFilter.CompareOp.EQUAL, Bytes.toBytes("col_1_var"));
        Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_2"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_2_var"));
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter1);
        filterList.addFilter(filter2);

        get.setFilter(filterList);
        Result result = table.get(get);
        assertTrue(result.getRow() == null);
    }
}
 
開發者ID:aliyun,項目名稱:aliyun-tablestore-hbase-client,代碼行數:26,代碼來源:TestFilterList.java

示例9: testTwoFilterWithMustOnePassFailed

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
@Test
public void testTwoFilterWithMustOnePassFailed() throws IOException {
    clean();
    {
        Put put = new Put(Bytes.toBytes(rowPrefix));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_1"), Bytes.toBytes("col_1_var"));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_2"), Bytes.toBytes("col_2_var"));
        table.put(put);
    }

    {
        Get get = new Get(Bytes.toBytes(rowPrefix));
        Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_1"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_1_var"));
        Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_2"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_2_var"));
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter1);
        filterList.addFilter(filter2);

        get.setFilter(filterList);
        Result result = table.get(get);
        assertTrue(result.getRow() == null);
    }
}
 
開發者ID:aliyun,項目名稱:aliyun-tablestore-hbase-client,代碼行數:26,代碼來源:TestFilterList.java

示例10: getConfiguredScanForJob

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException {
  Scan s = new Scan();
  // Set Scan Versions
  s.setMaxVersions(Integer.MAX_VALUE);
  s.setCacheBlocks(false);
  // Set Scan Column Family
  if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) {
    s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY)));
  }
  // Set RowFilter or Prefix Filter if applicable.
  Filter rowFilter = getRowFilter(args);
  if (rowFilter!= null) {
    LOG.info("Setting Row Filter for counter.");
    s.setFilter(rowFilter);
  }
  // Set TimeRange if defined
  long timeRange[] = getTimeRange(args);
  if (timeRange != null) {
    LOG.info("Setting TimeRange for counter.");
    s.setTimeRange(timeRange[0], timeRange[1]);
  }
  return s;
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:24,代碼來源:CellCounter.java

示例11: preprocess

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
public static ConditionTree preprocess(HRegion region, Filter filter, float maxScale) {
  if (filter == null) return null;
  ConditionTree tree = null;
  if (isIndexFilter(region, filter)) {
    System.out.println("preprocess A");
    tree = new ConditionTreeNoneLeafNode(region, (SingleColumnValueFilter) filter, maxScale);
  } else if (filter instanceof FilterList) {
    System.out.println("preprocess B");
    tree = new ConditionTreeNoneLeafNode(region, (FilterList) filter, maxScale);
  }
  if (tree.isPrune()) {
    System.out.println("return null for prune");
    return null;
  } else {
    return tree;
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:18,代碼來源:ScanPreprocess.java

示例12: constructScan

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
protected Scan constructScan(byte[] valuePrefix) throws IOException {
  FilterList list = new FilterList();
  Filter filter = new SingleColumnValueFilter(
      FAMILY_NAME, COLUMN_ZERO, CompareFilter.CompareOp.EQUAL,
      new BinaryComparator(valuePrefix)
  );
  list.addFilter(filter);
  if(opts.filterAll) {
    list.addFilter(new FilterAllFilter());
  }
  Scan scan = new Scan();
  scan.setCaching(opts.caching);
  if (opts.addColumns) {
    scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
  } else {
    scan.addFamily(FAMILY_NAME);
  }
  scan.setFilter(list);
  return scan;
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:21,代碼來源:PerformanceEvaluation.java

示例13: configure

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
@Override
public void configure(JobConf job) {
  try {
    HTable exampleTable = new HTable(HBaseConfiguration.create(job),
      Bytes.toBytes("exampleDeprecatedTable"));
    // mandatory
    setHTable(exampleTable);
    byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
      Bytes.toBytes("columnB") };
    // mandatory
    setInputColumns(inputColumns);
    Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
    // optional
    setRowFilter(exampleFilter);
  } catch (IOException exception) {
    throw new RuntimeException("Failed to configure for job.", exception);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:19,代碼來源:TestTableInputFormat.java

示例14: initialize

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
@Override
protected void initialize(JobContext job) throws IOException {
  Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(
      job.getConfiguration()));
  TableName tableName = TableName.valueOf("exampleTable");
  // mandatory
  initializeTable(connection, tableName);
  byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
    Bytes.toBytes("columnB") };
  //optional
  Scan scan = new Scan();
  for (byte[] family : inputColumns) {
    scan.addFamily(family);
  }
  Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
  scan.setFilter(exampleFilter);
  setScan(scan);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:19,代碼來源:TestTableInputFormat.java

示例15: toFilter

import org.apache.hadoop.hbase.filter.Filter; //導入依賴的package包/類
/**
 * Convert a protocol buffer Filter to a client Filter
 *
 * @param proto the protocol buffer Filter to convert
 * @return the converted Filter
 */
@SuppressWarnings("unchecked")
public static Filter toFilter(FilterProtos.Filter proto) throws IOException {
  String type = proto.getName();
  final byte [] value = proto.getSerializedFilter().toByteArray();
  String funcName = "parseFrom";
  try {
    Class<? extends Filter> c =
      (Class<? extends Filter>)Class.forName(type, true, CLASS_LOADER);
    Method parseFrom = c.getMethod(funcName, byte[].class);
    if (parseFrom == null) {
      throw new IOException("Unable to locate function: " + funcName + " in type: " + type);
    }
    return (Filter)parseFrom.invoke(c, value);
  } catch (Exception e) {
    // Either we couldn't instantiate the method object, or "parseFrom" failed.
    // In either case, let's not retry.
    throw new DoNotRetryIOException(e);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:26,代碼來源:ProtobufUtil.java


注:本文中的org.apache.hadoop.hbase.filter.Filter類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。