当前位置: 首页>>代码示例>>Java>>正文


Java Filter类代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.filter.Filter的典型用法代码示例。如果您正苦于以下问题:Java Filter类的具体用法?Java Filter怎么用?Java Filter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Filter类属于org.apache.hadoop.hbase.filter包,在下文中一共展示了Filter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: configure

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
@Override
public void configure(JobConf job) {
  try {
    Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job));
    TableName tableName = TableName.valueOf("exampleJobConfigurableTable");
    // mandatory
    initializeTable(connection, tableName);
    byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
      Bytes.toBytes("columnB") };
    //optional
    Scan scan = new Scan();
    for (byte[] family : inputColumns) {
      scan.addFamily(family);
    }
    Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
    scan.setFilter(exampleFilter);
    setScan(scan);
  } catch (IOException exception) {
    throw new RuntimeException("Failed to initialize.", exception);
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:22,代码来源:TestTableInputFormat.java

示例2: QueryByCondition2

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
public static void QueryByCondition2(String tableName) {

        try {
            HTablePool pool = new HTablePool(configuration, 1000);
            HTable table = (HTable) pool.getTable(tableName);
            Filter filter = new SingleColumnValueFilter(Bytes
                    .toBytes("column1"), null, CompareOp.EQUAL, Bytes
                    .toBytes("aaa")); // 当列column1的值为aaa时进行查询
            Scan s = new Scan();
            s.setFilter(filter);
            ResultScanner rs = table.getScanner(s);
            for (Result r : rs) {
                System.out.println("获得到rowkey:" + new String(r.getRow()));
                for (KeyValue keyValue : r.raw()) {
                    System.out.println("列:" + new String(keyValue.getFamily())
                            + "====值:" + new String(keyValue.getValue()));
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
 
开发者ID:yjp123456,项目名称:SparkDemo,代码行数:24,代码来源:MyClass.java

示例3: mergeScanSpecs

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
private HBaseScanSpec mergeScanSpecs(String functionName, HBaseScanSpec leftScanSpec, HBaseScanSpec rightScanSpec) {
  Filter newFilter = null;
  byte[] startRow = HConstants.EMPTY_START_ROW;
  byte[] stopRow = HConstants.EMPTY_END_ROW;

  switch (functionName) {
  case "booleanAnd":
    newFilter = HBaseUtils.andFilterAtIndex(leftScanSpec.filter, HBaseUtils.LAST_FILTER, rightScanSpec.filter);
    startRow = HBaseUtils.maxOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.minOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
    break;
  case "booleanOr":
    newFilter = HBaseUtils.orFilterAtIndex(leftScanSpec.filter, HBaseUtils.LAST_FILTER, rightScanSpec.filter);
    startRow = HBaseUtils.minOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.maxOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
  }
  return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, newFilter);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:19,代码来源:HBaseFilterBuilder.java

示例4: andFilterAtIndex

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
public static Filter andFilterAtIndex(Filter currentFilter, int index, Filter newFilter) {
  if (currentFilter == null) {
    return newFilter;
  } else if (newFilter == null) {
    return currentFilter;
  }

  List<Filter> allFilters = Lists.newArrayList();
  if (currentFilter instanceof FilterList && ((FilterList)currentFilter).getOperator() == FilterList.Operator.MUST_PASS_ALL) {
    allFilters.addAll(((FilterList)currentFilter).getFilters());
  } else {
    allFilters.add(currentFilter);
  }
  allFilters.add((index == LAST_FILTER ? allFilters.size() : index), newFilter);
  return new FilterList(FilterList.Operator.MUST_PASS_ALL, allFilters);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:17,代码来源:HBaseUtils.java

示例5: orFilterAtIndex

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
public static Filter orFilterAtIndex(Filter currentFilter, int index, Filter newFilter) {
  if (currentFilter == null) {
    return newFilter;
  } else if (newFilter == null) {
    return currentFilter;
  }

  List<Filter> allFilters = Lists.newArrayList();
  if (currentFilter instanceof FilterList && ((FilterList)currentFilter).getOperator() == FilterList.Operator.MUST_PASS_ONE) {
    allFilters.addAll(((FilterList)currentFilter).getFilters());
  } else {
    allFilters.add(currentFilter);
  }
  allFilters.add((index == LAST_FILTER ? allFilters.size() : index), newFilter);
  return new FilterList(FilterList.Operator.MUST_PASS_ONE, allFilters);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:17,代码来源:HBaseUtils.java

示例6: parseTree

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
public HBaseScanSpec parseTree() {
  HBaseScanSpec parsedSpec = le.accept(this, null);
  if (parsedSpec != null) {
    parsedSpec = mergeScanSpecs("booleanAnd", this.groupScan.getHBaseScanSpec(), parsedSpec);
    /*
     * If RowFilter is THE filter attached to the scan specification,
     * remove it since its effect is also achieved through startRow and stopRow.
     */
    Filter parsedFilter = HBaseUtils.deserializeFilter(parsedSpec.filter);
    if (parsedFilter instanceof RowFilter &&
        ((RowFilter)parsedFilter).getComparator() instanceof BinaryComparator) {
      parsedSpec.filter = null;
    }
  }
  return parsedSpec;
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:17,代码来源:HBaseFilterBuilder.java

示例7: mergeScanSpecs

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
private HBaseScanSpec mergeScanSpecs(String functionName, HBaseScanSpec leftScanSpec, HBaseScanSpec rightScanSpec) {
  Filter newFilter = null;
  byte[] startRow = HConstants.EMPTY_START_ROW;
  byte[] stopRow = HConstants.EMPTY_END_ROW;

  switch (functionName) {
  case "booleanAnd":
    newFilter = HBaseUtils.andFilterAtIndex(
        HBaseUtils.deserializeFilter(leftScanSpec.filter),
        HBaseUtils.LAST_FILTER,
        HBaseUtils.deserializeFilter(rightScanSpec.filter));
    startRow = HBaseUtils.maxOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.minOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
    break;
  case "booleanOr":
    newFilter = HBaseUtils.orFilterAtIndex(
        HBaseUtils.deserializeFilter(leftScanSpec.filter),
        HBaseUtils.LAST_FILTER,
        HBaseUtils.deserializeFilter(rightScanSpec.filter));
    startRow = HBaseUtils.minOfStartRows(leftScanSpec.startRow, rightScanSpec.startRow);
    stopRow = HBaseUtils.maxOfStopRows(leftScanSpec.stopRow, rightScanSpec.stopRow);
  }
  return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, newFilter);
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:25,代码来源:HBaseFilterBuilder.java

示例8: testTwoFilterWithMustAllPassFailed

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
@Test
public void testTwoFilterWithMustAllPassFailed() throws IOException {
    clean();
    {
        Put put = new Put(Bytes.toBytes(rowPrefix));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_1"), Bytes.toBytes("col_1_var"));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_2"), Bytes.toBytes("col_2_var"));
        table.put(put);
    }

    {
        Get get = new Get(Bytes.toBytes(rowPrefix));
        Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_1"),
                CompareFilter.CompareOp.EQUAL, Bytes.toBytes("col_1_var"));
        Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_2"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_2_var"));
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter1);
        filterList.addFilter(filter2);

        get.setFilter(filterList);
        Result result = table.get(get);
        assertTrue(result.getRow() == null);
    }
}
 
开发者ID:aliyun,项目名称:aliyun-tablestore-hbase-client,代码行数:26,代码来源:TestFilterList.java

示例9: testTwoFilterWithMustOnePassFailed

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
@Test
public void testTwoFilterWithMustOnePassFailed() throws IOException {
    clean();
    {
        Put put = new Put(Bytes.toBytes(rowPrefix));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_1"), Bytes.toBytes("col_1_var"));
        put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes("col_2"), Bytes.toBytes("col_2_var"));
        table.put(put);
    }

    {
        Get get = new Get(Bytes.toBytes(rowPrefix));
        Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_1"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_1_var"));
        Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes(familyName), Bytes.toBytes("col_2"),
                CompareFilter.CompareOp.NOT_EQUAL, Bytes.toBytes("col_2_var"));
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter1);
        filterList.addFilter(filter2);

        get.setFilter(filterList);
        Result result = table.get(get);
        assertTrue(result.getRow() == null);
    }
}
 
开发者ID:aliyun,项目名称:aliyun-tablestore-hbase-client,代码行数:26,代码来源:TestFilterList.java

示例10: getConfiguredScanForJob

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException {
  Scan s = new Scan();
  // Set Scan Versions
  s.setMaxVersions(Integer.MAX_VALUE);
  s.setCacheBlocks(false);
  // Set Scan Column Family
  if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) {
    s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY)));
  }
  // Set RowFilter or Prefix Filter if applicable.
  Filter rowFilter = getRowFilter(args);
  if (rowFilter!= null) {
    LOG.info("Setting Row Filter for counter.");
    s.setFilter(rowFilter);
  }
  // Set TimeRange if defined
  long timeRange[] = getTimeRange(args);
  if (timeRange != null) {
    LOG.info("Setting TimeRange for counter.");
    s.setTimeRange(timeRange[0], timeRange[1]);
  }
  return s;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:24,代码来源:CellCounter.java

示例11: preprocess

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
public static ConditionTree preprocess(HRegion region, Filter filter, float maxScale) {
  if (filter == null) return null;
  ConditionTree tree = null;
  if (isIndexFilter(region, filter)) {
    System.out.println("preprocess A");
    tree = new ConditionTreeNoneLeafNode(region, (SingleColumnValueFilter) filter, maxScale);
  } else if (filter instanceof FilterList) {
    System.out.println("preprocess B");
    tree = new ConditionTreeNoneLeafNode(region, (FilterList) filter, maxScale);
  }
  if (tree.isPrune()) {
    System.out.println("return null for prune");
    return null;
  } else {
    return tree;
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:18,代码来源:ScanPreprocess.java

示例12: constructScan

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
protected Scan constructScan(byte[] valuePrefix) throws IOException {
  FilterList list = new FilterList();
  Filter filter = new SingleColumnValueFilter(
      FAMILY_NAME, COLUMN_ZERO, CompareFilter.CompareOp.EQUAL,
      new BinaryComparator(valuePrefix)
  );
  list.addFilter(filter);
  if(opts.filterAll) {
    list.addFilter(new FilterAllFilter());
  }
  Scan scan = new Scan();
  scan.setCaching(opts.caching);
  if (opts.addColumns) {
    scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
  } else {
    scan.addFamily(FAMILY_NAME);
  }
  scan.setFilter(list);
  return scan;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:21,代码来源:PerformanceEvaluation.java

示例13: configure

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
@Override
public void configure(JobConf job) {
  try {
    HTable exampleTable = new HTable(HBaseConfiguration.create(job),
      Bytes.toBytes("exampleDeprecatedTable"));
    // mandatory
    setHTable(exampleTable);
    byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
      Bytes.toBytes("columnB") };
    // mandatory
    setInputColumns(inputColumns);
    Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
    // optional
    setRowFilter(exampleFilter);
  } catch (IOException exception) {
    throw new RuntimeException("Failed to configure for job.", exception);
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:19,代码来源:TestTableInputFormat.java

示例14: initialize

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
@Override
protected void initialize(JobContext job) throws IOException {
  Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(
      job.getConfiguration()));
  TableName tableName = TableName.valueOf("exampleTable");
  // mandatory
  initializeTable(connection, tableName);
  byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
    Bytes.toBytes("columnB") };
  //optional
  Scan scan = new Scan();
  for (byte[] family : inputColumns) {
    scan.addFamily(family);
  }
  Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
  scan.setFilter(exampleFilter);
  setScan(scan);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:19,代码来源:TestTableInputFormat.java

示例15: toFilter

import org.apache.hadoop.hbase.filter.Filter; //导入依赖的package包/类
/**
 * Convert a protocol buffer Filter to a client Filter
 *
 * @param proto the protocol buffer Filter to convert
 * @return the converted Filter
 */
@SuppressWarnings("unchecked")
public static Filter toFilter(FilterProtos.Filter proto) throws IOException {
  String type = proto.getName();
  final byte [] value = proto.getSerializedFilter().toByteArray();
  String funcName = "parseFrom";
  try {
    Class<? extends Filter> c =
      (Class<? extends Filter>)Class.forName(type, true, CLASS_LOADER);
    Method parseFrom = c.getMethod(funcName, byte[].class);
    if (parseFrom == null) {
      throw new IOException("Unable to locate function: " + funcName + " in type: " + type);
    }
    return (Filter)parseFrom.invoke(c, value);
  } catch (Exception e) {
    // Either we couldn't instantiate the method object, or "parseFrom" failed.
    // In either case, let's not retry.
    throw new DoNotRetryIOException(e);
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:26,代码来源:ProtobufUtil.java


注:本文中的org.apache.hadoop.hbase.filter.Filter类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。