当前位置: 首页>>代码示例>>Java>>正文


Java QualifierFilter类代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.filter.QualifierFilter的典型用法代码示例。如果您正苦于以下问题:Java QualifierFilter类的具体用法?Java QualifierFilter怎么用?Java QualifierFilter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


QualifierFilter类属于org.apache.hadoop.hbase.filter包,在下文中一共展示了QualifierFilter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: isFilterSupported

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Override
public FilterSupportStatus isFilterSupported(
    FilterAdapterContext context, QualifierFilter filter) {
  // For range filters, we only support a single family:
  if (!CompareOp.EQUAL.equals(filter.getOperator())
      && filter.getComparator() instanceof BinaryComparator
      && context.getScan().numFamilies() != 1) {
    return SINGLE_FAMILY_REQUIRED;
  }
  // Support binary comparators and regex comparators with equal compare op:
  if ( !(filter.getComparator() instanceof BinaryComparator)
      && !isRegexAndSupported(filter)) {
    return UNSUPPORTED_COMPARABLE;
  }
  return FilterSupportStatus.SUPPORTED;
}
 
开发者ID:dmmcerlean,项目名称:cloud-bigtable-client,代码行数:17,代码来源:QualifierFilterAdapter.java

示例2: testScanWithColumnsAndFilterAndVersion

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testScanWithColumnsAndFilterAndVersion() throws IOException {
  TableName tableName = TableName.valueOf(name.getMethodName());
  try (Table table = TEST_UTIL.createTable(tableName, FAMILY, 4)) {
    for (int i = 0; i < 4; i++) {
      Put put = new Put(ROW);
      put.addColumn(FAMILY, QUALIFIER, VALUE);
      table.put(put);
    }

    Scan scan = new Scan();
    scan.addColumn(FAMILY, QUALIFIER);
    scan.setFilter(new QualifierFilter(CompareOperator.EQUAL, new BinaryComparator(QUALIFIER)));
    scan.readVersions(3);

    try (ResultScanner scanner = table.getScanner(scan)) {
      Result result = scanner.next();
      assertEquals(3, result.size());
    }
  }
}
 
开发者ID:apache,项目名称:hbase,代码行数:22,代码来源:TestScannersFromClientSide.java

示例3: makeResponseTimeFilter

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
/**
 * make the hbase filter for selecting values of y-axis(response time) in order to select transactions in scatter chart.
 * 4 bytes for elapsed time should be attached for the prefix of column qualifier for to use this filter.
 *
 * @param area
 * @param offsetTransactionId
 * @param offsetTransactionElapsed
 * @return
 */
private Filter makeResponseTimeFilter(final SelectedScatterArea area, final TransactionId offsetTransactionId, int offsetTransactionElapsed) {
    // filter by response time
    ResponseTimeRange responseTimeRange = area.getResponseTimeRange();
    byte[] responseFrom = Bytes.toBytes(responseTimeRange.getFrom());
    byte[] responseTo = Bytes.toBytes(responseTimeRange.getTo());
    FilterList filterList = new FilterList(Operator.MUST_PASS_ALL);
    filterList.addFilter(new QualifierFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(responseFrom)));
    filterList.addFilter(new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryPrefixComparator(responseTo)));

    // add offset
    if (offsetTransactionId != null) {
        final Buffer buffer = new AutomaticBuffer(32);
        buffer.putInt(offsetTransactionElapsed);
        buffer.putPrefixedString(offsetTransactionId.getAgentId());
        buffer.putSVLong(offsetTransactionId.getAgentStartTime());
        buffer.putVLong(offsetTransactionId.getTransactionSequence());
        byte[] qualifierOffset = buffer.getBuffer();

        filterList.addFilter(new QualifierFilter(CompareOp.GREATER, new BinaryPrefixComparator(qualifierOffset)));
    }
    return filterList;
}
 
开发者ID:naver,项目名称:pinpoint,代码行数:32,代码来源:HbaseApplicationTraceIndexDao.java

示例4: testFilters

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testFilters() throws Exception {
  byte [] TABLE = Bytes.toBytes("testFilters");
  Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
  byte [][] ROWS = makeN(ROW, 10);
  byte [][] QUALIFIERS = {
      Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"),
      Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"),
      Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"),
      Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"),
      Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>")
  };
  for(int i=0;i<10;i++) {
    Put put = new Put(ROWS[i]);
    put.setDurability(Durability.SKIP_WAL);
    put.add(FAMILY, QUALIFIERS[i], VALUE);
    ht.put(put);
  }
  Scan scan = new Scan();
  scan.addFamily(FAMILY);
  Filter filter = new QualifierFilter(CompareOp.EQUAL,
    new RegexStringComparator("col[1-5]"));
  scan.setFilter(filter);
  ResultScanner scanner = ht.getScanner(scan);
  int expectedIndex = 1;
  for(Result result : ht.getScanner(scan)) {
    assertEquals(result.size(), 1);
    assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[expectedIndex]));
    assertTrue(Bytes.equals(CellUtil.cloneQualifier(result.rawCells()[0]),
        QUALIFIERS[expectedIndex]));
    expectedIndex++;
  }
  assertEquals(expectedIndex, 6);
  scanner.close();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:36,代码来源:TestFromClientSide.java

示例5: testFiltersWithReverseScan

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testFiltersWithReverseScan() throws Exception {
  TableName TABLE = TableName.valueOf("testFiltersWithReverseScan");
  Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
  byte[][] ROWS = makeN(ROW, 10);
  byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"),
      Bytes.toBytes("col1-<d2v1>-<d3v2>"),
      Bytes.toBytes("col2-<d2v1>-<d3v2>"),
      Bytes.toBytes("col3-<d2v1>-<d3v2>"),
      Bytes.toBytes("col4-<d2v1>-<d3v2>"),
      Bytes.toBytes("col5-<d2v1>-<d3v2>"),
      Bytes.toBytes("col6-<d2v1>-<d3v2>"),
      Bytes.toBytes("col7-<d2v1>-<d3v2>"),
      Bytes.toBytes("col8-<d2v1>-<d3v2>"),
      Bytes.toBytes("col9-<d2v1>-<d3v2>") };
  for (int i = 0; i < 10; i++) {
    Put put = new Put(ROWS[i]);
    put.add(FAMILY, QUALIFIERS[i], VALUE);
    ht.put(put);
  }
  Scan scan = new Scan();
  scan.setReversed(true);
  scan.addFamily(FAMILY);
  Filter filter = new QualifierFilter(CompareOp.EQUAL,
      new RegexStringComparator("col[1-5]"));
  scan.setFilter(filter);
  ResultScanner scanner = ht.getScanner(scan);
  int expectedIndex = 5;
  for (Result result : scanner) {
    assertEquals(result.size(), 1);
    assertTrue(Bytes.equals(result.raw()[0].getRow(), ROWS[expectedIndex]));
    assertTrue(Bytes.equals(result.raw()[0].getQualifier(),
        QUALIFIERS[expectedIndex]));
    expectedIndex--;
  }
  assertEquals(expectedIndex, 0);
  scanner.close();
  ht.close();
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:40,代码来源:TestFromClientSide.java

示例6: testSkipFilter

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testSkipFilter() throws Exception {
  // Test for qualifier regex: "testQualifierOne-2"
  // Should only get rows from second group, and all keys
  Filter f = new SkipFilter(new QualifierFilter(CompareOp.NOT_EQUAL,
      new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
  Scan s = new Scan();
  s.setFilter(f);
  
  KeyValue [] kvs = {
      // testRowTwo-0
      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
      // testRowTwo-2
      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
      // testRowTwo-3
      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
  };
  verifyScanFull(s, kvs);
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:35,代码来源:TestScannersWithFilters.java

示例7: testFilters

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testFilters() throws Exception {
  byte [] TABLE = Bytes.toBytes("testFilters");
  HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);
  byte [][] ROWS = makeN(ROW, 10);
  byte [][] QUALIFIERS = {
      Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"),
      Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"),
      Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"),
      Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"),
      Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>")
  };
  for(int i=0;i<10;i++) {
    Put put = new Put(ROWS[i]);
    put.setWriteToWAL(false);
    put.add(FAMILY, QUALIFIERS[i], VALUE);
    ht.put(put);
  }
  Scan scan = new Scan();
  scan.addFamily(FAMILY);
  Filter filter = new QualifierFilter(CompareOp.EQUAL,
    new RegexStringComparator("col[1-5]"));
  scan.setFilter(filter);
  ResultScanner scanner = ht.getScanner(scan);
  int expectedIndex = 1;
  for(Result result : ht.getScanner(scan)) {
    assertEquals(result.size(), 1);
    assertTrue(Bytes.equals(result.raw()[0].getRow(), ROWS[expectedIndex]));
    assertTrue(Bytes.equals(result.raw()[0].getQualifier(),
        QUALIFIERS[expectedIndex]));
    expectedIndex++;
  }
  assertEquals(expectedIndex, 6);
  scanner.close();
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:36,代码来源:TestFromClientSide.java

示例8: adapt

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Override
public RowFilter adapt(FilterAdapterContext context, QualifierFilter filter)
    throws IOException {
  if (filter.getComparator() instanceof RegexStringComparator) {
    return adaptRegexStringComparator(
        filter.getOperator(), (RegexStringComparator) filter.getComparator());
  } else if (filter.getComparator() instanceof BinaryComparator) {
    return adaptBinaryComparator(
        context, filter.getOperator(), (BinaryComparator) filter.getComparator());
  }
  throw new IllegalStateException(
      String.format(
          "Cannot adapt comparator %s",
          filter.getComparator().getClass().getCanonicalName()));
}
 
开发者ID:dmmcerlean,项目名称:cloud-bigtable-client,代码行数:16,代码来源:QualifierFilterAdapter.java

示例9: assertAdaptedForm

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
private void assertAdaptedForm(
    ByteArrayComparable comparable, CompareFilter.CompareOp op, RowFilter expectedFilter)
    throws IOException {
  QualifierFilter filter = new QualifierFilter(op, comparable);
  RowFilter actualFilter = adapter.adapt(scanWithOnFamilyScanContext, filter);
  Assert.assertEquals(expectedFilter, actualFilter);
}
 
开发者ID:dmmcerlean,项目名称:cloud-bigtable-client,代码行数:8,代码来源:TestQualifierFilterAdapter.java

示例10: testFilters

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testFilters() throws Exception {
  byte [] TABLE = Bytes.toBytes("testFilters");
  HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);
  byte [][] ROWS = makeN(ROW, 10);
  byte [][] QUALIFIERS = {
      Bytes.toBytes("col0-<d2v1>-<d3v2>"), Bytes.toBytes("col1-<d2v1>-<d3v2>"),
      Bytes.toBytes("col2-<d2v1>-<d3v2>"), Bytes.toBytes("col3-<d2v1>-<d3v2>"),
      Bytes.toBytes("col4-<d2v1>-<d3v2>"), Bytes.toBytes("col5-<d2v1>-<d3v2>"),
      Bytes.toBytes("col6-<d2v1>-<d3v2>"), Bytes.toBytes("col7-<d2v1>-<d3v2>"),
      Bytes.toBytes("col8-<d2v1>-<d3v2>"), Bytes.toBytes("col9-<d2v1>-<d3v2>")
  };
  for(int i=0;i<10;i++) {
    Put put = new Put(ROWS[i]);
    put.setDurability(Durability.SKIP_WAL);
    put.add(FAMILY, QUALIFIERS[i], VALUE);
    ht.put(put);
  }
  Scan scan = new Scan();
  scan.addFamily(FAMILY);
  Filter filter = new QualifierFilter(CompareOp.EQUAL,
    new RegexStringComparator("col[1-5]"));
  scan.setFilter(filter);
  ResultScanner scanner = ht.getScanner(scan);
  int expectedIndex = 1;
  for(Result result : ht.getScanner(scan)) {
    assertEquals(result.size(), 1);
    assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[expectedIndex]));
    assertTrue(Bytes.equals(CellUtil.cloneQualifier(result.rawCells()[0]),
        QUALIFIERS[expectedIndex]));
    expectedIndex++;
  }
  assertEquals(expectedIndex, 6);
  scanner.close();
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:36,代码来源:TestFromClientSide.java

示例11: testFiltersWithReverseScan

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testFiltersWithReverseScan() throws Exception {
  byte[] TABLE = Bytes.toBytes("testFiltersWithReverseScan");
  HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);
  byte[][] ROWS = makeN(ROW, 10);
  byte[][] QUALIFIERS = { Bytes.toBytes("col0-<d2v1>-<d3v2>"),
      Bytes.toBytes("col1-<d2v1>-<d3v2>"),
      Bytes.toBytes("col2-<d2v1>-<d3v2>"),
      Bytes.toBytes("col3-<d2v1>-<d3v2>"),
      Bytes.toBytes("col4-<d2v1>-<d3v2>"),
      Bytes.toBytes("col5-<d2v1>-<d3v2>"),
      Bytes.toBytes("col6-<d2v1>-<d3v2>"),
      Bytes.toBytes("col7-<d2v1>-<d3v2>"),
      Bytes.toBytes("col8-<d2v1>-<d3v2>"),
      Bytes.toBytes("col9-<d2v1>-<d3v2>") };
  for (int i = 0; i < 10; i++) {
    Put put = new Put(ROWS[i]);
    put.add(FAMILY, QUALIFIERS[i], VALUE);
    ht.put(put);
  }
  Scan scan = new Scan();
  scan.setReversed(true);
  scan.addFamily(FAMILY);
  Filter filter = new QualifierFilter(CompareOp.EQUAL,
      new RegexStringComparator("col[1-5]"));
  scan.setFilter(filter);
  ResultScanner scanner = ht.getScanner(scan);
  int expectedIndex = 5;
  for (Result result : scanner) {
    assertEquals(result.size(), 1);
    assertTrue(Bytes.equals(result.raw()[0].getRow(), ROWS[expectedIndex]));
    assertTrue(Bytes.equals(result.raw()[0].getQualifier(),
        QUALIFIERS[expectedIndex]));
    expectedIndex--;
  }
  assertEquals(expectedIndex, 0);
  scanner.close();
  ht.close();
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:40,代码来源:TestFromClientSide.java

示例12: createFilterFrom

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
private QualifierFilter createFilterFrom() {
	if (BigInteger.class.equals(columnNameType)) {
		int flippedFrom = StandardConverters.convertFromBytes(Integer.class, from);
		flippedFrom ^= (1 << 31);
		from = Bytes.toBytes(flippedFrom);
	}
	BinaryComparator startColumn = new BinaryComparator(from);
	CompareFilter.CompareOp  fromInclusive = CompareOp.GREATER_OR_EQUAL;
	return new QualifierFilter(fromInclusive, startColumn);
}
 
开发者ID:guci314,项目名称:playorm,代码行数:11,代码来源:CursorColumnSliceHbase.java

示例13: createFilterTo

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
private QualifierFilter createFilterTo() {
	if (BigInteger.class.equals(columnNameType)) {
		int flippedTo = StandardConverters.convertFromBytes(Integer.class, to);
		flippedTo ^= (1 << 31);
		to = Bytes.toBytes(flippedTo);
	}
	BinaryComparator endColumn = new BinaryComparator(to);
	CompareFilter.CompareOp toInclusive = CompareOp.LESS_OR_EQUAL;
	return new QualifierFilter(toInclusive, endColumn);
}
 
开发者ID:guci314,项目名称:playorm,代码行数:11,代码来源:CursorColumnSliceHbase.java

示例14: main

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
	Configuration configuration = getConfiguration();
	HBaseHelper helper = HBaseHelper.getHelper(configuration);
	Connection connection = helper.getConnection();
	
	/*helper.dropTable(TEST_TABLE_NAME);
    helper.createTable(TEST_TABLE_NAME, "colfam1", "colfam2");
    log.info("Adding rows to table...");
    helper.fillTable("testtable", 1, 10, 10, "colfam1", "colfam2");*/
	
    Table table = connection.getTable(TableName.valueOf(TEST_TABLE_NAME));
	
    Filter filter = new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes("col-2")));
    
    Scan scan = new Scan();
    scan.setFilter(filter);
    
    ResultScanner scanner = table.getScanner(scan);
    log.info("Scanning table... ");
    for (Result result : scanner) {
      log.info(result.toString());
    }
    scanner.close();

    Get get1 = new Get(Bytes.toBytes("row-5"));
    get1.setFilter(filter);
    Result result1 = table.get(get1);
    log.info("Result of get(): {}", result1);

}
 
开发者ID:lhfei,项目名称:hbase-in-action,代码行数:31,代码来源:QualifierFilterApp.java

示例15: testFilterList

import org.apache.hadoop.hbase.filter.QualifierFilter; //导入依赖的package包/类
@Test
public void testFilterList() throws Exception {
  // Test getting a single row, single key using Row, Qualifier, and Value 
  // regular expression and substring filters
  // Use must pass all
  List<Filter> filters = new ArrayList<Filter>();
  filters.add(new RowFilter(CompareOp.EQUAL,
    new RegexStringComparator(".+-2")));
  filters.add(new QualifierFilter(CompareOp.EQUAL,
    new RegexStringComparator(".+-2")));
  filters.add(new ValueFilter(CompareOp.EQUAL,
    new SubstringComparator("One")));
  Filter f = new FilterList(Operator.MUST_PASS_ALL, filters);
  Scan s = new Scan();
  s.addFamily(FAMILIES[0]);
  s.setFilter(f);
  KeyValue [] kvs = {
      new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0])
  };
  verifyScanFull(s, kvs);

  // Test getting everything with a MUST_PASS_ONE filter including row, qf,
  // val, regular expression and substring filters
  filters.clear();
  filters.add(new RowFilter(CompareOp.EQUAL,
    new RegexStringComparator(".+Two.+")));
  filters.add(new QualifierFilter(CompareOp.EQUAL,
    new RegexStringComparator(".+-2")));
  filters.add(new ValueFilter(CompareOp.EQUAL,
    new SubstringComparator("One")));
  f = new FilterList(Operator.MUST_PASS_ONE, filters);
  s = new Scan();
  s.setFilter(f);
  verifyScanNoEarlyOut(s, numRows, colsPerRow);
}
 
开发者ID:wanhao,项目名称:IRIndex,代码行数:36,代码来源:TestScannersWithFilters.java


注:本文中的org.apache.hadoop.hbase.filter.QualifierFilter类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。