當前位置: 首頁>>代碼示例>>Java>>正文


Java Scan.setStopRow方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hbase.client.Scan.setStopRow方法的典型用法代碼示例。如果您正苦於以下問題:Java Scan.setStopRow方法的具體用法?Java Scan.setStopRow怎麽用?Java Scan.setStopRow使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hbase.client.Scan的用法示例。


在下文中一共展示了Scan.setStopRow方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: testMinWithInvalidRange2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test(timeout = 300000)
public void testMinWithInvalidRange2() {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.addFamily(TEST_FAMILY);
  scan.setStartRow(ROWS[6]);
  scan.setStopRow(ROWS[6]);
  final ColumnInterpreter<Double, Double, EmptyMsg, DoubleMsg, DoubleMsg> ci = 
      new DoubleColumnInterpreter();
  Double min = null;
  try {
    min = aClient.min(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
  }
  assertEquals(null, min);// control should go to the catch block
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,代碼來源:TestDoubleColumnInterpreter.java

示例2: testAvgWithInvalidRange

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test (timeout=300000)
public void testAvgWithInvalidRange() {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
  scan.setStartRow(ROWS[5]);
  scan.setStopRow(ROWS[1]);
  final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
    new BigDecimalColumnInterpreter();
  Double avg = null;
  try {
    avg = aClient.avg(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
  }
  assertEquals(null, avg);// control should go to the catch block
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,代碼來源:TestBigDecimalColumnInterpreter.java

示例3: testMinWithInvalidRange

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test (timeout=300000)
public void testMinWithInvalidRange() {
  AggregationClient aClient = new AggregationClient(conf);
  BigDecimal min = null;
  Scan scan = new Scan();
  scan.addFamily(TEST_FAMILY);
  scan.setStartRow(ROWS[4]);
  scan.setStopRow(ROWS[2]);
  final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
    new BigDecimalColumnInterpreter();
  try {
    min = aClient.min(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
  }
  assertEquals(null, min);// control should go to the catch block
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,代碼來源:TestBigDecimalColumnInterpreter.java

示例4: testAvgWithInvalidRange

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test(timeout = 300000)
public void testAvgWithInvalidRange() {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
  scan.setStartRow(ROWS[5]);
  scan.setStopRow(ROWS[1]);
  final ColumnInterpreter<Double, Double, EmptyMsg, DoubleMsg, DoubleMsg> ci = 
      new DoubleColumnInterpreter();
  Double avg = null;
  try {
    avg = aClient.avg(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
  }
  assertEquals(null, avg);// control should go to the catch block
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,代碼來源:TestDoubleColumnInterpreter.java

示例5: testMaxWithInvalidRange

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test(timeout = 300000)
public void testMaxWithInvalidRange() {
  AggregationClient aClient = new AggregationClient(conf);
  final ColumnInterpreter<Double, Double, EmptyMsg, DoubleMsg, DoubleMsg> ci = 
      new DoubleColumnInterpreter();
  Scan scan = new Scan();
  scan.setStartRow(ROWS[4]);
  scan.setStopRow(ROWS[2]);
  scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
  double max = Double.MIN_VALUE;
  ;
  try {
    max = aClient.max(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
    max = 0.00;
  }
  assertEquals(0.00, max, 0.00);// control should go to the catch block
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:19,代碼來源:TestDoubleColumnInterpreter.java

示例6: testMaxWithInvalidRange

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test (timeout=300000)
public void testMaxWithInvalidRange() {
  AggregationClient aClient = new AggregationClient(conf);
  final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
    new BigDecimalColumnInterpreter();
  Scan scan = new Scan();
  scan.setStartRow(ROWS[4]);
  scan.setStopRow(ROWS[2]);
  scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
  BigDecimal max = new BigDecimal(Long.MIN_VALUE);
  ;
  try {
    max = aClient.max(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
    max = BigDecimal.ZERO;
  }
  assertEquals(BigDecimal.ZERO, max);// control should go to the catch block
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:19,代碼來源:TestBigDecimalColumnInterpreter.java

示例7: testStdWithValidRangeWithNullCF

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test(timeout = 300000)
public void testStdWithValidRangeWithNullCF() {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.setStartRow(ROWS[6]);
  scan.setStopRow(ROWS[17]);
  final ColumnInterpreter<Double, Double, EmptyMsg, DoubleMsg, DoubleMsg> ci = 
      new DoubleColumnInterpreter();
  Double std = null;
  try {
    std = aClient.std(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
  }
  assertEquals(null, std);// CP will throw an IOException about the
  // null column family, and max will be set to 0
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,代碼來源:TestDoubleColumnInterpreter.java

示例8: testStdWithValidRangeWithNullCF

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test (timeout=300000)
public void testStdWithValidRangeWithNullCF() {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.setStartRow(ROWS[6]);
  scan.setStopRow(ROWS[17]);
  final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
    new BigDecimalColumnInterpreter();
  Double std = null;
  try {
    std = aClient.std(TEST_TABLE, ci, scan);
  } catch (Throwable e) {
  }
  assertEquals(null, std);// CP will throw an IOException about the
  // null column family, and max will be set to 0
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,代碼來源:TestBigDecimalColumnInterpreter.java

示例9: innerAddScanner

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
private void innerAddScanner(HRegionLocation regionLocation) throws IOException {
  Scan newScan = new Scan(rawScan);
  if (regionLocation.getRegionInfo().getStartKey() != null)
    newScan.setStartRow(regionLocation.getRegionInfo().getStartKey());
  if (regionLocation.getRegionInfo().getEndKey() != null)
    newScan.setStopRow(regionLocation.getRegionInfo().getEndKey());
  newScan.setAttribute(IndexConstants.SCAN_WITH_INDEX, Bytes.toBytes("Hi"));
  ResultScanner scanner = table.getScanner(newScan);
  synchronized (scannerList) {
    scannerList.add(scanner);
  }
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:13,代碼來源:LocalScannerInParallel.java

示例10: initScanner

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
/**
 * init selected range and scanner
 *
 * @throws IOException
 */
private void initScanner() throws IOException {
  ScanRange selectedRange = null;
  int selectedRegionNumber = Integer.MAX_VALUE;
  for (ScanRange range : rangeList.getRanges()) {
    int cover = countCoveringRegions(conn,
        relation.getIndexTableName(range.getFamily(), range.getQualifier()), range.getStart(),
        range.getStop());
    LOG.info("LCDBG, " + cover + " regions are covered by range " + range);
    if (selectedRegionNumber > cover) {
      selectedRegionNumber = cover;
      selectedRange = range;
    }
  }
  LOG.info("LCDBG, GC Scanner using range " + selectedRange + " with " + selectedRegionNumber
      + " regions for scan id= " + rawScan.getId());
  indexFamily = selectedRange.getFamily();
  indexQualifier = selectedRange.getQualifier();
  List<ScanRange> list = new ArrayList<>(rangeList.getRanges());
  list.remove(selectedRange);
  Scan scan = new Scan();
  scan.setStartRow(selectedRange.getStart());
  scan.setStopRow(selectedRange.getStop());
  scan.setFamilyMap(rawScan.getFamilyMap());
  scan.setCaching(rawScan.getCaching());
  scan.setCacheBlocks(rawScan.getCacheBlocks());
  scan.setId(rawScan.getId());
  scan.setFilter(new ScanRange.ScanRangeList(list).toFilterList());
  Table table = conn.getTable(
      relation.getIndexTableName(selectedRange.getFamily(), selectedRange.getQualifier()));
  scanner = table.getScanner(scan);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:37,代碼來源:GCScanner.java

示例11: testMinWithValidRangeWithNoCQ

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test (timeout=300000)
public void testMinWithValidRangeWithNoCQ() throws Throwable {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.addFamily(TEST_FAMILY);
  scan.setStartRow(HConstants.EMPTY_START_ROW);
  scan.setStopRow(HConstants.EMPTY_END_ROW);
  final ColumnInterpreter<Long, Long, EmptyMsg, LongMsg, LongMsg> ci =
      new LongColumnInterpreter();
  long min = aClient.min(TEST_TABLE, ci,
      scan);
  assertEquals(0, min);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:14,代碼來源:TestAggregateProtocol.java

示例12: testMinWithValidRangeWithNoCQ

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test (timeout=300000)
public void testMinWithValidRangeWithNoCQ() throws Throwable {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.addFamily(TEST_FAMILY);
  scan.setStartRow(HConstants.EMPTY_START_ROW);
  scan.setStopRow(HConstants.EMPTY_END_ROW);
  final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci =
    new BigDecimalColumnInterpreter();
  BigDecimal min = aClient.min(TEST_TABLE, ci, scan);
  assertEquals(new BigDecimal("0.00"), min);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:13,代碼來源:TestBigDecimalColumnInterpreter.java

示例13: testSumWithValidRange2WithNoCQ

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
@Test (timeout=300000)
public void testSumWithValidRange2WithNoCQ() throws Throwable {
  AggregationClient aClient = new AggregationClient(conf);
  Scan scan = new Scan();
  scan.addFamily(TEST_FAMILY);
  scan.setStartRow(ROWS[6]);
  scan.setStopRow(ROWS[7]);
  final ColumnInterpreter<Long, Long, EmptyMsg, LongMsg, LongMsg> ci =
      new LongColumnInterpreter();
  long sum = aClient.sum(TEST_TABLE, ci, scan);
  assertEquals(6 + 60, sum);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:13,代碼來源:TestAggregateProtocol.java

示例14: testScan

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
/**
 * Tests a MR scan using specific start and stop rows.
 *
 * @throws IOException
 * @throws ClassNotFoundException
 * @throws InterruptedException
 */
private void testScan(String start, String stop, String last)
    throws IOException, InterruptedException, ClassNotFoundException {
  String jobName =
      "Scan" + (start != null ? start.toUpperCase() : "Empty") + "To" +
          (stop != null ? stop.toUpperCase() : "Empty");
  LOG.info("Before map/reduce startup - job " + jobName);
  Configuration c = new Configuration(TEST_UTIL.getConfiguration());

  c.set(KEY_STARTROW, start != null ? start : "");
  c.set(KEY_LASTROW, last != null ? last : "");

  List<Scan> scans = new ArrayList<Scan>();

  for (String tableName : TABLES) {
    Scan scan = new Scan();

    scan.addFamily(INPUT_FAMILY);
    scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(tableName));

    if (start != null) {
      scan.setStartRow(Bytes.toBytes(start));
    }
    if (stop != null) {
      scan.setStopRow(Bytes.toBytes(stop));
    }

    scans.add(scan);

    LOG.info("scan before: " + scan);
  }

  runJob(jobName, c, scans);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:41,代碼來源:MultiTableInputFormatTestBase.java

示例15: testScan

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類
/**
 * Tests a MR scan using specific start and stop rows.
 *
 * @throws IOException
 * @throws ClassNotFoundException
 * @throws InterruptedException
 */
protected void testScan(String start, String stop, String last)
throws IOException, InterruptedException, ClassNotFoundException {
  String jobName = "Scan" + (start != null ? start.toUpperCase() : "Empty") +
    "To" + (stop != null ? stop.toUpperCase() : "Empty");
  LOG.info("Before map/reduce startup - job " + jobName);
  Configuration c = new Configuration(TEST_UTIL.getConfiguration());
  Scan scan = new Scan();
  scan.addFamily(INPUT_FAMILY);
  if (start != null) {
    scan.setStartRow(Bytes.toBytes(start));
  }
  c.set(KEY_STARTROW, start != null ? start : "");
  if (stop != null) {
    scan.setStopRow(Bytes.toBytes(stop));
  }
  c.set(KEY_LASTROW, last != null ? last : "");
  LOG.info("scan before: " + scan);
  Job job = new Job(c, jobName);
  TableMapReduceUtil.initTableMapperJob(
    Bytes.toString(TABLE_NAME), scan, ScanMapper.class,
    ImmutableBytesWritable.class, ImmutableBytesWritable.class, job);
  job.setReducerClass(ScanReducer.class);
  job.setNumReduceTasks(1); // one to get final "first" and "last" key
  FileOutputFormat.setOutputPath(job,
      new Path(TEST_UTIL.getDataTestDir(), job.getJobName()));
  LOG.info("Started " + job.getJobName());
  assertTrue(job.waitForCompletion(true));
  LOG.info("After map/reduce completion - job " + jobName);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:37,代碼來源:TestTableInputFormatScanBase.java


注:本文中的org.apache.hadoop.hbase.client.Scan.setStopRow方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。