当前位置: 首页>>代码示例>>Java>>正文


Java IncompatibleFilterException类代码示例

本文整理汇总了Java中org.apache.hadoop.hbase.filter.IncompatibleFilterException的典型用法代码示例。如果您正苦于以下问题:Java IncompatibleFilterException类的具体用法?Java IncompatibleFilterException怎么用?Java IncompatibleFilterException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


IncompatibleFilterException类属于org.apache.hadoop.hbase.filter包,在下文中一共展示了IncompatibleFilterException类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: setBatch

import org.apache.hadoop.hbase.filter.IncompatibleFilterException; //导入依赖的package包/类
/**
 * Set the maximum number of values to return for each call to next()
 * @param batch the maximum number of values
 */
public Scan setBatch(int batch) {
  if (this.hasFilter() && this.filter.hasFilterRow()) {
    throw new IncompatibleFilterException(
      "Cannot set batch on a scan using a filter" +
      " that returns true for filter.hasFilterRow");
  }
  this.batch = batch;
  return this;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:14,代码来源:Scan.java

示例2: setBatch

import org.apache.hadoop.hbase.filter.IncompatibleFilterException; //导入依赖的package包/类
/**
 * Set the maximum number of values to return for each call to next()
 * @param batch the maximum number of values
 */
public void setBatch(int batch) {
  if (this.hasFilter() && this.filter.hasFilterRow()) {
    throw new IncompatibleFilterException("Cannot set batch on a scan using a filter"
        + " that returns true for filter.hasFilterRow");
  }
  this.batch = batch;
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:12,代码来源:Scan.java

示例3: getConfiguredScanForJob

import org.apache.hadoop.hbase.filter.IncompatibleFilterException; //导入依赖的package包/类
private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException {
  Scan s = new Scan();
  // Optional arguments.
  // Set Scan Versions
  int versions = args.length > 2? Integer.parseInt(args[2]): 1;
  s.setMaxVersions(versions);
  // Set Scan Range
  long startTime = args.length > 3? Long.parseLong(args[3]): 0L;
  long endTime = args.length > 4? Long.parseLong(args[4]): Long.MAX_VALUE;
  s.setTimeRange(startTime, endTime);
  // Set cache blocks
  s.setCacheBlocks(false);
  // Set Scan Column Family
  boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN));
  if (raw) {
    s.setRaw(raw);
  }
  
  if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) {
    s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY)));
  }
  // Set RowFilter or Prefix Filter if applicable.
  Filter exportFilter = getExportFilter(args);
  if (exportFilter!= null) {
      LOG.info("Setting Scan Filter for Export.");
    s.setFilter(exportFilter);
  }

  int batching = conf.getInt(EXPORT_BATCHING, -1);
  if (batching !=  -1){
    try {
      s.setBatch(batching);
    } catch (IncompatibleFilterException e) {
      LOG.error("Batching could not be set", e);
    }
  }
  LOG.info("versions=" + versions + ", starttime=" + startTime +
    ", endtime=" + endTime + ", keepDeletedCells=" + raw);
  return s;
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:41,代码来源:Export.java

示例4: setBatch

import org.apache.hadoop.hbase.filter.IncompatibleFilterException; //导入依赖的package包/类
/**
 * Set the maximum number of values to return for each call to next()
 * @param batch the maximum number of values
 */
public void setBatch(int batch) {
  if (this.hasFilter() && this.filter.hasFilterRow()) {
    throw new IncompatibleFilterException(
      "Cannot set batch on a scan using a filter" +
      " that returns true for filter.hasFilterRow");
  }
  this.batch = batch;
}
 
开发者ID:tenggyut,项目名称:HIndex,代码行数:13,代码来源:Scan.java

示例5: setBatch

import org.apache.hadoop.hbase.filter.IncompatibleFilterException; //导入依赖的package包/类
/**
 * Set the maximum number of cells to return for each call to next(). Callers should be aware
 * that this is not equivalent to calling {@link #setAllowPartialResults(boolean)}.
 * If you don't allow partial results, the number of cells in each Result must equal to your
 * batch setting unless it is the last Result for current row. So this method is helpful in paging
 * queries. If you just want to prevent OOM at client, use setAllowPartialResults(true) is better.
 * @param batch the maximum number of values
 * @see Result#mayHaveMoreCellsInRow()
 */
public Scan setBatch(int batch) {
  if (this.hasFilter() && this.filter.hasFilterRow()) {
    throw new IncompatibleFilterException(
      "Cannot set batch on a scan using a filter" +
      " that returns true for filter.hasFilterRow");
  }
  this.batch = batch;
  return this;
}
 
开发者ID:apache,项目名称:hbase,代码行数:19,代码来源:Scan.java

示例6: setBatch

import org.apache.hadoop.hbase.filter.IncompatibleFilterException; //导入依赖的package包/类
/**
 * Set the maximum number of values to return for each call to next()
 * 
 * @param batch
 *            the maximum number of values
 */
public void setBatch(int batch) {
	if (this.hasFilter() && this.filter.hasFilterRow()) {
		throw new IncompatibleFilterException(
				"Cannot set batch on a scan using a filter"
						+ " that returns true for filter.hasFilterRow");
	}
	this.batch = batch;
}
 
开发者ID:lifeng5042,项目名称:RStore,代码行数:15,代码来源:Scan.java

示例7: getConfiguredScanForJob

import org.apache.hadoop.hbase.filter.IncompatibleFilterException; //导入依赖的package包/类
private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException {
  Scan s = new Scan();
  // Optional arguments.
  // Set Scan Versions
  int versions = args.length > 2? Integer.parseInt(args[2]): 1;
  s.setMaxVersions(versions);
  // Set Scan Range
  long startTime = args.length > 3? Long.parseLong(args[3]): 0L;
  long endTime = args.length > 4? Long.parseLong(args[4]): Long.MAX_VALUE;
  s.setTimeRange(startTime, endTime);
  // Set cache blocks
  s.setCacheBlocks(false);
  // set Start and Stop row
  if (conf.get(TableInputFormat.SCAN_ROW_START) != null) {
    s.setStartRow(Bytes.toBytes(conf.get(TableInputFormat.SCAN_ROW_START)));
  }
  if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) {
    s.setStopRow(Bytes.toBytes(conf.get(TableInputFormat.SCAN_ROW_STOP)));
  }
  // Set Scan Column Family
  boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN));
  if (raw) {
    s.setRaw(raw);
  }
  
  if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) {
    s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY)));
  }
  // Set RowFilter or Prefix Filter if applicable.
  Filter exportFilter = getExportFilter(args);
  if (exportFilter!= null) {
      LOG.info("Setting Scan Filter for Export.");
    s.setFilter(exportFilter);
  }

  int batching = conf.getInt(EXPORT_BATCHING, -1);
  if (batching !=  -1){
    try {
      s.setBatch(batching);
    } catch (IncompatibleFilterException e) {
      LOG.error("Batching could not be set", e);
    }
  }
  LOG.info("versions=" + versions + ", starttime=" + startTime +
    ", endtime=" + endTime + ", keepDeletedCells=" + raw);
  return s;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:48,代码来源:Export.java


注:本文中的org.apache.hadoop.hbase.filter.IncompatibleFilterException类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。