本文整理汇总了Java中org.apache.hadoop.hbase.thrift.generated.TScan.isSetCaching方法的典型用法代码示例。如果您正苦于以下问题:Java TScan.isSetCaching方法的具体用法?Java TScan.isSetCaching怎么用?Java TScan.isSetCaching使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.thrift.generated.TScan
的用法示例。
在下文中一共展示了TScan.isSetCaching方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入方法依赖的package包/类
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
示例2: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入方法依赖的package包/类
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan) throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if(tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan));
} catch (IOException e) {
throw new IOError(e.getMessage());
}
}
示例3: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入方法依赖的package包/类
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
示例4: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入方法依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
Table table = null;
try {
table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
if (tScan.isSetReversed()) {
scan.setReversed(tScan.isReversed());
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
} finally{
closeTable(table);
}
}
示例5: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入方法依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
示例6: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入方法依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
Table table = null;
try {
table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
if (tScan.isSetReversed()) {
scan.setReversed(tScan.isReversed());
}
if (tScan.isSetCacheBlocks()) {
scan.setCacheBlocks(tScan.isCacheBlocks());
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw getIOError(e);
} finally{
closeTable(table);
}
}
示例7: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入方法依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
if (tScan.isSetReversed()) {
scan.setReversed(tScan.isReversed());
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}