本文整理汇总了Java中org.apache.hadoop.hbase.thrift.generated.TScan类的典型用法代码示例。如果您正苦于以下问题:Java TScan类的具体用法?Java TScan怎么用?Java TScan使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
TScan类属于org.apache.hadoop.hbase.thrift.generated包,在下文中一共展示了TScan类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
示例2: rowFilter
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Test
public void rowFilter() throws Exception {
String TABLE_NAME = "UIH_OverallItemInfo";
List<TRowResult> results = null;
//
List<ByteBuffer> columns = new LinkedList<ByteBuffer>();
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<ByteBuffer, ByteBuffer>();
//
TScan scan = new TScan();
scan.setCaching(200);
//
String filterString = "RowFilter(=, 'regexstring:00[1-3]00')";
scan.setFilterString(ByteBufferHelper.toByteBuffer(filterString));
scan.setColumns(columns);
//
int scannerId = 0;
long beg = System.currentTimeMillis();
try {
scannerId = client.scannerOpenWithScan(
ByteBufferHelper.toByteBuffer(TABLE_NAME), scan, attributes);// 可以attributes=null
results = client.scannerGetList(scannerId, 10);// 讀取幾筆
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (scannerId >= 0) {
client.scannerClose(scannerId);// 一定要關閉
}
}
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
printlnResult(results);
}
示例3: prefixFilter
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Test
public void prefixFilter() throws Exception {
String TABLE_NAME = "UIH_OverallItemInfo";
List<TRowResult> results = null;
//
List<ByteBuffer> columns = new LinkedList<ByteBuffer>();
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<ByteBuffer, ByteBuffer>();
//
TScan scan = new TScan();
scan.setCaching(200);
//
String filterString = "PrefixFilter('1000|B101|A17P')";
scan.setFilterString(ByteBufferHelper.toByteBuffer(filterString));
scan.setColumns(columns);
//
int scannerId = 0;
long beg = System.currentTimeMillis();
try {
scannerId = client.scannerOpenWithScan(
ByteBufferHelper.toByteBuffer(TABLE_NAME), scan, attributes);// 可以attributes=null
results = client.scannerGetList(scannerId, 10);// 讀取幾筆
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (scannerId >= 0) {
client.scannerClose(scannerId);// 一定要關閉
}
}
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
printlnResult(results);
}
示例4: keyOnlyFilter
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
/**
* 只傳回key值,value屏蔽掉
*
* @throws Exception
*/
@Test
public void keyOnlyFilter() throws Exception {
String TABLE_NAME = "UIH_OverallItemInfo";
List<TRowResult> results = null;
//
List<ByteBuffer> columns = new LinkedList<ByteBuffer>();
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<ByteBuffer, ByteBuffer>();
//
TScan scan = new TScan();
scan.setCaching(200);
// KeyOnlyFilter()
String filterString = "KeyOnlyFilter()";
scan.setFilterString(ByteBufferHelper.toByteBuffer(filterString));
scan.setColumns(columns);
//
int scannerId = 0;
long beg = System.currentTimeMillis();
try {
scannerId = client.scannerOpenWithScan(
ByteBufferHelper.toByteBuffer(TABLE_NAME), scan, attributes);// 可以attributes=null
results = client.scannerGetList(scannerId, 10);// 讀取幾筆
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (scannerId >= 0) {
client.scannerClose(scannerId);// 一定要關閉
}
}
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
//
printlnResult(results);
}
示例5: firstKeyOnlyFilter
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
/**
* 只傳回第一個keyValue
*
* @throws Exception
*/
@Test
public void firstKeyOnlyFilter() throws Exception {
String TABLE_NAME = "UIH_OverallItemInfo";
List<TRowResult> results = null;
//
List<ByteBuffer> columns = new LinkedList<ByteBuffer>();
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<ByteBuffer, ByteBuffer>();
//
TScan scan = new TScan();
scan.setCaching(200);
// FirstKeyOnlyFilter()
String filterString = "FirstKeyOnlyFilter()";
scan.setFilterString(ByteBufferHelper.toByteBuffer(filterString));
scan.setColumns(columns);
//
int scannerId = 0;
long beg = System.currentTimeMillis();
try {
scannerId = client.scannerOpenWithScan(
ByteBufferHelper.toByteBuffer(TABLE_NAME), scan, attributes);// 可以attributes=null
results = client.scannerGetList(scannerId, 10);// 讀取幾筆
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (scannerId >= 0) {
client.scannerClose(scannerId);// 一定要關閉
}
}
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
//
printlnResult(results);
}
示例6: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan) throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if(tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan));
} catch (IOException e) {
throw new IOError(e.getMessage());
}
}
示例7: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
示例8: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
Table table = null;
try {
table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
if (tScan.isSetReversed()) {
scan.setReversed(tScan.isReversed());
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
} finally{
closeTable(table);
}
}
示例9: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
示例10: scanOneColumn
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
/**
* scan 讀取特定column
*
* @throws Exception
*/
@Test
// 1053 at mills.
// 897 at mills.
// 888 at mills.
public void scanOneColumn() throws Exception {
// String TABLE_NAME = "ItemCreation_Buffer_Item";
// String rowKey =
// "0016630555\\x0501\\x0501\\x05A1FS\\x05CreateItem\\x0516630555\\x050000116\\x05670744-086876166705";
// String cloumn = "ItemInfo:SellerId";
String TABLE_NAME = "UIH_OverallItemInfo";
String rowKey = "1000|I200|A17P|AUDI15028071";
String cloumn = "CommonInfo:SellerID";
List<TRowResult> results = null;
//
List<ByteBuffer> columns = new LinkedList<ByteBuffer>();
columns.add(ByteBufferHelper.toByteBuffer(cloumn));
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<ByteBuffer, ByteBuffer>();
//
TScan scan = new TScan();
scan.setCaching(200);
scan.setStartRow(ByteBufferHelper.toByteBuffer(rowKey));
// scan.setStopRow(ByteBufferHelper.toByteBuffer(rowKey));
scan.setColumns(columns);
//
int scannerId = 0;
long beg = System.currentTimeMillis();
try {
scannerId = client.scannerOpenWithScan(
ByteBufferHelper.toByteBuffer(TABLE_NAME), scan, attributes);// 可以attributes=null
results = client.scannerGetList(scannerId, 10);// 讀取幾筆
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (scannerId >= 0) {
client.scannerClose(scannerId);// 一定要關閉
}
}
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
printlnResult(results);
}
示例11: scanNotClose
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Test
public void scanNotClose() throws Exception {
String TABLE_NAME = "ItemCreation_Buffer_Item_B";
String cloumn = "ItemInfo:ProcessResult";
//
List<ByteBuffer> columns = new LinkedList<ByteBuffer>();
columns.add(ByteBufferHelper.toByteBuffer(cloumn));
columns.add(ByteBufferHelper.toByteBuffer("ItemInfo:ProcessStatus"));
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<ByteBuffer, ByteBuffer>();
//
TScan scan = new TScan();
scan.setCaching(200);
//
String filterString = "SingleColumnValueFilter('ItemInfo','ProcessResult',=,'substring:Test')";// substring:A17P
filterString += " AND SingleColumnValueFilter('ItemInfo','ProcessStatus',=,'substring:Test')";
scan.setFilterString(ByteBufferHelper.toByteBuffer(filterString));
scan.setColumns(columns);
//
int scannerId = 0;
int rowCount = 0;
long beg = System.currentTimeMillis();
for (int i = 0; i < 1000; i++) {
try {
scannerId = client.scannerOpenWithScan(
ByteBufferHelper.toByteBuffer(TABLE_NAME), scan,
attributes);// 可以attributes=null
// System.out.println(scannerId);
while (true) {
List<TRowResult> results = client.scannerGet(scannerId);
if (results.isEmpty()) {
break;
}
System.out.println(i + ", " + scannerId);
// printlnResult(results);
rowCount++;
if (rowCount > 0) {
break;
}
}
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (scannerId >= 0) {
// 故意不關
// client.scannerClose(scannerId);// 一定要關閉
}
}
}
//
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
}
示例12: singleColumnValueFilter
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Test
public void singleColumnValueFilter() throws Exception {
String TABLE_NAME = "UIH_OverallItemInfo";
String cloumn = "CommonInfo:SellerID";
List<TRowResult> results = null;
//
List<ByteBuffer> columns = new LinkedList<ByteBuffer>();
columns.add(ByteBufferHelper.toByteBuffer(cloumn));
columns.add(ByteBufferHelper.toByteBuffer("CommonInfo:SellerItemNumber"));
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<ByteBuffer, ByteBuffer>();
//
TScan scan = new TScan();
scan.setCaching(200);
// SingleColumnValueFilter('<family>', '<qualifier>', <compare
// operator>, '<comparator>', <filterIfColumnMissing_boolean>,
// <latest_version_boolean>)
// 注意: 只有當COLUMNS中包含SingleColumnValueFilter提到的欄位時,
// 該SingleColumnValueFilter才有效的
String filterString = "SingleColumnValueFilter('CommonInfo','SellerID',=,'binary:A17P')";// substring:A17P
filterString += " AND SingleColumnValueFilter('CommonInfo','SellerItemNumber',=,'binary:AUDI15028005')";
scan.setFilterString(ByteBufferHelper.toByteBuffer(filterString));
scan.setColumns(columns);
//
int scannerId = 0;
long beg = System.currentTimeMillis();
try {
scannerId = client.scannerOpenWithScan(
ByteBufferHelper.toByteBuffer(TABLE_NAME), scan, attributes);// 可以attributes=null
results = client.scannerGetList(scannerId, 10);// 讀取幾筆
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (scannerId >= 0) {
client.scannerClose(scannerId);// 一定要關閉
}
}
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
printlnResult(results);
}
示例13: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
Table table = null;
try {
table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = CellUtil.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
if (tScan.isSetReversed()) {
scan.setReversed(tScan.isReversed());
}
if (tScan.isSetCacheBlocks()) {
scan.setCacheBlocks(tScan.isCacheBlocks());
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw getIOError(e);
} finally{
closeTable(table);
}
}
示例14: scannerOpenWithScan
import org.apache.hadoop.hbase.thrift.generated.TScan; //导入依赖的package包/类
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
if (tScan.isSetReversed()) {
scan.setReversed(tScan.isReversed());
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}