本文整理汇总了Java中org.apache.hadoop.hbase.KeyValue.parseColumn方法的典型用法代码示例。如果您正苦于以下问题:Java KeyValue.parseColumn方法的具体用法?Java KeyValue.parseColumn怎么用?Java KeyValue.parseColumn使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.KeyValue
的用法示例。
在下文中一共展示了KeyValue.parseColumn方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: deleteAllTs
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public void deleteAllTs(ByteBuffer tableName,
ByteBuffer row,
ByteBuffer column,
long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
Table table = null;
try {
table = getTable(tableName);
Delete delete = new Delete(getBytes(row));
addAttributes(delete, attributes);
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
delete.deleteFamily(famAndQf[0], timestamp);
} else {
delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
}
table.delete(delete);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
} finally {
closeTable(table);
}
}
示例2: appendFromThrift
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* From a {@link TAppend} create an {@link Append}.
* @param tappend the Thrift version of an append.
* @return an increment that the {@link TAppend} represented.
*/
public static Append appendFromThrift(TAppend tappend) {
Append append = new Append(tappend.getRow());
List<ByteBuffer> columns = tappend.getColumns();
List<ByteBuffer> values = tappend.getValues();
if (columns.size() != values.size()) {
throw new IllegalArgumentException(
"Sizes of columns and values in tappend object are not matching");
}
int length = columns.size();
for (int i = 0; i < length; i++) {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(columns.get(i)));
append.add(famAndQf[0], famAndQf[1], getBytes(values.get(i)));
}
return append;
}
示例3: insertData
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
private static int insertData(TableName tableName, String column, double prob) throws IOException {
byte[] k = new byte[3];
byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));
List<Put> puts = new ArrayList<>();
for (int i = 0; i < 9; i++) {
Put put = new Put(Bytes.toBytes("row" + i));
put.setDurability(Durability.SKIP_WAL);
put.add(famAndQf[0], famAndQf[1], k);
put.setCellVisibility(new CellVisibility("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!"
+ TOPSECRET));
puts.add(put);
}
try (Table table = new HTable(TEST_UTIL.getConfiguration(), tableName)) {
table.put(puts);
}
return puts.size();
}
示例4: buildResultFromModel
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
protected Result[] buildResultFromModel(final CellSetModel model) {
List<Result> results = new ArrayList<Result>();
for (RowModel row: model.getRows()) {
List<Cell> kvs = new ArrayList<Cell>();
for (CellModel cell: row.getCells()) {
byte[][] split = KeyValue.parseColumn(cell.getColumn());
byte[] column = split[0];
byte[] qualifier = null;
if (split.length == 1) {
qualifier = HConstants.EMPTY_BYTE_ARRAY;
} else if (split.length == 2) {
qualifier = split[1];
} else {
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
kvs.add(new KeyValue(row.getKey(), column, qualifier,
cell.getTimestamp(), cell.getValue()));
}
results.add(Result.create(kvs));
}
return results.toArray(new Result[results.size()]);
}
示例5: run
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
public void run() {
try {
Get get = new Get(rowkey);
get.setFilter(ftlist);
get.setCacheBlocks(false);
if (resultColumns != null && resultColumns.length != 0) {
for (byte[] column : resultColumns) {
byte[][] tmp = KeyValue.parseColumn(column);
if (tmp.length == 1) {
get.addFamily(tmp[0]);
} else {
get.addColumn(tmp[0], tmp[1]);
}
}
}
rsnew = table.get(get);
table.close();
} catch (Exception e) {
rsnew = null;
exception = e;
}
}
示例6: insertData
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
static int insertData(Configuration conf, TableName tableName, String column, double prob)
throws IOException {
Random rng = new Random();
byte[] k = new byte[3];
byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));
List<Put> puts = new ArrayList<>();
for (byte b1 = 'a'; b1 < 'z'; b1++) {
for (byte b2 = 'a'; b2 < 'z'; b2++) {
for (byte b3 = 'a'; b3 < 'z'; b3++) {
if (rng.nextDouble() < prob) {
k[0] = b1;
k[1] = b2;
k[2] = b3;
Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL);
put.add(famAndQf[0], famAndQf[1], k);
puts.add(put);
}
}
}
}
try (Connection conn = ConnectionFactory.createConnection(conf);
Table table = conn.getTable(tableName)) {
table.put(puts);
}
return puts.size();
}
示例7: get
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public List<TCell> get(
ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
return get(tableName, row, famAndQf[0], null, attributes);
}
if (famAndQf.length == 2) {
return get(tableName, row, famAndQf[0], famAndQf[1], attributes);
}
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
示例8: getVer
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if(famAndQf.length == 1) {
return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
}
if (famAndQf.length == 2) {
return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions, attributes);
}
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
示例9: getVerTs
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
}
if (famAndQf.length == 2) {
return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, numVersions,
attributes);
}
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
示例10: getRowsWithColumnsTs
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public List<TRowResult> getRowsWithColumnsTs(ByteBuffer tableName,
List<ByteBuffer> rows,
List<ByteBuffer> columns, long timestamp,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
Table table= null;
try {
List<Get> gets = new ArrayList<Get>(rows.size());
table = getTable(tableName);
if (metrics != null) {
metrics.incNumRowKeysInBatchGet(rows.size());
}
for (ByteBuffer row : rows) {
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
if (columns != null) {
for(ByteBuffer column : columns) {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
get.addFamily(famAndQf[0]);
} else {
get.addColumn(famAndQf[0], famAndQf[1]);
}
}
}
get.setTimeRange(0, timestamp);
gets.add(get);
}
Result[] result = table.get(gets);
return ThriftUtilities.rowResultFromHBase(result);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
} finally{
closeTable(table);
}
}
示例11: atomicIncrement
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public long atomicIncrement(
ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
throws IOError, IllegalArgument, TException {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if(famAndQf.length == 1) {
return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
}
return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount);
}
示例12: scannerOpenWithStop
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public int scannerOpenWithStop(ByteBuffer tableName, ByteBuffer startRow,
ByteBuffer stopRow, List<ByteBuffer> columns,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, TException {
Table table = null;
try {
table = getTable(tableName);
Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
addAttributes(scan, attributes);
if(columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
} finally{
closeTable(table);
}
}
示例13: scannerOpenWithPrefix
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
@Override
public int scannerOpenWithPrefix(ByteBuffer tableName,
ByteBuffer startAndPrefix,
List<ByteBuffer> columns,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, TException {
Table table = null;
try {
table = getTable(tableName);
Scan scan = new Scan(getBytes(startAndPrefix));
addAttributes(scan, attributes);
Filter f = new WhileMatchFilter(
new PrefixFilter(getBytes(startAndPrefix)));
scan.setFilter(f);
if (columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
} finally{
closeTable(table);
}
}
示例14: RowResultGenerator
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
public RowResultGenerator(final String tableName, final RowSpec rowspec,
final Filter filter, final boolean cacheBlocks)
throws IllegalArgumentException, IOException {
Table table = RESTServlet.getInstance().getTable(tableName);
try {
Get get = new Get(rowspec.getRow());
if (rowspec.hasColumns()) {
for (byte[] col: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(col);
if (split.length == 1) {
get.addFamily(split[0]);
} else if (split.length == 2) {
get.addColumn(split[0], split[1]);
} else {
throw new IllegalArgumentException("Invalid column specifier.");
}
}
}
get.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
get.setMaxVersions(rowspec.getMaxVersions());
if (filter != null) {
get.setFilter(filter);
}
get.setCacheBlocks(cacheBlocks);
Result result = table.get(get);
if (result != null && !result.isEmpty()) {
valuesI = result.listCells().iterator();
}
} catch (DoNotRetryIOException | NeedUnmanagedConnectionException e) {
// Warn here because Stargate will return 404 in the case if multiple
// column families were specified but one did not exist -- currently
// HBase will fail the whole Get.
// Specifying multiple columns in a URI should be uncommon usage but
// help to avoid confusion by leaving a record of what happened here in
// the log.
LOG.warn(StringUtils.stringifyException(e));
} finally {
table.close();
}
}
示例15: addColumn
import org.apache.hadoop.hbase.KeyValue; //导入方法依赖的package包/类
/**
* Parses a combined family and qualifier and adds either both or just the
* family in case there is no qualifier. This assumes the older colon
* divided notation, e.g. "family:qualifier".
*
* @param scan The Scan to update.
* @param familyAndQualifier family and qualifier
* @throws IllegalArgumentException When familyAndQualifier is invalid.
*/
private static void addColumn(Scan scan, byte[] familyAndQualifier) {
byte [][] fq = KeyValue.parseColumn(familyAndQualifier);
if (fq.length == 1) {
scan.addFamily(fq[0]);
} else if (fq.length == 2) {
scan.addColumn(fq[0], fq[1]);
} else {
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
}