本文整理汇总了Java中org.apache.hadoop.hbase.Cell.toString方法的典型用法代码示例。如果您正苦于以下问题:Java Cell.toString方法的具体用法?Java Cell.toString怎么用?Java Cell.toString使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.Cell
的用法示例。
在下文中一共展示了Cell.toString方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: addDeleteMarker
import org.apache.hadoop.hbase.Cell; //导入方法依赖的package包/类
/**
* Advanced use only.
* Add an existing delete marker to this Delete object.
* @param kv An existing KeyValue of type "delete".
* @return this for invocation chaining
* @throws IOException
*/
@SuppressWarnings("unchecked")
public Delete addDeleteMarker(Cell kv) throws IOException {
// TODO: Deprecate and rename 'add' so it matches how we add KVs to Puts.
if (!CellUtil.isDelete(kv)) {
throw new IOException("The recently added KeyValue is not of type "
+ "delete. Rowkey: " + Bytes.toStringBinary(this.row));
}
if (Bytes.compareTo(this.row, 0, row.length, kv.getRowArray(),
kv.getRowOffset(), kv.getRowLength()) != 0) {
throw new WrongRowIOException("The row in " + kv.toString() +
" doesn't match the original one " + Bytes.toStringBinary(this.row));
}
byte [] family = CellUtil.cloneFamily(kv);
List<Cell> list = familyMap.get(family);
if (list == null) {
list = new ArrayList<Cell>();
}
list.add(kv);
familyMap.put(family, list);
return this;
}
示例2: doSmokeTest
import org.apache.hadoop.hbase.Cell; //导入方法依赖的package包/类
public static void doSmokeTest(FileSystem fs, Path path, String codec)
throws Exception {
Configuration conf = HBaseConfiguration.create();
HFileContext context = new HFileContextBuilder()
.withCompression(AbstractHFileWriter.compressionByName(codec)).build();
HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
.withPath(fs, path)
.withFileContext(context)
.create();
// Write any-old Cell...
final byte [] rowKey = Bytes.toBytes("compressiontestkey");
Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval"));
writer.append(c);
writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
writer.close();
Cell cc = null;
HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
try {
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
scanner.seekTo(); // position to the start of file
// Scanner does not do Cells yet. Do below for now till fixed.
cc = scanner.getKeyValue();
if (CellComparator.compareRows(c, cc) != 0) {
throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
}
} finally {
reader.close();
}
}
示例3: add
import org.apache.hadoop.hbase.Cell; //导入方法依赖的package包/类
/**
* Add the specified KeyValue to this Put operation. Operation assumes that
* the passed KeyValue is immutable and its backing array will not be modified
* for the duration of this Put.
* @param kv individual KeyValue
* @return this
* @throws java.io.IOException e
*/
public Put add(Cell kv) throws IOException{
byte [] family = CellUtil.cloneFamily(kv);
List<Cell> list = getCellList(family);
//Checking that the row of the kv is the same as the put
int res = Bytes.compareTo(this.row, 0, row.length,
kv.getRowArray(), kv.getRowOffset(), kv.getRowLength());
if (res != 0) {
throw new WrongRowIOException("The row in " + kv.toString() +
" doesn't match the original one " + Bytes.toStringBinary(this.row));
}
list.add(kv);
familyMap.put(family, list);
return this;
}