本文整理汇总了Java中org.apache.hadoop.hbase.CellUtil.createCell方法的典型用法代码示例。如果您正苦于以下问题:Java CellUtil.createCell方法的具体用法?Java CellUtil.createCell怎么用?Java CellUtil.createCell使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.CellUtil
的用法示例。
在下文中一共展示了CellUtil.createCell方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: parseCell
import org.apache.hadoop.hbase.CellUtil; //导入方法依赖的package包/类
@Override
protected Cell parseCell() throws IOException {
byte [] row = readByteArray(this.in);
byte [] family = readByteArray(in);
byte [] qualifier = readByteArray(in);
byte [] longArray = new byte[Bytes.SIZEOF_LONG];
IOUtils.readFully(this.in, longArray);
long timestamp = Bytes.toLong(longArray);
byte type = (byte) this.in.read();
byte[] value = readByteArray(in);
// Read memstore version
byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG];
IOUtils.readFully(this.in, memstoreTSArray);
long memstoreTS = Bytes.toLong(memstoreTSArray);
return CellUtil.createCell(row, family, qualifier, timestamp, type, value, memstoreTS);
}
示例2: parseCell
import org.apache.hadoop.hbase.CellUtil; //导入方法依赖的package包/类
protected Cell parseCell() throws IOException {
byte[] row = readByteArray(this.in);
byte[] family = readByteArray(in);
byte[] qualifier = readByteArray(in);
byte[] longArray = new byte[Bytes.SIZEOF_LONG];
IOUtils.readFully(this.in, longArray);
long timestamp = Bytes.toLong(longArray);
byte type = (byte) this.in.read();
byte[] value = readByteArray(in);
byte[] tags = readByteArray(in);
// Read memstore version
byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG];
IOUtils.readFully(this.in, memstoreTSArray);
long memstoreTS = Bytes.toLong(memstoreTSArray);
return CellUtil.createCell(row, family, qualifier, timestamp, type, value, tags, memstoreTS);
}
示例3: parseCell
import org.apache.hadoop.hbase.CellUtil; //导入方法依赖的package包/类
protected Cell parseCell() throws IOException {
CellProtos.Cell pbcell = CellProtos.Cell.parseDelimitedFrom(this.in);
return CellUtil.createCell(pbcell.getRow().toByteArray(),
pbcell.getFamily().toByteArray(), pbcell.getQualifier().toByteArray(),
pbcell.getTimestamp(), (byte)pbcell.getCellType().getNumber(),
pbcell.getValue().toByteArray());
}
示例4: doSmokeTest
import org.apache.hadoop.hbase.CellUtil; //导入方法依赖的package包/类
public static void doSmokeTest(FileSystem fs, Path path, String codec)
throws Exception {
Configuration conf = HBaseConfiguration.create();
HFileContext context = new HFileContextBuilder()
.withCompression(AbstractHFileWriter.compressionByName(codec)).build();
HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
.withPath(fs, path)
.withFileContext(context)
.create();
// Write any-old Cell...
final byte [] rowKey = Bytes.toBytes("compressiontestkey");
Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval"));
writer.append(c);
writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
writer.close();
Cell cc = null;
HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
try {
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
scanner.seekTo(); // position to the start of file
// Scanner does not do Cells yet. Do below for now till fixed.
cc = scanner.getKeyValue();
if (CellComparator.compareRows(c, cc) != 0) {
throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
}
} finally {
reader.close();
}
}
示例5: toCell
import org.apache.hadoop.hbase.CellUtil; //导入方法依赖的package包/类
public static Cell toCell(final CellProtos.Cell cell) {
// Doing this is going to kill us if we do it for all data passed.
// St.Ack 20121205
return CellUtil.createCell(cell.getRow().toByteArray(),
cell.getFamily().toByteArray(),
cell.getQualifier().toByteArray(),
cell.getTimestamp(),
(byte)cell.getCellType().getNumber(),
cell.getValue().toByteArray());
}
示例6: map
import org.apache.hadoop.hbase.CellUtil; //导入方法依赖的package包/类
public Cell map() {
return CellUtil.createCell(rowKey, family, qualifier,
timestamp, type, value);
}