本文整理汇总了Java中org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter类的典型用法代码示例。如果您正苦于以下问题:Java AbstractHFileWriter类的具体用法?Java AbstractHFileWriter怎么用?Java AbstractHFileWriter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AbstractHFileWriter类属于org.apache.hadoop.hbase.io.hfile包,在下文中一共展示了AbstractHFileWriter类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setUp
import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
@Override
void setUp() throws Exception {
HFileContextBuilder builder = new HFileContextBuilder()
.withCompression(AbstractHFileWriter.compressionByName(codec))
.withBlockSize(RFILE_BLOCKSIZE);
if (cipher == "aes") {
byte[] cipherKey = new byte[AES.KEY_LENGTH];
new SecureRandom().nextBytes(cipherKey);
builder.withEncryptionContext(Encryption.newContext(conf)
.setCipher(Encryption.getCipher(conf, cipher))
.setKey(cipherKey));
} else if (!"none".equals(cipher)) {
throw new IOException("Cipher " + cipher + " not supported.");
}
HFileContext hFileContext = builder.build();
writer = HFile.getWriterFactoryNoCache(conf)
.withPath(fs, mf)
.withFileContext(hFileContext)
.withComparator(new KeyValue.RawBytesComparator())
.create();
}
示例2: doSmokeTest
import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
public static void doSmokeTest(FileSystem fs, Path path, String codec)
throws Exception {
Configuration conf = HBaseConfiguration.create();
HFileContext context = new HFileContextBuilder()
.withCompression(AbstractHFileWriter.compressionByName(codec)).build();
HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
.withPath(fs, path)
.withFileContext(context)
.create();
writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
writer.close();
HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
reader.loadFileInfo();
byte[] key = reader.getFirstKey();
boolean rc = Bytes.toString(key).equals("testkey");
reader.close();
if (!rc) {
throw new Exception("Read back incorrect result: " +
Bytes.toStringBinary(key));
}
}
示例3: createFamilyCompressionMap
import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
/**
* Runs inside the task to deserialize column family to compression algorithm
* map from the configuration.
*
* @param conf to read the serialized values from
* @return a map from column family to the configured compression algorithm
*/
@VisibleForTesting
static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration
conf) {
Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
COMPRESSION_FAMILIES_CONF_KEY);
Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],
Algorithm>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
Algorithm algorithm = AbstractHFileWriter.compressionByName
(e.getValue());
compressionMap.put(e.getKey(), algorithm);
}
return compressionMap;
}
示例4: doSmokeTest
import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
public static void doSmokeTest(FileSystem fs, Path path, String codec)
throws Exception {
Configuration conf = HBaseConfiguration.create();
HFileContext context = new HFileContextBuilder()
.withCompression(AbstractHFileWriter.compressionByName(codec)).build();
HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
.withPath(fs, path)
.withFileContext(context)
.create();
// Write any-old Cell...
final byte [] rowKey = Bytes.toBytes("compressiontestkey");
Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval"));
writer.append(c);
writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
writer.close();
Cell cc = null;
HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
try {
reader.loadFileInfo();
HFileScanner scanner = reader.getScanner(false, true);
scanner.seekTo(); // position to the start of file
// Scanner does not do Cells yet. Do below for now till fixed.
cc = scanner.getKeyValue();
if (CellComparator.compareRows(c, cc) != 0) {
throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
}
} finally {
reader.close();
}
}
示例5: createFamilyCompressionMap
import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
/**
* Runs inside the task to deserialize column family to compression
* algorithm map from the configuration.
*
* @param conf to read the serialized values from
* @return a map from column family to the configured compression algorithm
*/
@VisibleForTesting
static Map<byte[], Compression.Algorithm> createFamilyCompressionMap(Configuration conf) {
Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
COMPRESSION_FAMILIES_CONF_KEY);
Map<byte[], Compression.Algorithm> compressionMap = new TreeMap<byte[], Compression.Algorithm>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
Compression.Algorithm algorithm = AbstractHFileWriter.compressionByName(e.getValue());
compressionMap.put(e.getKey(), algorithm);
}
return compressionMap;
}
示例6: createFamilyCompressionMap
import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; //导入依赖的package包/类
/**
* Runs inside the task to deserialize column family to compression algorithm
* map from the configuration.
*
* @param conf to read the serialized values from
* @return a map from column family to the configured compression algorithm
*/
@VisibleForTesting
static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration
conf) {
Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
COMPRESSION_FAMILIES_CONF_KEY);
Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],
Algorithm>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
Algorithm algorithm = AbstractHFileWriter.compressionByName(e.getValue());
compressionMap.put(e.getKey(), algorithm);
}
return compressionMap;
}