本文整理汇总了Java中org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext类的典型用法代码示例。如果您正苦于以下问题:Java HFileBlockDefaultDecodingContext类的具体用法?Java HFileBlockDefaultDecodingContext怎么用?Java HFileBlockDefaultDecodingContext使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
HFileBlockDefaultDecodingContext类属于org.apache.hadoop.hbase.io.encoding包,在下文中一共展示了HFileBlockDefaultDecodingContext类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: FSReaderImpl
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
FSReaderImpl(FSDataInputStreamWrapper stream, long fileSize, HFileSystem hfs, Path path,
HFileContext fileContext) throws IOException {
this.fileSize = fileSize;
this.hfs = hfs;
if (path != null) {
this.pathName = path.toString();
}
this.fileContext = fileContext;
this.hdrSize = headerSize(fileContext.isUseHBaseChecksum());
this.streamWrapper = stream;
// Older versions of HBase didn't support checksum.
this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum());
defaultDecodingCtx = new HFileBlockDefaultDecodingContext(fileContext);
encodedBlockDecodingCtx = defaultDecodingCtx;
}
示例2: newDataBlockDecodingContext
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext fileContext) {
DataBlockEncoder encoder = encoding.getEncoder();
if (encoder != null) {
return encoder.newDataBlockDecodingContext(fileContext);
}
return new HFileBlockDefaultDecodingContext(fileContext);
}
示例3: FSReaderImpl
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
public FSReaderImpl(FSDataInputStreamWrapper stream, long fileSize, HFileSystem hfs, Path path,
HFileContext fileContext) throws IOException {
super(fileSize, hfs, path, fileContext);
this.streamWrapper = stream;
// Older versions of HBase didn't support checksum.
this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum());
defaultDecodingCtx = new HFileBlockDefaultDecodingContext(fileContext);
encodedBlockDecodingCtx = defaultDecodingCtx;
}
示例4: FSReaderV2
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
public FSReaderV2(FSDataInputStreamWrapper stream, long fileSize, HFileSystem hfs, Path path,
HFileContext fileContext) throws IOException {
super(fileSize, hfs, path, fileContext);
this.streamWrapper = stream;
// Older versions of HBase didn't support checksum.
this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum());
defaultDecodingCtx =
new HFileBlockDefaultDecodingContext(fileContext);
encodedBlockDecodingCtx =
new HFileBlockDefaultDecodingContext(fileContext);
}
示例5: newDataBlockDecodingContext
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(
Algorithm compressionAlgorithm) {
DataBlockEncoder encoder = encoding.getEncoder();
if (encoder != null) {
return encoder.newDataBlockDecodingContext(compressionAlgorithm);
}
return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
}
示例6: FSReaderV2
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
public FSReaderV2(FSDataInputStreamWrapper stream, Algorithm compressAlgo, long fileSize,
int minorVersion, HFileSystem hfs, Path path) throws IOException {
super(compressAlgo, fileSize, minorVersion, hfs, path);
this.streamWrapper = stream;
// Older versions of HBase didn't support checksum.
boolean forceNoHBaseChecksum = (this.getMinorVersion() < MINOR_VERSION_WITH_CHECKSUM);
this.streamWrapper.prepareForBlockReader(forceNoHBaseChecksum);
defaultDecodingCtx =
new HFileBlockDefaultDecodingContext(compressAlgo);
encodedBlockDecodingCtx =
new HFileBlockDefaultDecodingContext(compressAlgo);
}
示例7: newOnDiskDataBlockDecodingContext
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
@Override
public HFileBlockDecodingContext newOnDiskDataBlockDecodingContext(
Algorithm compressionAlgorithm) {
if (onDisk != null) {
DataBlockEncoder encoder = onDisk.getEncoder();
if (encoder != null) {
return encoder.newDataBlockDecodingContext(
compressionAlgorithm);
}
}
return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
}
示例8: FSReaderV2
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
public FSReaderV2(FSDataInputStream istream,
FSDataInputStream istreamNoFsChecksum, Algorithm compressAlgo,
long fileSize, int minorVersion, HFileSystem hfs, Path path)
throws IOException {
super(istream, istreamNoFsChecksum, compressAlgo, fileSize,
minorVersion, hfs, path);
if (hfs != null) {
// Check the configuration to determine whether hbase-level
// checksum verification is needed or not.
useHBaseChecksum = hfs.useHBaseChecksum();
} else {
// The configuration does not specify anything about hbase checksum
// validations. Set it to true here assuming that we will verify
// hbase checksums for all reads. For older files that do not have
// stored checksums, this flag will be reset later.
useHBaseChecksum = true;
}
// for older versions, hbase did not store checksums.
if (getMinorVersion() < MINOR_VERSION_WITH_CHECKSUM) {
useHBaseChecksum = false;
}
this.useHBaseChecksumConfigured = useHBaseChecksum;
defaultDecodingCtx =
new HFileBlockDefaultDecodingContext(compressAlgo);
encodedBlockDecodingCtx =
new HFileBlockDefaultDecodingContext(compressAlgo);
}
示例9: newDataBlockDecodingContext
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
return new HFileBlockDefaultDecodingContext(meta);
}
示例10: newDataBlockDecodingContext
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(
Algorithm compressionAlgorithm) {
return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
}
示例11: newDataBlockDecodingContext
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(Algorithm compressionAlgorithm) {
return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
}
示例12: newOnDiskDataBlockDecodingContext
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; //导入依赖的package包/类
@Override
public HFileBlockDecodingContext newOnDiskDataBlockDecodingContext(
Algorithm compressionAlgorithm) {
return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
}