本文整理汇总了Java中org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecByClassName方法的典型用法代码示例。如果您正苦于以下问题:Java CompressionCodecFactory.getCodecByClassName方法的具体用法?Java CompressionCodecFactory.getCodecByClassName怎么用?Java CompressionCodecFactory.getCodecByClassName使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.compress.CompressionCodecFactory
的用法示例。
在下文中一共展示了CompressionCodecFactory.getCodecByClassName方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createCompression
import org.apache.hadoop.io.compress.CompressionCodecFactory; //导入方法依赖的package包/类
/**
* Create a compression instance using the codec specified by
* <code>codecClassName</code>
*/
static FSImageCompression createCompression(Configuration conf,
String codecClassName)
throws IOException {
CompressionCodecFactory factory = new CompressionCodecFactory(conf);
CompressionCodec codec = factory.getCodecByClassName(codecClassName);
if (codec == null) {
throw new IOException("Not a supported codec: " + codecClassName);
}
return new FSImageCompression(codec);
}
示例2: createCompression
import org.apache.hadoop.io.compress.CompressionCodecFactory; //导入方法依赖的package包/类
/**
* Create a compression instance using the codec specified by
* <code>codecClassName</code>
*/
private static FSImageCompression createCompression(Configuration conf,
String codecClassName)
throws IOException {
CompressionCodecFactory factory = new CompressionCodecFactory(conf);
CompressionCodec codec = factory.getCodecByClassName(codecClassName);
if (codec == null) {
throw new IOException("Not a supported codec: " + codecClassName);
}
return new FSImageCompression(codec);
}
示例3: loadImage
import org.apache.hadoop.io.compress.CompressionCodecFactory; //导入方法依赖的package包/类
@Override
public void loadImage(DataInputStream in, ImageVisitor v,
boolean skipBlocks) throws IOException {
try {
InjectionHandler.processEvent(InjectionEvent.IMAGE_LOADER_CURRENT_START);
v.start();
v.visitEnclosingElement(ImageElement.FS_IMAGE);
imageVersion = in.readInt();
if( !canLoadVersion(imageVersion))
throw new IOException("Cannot process fslayout version " + imageVersion);
v.visit(ImageElement.IMAGE_VERSION, imageVersion);
v.visit(ImageElement.NAMESPACE_ID, in.readInt());
long numInodes = in.readLong();
v.setNumberOfFiles(numInodes);
v.visit(ImageElement.GENERATION_STAMP, in.readLong());
if (imageVersion <= FSConstants.STORED_TXIDS) {
v.visit(ImageElement.LAST_TXID, in.readLong());
}
if (LayoutVersion.supports(Feature.ADD_INODE_ID, imageVersion)) {
v.visit(ImageElement.LAST_INODE_ID, in.readLong());
}
if (LayoutVersion.supports(Feature.FSIMAGE_COMPRESSION, imageVersion)) {
boolean isCompressed = in.readBoolean();
v.visit(ImageElement.IS_COMPRESSED, imageVersion);
if (isCompressed) {
String codecClassName = Text.readString(in);
v.visit(ImageElement.COMPRESS_CODEC, codecClassName);
CompressionCodecFactory codecFac = new CompressionCodecFactory(
new Configuration());
CompressionCodec codec = codecFac.getCodecByClassName(codecClassName);
if (codec == null) {
throw new IOException("Image compression codec not supported: "
+ codecClassName);
}
in = new DataInputStream(codec.createInputStream(in));
}
}
in = BufferedByteInputStream.wrapInputStream(in, 8 * BASE_BUFFER_SIZE, BASE_BUFFER_SIZE);
processINodes(in, v, numInodes, skipBlocks);
processINodesUC(in, v, skipBlocks);
v.leaveEnclosingElement(); // FSImage
v.finish();
} catch(IOException e) {
// Tell the visitor to clean up, then re-throw the exception
v.finishAbnormally();
throw e;
}
}
示例4: loadImage
import org.apache.hadoop.io.compress.CompressionCodecFactory; //导入方法依赖的package包/类
@Override
public void loadImage(DataInputStream in, ImageVisitor v,
boolean skipBlocks) throws IOException {
boolean done = false;
try {
v.start();
v.visitEnclosingElement(ImageElement.FS_IMAGE);
imageVersion = in.readInt();
if( !canLoadVersion(imageVersion))
throw new IOException("Cannot process fslayout version " + imageVersion);
v.visit(ImageElement.IMAGE_VERSION, imageVersion);
v.visit(ImageElement.NAMESPACE_ID, in.readInt());
long numInodes = in.readLong();
v.visit(ImageElement.GENERATION_STAMP, in.readLong());
if (LayoutVersion.supports(Feature.SEQUENTIAL_BLOCK_ID, imageVersion)) {
v.visit(ImageElement.GENERATION_STAMP_V2, in.readLong());
v.visit(ImageElement.GENERATION_STAMP_V1_LIMIT, in.readLong());
v.visit(ImageElement.LAST_ALLOCATED_BLOCK_ID, in.readLong());
}
if (LayoutVersion.supports(Feature.STORED_TXIDS, imageVersion)) {
v.visit(ImageElement.TRANSACTION_ID, in.readLong());
}
if (LayoutVersion.supports(Feature.ADD_INODE_ID, imageVersion)) {
v.visit(ImageElement.LAST_INODE_ID, in.readLong());
}
boolean supportSnapshot = LayoutVersion.supports(Feature.SNAPSHOT,
imageVersion);
if (supportSnapshot) {
v.visit(ImageElement.SNAPSHOT_COUNTER, in.readInt());
int numSnapshots = in.readInt();
v.visit(ImageElement.NUM_SNAPSHOTS_TOTAL, numSnapshots);
for (int i = 0; i < numSnapshots; i++) {
processSnapshot(in, v);
}
}
if (LayoutVersion.supports(Feature.FSIMAGE_COMPRESSION, imageVersion)) {
boolean isCompressed = in.readBoolean();
v.visit(ImageElement.IS_COMPRESSED, String.valueOf(isCompressed));
if (isCompressed) {
String codecClassName = Text.readString(in);
v.visit(ImageElement.COMPRESS_CODEC, codecClassName);
CompressionCodecFactory codecFac = new CompressionCodecFactory(
new Configuration());
CompressionCodec codec = codecFac.getCodecByClassName(codecClassName);
if (codec == null) {
throw new IOException("Image compression codec not supported: "
+ codecClassName);
}
in = new DataInputStream(codec.createInputStream(in));
}
}
processINodes(in, v, numInodes, skipBlocks, supportSnapshot);
subtreeMap.clear();
dirNodeMap.clear();
processINodesUC(in, v, skipBlocks);
if (LayoutVersion.supports(Feature.DELEGATION_TOKEN, imageVersion)) {
processDelegationTokens(in, v);
}
v.leaveEnclosingElement(); // FSImage
done = true;
} finally {
if (done) {
v.finish();
} else {
v.finishAbnormally();
}
}
}