本文整理汇总了Java中org.apache.hadoop.io.compress.Decompressor.reset方法的典型用法代码示例。如果您正苦于以下问题:Java Decompressor.reset方法的具体用法?Java Decompressor.reset怎么用?Java Decompressor.reset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.compress.Decompressor
的用法示例。
在下文中一共展示了Decompressor.reset方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getDecompressor
import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() throws IOException {
CompressionCodec codec = getCodec();
if (codec != null) {
Decompressor decompressor = CodecPool.getDecompressor(codec);
if (decompressor != null) {
if (decompressor.finished()) {
// Somebody returns the decompressor to CodecPool but is still using
// it.
LOG.warn("Deompressor obtained from CodecPool already finished()");
} else {
if(LOG.isDebugEnabled()) {
LOG.debug("Got a decompressor: " + decompressor.hashCode());
}
}
/**
* Following statement is necessary to get around bugs in 0.18 where a
* decompressor is referenced after returned back to the codec pool.
*/
decompressor.reset();
}
return decompressor;
}
return null;
}
示例2: getDecompressor
import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() {
CompressionCodec codec = getCodec(conf);
if (codec != null) {
Decompressor decompressor = CodecPool.getDecompressor(codec);
if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor
+ " from pool.");
if (decompressor != null) {
if (decompressor.finished()) {
// Somebody returns the decompressor to CodecPool but is still using it.
LOG.warn("Deompressor obtained from CodecPool is already finished()");
}
decompressor.reset();
}
return decompressor;
}
return null;
}
示例3: getDecompressor
import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() {
CompressionCodec codec = getCodec(conf);
if (codec != null) {
Decompressor decompressor = CodecPool.getDecompressor(codec);
if (decompressor != null) {
if (decompressor.finished()) {
// Somebody returns the decompressor to CodecPool but is still using
// it.
LOG
.warn("Deompressor obtained from CodecPool is already finished()");
// throw new AssertionError(
// "Decompressor obtained from CodecPool is already finished()");
}
decompressor.reset();
}
return decompressor;
}
return null;
}
示例4: getDecompressor
import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() throws IOException {
CompressionCodec codec = getCodec();
if (codec != null) {
Decompressor decompressor = CodecPool.getDecompressor(codec);
if (decompressor != null) {
if (decompressor.finished()) {
// Somebody returns the decompressor to CodecPool but is still using
// it.
LOG.warn("Deompressor obtained from CodecPool already finished()");
} else {
LOG.debug("Got a decompressor: " + decompressor.hashCode());
}
/**
* Following statement is necessary to get around bugs in 0.18 where a
* decompressor is referenced after returned back to the codec pool.
*/
decompressor.reset();
}
return decompressor;
}
return null;
}
示例5: getDecompressor
import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() {
CompressionCodec codec = getCodec(conf);
if (codec != null) {
Decompressor decompressor = CodecPool.getDecompressor(codec);
if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor + " from pool.");
if (decompressor != null) {
if (decompressor.finished()) {
// Somebody returns the decompressor to CodecPool but is still using it.
LOG.warn("Deompressor obtained from CodecPool is already finished()");
}
decompressor.reset();
}
return decompressor;
}
return null;
}
示例6: closeAndRelease
import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
@Override
public void closeAndRelease(CompressionInputStream cin) {
IOUtils.closeQuietly(cin);
if (hasDecompressors) {
Decompressor dec = usedDecompressors.remove(cin);
dec.reset();
decompressorQueue.offer(dec);
status.setCounter(DECOMPRESSOR_STR,
decompressorsUsedCount.decrementAndGet());
}
}
示例7: DataSegmentReader
import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
/**
* May throw EOFException if InputStream does not have a
* complete data segment.
*
* NOTE: This class holds reference to the Decompressor in
* the decompressorCache, until the return value of
* getInputStream() is closed.
*
* @param decompressorCache
* @throws EmptyDataSegmentException if there is nothing to read.
* @throws EOFException if the data segment is not complete.
*/
DataSegmentReader(DataInputStream in, Configuration conf,
HashMap<Text, Decompressor> decompressorCache)
throws EmptyDataSegmentException, EOFException,
ClassNotFoundException, IOException {
// Read from DataInputStream
// 1. Read length
int length = 0;
try {
length = in.readInt();
} catch (EOFException e) {
throw new EmptyDataSegmentException();
}
// 2. Read codec
int codecNameUTF8Length = in.readShort();
byte[] codecNameUTF8 = new byte[codecNameUTF8Length];
in.readFully(codecNameUTF8);
Text codecNameText = new Text(codecNameUTF8);
// 3. read CRC32 (only present when uncompressed)
boolean hasCrc32 = (codecNameUTF8Length == 0);
long crc32Value = 0;
if (hasCrc32) {
crc32Value = in.readLong();
}
// 4. read data
byte[] storedData
= new byte[length - (hasCrc32 ? 8 : 0)/*crc32*/
- 2/*codec length*/ - codecNameUTF8Length];
in.readFully(storedData);
// Verify the checksum
if (hasCrc32) {
CRC32 crc32 = new CRC32();
crc32.update(storedData);
if (crc32.getValue() != crc32Value) {
throw new CorruptedDataException("Corrupted data segment with length " + length
+ " crc32 expected " + crc32Value + " but got " + crc32.getValue());
}
}
// Uncompress the data if needed
if (codecNameUTF8Length == 0) {
// no compression
uncompressedData = new ByteArrayInputStream(storedData);
} else {
CompressionCodec codec = getCodecFromName(codecNameText, conf);
Decompressor decompressor = null;
if (decompressorCache != null) {
// Create decompressor and add to cache if needed.
decompressor = decompressorCache.get(codecNameText);
if (decompressor == null) {
decompressor = codec.createDecompressor();
} else {
decompressor.reset();
}
}
if (decompressor == null) {
uncompressedData = codec.createInputStream(new ByteArrayInputStream(storedData));
} else {
uncompressedData = codec.createInputStream(new ByteArrayInputStream(storedData),
decompressor);
}
}
}