当前位置: 首页>>代码示例>>Java>>正文


Java Decompressor.reset方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.compress.Decompressor.reset方法的典型用法代码示例。如果您正苦于以下问题:Java Decompressor.reset方法的具体用法?Java Decompressor.reset怎么用?Java Decompressor.reset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.compress.Decompressor的用法示例。


在下文中一共展示了Decompressor.reset方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getDecompressor

import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG.warn("Deompressor obtained from CodecPool already finished()");
      } else {
        if(LOG.isDebugEnabled()) {
          LOG.debug("Got a decompressor: " + decompressor.hashCode());
        }
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * decompressor is referenced after returned back to the codec pool.
       */
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:26,代码来源:Compression.java

示例2: getDecompressor

import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() {
  CompressionCodec codec = getCodec(conf);
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor
        + " from pool.");
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using it.
        LOG.warn("Deompressor obtained from CodecPool is already finished()");
      }
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:19,代码来源:Compression.java

示例3: getDecompressor

import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() {
  CompressionCodec codec = getCodec(conf);
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG
            .warn("Deompressor obtained from CodecPool is already finished()");
        // throw new AssertionError(
        // "Decompressor obtained from CodecPool is already finished()");
      }
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:21,代码来源:Compression.java

示例4: getDecompressor

import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using
        // it.
        LOG.warn("Deompressor obtained from CodecPool already finished()");
      } else {
        LOG.debug("Got a decompressor: " + decompressor.hashCode());
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * decompressor is referenced after returned back to the codec pool.
       */
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:24,代码来源:Compression.java

示例5: getDecompressor

import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
public Decompressor getDecompressor() {
  CompressionCodec codec = getCodec(conf);
  if (codec != null) {
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor + " from pool.");
    if (decompressor != null) {
      if (decompressor.finished()) {
        // Somebody returns the decompressor to CodecPool but is still using it.
        LOG.warn("Deompressor obtained from CodecPool is already finished()");
      }
      decompressor.reset();
    }
    return decompressor;
  }

  return null;
}
 
开发者ID:grokcoder,项目名称:pbase,代码行数:18,代码来源:Compression.java

示例6: closeAndRelease

import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
@Override
public void closeAndRelease(CompressionInputStream cin) {

	IOUtils.closeQuietly(cin);

	if (hasDecompressors) {
		Decompressor dec = usedDecompressors.remove(cin);
		dec.reset();
		decompressorQueue.offer(dec);
		status.setCounter(DECOMPRESSOR_STR,
				decompressorsUsedCount.decrementAndGet());
	}

}
 
开发者ID:gerritjvv,项目名称:bigstreams,代码行数:15,代码来源:CompressionPoolImpl.java

示例7: DataSegmentReader

import org.apache.hadoop.io.compress.Decompressor; //导入方法依赖的package包/类
/**
 * May throw EOFException if InputStream does not have a
 * complete data segment.
 *
 * NOTE: This class holds reference to the Decompressor in
 * the decompressorCache, until the return value of
 * getInputStream() is closed.
 *
 * @param decompressorCache
 * @throws EmptyDataSegmentException  if there is nothing to read.
 * @throws EOFException  if the data segment is not complete.
 */
DataSegmentReader(DataInputStream in, Configuration conf,
    HashMap<Text, Decompressor> decompressorCache)
    throws EmptyDataSegmentException, EOFException,
    ClassNotFoundException, IOException {

  // Read from DataInputStream
  // 1. Read length
  int length = 0;
  try {
    length = in.readInt();
  } catch (EOFException e) {
    throw new EmptyDataSegmentException();
  }

  // 2. Read codec
  int codecNameUTF8Length = in.readShort();
  byte[] codecNameUTF8 = new byte[codecNameUTF8Length];
  in.readFully(codecNameUTF8);
  Text codecNameText = new Text(codecNameUTF8);
  // 3. read CRC32 (only present when uncompressed)
  boolean hasCrc32 = (codecNameUTF8Length == 0);
  long crc32Value = 0;
  if (hasCrc32) {
    crc32Value = in.readLong();
  }
  // 4. read data
  byte[] storedData
      = new byte[length - (hasCrc32 ? 8 : 0)/*crc32*/
                 - 2/*codec length*/ - codecNameUTF8Length];
  in.readFully(storedData);

  // Verify the checksum
  if (hasCrc32) {
    CRC32 crc32 = new CRC32();
    crc32.update(storedData);
    if (crc32.getValue() != crc32Value) {
      throw new CorruptedDataException("Corrupted data segment with length " + length
          + " crc32 expected " + crc32Value + " but got " + crc32.getValue());
    }
  }

  // Uncompress the data if needed
  if (codecNameUTF8Length == 0) {
    // no compression
    uncompressedData = new ByteArrayInputStream(storedData);
  } else {
    CompressionCodec codec = getCodecFromName(codecNameText, conf);
    Decompressor decompressor = null;
    if (decompressorCache != null) {
      // Create decompressor and add to cache if needed.
      decompressor = decompressorCache.get(codecNameText);
      if (decompressor == null) {
        decompressor = codec.createDecompressor();
      } else {
        decompressor.reset();
      }
    }
    if (decompressor == null) {
      uncompressedData = codec.createInputStream(new ByteArrayInputStream(storedData));
    } else {
      uncompressedData = codec.createInputStream(new ByteArrayInputStream(storedData),
          decompressor);
    }
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:78,代码来源:DataSegmentReader.java


注:本文中的org.apache.hadoop.io.compress.Decompressor.reset方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。