当前位置: 首页>>代码示例>>Java>>正文


Java CodecPool.getCompressor方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.compress.CodecPool.getCompressor方法的典型用法代码示例。如果您正苦于以下问题:Java CodecPool.getCompressor方法的具体用法?Java CodecPool.getCompressor怎么用?Java CodecPool.getCompressor使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.compress.CodecPool的用法示例。


在下文中一共展示了CodecPool.getCompressor方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getCompressor

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Compressor getCompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Compressor compressor = CodecPool.getCompressor(codec);
    if (compressor != null) {
      if (compressor.finished()) {
        // Somebody returns the compressor to CodecPool but is still using
        // it.
        LOG.warn("Compressor obtained from CodecPool already finished()");
      } else {
        if(LOG.isDebugEnabled()) {
          LOG.debug("Got a compressor: " + compressor.hashCode());
        }
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * compressor is referenced after returned back to the codec pool.
       */
      compressor.reset();
    }
    return compressor;
  }
  return null;
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:25,代码来源:Compression.java

示例2: createJsonGenerator

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();
  
  return outGen;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:Anonymizer.java

示例3: getCompressor

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Compressor getCompressor() {
  CompressionCodec codec = getCodec(conf);
  if (codec != null) {
    Compressor compressor = CodecPool.getCompressor(codec);
    if (LOG.isTraceEnabled()) LOG.trace("Retrieved compressor " + compressor + " from pool.");
    if (compressor != null) {
      if (compressor.finished()) {
        // Somebody returns the compressor to CodecPool but is still using it.
        LOG.warn("Compressor obtained from CodecPool is already finished()");
      }
      compressor.reset();
    }
    return compressor;
  }
  return null;
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:17,代码来源:Compression.java

示例4: getCompressor

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Compressor getCompressor() {
  CompressionCodec codec = getCodec(conf);
  if (codec != null) {
    Compressor compressor = CodecPool.getCompressor(codec);
    if (compressor != null) {
      if (compressor.finished()) {
        // Somebody returns the compressor to CodecPool but is still using
        // it.
        LOG
            .warn("Compressor obtained from CodecPool is already finished()");
        // throw new AssertionError(
        // "Compressor obtained from CodecPool is already finished()");
      }
      compressor.reset();
    }
    return compressor;
  }
  return null;
}
 
开发者ID:fengchen8086,项目名称:LCIndex-HBase-0.94.16,代码行数:20,代码来源:Compression.java

示例5: getCompressor

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Compressor getCompressor() throws IOException {
  CompressionCodec codec = getCodec();
  if (codec != null) {
    Compressor compressor = CodecPool.getCompressor(codec);
    if (compressor != null) {
      if (compressor.finished()) {
        // Somebody returns the compressor to CodecPool but is still using
        // it.
        LOG.warn("Compressor obtained from CodecPool already finished()");
      } else {
        LOG.debug("Got a compressor: " + compressor.hashCode());
      }
      /**
       * Following statement is necessary to get around bugs in 0.18 where a
       * compressor is referenced after returned back to the codec pool.
       */
      compressor.reset();
    }
    return compressor;
  }
  return null;
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:23,代码来源:Compression.java

示例6: Writer

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Writer(Configuration conf, FSDataOutputStream out, 
    Class<K> keyClass, Class<V> valueClass,
    CompressionCodec codec, Counters.Counter writesCounter,
    boolean ownOutputStream)
    throws IOException {
  this.writtenRecordsCounter = writesCounter;
  this.checksumOut = new IFileOutputStream(out);
  this.rawOut = out;
  this.start = this.rawOut.getPos();
  if (codec != null) {
    this.compressor = CodecPool.getCompressor(codec);
    if (this.compressor != null) {
      this.compressor.reset();
      this.compressedOut = codec.createOutputStream(checksumOut, compressor);
      this.out = new FSDataOutputStream(this.compressedOut,  null);
      this.compressOutput = true;
    } else {
      LOG.warn("Could not obtain compressor from CodecPool");
      this.out = new FSDataOutputStream(checksumOut,null);
    }
  } else {
    this.out = new FSDataOutputStream(checksumOut,null);
  }
  
  this.keyClass = keyClass;
  this.valueClass = valueClass;

  if (keyClass != null) {
    SerializationFactory serializationFactory = 
      new SerializationFactory(conf);
    this.keySerializer = serializationFactory.getSerializer(keyClass);
    this.keySerializer.open(buffer);
    this.valueSerializer = serializationFactory.getSerializer(valueClass);
    this.valueSerializer.open(buffer);
  }
  this.ownOutputStream = ownOutputStream;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:38,代码来源:IFile.java

示例7: init

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
@Override
public void init(Path path, Configuration conf) throws IOException {
  FileSystem fs = path.getFileSystem(conf);
  CompressionCodec codec = new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(fs.create(path), compressor);
  } else {
    output = fs.create(path);
  }
  writer = new JsonObjectMapperWriter<T>(output, 
      conf.getBoolean("rumen.output.pretty.print", true));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:15,代码来源:DefaultOutputter.java

示例8: Writer

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Writer(Configuration conf, FSDataOutputStream out, 
    Class<K> keyClass, Class<V> valueClass,
    CompressionCodec codec, Counters.Counter writesCounter)
    throws IOException {
  this.writtenRecordsCounter = writesCounter;
  this.checksumOut = new IFileOutputStream(out);
  this.rawOut = out;
  this.start = this.rawOut.getPos();
  if (codec != null) {
    this.compressor = CodecPool.getCompressor(codec);
    if (this.compressor != null) {
      this.compressor.reset();
      this.compressedOut = codec.createOutputStream(checksumOut, compressor);
      this.out = new FSDataOutputStream(this.compressedOut,  null);
      this.compressOutput = true;
    } else {
      LOG.warn("Could not obtain compressor from CodecPool");
      this.out = new FSDataOutputStream(checksumOut,null);
    }
  } else {
    this.out = new FSDataOutputStream(checksumOut,null);
  }
  
  this.keyClass = keyClass;
  this.valueClass = valueClass;
  SerializationFactory serializationFactory = new SerializationFactory(conf);
  this.keySerializer = serializationFactory.getSerializer(keyClass);
  this.keySerializer.open(buffer);
  this.valueSerializer = serializationFactory.getSerializer(valueClass);
  this.valueSerializer.open(buffer);
}
 
开发者ID:Nextzero,项目名称:hadoop-2.6.0-cdh5.4.3,代码行数:32,代码来源:IFile.java

示例9: Writer

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Writer(Configuration conf, FSDataOutputStream out, 
    Class<K> keyClass, Class<V> valueClass,
    CompressionCodec codec, Counters.Counter writesCounter)
    throws IOException {
  this.writtenRecordsCounter = writesCounter;
  this.checksumOut = new IFileOutputStream(out);
  this.rawOut = out;
  this.start = this.rawOut.getPos();
  
  if (codec != null) {
    this.compressor = CodecPool.getCompressor(codec);
    this.compressor.reset();
    this.compressedOut = codec.createOutputStream(checksumOut, compressor);
    this.out = new FSDataOutputStream(this.compressedOut,  null);
    this.compressOutput = true;
  } else {
    this.out = new FSDataOutputStream(checksumOut,null);
  }
  
  this.keyClass = keyClass;
  this.valueClass = valueClass;
  SerializationFactory serializationFactory = new SerializationFactory(conf);
  this.keySerializer = serializationFactory.getSerializer(keyClass);
  this.keySerializer.open(buffer);
  this.valueSerializer = serializationFactory.getSerializer(valueClass);
  this.valueSerializer.open(buffer);
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:28,代码来源:IFile.java

示例10: init

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
/** Initialize. */
@SuppressWarnings("unchecked")
void init(Path name, Configuration conf, FSDataOutputStream out,
          Class keyClass, Class valClass,
          boolean compress, CompressionCodec codec, Metadata metadata) 
  throws IOException {
  this.conf = conf;
  this.out = out;
  this.keyClass = keyClass;
  this.valClass = valClass;
  this.compress = compress;
  this.codec = codec;
  this.metadata = metadata;
  SerializationFactory serializationFactory = new SerializationFactory(conf);
  this.keySerializer = serializationFactory.getSerializer(keyClass);
  this.keySerializer.open(buffer);
  this.uncompressedValSerializer = serializationFactory.getSerializer(valClass);
  this.uncompressedValSerializer.open(buffer);
  if (this.codec != null) {
    ReflectionUtils.setConf(this.codec, this.conf);
    this.compressor = CodecPool.getCompressor(this.codec);
    this.deflateFilter = this.codec.createOutputStream(buffer, compressor);
    this.deflateOut = 
      new DataOutputStream(new BufferedOutputStream(deflateFilter));
    this.compressedValSerializer = serializationFactory.getSerializer(valClass);
    this.compressedValSerializer.open(deflateOut);
  }
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:29,代码来源:SequenceFile.java

示例11: Writer

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public Writer(Configuration conf, FSDataOutputStream out, 
    Class<K> keyClass, Class<V> valueClass,
    CompressionCodec codec, Counters.Counter writesCounter)
    throws IOException {
  this.writtenRecordsCounter = writesCounter;
  this.checksumOut = new IFileOutputStream(out);
  this.rawOut = out;
  this.start = this.rawOut.getPos();
  if (codec != null) {
    this.compressor = CodecPool.getCompressor(codec);
    if (this.compressor != null) {
      this.compressor.reset();
      this.compressedOut = codec.createOutputStream(checksumOut, compressor);
      this.out = new FSDataOutputStream(this.compressedOut,  null);
      this.compressOutput = true;
    } else {
      LOG.warn("Could not obtain compressor from CodecPool");
      this.out = new FSDataOutputStream(checksumOut,null);
    }
  } else {
    this.out = new FSDataOutputStream(checksumOut,null);
  }
  
  this.keyClass = keyClass;
  this.valueClass = valueClass;

  if (keyClass != null) {
    SerializationFactory serializationFactory = 
      new SerializationFactory(conf);
    this.keySerializer = serializationFactory.getSerializer(keyClass);
    this.keySerializer.open(buffer);
    this.valueSerializer = serializationFactory.getSerializer(valueClass);
    this.valueSerializer.open(buffer);
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:36,代码来源:IFile.java

示例12: BytesCompressor

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
public BytesCompressor(CompressionCodecName codecName, CompressionCodec codec, int pageSize)
{
    this.codecName = codecName;
    this.codec = codec;
    if (codec != null) {
        this.compressor = CodecPool.getCompressor(codec);
        this.compressedOutBuffer = new ByteArrayOutputStream(pageSize);
    }
    else {
        this.compressor = null;
        this.compressedOutBuffer = null;
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:14,代码来源:ParquetCodecFactory.java

示例13: init

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
/** Initialize. */
@SuppressWarnings("unchecked")
void init(Configuration conf, FSDataOutputStream out, boolean ownStream,
          Class keyClass, Class valClass,
          CompressionCodec codec, Metadata metadata) 
  throws IOException {
  this.conf = conf;
  this.out = out;
  this.ownOutputStream = ownStream;
  this.keyClass = keyClass;
  this.valClass = valClass;
  this.codec = codec;
  this.metadata = metadata;
  SerializationFactory serializationFactory = new SerializationFactory(conf);
  this.keySerializer = serializationFactory.getSerializer(keyClass);
  if (this.keySerializer == null) {
    throw new IOException(
        "Could not find a serializer for the Key class: '"
            + keyClass.getCanonicalName() + "'. "
            + "Please ensure that the configuration '" +
            CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
            + "properly configured, if you're using"
            + "custom serialization.");
  }
  this.keySerializer.open(buffer);
  this.uncompressedValSerializer = serializationFactory.getSerializer(valClass);
  if (this.uncompressedValSerializer == null) {
    throw new IOException(
        "Could not find a serializer for the Value class: '"
            + valClass.getCanonicalName() + "'. "
            + "Please ensure that the configuration '" +
            CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
            + "properly configured, if you're using"
            + "custom serialization.");
  }
  this.uncompressedValSerializer.open(buffer);
  if (this.codec != null) {
    ReflectionUtils.setConf(this.codec, this.conf);
    this.compressor = CodecPool.getCompressor(this.codec);
    this.deflateFilter = this.codec.createOutputStream(buffer, compressor);
    this.deflateOut = 
      new DataOutputStream(new BufferedOutputStream(deflateFilter));
    this.compressedValSerializer = serializationFactory.getSerializer(valClass);
    if (this.compressedValSerializer == null) {
      throw new IOException(
          "Could not find a serializer for the Value class: '"
              + valClass.getCanonicalName() + "'. "
              + "Please ensure that the configuration '" +
              CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
              + "properly configured, if you're using"
              + "custom serialization.");
    }
    this.compressedValSerializer.open(deflateOut);
  }

  if (appendMode) {
    sync();
  } else {
    writeFileHeader();
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:62,代码来源:SequenceFile.java

示例14: open

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
@Override
public void open(String filePath, CompressionCodec codec,
    CompressionType cType) throws IOException {
  Configuration conf = new Configuration();
  Path dstPath = new Path(filePath);
  FileSystem hdfs = dstPath.getFileSystem(conf);
  if (useRawLocalFileSystem) {
    if (hdfs instanceof LocalFileSystem) {
      hdfs = ((LocalFileSystem)hdfs).getRaw();
    } else {
      logger.warn("useRawLocalFileSystem is set to true but file system " +
          "is not of type LocalFileSystem: " + hdfs.getClass().getName());
    }
  }
  boolean appending = false;
  if (conf.getBoolean("hdfs.append.support", false) == true && hdfs.isFile(dstPath)) {
    fsOut = hdfs.append(dstPath);
    appending = true;
  } else {
    fsOut = hdfs.create(dstPath);
  }
  if (compressor == null) {
    compressor = CodecPool.getCompressor(codec, conf);
  }
  cmpOut = codec.createOutputStream(fsOut, compressor);
  serializer = EventSerializerFactory.getInstance(serializerType,
      serializerContext, cmpOut);
  if (appending && !serializer.supportsReopen()) {
    cmpOut.close();
    serializer = null;
    throw new IOException("serializer (" + serializerType
        + ") does not support append");
  }

  registerCurrentStream(fsOut, hdfs, dstPath);

  if (appending) {
    serializer.afterReopen();
  } else {
    serializer.afterCreate();
  }
  isFinished = false;
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:44,代码来源:HDFSCompressedDataStream.java

示例15: init

import org.apache.hadoop.io.compress.CodecPool; //导入方法依赖的package包/类
/** Initialize. */
@SuppressWarnings("unchecked")
void init(Configuration conf, FSDataOutputStream out, boolean ownStream,
          Class keyClass, Class valClass,
          CompressionCodec codec, Metadata metadata) 
  throws IOException {
  this.conf = conf;
  this.out = out;
  this.ownOutputStream = ownStream;
  this.keyClass = keyClass;
  this.valClass = valClass;
  this.codec = codec;
  this.metadata = metadata;
  SerializationFactory serializationFactory = new SerializationFactory(conf);
  this.keySerializer = serializationFactory.getSerializer(keyClass);
  if (this.keySerializer == null) {
    throw new IOException(
        "Could not find a serializer for the Key class: '"
            + keyClass.getCanonicalName() + "'. "
            + "Please ensure that the configuration '" +
            CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
            + "properly configured, if you're using"
            + "custom serialization.");
  }
  this.keySerializer.open(buffer);
  this.uncompressedValSerializer = serializationFactory.getSerializer(valClass);
  if (this.uncompressedValSerializer == null) {
    throw new IOException(
        "Could not find a serializer for the Value class: '"
            + valClass.getCanonicalName() + "'. "
            + "Please ensure that the configuration '" +
            CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
            + "properly configured, if you're using"
            + "custom serialization.");
  }
  this.uncompressedValSerializer.open(buffer);
  if (this.codec != null) {
    ReflectionUtils.setConf(this.codec, this.conf);
    this.compressor = CodecPool.getCompressor(this.codec);
    this.deflateFilter = this.codec.createOutputStream(buffer, compressor);
    this.deflateOut = 
      new DataOutputStream(new BufferedOutputStream(deflateFilter));
    this.compressedValSerializer = serializationFactory.getSerializer(valClass);
    if (this.compressedValSerializer == null) {
      throw new IOException(
          "Could not find a serializer for the Value class: '"
              + valClass.getCanonicalName() + "'. "
              + "Please ensure that the configuration '" +
              CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
              + "properly configured, if you're using"
              + "custom serialization.");
    }
    this.compressedValSerializer.open(deflateOut);
  }
  writeFileHeader();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:57,代码来源:SequenceFile.java


注:本文中的org.apache.hadoop.io.compress.CodecPool.getCompressor方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。