当前位置: 首页>>代码示例>>Java>>正文


Java CompressionInputStream类代码示例

本文整理汇总了Java中org.apache.hadoop.io.compress.CompressionInputStream的典型用法代码示例。如果您正苦于以下问题:Java CompressionInputStream类的具体用法?Java CompressionInputStream怎么用?Java CompressionInputStream使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


CompressionInputStream类属于org.apache.hadoop.io.compress包,在下文中一共展示了CompressionInputStream类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: readBuffer

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
/** Read a compressed buffer */
private synchronized void readBuffer(DataInputBuffer buffer, 
                                     CompressionInputStream filter) throws IOException {
  // Read data into a temporary buffer
  DataOutputBuffer dataBuffer = new DataOutputBuffer();

  try {
    int dataBufferLength = WritableUtils.readVInt(in);
    dataBuffer.write(in, dataBufferLength);
  
    // Set up 'buffer' connected to the input-stream
    buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength());
  } finally {
    dataBuffer.close();
  }

  // Reset the codec
  filter.resetState();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:20,代码来源:SequenceFile.java

示例2: createDecompressionStream

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
@Override
public synchronized InputStream createDecompressionStream(
    InputStream downStream, Decompressor decompressor,
    int downStreamBufferSize) throws IOException {
  if (!isSupported()) {
    throw new IOException(
        "LZO codec class not specified. Did you forget to set property "
            + CONF_LZO_CLASS + "?");
  }
  InputStream bis1 = null;
  if (downStreamBufferSize > 0) {
    bis1 = new BufferedInputStream(downStream, downStreamBufferSize);
  } else {
    bis1 = downStream;
  }
  conf.setInt("io.compression.codec.lzo.buffersize", 64 * 1024);
  CompressionInputStream cis =
      codec.createInputStream(bis1, decompressor);
  BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE);
  return bis2;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:22,代码来源:Compression.java

示例3: copy

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
/**
 * 
 * @param source
 * @param dest
 * @param codec
 * @param compressor
 *            may be null
 * @param decomp
 *            may be null
 * @param mark
 * @return
 * @throws IOException
 */
public static final CompressionOutputStream copy(File source, File dest,
		CompressionCodec codec, Compressor compressor, Decompressor decomp,
		long mark) throws IOException {

	FileInputStream fileInput = new FileInputStream(source);
	CompressionInputStream in = (decomp == null) ? codec
			.createInputStream(fileInput) : codec.createInputStream(
			fileInput, decomp);

	FileOutputStream fileOut = new FileOutputStream(dest);
	CompressionOutputStream out = (compressor == null) ? codec
			.createOutputStream(fileOut) : codec.createOutputStream(
			fileOut, compressor);

	try {
		copy(in, out, mark);
		return out;
	} finally {
		IOUtils.closeQuietly(in);
		IOUtils.closeQuietly(fileInput);
	}
}
 
开发者ID:gerritjvv,项目名称:bigstreams,代码行数:36,代码来源:CompressionRollBackHelper.java

示例4: getBufferedReader

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
public static BufferedReader getBufferedReader(File file, MapredContext context)
        throws IOException {
    URI fileuri = file.toURI();
    Path path = new Path(fileuri);

    Configuration conf = context.getJobConf();
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(path);

    if (codec == null) {
        return new BufferedReader(new FileReader(file));
    } else {
        Decompressor decompressor = CodecPool.getDecompressor(codec);
        FileInputStream fis = new FileInputStream(file);
        CompressionInputStream cis = codec.createInputStream(fis, decompressor);
        BufferedReader br = new BufferedReaderExt(new InputStreamReader(cis), decompressor);
        return br;
    }
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:20,代码来源:HadoopUtils.java

示例5: createDecompressionStream

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
@Override
public synchronized InputStream createDecompressionStream(
    InputStream downStream, Decompressor decompressor,
    int downStreamBufferSize) throws IOException {
  if (!isSupported()) {
    throw new IOException(
        "LZO codec class not specified. Did you forget to set property "
            + CONF_LZO_CLASS + "?");
  }
  InputStream bis1 = null;
  if (downStreamBufferSize > 0) {
    bis1 = new BufferedInputStream(downStream, downStreamBufferSize);
  } else {
    bis1 = downStream;
  }
  conf.setInt(IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY,
      IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_DEFAULT);
  CompressionInputStream cis =
      codec.createInputStream(bis1, decompressor);
  BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE);
  return bis2;
}
 
开发者ID:hopshadoop,项目名称:hops,代码行数:23,代码来源:Compression.java

示例6: createDecompressionStream

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
@Override
public synchronized InputStream createDecompressionStream(
    InputStream downStream, Decompressor decompressor,
    int downStreamBufferSize) throws IOException {
  if (!isSupported()) {
    throw new IOException(
        "LZO codec class not specified. Did you forget to set property "
            + CONF_LZO_CLASS + "?");
  }
  InputStream bis1 = null;
  if (downStreamBufferSize > 0) {
    bis1 = new BufferedInputStream(downStream, downStreamBufferSize);
  } else {
    bis1 = downStream;
  }
  CompressionInputStream cis =
      codec.createInputStream(bis1, decompressor);
  BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE);
  return bis2;
}
 
开发者ID:sigmoidanalytics,项目名称:spork-streaming,代码行数:21,代码来源:Compression.java

示例7: testCompressAndDecompressConsistent

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
@Test
public void testCompressAndDecompressConsistent() throws Exception {
    final String testString = "Test String";
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final OutputStreamWriter writer = new OutputStreamWriter(subject.createOutputStream(baos));
    writer.write(testString);
    writer.flush();
    writer.close();

    final CompressionInputStream inputStream = subject.createInputStream(new ByteArrayInputStream(baos
            .toByteArray()));
    final StringWriter contentsTester = new StringWriter();
    IOUtils.copy(inputStream, contentsTester);
    inputStream.close();
    contentsTester.flush();
    contentsTester.close();

    Assert.assertEquals(testString, contentsTester.toString());
}
 
开发者ID:Conductor,项目名称:kangaroo,代码行数:20,代码来源:SnappyFramedCodecTest.java

示例8: readBuffer

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
/** Read a compressed buffer */
private synchronized void readBuffer(DataInputBuffer buffer,
    CompressionInputStream filter) throws IOException {
  // Read data into a temporary buffer
  DataOutputBuffer dataBuffer = new DataOutputBuffer();

  try {
    int dataBufferLength = WritableUtils.readVInt(in);
    dataBuffer.write(in, dataBufferLength);

    // Set up 'buffer' connected to the input-stream
    buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength());
  } finally {
    dataBuffer.close();
  }

  // Reset the codec
  filter.resetState();
}
 
开发者ID:azkaban,项目名称:azkaban-plugins,代码行数:20,代码来源:AzkabanSequenceFileReader.java

示例9: testInternalErrorTranslation

import org.apache.hadoop.io.compress.CompressionInputStream; //导入依赖的package包/类
@Test
public void testInternalErrorTranslation() throws Exception {
  String codecErrorMsg = "codec failure";
  CompressionInputStream mockCodecStream = mock(CompressionInputStream.class);
  when(mockCodecStream.read(any(byte[].class), anyInt(), anyInt()))
      .thenThrow(new InternalError(codecErrorMsg));
  Decompressor mockDecoder = mock(Decompressor.class);
  CompressionCodec mockCodec = mock(CompressionCodec.class);
  when(mockCodec.createDecompressor()).thenReturn(mockDecoder);
  when(mockCodec.createInputStream(any(InputStream.class), any(Decompressor.class)))
      .thenReturn(mockCodecStream);
  byte[] header = new byte[] { (byte) 'T', (byte) 'I', (byte) 'F', (byte) 1};
  try {
    ShuffleUtils.shuffleToMemory(new byte[1024], new ByteArrayInputStream(header),
        1024, 128, mockCodec, false, 0, mock(Logger.class), null);
    Assert.fail("shuffle was supposed to throw!");
  } catch (IOException e) {
    Assert.assertTrue(e.getCause() instanceof InternalError);
    Assert.assertTrue(e.getMessage().contains(codecErrorMsg));
  }
}
 
开发者ID:apache,项目名称:tez,代码行数:22,代码来源:TestShuffleUtils.java


注:本文中的org.apache.hadoop.io.compress.CompressionInputStream类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。