当前位置: 首页>>代码示例>>Java>>正文


Java ZlibFactory类代码示例

本文整理汇总了Java中org.apache.hadoop.io.compress.zlib.ZlibFactory的典型用法代码示例。如果您正苦于以下问题:Java ZlibFactory类的具体用法?Java ZlibFactory怎么用?Java ZlibFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ZlibFactory类属于org.apache.hadoop.io.compress.zlib包,在下文中一共展示了ZlibFactory类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testNativeCodeLoaded

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
@Test
public void testNativeCodeLoaded() {
  if (requireTestJni() == false) {
    LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
    return;
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
        "libhadoop.so was not loaded.");
  }
  assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
  // library names are depended on platform and build envs
  // so just check names are available
  assertFalse(ZlibFactory.getLibraryName().isEmpty());
  if (NativeCodeLoader.buildSupportsSnappy()) {
    assertFalse(SnappyCodec.getLibraryName().isEmpty());
  }
  if (NativeCodeLoader.buildSupportsOpenssl()) {
    assertFalse(OpensslCipher.getLibraryName().isEmpty());
  }
  assertFalse(Lz4Codec.getLibraryName().isEmpty());
  LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:24,代码来源:TestNativeCodeLoader.java

示例2: isAvailable

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
/**
 * Method for compressor availability check
 */
private static <T extends Compressor, E extends Decompressor> boolean isAvailable(TesterPair<T, E> pair) {
  Compressor compressor = pair.compressor;

  if (compressor.getClass().isAssignableFrom(Lz4Compressor.class)
          && (NativeCodeLoader.isNativeCodeLoaded()))
    return true;

  else if (compressor.getClass().isAssignableFrom(BuiltInZlibDeflater.class)
          && NativeCodeLoader.isNativeCodeLoaded())
    return true;

  else if (compressor.getClass().isAssignableFrom(ZlibCompressor.class)) {
    return ZlibFactory.isNativeZlibLoaded(new Configuration());
  }              
  else if (compressor.getClass().isAssignableFrom(SnappyCompressor.class)
          && isNativeSnappyLoadable())
    return true;
  
  return false;      
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:24,代码来源:CompressDecompressTester.java

示例3: testCodecPoolGzipReuse

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
@Test
public void testCodecPoolGzipReuse() throws Exception {
  Configuration conf = new Configuration();
  conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
  if (!ZlibFactory.isNativeZlibLoaded(conf)) {
    LOG.warn("testCodecPoolGzipReuse skipped: native libs not loaded");
    return;
  }
  GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
  DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
  Compressor c1 = CodecPool.getCompressor(gzc);
  Compressor c2 = CodecPool.getCompressor(dfc);
  CodecPool.returnCompressor(c1);
  CodecPool.returnCompressor(c2);
  assertTrue("Got mismatched ZlibCompressor", c2 != CodecPool.getCompressor(gzc));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:17,代码来源:TestCodec.java

示例4: testGzipLongOverflow

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
@Test
public void testGzipLongOverflow() throws IOException {
  LOG.info("testGzipLongOverflow");

  // Don't use native libs for this test.
  Configuration conf = new Configuration();
  conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
  assertFalse("ZlibFactory is using native libs against request",
      ZlibFactory.isNativeZlibLoaded(conf));

  // Ensure that the CodecPool has a BuiltInZlibInflater in it.
  Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
  assertNotNull("zlibDecompressor is null!", zlibDecompressor);
  assertTrue("ZlibFactory returned unexpected inflator",
      zlibDecompressor instanceof BuiltInZlibInflater);
  CodecPool.returnDecompressor(zlibDecompressor);

  // Now create a GZip text file.
  String tmpDir = System.getProperty("test.build.data", "/tmp/");
  Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
  BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
    new GZIPOutputStream(new FileOutputStream(f.toString()))));

  final int NBUF = 1024 * 4 + 1;
  final char[] buf = new char[1024 * 1024];
  for (int i = 0; i < buf.length; i++) buf[i] = '\0';
  for (int i = 0; i < NBUF; i++) {
    bw.write(buf);
  }
  bw.close();

  // Now read it back, using the CodecPool to establish the
  // decompressor to use.
  CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
  CompressionCodec codec = ccf.getCodec(f);
  Decompressor decompressor = CodecPool.getDecompressor(codec);
  FileSystem fs = FileSystem.getLocal(conf);
  InputStream is = fs.open(f);
  is = codec.createInputStream(is, decompressor);
  BufferedReader br = new BufferedReader(new InputStreamReader(is));
  for (int j = 0; j < NBUF; j++) {
    int n = br.read(buf);
    assertEquals("got wrong read length!", n, buf.length);
    for (int i = 0; i < buf.length; i++)
      assertEquals("got wrong byte!", buf[i], '\0');
  }
  br.close();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:49,代码来源:TestCodec.java

示例5: testCodecInitWithCompressionLevel

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
@Test
public void testCodecInitWithCompressionLevel() throws Exception {
  Configuration conf = new Configuration();
  if (ZlibFactory.isNativeZlibLoaded(conf)) {
    LOG.info("testCodecInitWithCompressionLevel with native");
    codecTestWithNOCompression(conf,
                          "org.apache.hadoop.io.compress.GzipCodec");
    codecTestWithNOCompression(conf,
                       "org.apache.hadoop.io.compress.DefaultCodec");
  } else {
    LOG.warn("testCodecInitWithCompressionLevel for native skipped"
             + ": native libs not loaded");
  }
  conf = new Configuration();
  // don't use native libs
  ZlibFactory.setNativeZlibLoaded(false);
  codecTestWithNOCompression( conf,
                       "org.apache.hadoop.io.compress.DefaultCodec");
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:20,代码来源:TestCodec.java

示例6: testCodecInitWithCompressionLevel

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
@Test
public void testCodecInitWithCompressionLevel() throws Exception {
  Configuration conf = new Configuration();
  conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
  if (ZlibFactory.isNativeZlibLoaded(conf)) {
    LOG.info("testCodecInitWithCompressionLevel with native");
    codecTestWithNOCompression(conf,
                          "org.apache.hadoop.io.compress.GzipCodec");
    codecTestWithNOCompression(conf,
                       "org.apache.hadoop.io.compress.DefaultCodec");
  } else {
    LOG.warn("testCodecInitWithCompressionLevel for native skipped"
             + ": native libs not loaded");
  }
  conf = new Configuration();
  conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
  codecTestWithNOCompression( conf,
                       "org.apache.hadoop.io.compress.DefaultCodec");
}
 
开发者ID:yncxcw,项目名称:big-c,代码行数:20,代码来源:TestCodec.java

示例7: createWriter

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
/**
 * Construct the preferred type of 'raw' SequenceFile Writer.
 * @param out The stream on top which the writer is to be constructed.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param compress Compress data?
 * @param blockCompress Compress blocks?
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
private static Writer
  createWriter(Configuration conf, FSDataOutputStream out, 
               Class keyClass, Class valClass, boolean compress, boolean blockCompress,
               CompressionCodec codec, Metadata metadata)
  throws IOException {
  if (codec != null && (codec instanceof GzipCodec) && 
      !NativeCodeLoader.isNativeCodeLoaded() && 
      !ZlibFactory.isNativeZlibLoaded(conf)) {
    throw new IllegalArgumentException("SequenceFile doesn't work with " +
                                       "GzipCodec without native-hadoop code!");
  }

  Writer writer = null;

  if (!compress) {
    writer = new Writer(conf, out, keyClass, valClass, metadata);
  } else if (compress && !blockCompress) {
    writer = new RecordCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  } else {
    writer = new BlockCompressWriter(conf, out, keyClass, valClass, codec, metadata);
  }
  
  return writer;
}
 
开发者ID:rhli,项目名称:hadoop-EAR,代码行数:36,代码来源:SequenceFile.java

示例8: testNativeCodeLoaded

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
@Test
public void testNativeCodeLoaded() {
  if (requireTestJni() == false) {
    LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
    return;
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
        "libhadoop.so was not loaded.");
  }
  assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
  // library names are depended on platform and build envs
  // so just check names are available
  assertFalse(ZlibFactory.getLibraryName().isEmpty());
  if (NativeCodeLoader.buildSupportsSnappy()) {
    assertFalse(SnappyCodec.getLibraryName().isEmpty());
  }
  assertFalse(Lz4Codec.getLibraryName().isEmpty());
  LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:21,代码来源:TestNativeCodeLoader.java

示例9: testCodecInitWithCompressionLevel

import org.apache.hadoop.io.compress.zlib.ZlibFactory; //导入依赖的package包/类
public void testCodecInitWithCompressionLevel() throws Exception {
  Configuration conf = new Configuration();
  conf.setBoolean("io.native.lib.available", true);
  if (ZlibFactory.isNativeZlibLoaded(conf)) {
    LOG.info("testCodecInitWithCompressionLevel with native");
    codecTestWithNOCompression(conf,
                          "org.apache.hadoop.io.compress.GzipCodec");
    codecTestWithNOCompression(conf,
                       "org.apache.hadoop.io.compress.DefaultCodec");
  } else {
    LOG.warn("testCodecInitWithCompressionLevel for native skipped"
             + ": native libs not loaded");
  }
  conf = new Configuration();
  conf.setBoolean("io.native.lib.available", false);
  codecTestWithNOCompression( conf,
                       "org.apache.hadoop.io.compress.DefaultCodec");
}
 
开发者ID:Seagate,项目名称:hadoop-on-lustre,代码行数:19,代码来源:TestCodec.java


注:本文中的org.apache.hadoop.io.compress.zlib.ZlibFactory类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。