当前位置: 首页>>代码示例>>Java>>正文


Java SnappyCodec类代码示例

本文整理汇总了Java中org.apache.hadoop.io.compress.SnappyCodec的典型用法代码示例。如果您正苦于以下问题:Java SnappyCodec类的具体用法?Java SnappyCodec怎么用?Java SnappyCodec使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


SnappyCodec类属于org.apache.hadoop.io.compress包,在下文中一共展示了SnappyCodec类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testNativeCodeLoaded

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
@Test
public void testNativeCodeLoaded() {
  if (requireTestJni() == false) {
    LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
    return;
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
        "libhadoop.so was not loaded.");
  }
  assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
  // library names are depended on platform and build envs
  // so just check names are available
  assertFalse(ZlibFactory.getLibraryName().isEmpty());
  if (NativeCodeLoader.buildSupportsSnappy()) {
    assertFalse(SnappyCodec.getLibraryName().isEmpty());
  }
  if (NativeCodeLoader.buildSupportsOpenssl()) {
    assertFalse(OpensslCipher.getLibraryName().isEmpty());
  }
  assertFalse(Lz4Codec.getLibraryName().isEmpty());
  LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:24,代码来源:TestNativeCodeLoader.java

示例2: testNativeCodeLoaded

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
@Test
public void testNativeCodeLoaded() {
  if (requireTestJni() == false) {
    LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
    return;
  }
  if (!NativeCodeLoader.isNativeCodeLoaded()) {
    fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
        "libhadoop.so was not loaded.");
  }
  assertFalse(NativeCodeLoader.getLibraryName().isEmpty());
  // library names are depended on platform and build envs
  // so just check names are available
  assertFalse(ZlibFactory.getLibraryName().isEmpty());
  if (NativeCodeLoader.buildSupportsSnappy()) {
    assertFalse(SnappyCodec.getLibraryName().isEmpty());
  }
  assertFalse(Lz4Codec.getLibraryName().isEmpty());
  LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:21,代码来源:TestNativeCodeLoader.java

示例3: setTasksClasses

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
/**
 * Sets task classes with related info if needed into configuration object.
 *
 * @param job Configuration to change.
 * @param setMapper Option to set mapper and input format classes.
 * @param setCombiner Option to set combiner class.
 * @param setReducer Option to set reducer and output format classes.
 */
public static void setTasksClasses(Job job, boolean setMapper, boolean setCombiner, boolean setReducer,
        boolean outputCompression) {
    if (setMapper) {
        job.setMapperClass(HadoopWordCount2Mapper.class);
        job.setInputFormatClass(TextInputFormat.class);
    }

    if (setCombiner)
        job.setCombinerClass(HadoopWordCount2Combiner.class);

    if (setReducer) {
        job.setReducerClass(HadoopWordCount2Reducer.class);
        job.setOutputFormatClass(TextOutputFormat.class);
    }

    if (outputCompression) {
        job.setOutputFormatClass(SequenceFileOutputFormat.class);

        SequenceFileOutputFormat.setOutputCompressionType(job, SequenceFile.CompressionType.BLOCK);

        SequenceFileOutputFormat.setCompressOutput(job, true);

        job.getConfiguration().set(FileOutputFormat.COMPRESS_CODEC, SnappyCodec.class.getName());
    }
}
 
开发者ID:apache,项目名称:ignite,代码行数:34,代码来源:HadoopWordCount2.java

示例4: testGetCodecString

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
@Test
public void testGetCodecString() {
  Configuration configuration = dfsServer.getConf();
  assertEquals(MrUtil.CODEC_NONE, MrUtil.getCodecString(configuration));
  configuration.setBoolean(FileOutputFormat.COMPRESS, false);
  assertEquals(MrUtil.CODEC_NONE, MrUtil.getCodecString(configuration));
  configuration.setBoolean(FileOutputFormat.COMPRESS, true);
  assertEquals(new DefaultCodec().getDefaultExtension().substring(1, new DefaultCodec().getDefaultExtension().length()),
    MrUtil.getCodecString(configuration));
  configuration.set(FileOutputFormat.COMPRESS_CODEC, SnappyCodec.class.getName());
  assertEquals(new SnappyCodec().getDefaultExtension().substring(1, new SnappyCodec().getDefaultExtension().length()),
    MrUtil.getCodecString(configuration));
  configuration.set(FileOutputFormat.COMPRESS_TYPE, CompressionType.BLOCK.toString());
  assertEquals(new SnappyCodec().getDefaultExtension().substring(1, new SnappyCodec().getDefaultExtension().length()),
    MrUtil.getCodecString(configuration));
  configuration.set(FileOutputFormat.COMPRESS_TYPE, CompressionType.NONE.toString());
  assertEquals(MrUtil.CODEC_NONE, MrUtil.getCodecString(configuration));
  configuration.set(FileOutputFormat.COMPRESS_TYPE, CompressionType.BLOCK.toString());
  configuration.setBoolean(FileOutputFormat.COMPRESS, false);
  assertEquals(MrUtil.CODEC_NONE, MrUtil.getCodecString(configuration));
}
 
开发者ID:ggear,项目名称:cloudera-framework,代码行数:22,代码来源:TestMrUtil.java

示例5: run

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
@Override
public int run(String[] args) throws Exception {

  if (args.length != 2) {
    System.out.printf("Usage: CreateSequenceFile <input dir> <output dir>\n");
    return -1;
  }

  Job job = new Job(getConf());
  job.setJarByClass(CreateSequenceFile.class);
  job.setJobName("Create Sequence File");
  
  job.setNumReduceTasks(0);
  job.setOutputFormatClass(SequenceFileOutputFormat.class);
  
  FileInputFormat.setInputPaths(job, new Path(args[0]));
  SequenceFileOutputFormat.setOutputPath(job, new Path(args[1]));
  
  FileOutputFormat.setCompressOutput(job,true);
  FileOutputFormat.setOutputCompressorClass(job,SnappyCodec.class);
  
  SequenceFileOutputFormat.setOutputCompressionType(job,
  CompressionType.BLOCK);
  boolean success = job.waitForCompletion(true);
  return success ? 0 : 1;
}
 
开发者ID:mellowonpsx,项目名称:cloudera-homework,代码行数:27,代码来源:CreateSequenceFile.java

示例6: configStage1

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
protected JobConf configStage1() throws Exception {
    final JobConf conf = new JobConf(getConf(), ConCmptBlock.class);
    conf.set("block_width", "" + block_width);
    conf.set("recursive_diagmult", "" + recursive_diagmult);
    conf.setJobName("data-piqid.pegasus.ConCmptBlock_pass1");

    conf.setMapperClass(MapStage1.class);
    conf.setReducerClass(RedStage1.class);

    FileInputFormat.setInputPaths(conf, edge_path, curbm_path);
    FileOutputFormat.setOutputPath(conf, tempbm_path);
    FileOutputFormat.setCompressOutput(conf, true);
    FileOutputFormat.setOutputCompressorClass(conf, SnappyCodec.class);

    conf.setNumReduceTasks(nreducers);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}
 
开发者ID:patriziosotgiu,项目名称:pegasus,代码行数:22,代码来源:ConCmptBlock.java

示例7: configStage2

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
protected JobConf configStage2() throws Exception {
    final JobConf conf = new JobConf(getConf(), ConCmptBlock.class);
    conf.set("block_width", "" + block_width);
    conf.setJobName("data-piqid.pegasus.ConCmptBlock_pass2");

    conf.setMapperClass(MapStage2.class);
    conf.setReducerClass(RedStage2.class);

    FileInputFormat.setInputPaths(conf, tempbm_path);
    FileOutputFormat.setOutputPath(conf, nextbm_path);
    FileOutputFormat.setCompressOutput(conf, true);
    FileOutputFormat.setOutputCompressorClass(conf, SnappyCodec.class);

    conf.setNumReduceTasks(nreducers);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}
 
开发者ID:patriziosotgiu,项目名称:pegasus,代码行数:21,代码来源:ConCmptBlock.java

示例8: configStage4

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
protected JobConf configStage4() throws Exception {
    final JobConf conf = new JobConf(getConf(), ConCmptBlock.class);
    conf.set("block_width", "" + block_width);
    conf.setJobName("data-piqid.pegasus.ConCmptBlock_pass4");

    conf.setMapperClass(MapStage4.class);

    FileInputFormat.setInputPaths(conf, curbm_path);
    FileOutputFormat.setOutputPath(conf, curbm_unfold_path);
    FileOutputFormat.setCompressOutput(conf, true);
    FileOutputFormat.setOutputCompressorClass(conf, SnappyCodec.class);

    conf.setNumReduceTasks(0);        //This is essential for map-only tasks.

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}
 
开发者ID:patriziosotgiu,项目名称:pegasus,代码行数:20,代码来源:ConCmptBlock.java

示例9: configStage5

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
protected JobConf configStage5() throws Exception {
    final JobConf conf = new JobConf(getConf(), ConCmptBlock.class);
    conf.set("block_width", "" + block_width);
    conf.setJobName("data-piqid.pegasus.ConCmptBlock_pass5");

    conf.setMapperClass(MapStage5.class);
    conf.setReducerClass(RedStage5.class);
    conf.setCombinerClass(RedStage5.class);

    FileInputFormat.setInputPaths(conf, curbm_path);
    FileOutputFormat.setOutputPath(conf, summaryout_path);
    FileOutputFormat.setCompressOutput(conf, true);
    FileOutputFormat.setOutputCompressorClass(conf, SnappyCodec.class);

    conf.setNumReduceTasks(nreducers);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(LongWritable.class);

    return conf;
}
 
开发者ID:patriziosotgiu,项目名称:pegasus,代码行数:22,代码来源:ConCmptBlock.java

示例10: configStage1

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
protected JobConf configStage1() throws Exception {
    final JobConf conf = new JobConf(getConf(), ConCmptIVGen.class);
    conf.set("number_nodes", "" + number_nodes);
    conf.setJobName("data-piqid.pegasus.ConCmptIVGen_Stage1");

    conf.setMapperClass(MapStage1.class);
    conf.setReducerClass(RedStage1.class);

    FileInputFormat.setInputPaths(conf, input_path);
    FileOutputFormat.setOutputPath(conf, output_path);
    FileOutputFormat.setCompressOutput(conf, true);
    FileOutputFormat.setOutputCompressorClass(conf, SnappyCodec.class);

    conf.setNumReduceTasks(number_reducers);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);


    return conf;
}
 
开发者ID:patriziosotgiu,项目名称:pegasus,代码行数:22,代码来源:ConCmptIVGen.java

示例11: testCsvBlurDriverTest3

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
@Test
public void testCsvBlurDriverTest3() throws Exception {
  Configuration configurationSetup = new Configuration();
  ControllerPool controllerPool = new CsvBlurDriver.ControllerPool() {
    @Override
    public Iface getClient(String controllerConnectionStr) {
      return getMockIface();
    }
  };
  AtomicReference<Callable<Void>> ref = new AtomicReference<Callable<Void>>();
  Job job = CsvBlurDriver.setupJob(configurationSetup, controllerPool, ref, "-c", "host:40010", "-d", "family1",
      "col1", "col2", "-d", "family2", "col3", "col4", "-t", "table1", "-i", _path1.toString(), "-i",
      _path2.toString(), "-S", "-C", "1000000", "2000000", "-p", "SNAPPY");
  assertNotNull(job);
  Configuration configuration = job.getConfiguration();
  TableDescriptor tableDescriptor = BlurOutputFormat.getTableDescriptor(configuration);
  assertEquals(tableDescriptor.getName(), "table1");
  Collection<String> inputs = configuration.getStringCollection("mapred.input.dir");
  assertEquals(2, inputs.size());
  Map<String, List<String>> familyAndColumnNameMap = CsvBlurMapper.getFamilyAndColumnNameMap(configuration);
  assertEquals(2, familyAndColumnNameMap.size());
  assertEquals("true", configuration.get(CsvBlurDriver.MAPRED_COMPRESS_MAP_OUTPUT));
  assertEquals(SnappyCodec.class.getName(), configuration.get(CsvBlurDriver.MAPRED_MAP_OUTPUT_COMPRESSION_CODEC));
}
 
开发者ID:apache,项目名称:incubator-blur,代码行数:25,代码来源:CsvBlurDriverTest.java

示例12: getSnappyInputStream

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
/**
 * Gets an InputStream that uses the snappy codec and wraps the supplied base input stream.
 *
 * @param the buffer size for the codec to use (in bytes)
 * @param in  the base input stream to wrap around
 * @return an InputStream that uses the Snappy codec
 * @throws Exception if snappy is not available or an error occurs during reflection
 */
public InputStream getSnappyInputStream( int bufferSize, InputStream in ) throws Exception {
  if ( !isHadoopSnappyAvailable() ) {
    throw new Exception( "Hadoop-snappy does not seem to be available" );
  }

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  Thread.currentThread().setContextClassLoader( getClass().getClassLoader() );
  try {
    SnappyCodec c = new SnappyCodec();
    Configuration newConf = new Configuration();
    newConf.set( IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, "" + bufferSize );
    c.setConf( newConf );
    return c.createInputStream( in );
  } finally {
    Thread.currentThread().setContextClassLoader( cl );
  }
}
 
开发者ID:pentaho,项目名称:pentaho-hadoop-shims,代码行数:26,代码来源:CommonSnappyShim.java

示例13: getSnappyOutputStream

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
/**
 * Gets an OutputStream that uses the snappy codec and wraps the supplied base output stream.
 *
 * @param the buffer size for the codec to use (in bytes)
 * @param out the base output stream to wrap around
 * @return a OutputStream that uses the Snappy codec
 * @throws Exception if snappy is not available or an error occurs during reflection
 */
public OutputStream getSnappyOutputStream( int bufferSize, OutputStream out ) throws Exception {
  if ( !isHadoopSnappyAvailable() ) {
    throw new Exception( "Hadoop-snappy does not seem to be available" );
  }

  ClassLoader cl = Thread.currentThread().getContextClassLoader();
  Thread.currentThread().setContextClassLoader( getClass().getClassLoader() );
  try {
    SnappyCodec c = new SnappyCodec();
    Configuration newConf = new Configuration();
    newConf.set( IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, "" + bufferSize );
    c.setConf( newConf );
    return c.createOutputStream( out );
  } finally {
    Thread.currentThread().setContextClassLoader( cl );
  }
}
 
开发者ID:pentaho,项目名称:pentaho-hadoop-shims,代码行数:26,代码来源:CommonSnappyShim.java

示例14: openInputStream

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
public InputStream openInputStream(Path inputFilePath) throws IOException {
/* 224 */     FileSystem inputFs = inputFilePath.getFileSystem(this.conf);
/* 225 */     InputStream inputStream = inputFs.open(inputFilePath);
/*     */ 
/* 227 */     if (!this.outputCodec.equalsIgnoreCase("keep")) {
/* 228 */       String suffix = Utils.getSuffix(inputFilePath.getName());
/* 229 */       if (suffix.equalsIgnoreCase("gz"))
/* 230 */         return new GZIPInputStream(inputStream);
/* 231 */       if (suffix.equalsIgnoreCase("snappy")) {
/* 232 */         SnappyCodec codec = new SnappyCodec();
/* 233 */         codec.setConf(getConf());
/* 234 */         return codec.createInputStream(inputStream);
/* 235 */       }

               // if ((suffix.equalsIgnoreCase("lzop")) || (suffix.equalsIgnoreCase("lzo"))) {
/* 236 */      //   LzopCodec codec = new LzopCodec();
/* 237 */      //   codec.setConf(getConf());
/* 238 */      //   return codec.createInputStream(inputStream);
/*     */      // }
/*     */     }
/* 241 */     return inputStream;
/*     */   }
 
开发者ID:libin,项目名称:s3distcp,代码行数:23,代码来源:CopyFilesReducer.java

示例15: openOutputStream

import org.apache.hadoop.io.compress.SnappyCodec; //导入依赖的package包/类
public OutputStream openOutputStream(Path outputFilePath) throws IOException {
/* 245 */     FileSystem outputFs = outputFilePath.getFileSystem(this.conf);
/* 246 */     OutputStream outputStream = outputFs.create(outputFilePath, this.reporter);
/* 247 */     if ((this.outputCodec.equalsIgnoreCase("gzip")) || (this.outputCodec.equalsIgnoreCase("gz")))
/* 248 */       return new GZIPOutputStream(outputStream);
/* 249 */     //if (this.outputCodec.equalsIgnoreCase("lzo")) {
/* 250 */       //LzopCodec codec = new LzopCodec();
/* 251 */       //codec.setConf(getConf());
/* 252 */       //return codec.createOutputStream(outputStream);
/* 253 */     //}
              if (this.outputCodec.equalsIgnoreCase("snappy")) {
/* 254 */       SnappyCodec codec = new SnappyCodec();
/* 255 */       codec.setConf(getConf());
/* 256 */       return codec.createOutputStream(outputStream);
/*     */     }
/* 258 */     return outputStream;
/*     */   }
 
开发者ID:libin,项目名称:s3distcp,代码行数:18,代码来源:CopyFilesReducer.java


注:本文中的org.apache.hadoop.io.compress.SnappyCodec类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。