当前位置: 首页>>代码示例>>Java>>正文


Java CodecFactory类代码示例

本文整理汇总了Java中org.apache.avro.file.CodecFactory的典型用法代码示例。如果您正苦于以下问题:Java CodecFactory类的具体用法?Java CodecFactory怎么用?Java CodecFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


CodecFactory类属于org.apache.avro.file包,在下文中一共展示了CodecFactory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testCountFilesInFolderWithCodec

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
private void testCountFilesInFolderWithCodec(CodecFactory codec) throws Exception {
    AvroDataFileGenerator generator = intRecordGenerator(getClass(), codec);
    File folder = AvroTestUtil.tempDirectory(getClass(), testName.getMethodName());

    for (int i = 0; i < 10; i++) {
        generator.createAvroFile(String.format("%s_%s.avro", testName.getMethodName(), i), 1000, folder);
    }

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    int returnCode = new AvroCountTool().run(
            System.in,
            new PrintStream(outputStream, true, StandardCharsets.UTF_8.toString()),
            System.err,
            Collections.singletonList(folder.getAbsolutePath())
    );

    assertEquals(0, returnCode);
    assertEquals("10000", new String(outputStream.toByteArray(), StandardCharsets.UTF_8).trim());
}
 
开发者ID:jwoschitz,项目名称:avrocount,代码行数:20,代码来源:AvroCountToolTest.java

示例2: testIgnoreNonAvroSuffixedFilesInFolder

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Test
public void testIgnoreNonAvroSuffixedFilesInFolder() throws Exception {
    AvroDataFileGenerator generator = intRecordGenerator(getClass(), CodecFactory.nullCodec());
    File folder = AvroTestUtil.tempDirectory(getClass(), testName.getMethodName());

    for (int i = 0; i < 10; i++) {
        FileTestUtil.createNewFile(getClass(), String.format("not_an_avro_%s.file", i), folder);
        generator.createAvroFile(String.format("%s_%s.avro", testName.getMethodName(), i), 1000, folder);
    }


    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    int returnCode = new AvroCountTool().run(
            System.in,
            new PrintStream(outputStream, true, StandardCharsets.UTF_8.toString()),
            System.err,
            Collections.singletonList(folder.getAbsolutePath())
    );

    assertEquals(0, returnCode);
    assertEquals("10000", new String(outputStream.toByteArray(), StandardCharsets.UTF_8).trim());
}
 
开发者ID:jwoschitz,项目名称:avrocount,代码行数:23,代码来源:AvroCountToolTest.java

示例3: testBenchmark

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Test
@Ignore
public void testBenchmark() throws Exception {
    AvroDataFileGenerator generator = intRecordGenerator(getClass(), CodecFactory.snappyCodec());
    File folder = AvroTestUtil.tempDirectory(getClass(), testName.getMethodName());

    for (int i = 0; i < 100; i++) {
        generator.createAvroFile(String.format("%s_%s.avro", testName.getMethodName(), i), 10000000, folder);
    }

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    int returnCode = new AvroCountTool().run(
            System.in,
            new PrintStream(outputStream, true, StandardCharsets.UTF_8.toString()),
            System.err,
            Collections.singletonList(folder.getAbsolutePath())
    );

    assertEquals(0, returnCode);
    assertEquals("1000000000", new String(outputStream.toByteArray(), StandardCharsets.UTF_8).trim());
}
 
开发者ID:jwoschitz,项目名称:avrocount,代码行数:22,代码来源:AvroCountToolTest.java

示例4: testBenchmarkWithMinimalParallelism

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Test
@Ignore
public void testBenchmarkWithMinimalParallelism() throws Exception {
    AvroDataFileGenerator generator = intRecordGenerator(getClass(), CodecFactory.snappyCodec());
    File folder = AvroTestUtil.tempDirectory(getClass(), testName.getMethodName());

    for (int i = 0; i < 100; i++) {
        generator.createAvroFile(String.format("%s_%s.avro", testName.getMethodName(), i), 10000000, folder);
    }

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    int returnCode = new AvroCountTool().run(
            System.in,
            new PrintStream(outputStream, true, StandardCharsets.UTF_8.toString()),
            System.err,
            Arrays.asList(folder.getAbsolutePath(), "--maxParallelism=1")
    );

    assertEquals(0, returnCode);
    assertEquals("1000000000", new String(outputStream.toByteArray(), StandardCharsets.UTF_8).trim());
}
 
开发者ID:jwoschitz,项目名称:avrocount,代码行数:22,代码来源:AvroCountToolTest.java

示例5: testBenchmarkBigFile

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Test
@Ignore
public void testBenchmarkBigFile() throws Exception {
    AvroDataFileGenerator generator = intRecordGenerator(getClass(), CodecFactory.snappyCodec());
    File folder = AvroTestUtil.tempDirectory(getClass(), testName.getMethodName());
    generator.createAvroFile(String.format("%s_%s.avro", testName.getMethodName(), 0), 100000000, folder);

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    int returnCode = new AvroCountTool().run(
            System.in,
            new PrintStream(outputStream, true, StandardCharsets.UTF_8.toString()),
            System.err,
            Collections.singletonList(folder.getAbsolutePath())
    );

    assertEquals(0, returnCode);
    assertEquals("100000000", new String(outputStream.toByteArray(), StandardCharsets.UTF_8).trim());
}
 
开发者ID:jwoschitz,项目名称:avrocount,代码行数:19,代码来源:AvroCountToolTest.java

示例6: configure

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Override
public void configure(Context context) {

  int syncIntervalBytes =
      context.getInteger(SYNC_INTERVAL_BYTES, DEFAULT_SYNC_INTERVAL_BYTES);
  String compressionCodec =
      context.getString(COMPRESSION_CODEC, DEFAULT_COMPRESSION_CODEC);

  writer = new ReflectDatumWriter<T>(getSchema());
  dataFileWriter = new DataFileWriter<T>(writer);

  dataFileWriter.setSyncInterval(syncIntervalBytes);

  try {
    CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
    dataFileWriter.setCodec(codecFactory);
  } catch (AvroRuntimeException e) {
    logger.warn("Unable to instantiate avro codec with name (" +
        compressionCodec + "). Compression disabled. Exception follows.", e);
  }
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:22,代码来源:AbstractAvroEventSerializer.java

示例7: writeToStream

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
/**
 * Writes the given {@link DataEvent}s from the eventStream into the
 * {@link OutputStream} using avro's object container format (see
 * {@link DataFileWriter}). Please note: As this method is creating the
 * {@link OutputStream} via the {@link Supplier}, the {@link OutputStream}
 * is as well closed by this method.
 * 
 * 
 * @param eventStream
 *            a stream of valid and normalized {@link DataEvent}.
 * @param out
 *            a {@link Supplier} of an output stream
 * @throws IOException
 */
public static void writeToStream(Stream<DataEvent> eventStream, Supplier<OutputStream> outSupplier) throws IOException {
	final OutputStream out = outSupplier.get();
	@Cleanup
	final DataFileWriter<DataEvent> writer = new DataFileWriter<>(DataEventSerializer.getWRITER());
	writer.setSyncInterval(1024 * 1024);
	writer.setCodec(CodecFactory.deflateCodec(9));
	writer.setMeta("created_at", new Date().getTime());
	writer.create(DataEvent.SCHEMA$, out);

	eventStream.forEach(event -> {
		try {
			writer.append(event);
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	});
}
 
开发者ID:adsquare,项目名称:data-delivery,代码行数:32,代码来源:DataEventSerializer.java

示例8: testCompressedWriteAndReadASingleFile

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Test
@SuppressWarnings("unchecked")
@Category(NeedsRunner.class)
public void testCompressedWriteAndReadASingleFile() throws Throwable {
  List<GenericClass> values =
      ImmutableList.of(new GenericClass(3, "hi"), new GenericClass(5, "bar"));
  File outputFile = tmpFolder.newFile("output.avro");

  writePipeline.apply(Create.of(values))
      .apply(
          AvroIO.write(GenericClass.class)
              .to(outputFile.getAbsolutePath())
              .withoutSharding()
              .withCodec(CodecFactory.deflateCodec(9)));
  writePipeline.run();

  PAssert.that(
          readPipeline.apply(AvroIO.read(GenericClass.class).from(outputFile.getAbsolutePath())))
      .containsInAnyOrder(values);
  readPipeline.run();

  try (DataFileStream dataFileStream =
      new DataFileStream(new FileInputStream(outputFile), new GenericDatumReader())) {
    assertEquals("deflate", dataFileStream.getMetaString("avro.codec"));
  }
}
 
开发者ID:apache,项目名称:beam,代码行数:27,代码来源:AvroIOTest.java

示例9: testWriteThenReadASingleFileWithNullCodec

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Test
@SuppressWarnings("unchecked")
@Category(NeedsRunner.class)
public void testWriteThenReadASingleFileWithNullCodec() throws Throwable {
  List<GenericClass> values =
      ImmutableList.of(new GenericClass(3, "hi"), new GenericClass(5, "bar"));
  File outputFile = tmpFolder.newFile("output.avro");

  writePipeline.apply(Create.of(values))
      .apply(
          AvroIO.write(GenericClass.class)
              .to(outputFile.getAbsolutePath())
              .withoutSharding()
              .withCodec(CodecFactory.nullCodec()));
  writePipeline.run();

  PAssert.that(
          readPipeline.apply(AvroIO.read(GenericClass.class).from(outputFile.getAbsolutePath())))
      .containsInAnyOrder(values);
  readPipeline.run();

  try (DataFileStream dataFileStream =
      new DataFileStream(new FileInputStream(outputFile), new GenericDatumReader())) {
    assertEquals("null", dataFileStream.getMetaString("avro.codec"));
  }
}
 
开发者ID:apache,项目名称:beam,代码行数:27,代码来源:AvroIOTest.java

示例10: testWriteDisplayData

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
@Test
public void testWriteDisplayData() {
  AvroIO.Write<GenericClass> write =
      AvroIO.write(GenericClass.class)
          .to("/foo")
          .withShardNameTemplate("-SS-of-NN-")
          .withSuffix("bar")
          .withNumShards(100)
          .withCodec(CodecFactory.snappyCodec());

  DisplayData displayData = DisplayData.from(write);

  assertThat(displayData, hasDisplayItem("filePrefix", "/foo"));
  assertThat(displayData, hasDisplayItem("shardNameTemplate", "-SS-of-NN-"));
  assertThat(displayData, hasDisplayItem("fileSuffix", "bar"));
  assertThat(
      displayData,
      hasDisplayItem(
          "schema",
          "{\"type\":\"record\",\"name\":\"GenericClass\",\"namespace\":\"org.apache.beam.sdk.io"
              + ".AvroIOTest$\",\"fields\":[{\"name\":\"intField\",\"type\":\"int\"},"
              + "{\"name\":\"stringField\",\"type\":\"string\"}]}"));
  assertThat(displayData, hasDisplayItem("numShards", 100));
  assertThat(displayData, hasDisplayItem("codec", CodecFactory.snappyCodec().toString()));
}
 
开发者ID:apache,项目名称:beam,代码行数:26,代码来源:AvroIOTest.java

示例11: getCompressionCodec

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
private CodecFactory getCompressionCodec(Map<String, String> conf) {
	if (getBoolean(conf, CONF_COMPRESS, false)) {
		int deflateLevel = getInt(conf, CONF_DEFLATE_LEVEL, CodecFactory.DEFAULT_DEFLATE_LEVEL);
		int xzLevel = getInt(conf, CONF_XZ_LEVEL, CodecFactory.DEFAULT_XZ_LEVEL);

		String outputCodec = conf.get(CONF_COMPRESS_CODEC);

		if (DataFileConstants.DEFLATE_CODEC.equals(outputCodec)) {
			return CodecFactory.deflateCodec(deflateLevel);
		} else if (DataFileConstants.XZ_CODEC.equals(outputCodec)) {
			return CodecFactory.xzCodec(xzLevel);
		} else {
			return CodecFactory.fromString(outputCodec);
		}
	}
	return CodecFactory.nullCodec();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:AvroKeyValueSinkWriter.java

示例12: AvroKeyValueWriter

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
AvroKeyValueWriter(Schema keySchema, Schema valueSchema,
		CodecFactory compressionCodec, OutputStream outputStream,
		int syncInterval) throws IOException {
	// Create the generic record schema for the key/value pair.
	mKeyValuePairSchema = AvroKeyValue
			.getSchema(keySchema, valueSchema);

	// Create an Avro container file and a writer to it.
	DatumWriter<GenericRecord> genericDatumWriter = new GenericDatumWriter<GenericRecord>(
			mKeyValuePairSchema);
	mAvroFileWriter = new DataFileWriter<GenericRecord>(
			genericDatumWriter);
	mAvroFileWriter.setCodec(compressionCodec);
	mAvroFileWriter.setSyncInterval(syncInterval);
	mAvroFileWriter.create(mKeyValuePairSchema, outputStream);

	// Create a reusable output record.
	mOutputRecord = new AvroKeyValue<Object, Object>(
			new GenericData.Record(mKeyValuePairSchema));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:AvroKeyValueSinkWriter.java

示例13: getCompressionCodec

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
private CodecFactory getCompressionCodec(Map<String,String> conf) {
	if (getBoolean(conf, CONF_COMPRESS, false)) {
		int deflateLevel = getInt(conf, CONF_DEFLATE_LEVEL, CodecFactory.DEFAULT_DEFLATE_LEVEL);
		int xzLevel = getInt(conf, CONF_XZ_LEVEL, CodecFactory.DEFAULT_XZ_LEVEL);

		String outputCodec = conf.get(CONF_COMPRESS_CODEC);

		if (DataFileConstants.DEFLATE_CODEC.equals(outputCodec)) {
			return CodecFactory.deflateCodec(deflateLevel);
		} else if (DataFileConstants.XZ_CODEC.equals(outputCodec)) {
			return CodecFactory.xzCodec(xzLevel);
		} else {
			return CodecFactory.fromString(outputCodec);
		}
	}
	return CodecFactory.nullCodec();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:AvroKeyValueSinkWriter.java

示例14: writeToAvro

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
public static void writeToAvro(File inputFile, OutputStream outputStream)
    throws IOException {

  DataFileWriter<Stock> writer =
      new DataFileWriter<Stock>(
          new SpecificDatumWriter<Stock>());

  writer.setCodec(CodecFactory.snappyCodec());
  writer.create(Stock.SCHEMA$, outputStream);

  for (Stock stock : AvroStockUtils.fromCsvFile(inputFile)) {
    writer.append(stock);
  }

  IOUtils.closeStream(writer);
  IOUtils.closeStream(outputStream);
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:18,代码来源:AvroStockFileWrite.java

示例15: writeToAvro

import org.apache.avro.file.CodecFactory; //导入依赖的package包/类
public static void writeToAvro(File inputFile, OutputStream outputStream)
    throws IOException {

  DataFileWriter<GenericRecord> writer =
      new DataFileWriter<GenericRecord>(
          new GenericDatumWriter<GenericRecord>());

  writer.setCodec(CodecFactory.snappyCodec());
  writer.create(SCHEMA, outputStream);

  for (Stock stock : AvroStockUtils.fromCsvFile(inputFile)) {

    AvroKeyValue<CharSequence, Stock> record
        = new AvroKeyValue<CharSequence, Stock>(new GenericData.Record(SCHEMA));
    record.setKey(stock.getSymbol());
    record.setValue(stock);

    writer.append(record.get());
  }

  IOUtils.closeStream(writer);
  IOUtils.closeStream(outputStream);
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:24,代码来源:AvroKeyValueFileWrite.java


注:本文中的org.apache.avro.file.CodecFactory类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。