当前位置: 首页>>代码示例>>Java>>正文


Java ReflectDatumWriter类代码示例

本文整理汇总了Java中org.apache.avro.reflect.ReflectDatumWriter的典型用法代码示例。如果您正苦于以下问题:Java ReflectDatumWriter类的具体用法?Java ReflectDatumWriter怎么用?Java ReflectDatumWriter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ReflectDatumWriter类属于org.apache.avro.reflect包,在下文中一共展示了ReflectDatumWriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: configure

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public void configure(Context context) {

  int syncIntervalBytes =
      context.getInteger(SYNC_INTERVAL_BYTES, DEFAULT_SYNC_INTERVAL_BYTES);
  String compressionCodec =
      context.getString(COMPRESSION_CODEC, DEFAULT_COMPRESSION_CODEC);

  writer = new ReflectDatumWriter<T>(getSchema());
  dataFileWriter = new DataFileWriter<T>(writer);

  dataFileWriter.setSyncInterval(syncIntervalBytes);

  try {
    CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
    dataFileWriter.setCodec(codecFactory);
  } catch (AvroRuntimeException e) {
    logger.warn("Unable to instantiate avro codec with name (" +
        compressionCodec + "). Compression disabled. Exception follows.", e);
  }
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:22,代码来源:AbstractAvroEventSerializer.java

示例2: serialize

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
  if (schema == null || !datumSchema.equals(schema)) {
    schema = datumSchema;
    out = new ByteArrayOutputStream();
    writer = new ReflectDatumWriter<Object>(schema);
    encoder = EncoderFactory.get().binaryEncoder(out, null);
  }
  out.reset();
  try {
    writer.write(datum, encoder);
    encoder.flush();
    return out.toByteArray();
  } catch (IOException e) {
    throw new FlumeException(e);
  }
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:17,代码来源:Log4jAppender.java

示例3: main

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
	Node node = new Node();
	node.setValue("Gaurav");
	node.setNext(node);
	
	byte[] payload;
	DatumWriter<Node> datumWriter = new ReflectDatumWriter<>(Node.class);
	try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
		Encoder out = EncoderFactory.get().binaryEncoder(baos, null);
		datumWriter.write(node, out );
		out.flush();
		payload = baos.toByteArray();
	}
	catch (Exception e) {
		System.err.println(e);
		throw e;
	}
	System.out.println(new String(payload));
	System.out.println(payload.length);
}
 
开发者ID:gauravrmazra,项目名称:gauravbytes,代码行数:21,代码来源:AvroReflectDataSerializationTest.java

示例4: testReflect

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:AvroTestUtil.java

示例5: close

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public void close(TaskAttemptContext context) throws IOException {
    // Create an Avro container file and a writer to it.
    DataFileWriter<K> avroFileWriter;
    avroFileWriter = new DataFileWriter<K>(new ReflectDatumWriter<K>(writerSchema));
    avroFileWriter.setCodec(compressionCodec);

    // Writes the meta-data.
    avroFileWriter.setMeta(Constants.AVRO_NUMBER_OF_RECORDS, this.numberOfRecords);

    // Writes the file.
    avroFileWriter.create(this.writerSchema, this.outputStream);
    for (AvroKey<K> record : this.recordsList)
        avroFileWriter.append(record.datum());

    // Close the stream.
    avroFileWriter.close();
}
 
开发者ID:pasqualesalza,项目名称:elephant56,代码行数:19,代码来源:PopulationRecordWriter.java

示例6: initializeAvro

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private void initializeAvro() {
	final ClassLoader cl = Thread.currentThread().getContextClassLoader();

	if (SpecificRecord.class.isAssignableFrom(type)) {
		this.avroData = new SpecificData(cl);
		this.schema = this.avroData.getSchema(type);
		this.reader = new SpecificDatumReader<>(schema, schema, avroData);
		this.writer = new SpecificDatumWriter<>(schema, avroData);
	}
	else {
		final ReflectData reflectData = new ReflectData(cl);
		this.avroData = reflectData;
		this.schema = this.avroData.getSchema(type);
		this.reader = new ReflectDatumReader<>(schema, schema, reflectData);
		this.writer = new ReflectDatumWriter<>(schema, reflectData);
	}

	this.encoder = new DataOutputEncoder();
	this.decoder = new DataInputDecoder();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:AvroSerializer.java

示例7: serialize

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
    if (schema == null || !datumSchema.equals(schema)) {
        schema = datumSchema;
        out = new ByteArrayOutputStream();
        writer = new ReflectDatumWriter<Object>(schema);
        encoder = EncoderFactory.get().binaryEncoder(out, null);
    }
    out.reset();
    try {
        writer.write(datum, encoder);
        encoder.flush();
        return out.toByteArray();
    } catch (IOException e) {
        throw new FlumeException(e);
    }
}
 
开发者ID:blackshadowwalker,项目名称:log4j-collector,代码行数:17,代码来源:FlumeAppender.java

示例8: serialise

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public byte[] serialise(final Object object) throws SerialisationException {
    Schema schema = ReflectData.get().getSchema(object.getClass());
    DatumWriter<Object> datumWriter = new ReflectDatumWriter<>(schema);
    DataFileWriter<Object> dataFileWriter = new DataFileWriter<>(datumWriter);
    ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
    try {
        dataFileWriter.create(schema, byteOut);
        dataFileWriter.append(object);
        dataFileWriter.flush();
    } catch (final IOException e) {
        throw new SerialisationException("Unable to serialise given object of class: " + object.getClass().getName(), e);
    } finally {
        close(dataFileWriter);
    }
    return byteOut.toByteArray();
}
 
开发者ID:gchq,项目名称:Gaffer,代码行数:18,代码来源:AvroSerialiser.java

示例9: getDatumWriter

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private DatumWriter<Object> getDatumWriter(Class<Object> type, Schema schema) {
	DatumWriter<Object> writer;
	this.logger.debug("Finding correct DatumWriter for type " + type.getName());
	if (SpecificRecord.class.isAssignableFrom(type)) {
		if (schema != null) {
			writer = new SpecificDatumWriter<>(schema);
		}
		else {
			writer = new SpecificDatumWriter<>(type);
		}
	}
	else if (GenericRecord.class.isAssignableFrom(type)) {
		writer = new GenericDatumWriter<>(schema);
	}
	else {
		if (schema != null) {
			writer = new ReflectDatumWriter<>(schema);
		}
		else {
			writer = new ReflectDatumWriter<>(type);
		}
	}
	return writer;
}
 
开发者ID:spring-cloud,项目名称:spring-cloud-stream,代码行数:25,代码来源:AbstractAvroMessageConverter.java

示例10: AvroKeyWithMetadataRecordWriter

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
/**
 * Constructor.
 *
 * @param writerSchema The writer schema for the records in the Avro container file.
 * @param compressionCodec A compression codec factory for the Avro container file.
 * @param outputStream The output stream to write the Avro container file to.
 * @param conf the configuration
 * @throws IOException If the record writer cannot be opened.
 */
public AvroKeyWithMetadataRecordWriter(Schema writerSchema, CodecFactory compressionCodec,
    OutputStream outputStream, Configuration conf) throws IOException {
  // Create an Avro container file and a writer to it.
  mAvroFileWriter = new DataFileWriter<T>(new ReflectDatumWriter<T>(writerSchema));
  mAvroFileWriter.setCodec(compressionCodec);
  
  for (Entry<String,String> e : conf)
  {
    if (e.getKey().startsWith(TEXT_PREFIX))
      mAvroFileWriter.setMeta(e.getKey().substring(TEXT_PREFIX.length()),
                              e.getValue());
  }
  
  mAvroFileWriter.create(writerSchema, outputStream);
}
 
开发者ID:apache,项目名称:incubator-datafu,代码行数:25,代码来源:AvroKeyWithMetadataRecordWriter.java

示例11: serializeAvro

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private byte[] serializeAvro(Object datum, Schema schema) throws IOException {
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(schema);
  BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
  out.reset();
  writer.write(datum, encoder);
  encoder.flush();
  return out.toByteArray();
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:10,代码来源:TestAvroEventSerializer.java

示例12: serialize

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@SuppressWarnings("unchecked")
public static byte[] serialize(Object datum, Schema schema) {
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
  ReflectDatumWriter writer = new ReflectDatumWriter(schema);
  try {
    writer.write(datum, encoder);
    encoder.flush();
  } catch (IOException ex) {
    Throwables.propagate(ex);
  }
  return out.toByteArray();
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:14,代码来源:TestDatasetSink.java

示例13: getRecordWriter

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public RecordWriter<AvroWrapper<T>, NullWritable> getRecordWriter(
  TaskAttemptContext context) throws IOException, InterruptedException {

  boolean isMapOnly = context.getNumReduceTasks() == 0;
  Schema schema =
    isMapOnly ? AvroJob.getMapOutputSchema(context.getConfiguration())
      : AvroJob.getOutputSchema(context.getConfiguration());

  final DataFileWriter<T> WRITER =
    new DataFileWriter<T>(new ReflectDatumWriter<T>());

  configureDataFileWriter(WRITER, context);

  Path path = getDefaultWorkFile(context, EXT);
  WRITER.create(schema,
    path.getFileSystem(context.getConfiguration()).create(path));

  return new RecordWriter<AvroWrapper<T>, NullWritable>() {
    @Override
    public void write(AvroWrapper<T> wrapper, NullWritable ignore)
      throws IOException {
      WRITER.append(wrapper.datum());
    }

    @Override
    public void close(TaskAttemptContext taskAttemptContext)
      throws IOException, InterruptedException {
      WRITER.close();
    }
  };
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:33,代码来源:AvroOutputFormat.java

示例14: writeIndividualsToFile

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
public static void writeIndividualsToFile(
        Path filePath, Configuration configuration,
        List<IndividualWrapper<Individual, FitnessValue>> individuals,
        int startIndividualIndex, int numberOfIndividualsToWrite,
        Schema individualWrapperSchema) throws IOException {

    FileSystem fileSystem = FileSystem.get(configuration);

    OutputStream fileOutput = fileSystem.create(filePath, true);
    ReflectData reflectData = new ReflectData(configuration.getClassLoader());
    DatumWriter<IndividualWrapper<Individual, FitnessValue>> datumWriter = new ReflectDatumWriter<>(individualWrapperSchema, reflectData);
    DataFileWriter<IndividualWrapper<Individual, FitnessValue>> avroFileWriter =
            new DataFileWriter<>(datumWriter);

    avroFileWriter.setCodec(CodecFactory.snappyCodec());
    avroFileWriter.setMeta(Constants.AVRO_NUMBER_OF_RECORDS, numberOfIndividualsToWrite);

    avroFileWriter.create(individualWrapperSchema, fileOutput);

    int currentIndividualIndex = startIndividualIndex;
    for (int i = 0; i < numberOfIndividualsToWrite; i++) {
        IndividualWrapper<Individual, FitnessValue> currentIndividual = individuals.get(currentIndividualIndex);
        avroFileWriter.append(currentIndividual);
        currentIndividualIndex++;
    }

    avroFileWriter.close();
}
 
开发者ID:pasqualesalza,项目名称:elephant56,代码行数:29,代码来源:Driver.java

示例15: getDatumWriter

import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private DatumWriter getDatumWriter(Class<?> type, Schema schema){
	DatumWriter writer = null;
	logger.debug("Finding correct DatumWriter for type {}",type.getName());
	if(SpecificRecord.class.isAssignableFrom(type)){
		writer = new SpecificDatumWriter<>(schema);
	}else if(GenericRecord.class.isAssignableFrom(type)){
		writer = new GenericDatumWriter<>(schema);
	}else{
		writer = new ReflectDatumWriter<>(schema);
	}
	logger.debug("DatumWriter of type {} selected",writer.getClass().getName());
	return writer;
}
 
开发者ID:viniciusccarvalho,项目名称:schema-evolution-samples,代码行数:14,代码来源:AvroCodec.java


注:本文中的org.apache.avro.reflect.ReflectDatumWriter类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。