本文整理汇总了Java中org.apache.avro.reflect.ReflectDatumWriter类的典型用法代码示例。如果您正苦于以下问题:Java ReflectDatumWriter类的具体用法?Java ReflectDatumWriter怎么用?Java ReflectDatumWriter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ReflectDatumWriter类属于org.apache.avro.reflect包,在下文中一共展示了ReflectDatumWriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: configure
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public void configure(Context context) {
int syncIntervalBytes =
context.getInteger(SYNC_INTERVAL_BYTES, DEFAULT_SYNC_INTERVAL_BYTES);
String compressionCodec =
context.getString(COMPRESSION_CODEC, DEFAULT_COMPRESSION_CODEC);
writer = new ReflectDatumWriter<T>(getSchema());
dataFileWriter = new DataFileWriter<T>(writer);
dataFileWriter.setSyncInterval(syncIntervalBytes);
try {
CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
dataFileWriter.setCodec(codecFactory);
} catch (AvroRuntimeException e) {
logger.warn("Unable to instantiate avro codec with name (" +
compressionCodec + "). Compression disabled. Exception follows.", e);
}
}
示例2: serialize
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
if (schema == null || !datumSchema.equals(schema)) {
schema = datumSchema;
out = new ByteArrayOutputStream();
writer = new ReflectDatumWriter<Object>(schema);
encoder = EncoderFactory.get().binaryEncoder(out, null);
}
out.reset();
try {
writer.write(datum, encoder);
encoder.flush();
return out.toByteArray();
} catch (IOException e) {
throw new FlumeException(e);
}
}
示例3: main
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
Node node = new Node();
node.setValue("Gaurav");
node.setNext(node);
byte[] payload;
DatumWriter<Node> datumWriter = new ReflectDatumWriter<>(Node.class);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
Encoder out = EncoderFactory.get().binaryEncoder(baos, null);
datumWriter.write(node, out );
out.flush();
payload = baos.toByteArray();
}
catch (Exception e) {
System.err.println(e);
throw e;
}
System.out.println(new String(payload));
System.out.println(payload.length);
}
示例4: testReflect
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
public static void testReflect(Object value, Type type, String schema)
throws Exception {
// check that schema matches expected
Schema s = ReflectData.get().getSchema(type);
assertEquals(Schema.parse(schema), s);
// check that value is serialized correctly
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
Object after =
reader.read(null,
DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
assertEquals(value, after);
}
示例5: close
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public void close(TaskAttemptContext context) throws IOException {
// Create an Avro container file and a writer to it.
DataFileWriter<K> avroFileWriter;
avroFileWriter = new DataFileWriter<K>(new ReflectDatumWriter<K>(writerSchema));
avroFileWriter.setCodec(compressionCodec);
// Writes the meta-data.
avroFileWriter.setMeta(Constants.AVRO_NUMBER_OF_RECORDS, this.numberOfRecords);
// Writes the file.
avroFileWriter.create(this.writerSchema, this.outputStream);
for (AvroKey<K> record : this.recordsList)
avroFileWriter.append(record.datum());
// Close the stream.
avroFileWriter.close();
}
示例6: initializeAvro
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private void initializeAvro() {
final ClassLoader cl = Thread.currentThread().getContextClassLoader();
if (SpecificRecord.class.isAssignableFrom(type)) {
this.avroData = new SpecificData(cl);
this.schema = this.avroData.getSchema(type);
this.reader = new SpecificDatumReader<>(schema, schema, avroData);
this.writer = new SpecificDatumWriter<>(schema, avroData);
}
else {
final ReflectData reflectData = new ReflectData(cl);
this.avroData = reflectData;
this.schema = this.avroData.getSchema(type);
this.reader = new ReflectDatumReader<>(schema, schema, reflectData);
this.writer = new ReflectDatumWriter<>(schema, reflectData);
}
this.encoder = new DataOutputEncoder();
this.decoder = new DataInputDecoder();
}
示例7: serialize
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException {
if (schema == null || !datumSchema.equals(schema)) {
schema = datumSchema;
out = new ByteArrayOutputStream();
writer = new ReflectDatumWriter<Object>(schema);
encoder = EncoderFactory.get().binaryEncoder(out, null);
}
out.reset();
try {
writer.write(datum, encoder);
encoder.flush();
return out.toByteArray();
} catch (IOException e) {
throw new FlumeException(e);
}
}
示例8: serialise
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public byte[] serialise(final Object object) throws SerialisationException {
Schema schema = ReflectData.get().getSchema(object.getClass());
DatumWriter<Object> datumWriter = new ReflectDatumWriter<>(schema);
DataFileWriter<Object> dataFileWriter = new DataFileWriter<>(datumWriter);
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
try {
dataFileWriter.create(schema, byteOut);
dataFileWriter.append(object);
dataFileWriter.flush();
} catch (final IOException e) {
throw new SerialisationException("Unable to serialise given object of class: " + object.getClass().getName(), e);
} finally {
close(dataFileWriter);
}
return byteOut.toByteArray();
}
示例9: getDatumWriter
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private DatumWriter<Object> getDatumWriter(Class<Object> type, Schema schema) {
DatumWriter<Object> writer;
this.logger.debug("Finding correct DatumWriter for type " + type.getName());
if (SpecificRecord.class.isAssignableFrom(type)) {
if (schema != null) {
writer = new SpecificDatumWriter<>(schema);
}
else {
writer = new SpecificDatumWriter<>(type);
}
}
else if (GenericRecord.class.isAssignableFrom(type)) {
writer = new GenericDatumWriter<>(schema);
}
else {
if (schema != null) {
writer = new ReflectDatumWriter<>(schema);
}
else {
writer = new ReflectDatumWriter<>(type);
}
}
return writer;
}
示例10: AvroKeyWithMetadataRecordWriter
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
/**
* Constructor.
*
* @param writerSchema The writer schema for the records in the Avro container file.
* @param compressionCodec A compression codec factory for the Avro container file.
* @param outputStream The output stream to write the Avro container file to.
* @param conf the configuration
* @throws IOException If the record writer cannot be opened.
*/
public AvroKeyWithMetadataRecordWriter(Schema writerSchema, CodecFactory compressionCodec,
OutputStream outputStream, Configuration conf) throws IOException {
// Create an Avro container file and a writer to it.
mAvroFileWriter = new DataFileWriter<T>(new ReflectDatumWriter<T>(writerSchema));
mAvroFileWriter.setCodec(compressionCodec);
for (Entry<String,String> e : conf)
{
if (e.getKey().startsWith(TEXT_PREFIX))
mAvroFileWriter.setMeta(e.getKey().substring(TEXT_PREFIX.length()),
e.getValue());
}
mAvroFileWriter.create(writerSchema, outputStream);
}
示例11: serializeAvro
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private byte[] serializeAvro(Object datum, Schema schema) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(schema);
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
out.reset();
writer.write(datum, encoder);
encoder.flush();
return out.toByteArray();
}
示例12: serialize
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@SuppressWarnings("unchecked")
public static byte[] serialize(Object datum, Schema schema) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
ReflectDatumWriter writer = new ReflectDatumWriter(schema);
try {
writer.write(datum, encoder);
encoder.flush();
} catch (IOException ex) {
Throwables.propagate(ex);
}
return out.toByteArray();
}
示例13: getRecordWriter
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
@Override
public RecordWriter<AvroWrapper<T>, NullWritable> getRecordWriter(
TaskAttemptContext context) throws IOException, InterruptedException {
boolean isMapOnly = context.getNumReduceTasks() == 0;
Schema schema =
isMapOnly ? AvroJob.getMapOutputSchema(context.getConfiguration())
: AvroJob.getOutputSchema(context.getConfiguration());
final DataFileWriter<T> WRITER =
new DataFileWriter<T>(new ReflectDatumWriter<T>());
configureDataFileWriter(WRITER, context);
Path path = getDefaultWorkFile(context, EXT);
WRITER.create(schema,
path.getFileSystem(context.getConfiguration()).create(path));
return new RecordWriter<AvroWrapper<T>, NullWritable>() {
@Override
public void write(AvroWrapper<T> wrapper, NullWritable ignore)
throws IOException {
WRITER.append(wrapper.datum());
}
@Override
public void close(TaskAttemptContext taskAttemptContext)
throws IOException, InterruptedException {
WRITER.close();
}
};
}
示例14: writeIndividualsToFile
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
public static void writeIndividualsToFile(
Path filePath, Configuration configuration,
List<IndividualWrapper<Individual, FitnessValue>> individuals,
int startIndividualIndex, int numberOfIndividualsToWrite,
Schema individualWrapperSchema) throws IOException {
FileSystem fileSystem = FileSystem.get(configuration);
OutputStream fileOutput = fileSystem.create(filePath, true);
ReflectData reflectData = new ReflectData(configuration.getClassLoader());
DatumWriter<IndividualWrapper<Individual, FitnessValue>> datumWriter = new ReflectDatumWriter<>(individualWrapperSchema, reflectData);
DataFileWriter<IndividualWrapper<Individual, FitnessValue>> avroFileWriter =
new DataFileWriter<>(datumWriter);
avroFileWriter.setCodec(CodecFactory.snappyCodec());
avroFileWriter.setMeta(Constants.AVRO_NUMBER_OF_RECORDS, numberOfIndividualsToWrite);
avroFileWriter.create(individualWrapperSchema, fileOutput);
int currentIndividualIndex = startIndividualIndex;
for (int i = 0; i < numberOfIndividualsToWrite; i++) {
IndividualWrapper<Individual, FitnessValue> currentIndividual = individuals.get(currentIndividualIndex);
avroFileWriter.append(currentIndividual);
currentIndividualIndex++;
}
avroFileWriter.close();
}
示例15: getDatumWriter
import org.apache.avro.reflect.ReflectDatumWriter; //导入依赖的package包/类
private DatumWriter getDatumWriter(Class<?> type, Schema schema){
DatumWriter writer = null;
logger.debug("Finding correct DatumWriter for type {}",type.getName());
if(SpecificRecord.class.isAssignableFrom(type)){
writer = new SpecificDatumWriter<>(schema);
}else if(GenericRecord.class.isAssignableFrom(type)){
writer = new GenericDatumWriter<>(schema);
}else{
writer = new ReflectDatumWriter<>(schema);
}
logger.debug("DatumWriter of type {} selected",writer.getClass().getName());
return writer;
}