本文整理汇总了Java中org.apache.avro.generic.GenericDatumWriter.write方法的典型用法代码示例。如果您正苦于以下问题:Java GenericDatumWriter.write方法的具体用法?Java GenericDatumWriter.write怎么用?Java GenericDatumWriter.write使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.avro.generic.GenericDatumWriter
的用法示例。
在下文中一共展示了GenericDatumWriter.write方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: writeContainerless
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
private void writeContainerless(Record src, OutputStream dst) {
try {
GenericDatumWriter datumWriter = new GenericDatumWriter();
Encoder encoder = null;
Schema schema = null;
for (Object attachment : src.get(Fields.ATTACHMENT_BODY)) {
Preconditions.checkNotNull(attachment);
GenericContainer datum = (GenericContainer) attachment;
schema = getSchema(datum, schema);
datumWriter.setSchema(schema);
if (encoder == null) { // init
if (format == Format.containerlessJSON) {
encoder = EncoderFactory.get().jsonEncoder(schema, dst);
} else {
encoder = EncoderFactory.get().binaryEncoder(dst, null);
}
}
datumWriter.write(datum, encoder);
}
encoder.flush();
} catch (IOException e) {
throw new MorphlineRuntimeException(e);
}
}
示例2: dataToByteArray
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
public static byte[] dataToByteArray(Schema schema, GenericRecord datum) throws IOException {
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
ByteArrayOutputStream os = new ByteArrayOutputStream();
try {
Encoder e = EncoderFactory.get().binaryEncoder(os, null);
writer.write(datum, e);
e.flush();
byte[] byteData = os.toByteArray();
return byteData;
} finally {
os.close();
}
}
示例3: jsonFromGenericRecord
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
public static String jsonFromGenericRecord(GenericRecord record) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<>(DefaultTopicSchema.MESSAGE_V0);
try {
Encoder encoder = new JsonEncoder(DefaultTopicSchema.MESSAGE_V0, out);
writer.write(record, encoder);
encoder.flush();
} catch (IOException e) {
LOG.error("Unable to serialize avro record due to error " + e);
}
return out.toString();
}
示例4: process
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
@Override
protected void process() throws Exception {
InputStream avscInput = new FileInputStream("data/twitter.avsc");
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(avscInput);
final StreamingOutput<OutputTuple> out = getOutput(0);
/* Serializing to a byte array */
for (int i = 0; i < 100; i++) {
GenericRecord producedDatum = new GenericData.Record(schema);
producedDatum.put("username", "Frank");
producedDatum.put("tweet", "This Avro message really rocks: " + i);
producedDatum.put("timestamp", new Long(1048298232L + i));
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
ByteArrayOutputStream os = new ByteArrayOutputStream();
Encoder e = EncoderFactory.get().binaryEncoder(os, null);
writer.write(producedDatum, e);
e.flush();
byte[] byteData = os.toByteArray();
os.close();
/* Now submit tuple */
OutputTuple tuple = out.newTuple();
Blob blobData = ValueFactory.newBlob(byteData);
tuple.setBlob(0, blobData);
out.submit(tuple);
}
// Make the set of tuples a window.
out.punctuate(Punctuation.WINDOW_MARKER);
}
示例5: writeAvroJsonEncoding
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
/**
* Writes out Java objects into a JSON-encoded file
*/
public static void writeAvroJsonEncoding(File file, Meetup[] Meetups,Schema schema) throws IOException {
GenericDatumWriter writer = new GenericDatumWriter(schema);
Encoder e = EncoderFactory.get().jsonEncoder(schema, new FileOutputStream(file));
for (Meetup p : Meetups)
writer.write(p.serialize(schema), e);
e.flush();
}
示例6: toBytes
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
public byte[] toBytes(Object object) {
ByteArrayOutputStream output = new ByteArrayOutputStream();
GenericDatumWriter<Object> datumWriter = null;
try {
Encoder encoder = EncoderFactory.get().binaryEncoder(output, null);
datumWriter = new GenericDatumWriter<Object>(schema);
datumWriter.write(object, encoder);
encoder.flush();
output.close();
} catch(Exception e) {
throw new SerializationException("An exception has thrown in Avro generic serialization process", e);
}
return output.toByteArray();
}
示例7: writeIndexedRecord
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
private void writeIndexedRecord(GenericDatumWriter<IndexedRecord> writer, Encoder encoder, IndexedRecord indexedRecord) {
try {
writer.write(indexedRecord, encoder);
} catch (IOException e) {
log.warn("Couldn't serialize Avro record.", e);
}
}
示例8: toJson
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
private static String toJson(List<GenericRecord> records, Schema schema)
throws IOException {
ByteArrayOutputStream output = new ByteArrayOutputStream();
HackedJsonEncoder jsonEncoder = new HackedJsonEncoder(schema, output);
GenericDatumWriter<GenericRecord> writer =
new GenericDatumWriter<GenericRecord>(schema);
for (GenericRecord record : records) {
writer.write(record, jsonEncoder);
}
jsonEncoder.flush();
output.flush();
return output.toString();
}
示例9: generateWorksheet
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
@Override
public Worksheet generateWorksheet() throws JSONException, IOException,
KarmaException {
DataFileReader<Void> schemareader = new DataFileReader<Void>(file, new GenericDatumReader<Void>());
Schema schema = schemareader.getSchema();
schemareader.close();
DataFileReader<GenericRecord> reader = new DataFileReader<GenericRecord>(file, new GenericDatumReader<GenericRecord>(schema));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write('[');
baos.write('\n');
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(reader.getSchema());
while(reader.hasNext())
{
GenericRecord record = reader.next();
JsonEncoder encoder = EncoderFactory.get().jsonEncoder(reader.getSchema(), new JsonFactory().createJsonGenerator(baos)).configure(baos);
writer.write(record, encoder);
encoder.flush();
if(reader.hasNext())
{
baos.write(',');
}
}
reader.close();
baos.write('\n');
baos.write(']');
baos.flush();
baos.close();
String json = new String(baos.toByteArray());
JsonImport jsonImport = new JsonImport(json, this.getFactory(), this.getWorksheet(), workspace, maxNumLines);
return jsonImport.generateWorksheet();
}
示例10: datumToBytes
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
private byte[] datumToBytes(Schema myNewSchema, GenericRecord datum) throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Encoder e = EncoderFactory.get().binaryEncoder(outputStream, null);
GenericDatumWriter w = new GenericDatumWriter(myNewSchema);
w.write(datum, e);
e.flush();
return outputStream.toByteArray();
}
示例11: write
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
public void write(Kryo kryo, Output output, GenericContainer record) {
String fingerPrint = this.getFingerprint(record.getSchema());
output.writeString(fingerPrint);
GenericDatumWriter<GenericContainer> writer = new GenericDatumWriter<>(record.getSchema());
BinaryEncoder encoder = EncoderFactory
.get()
.directBinaryEncoder(output, null);
try {
writer.write(record, encoder);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
示例12: encodeObject
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
protected static <T> byte[] encodeObject(final T datum, final GenericDatumWriter<T> writer) throws IOException {
// The encoder instantiation can be replaced with a ThreadLocal if needed
ByteArrayOutputStream os = new ByteArrayOutputStream();
BinaryEncoder encoder = ENCODER_FACTORY.binaryEncoder(os, null);
writer.write(datum, encoder);
encoder.flush();
return os.toByteArray();
}
示例13: toBytes
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
public byte[] toBytes(Object object) {
ByteArrayOutputStream output = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(output);
GenericDatumWriter<Object> datumWriter = null;
try {
datumWriter = new GenericDatumWriter<Object>(typeDef);
datumWriter.write(object, encoder);
encoder.flush();
} catch(IOException e) {
throw new SerializationException(e);
} finally {
SerializationUtils.close(output);
}
return output.toByteArray();
}
示例14: pushAvroIntoKafka
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
public static void pushAvroIntoKafka(List<File> avroFiles, String kafkaBroker, String kafkaTopic) {
Properties properties = new Properties();
properties.put("metadata.broker.list", kafkaBroker);
properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
properties.put("request.required.acks", "1");
ProducerConfig producerConfig = new ProducerConfig(properties);
Producer<String, byte[]> producer = new Producer<String, byte[]>(producerConfig);
for (File avroFile : avroFiles) {
try {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536);
DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile);
BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null);
GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(reader.getSchema());
int recordCount = 0;
List<KeyedMessage<String, byte[]>> messagesToWrite = new ArrayList<KeyedMessage<String, byte[]>>(10000);
for (GenericRecord genericRecord : reader) {
outputStream.reset();
datumWriter.write(genericRecord, binaryEncoder);
binaryEncoder.flush();
byte[] bytes = outputStream.toByteArray();
KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(kafkaTopic, bytes);
if (BATCH_KAFKA_MESSAGES) {
messagesToWrite.add(data);
} else {
producer.send(data);
}
recordCount += 1;
}
if (BATCH_KAFKA_MESSAGES) {
producer.send(messagesToWrite);
}
outputStream.close();
reader.close();
LOGGER.info("Finished writing " + recordCount + " records from " + avroFile.getName() + " into Kafka topic "
+ kafkaTopic);
int totalRecordCount = totalAvroRecordWrittenCount.addAndGet(recordCount);
LOGGER.info("Total records written so far " + totalRecordCount);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
}
示例15: generateRecordsContainerEvent
import org.apache.avro.generic.GenericDatumWriter; //导入方法依赖的package包/类
private Event generateRecordsContainerEvent(String appToken, LogSchema schema,
List<LogEvent> logEvents, ProfileInfo clientProfile,
ProfileInfo serverProfile, RecordHeader header) {
if (clientProfile == null && includeClientProfile) {
LOG.error("Can't generate records container event. " + CLIENT_PROFILE_NOT_SET);
throw new RuntimeException(CLIENT_PROFILE_NOT_SET);
}
if (serverProfile == null && includeServerProfile) {
LOG.error("Can't generate records container event. " + SERVER_PROFILE_NOT_SET);
throw new RuntimeException(SERVER_PROFILE_NOT_SET);
}
Event event = null;
RecordData logData = new RecordData();
logData.setSchemaVersion(schema.getVersion());
logData.setApplicationToken(appToken);
logData.setRecordHeader(header);
if (includeClientProfile) {
if (clientProfile != null) {
logData.setClientProfileBody(clientProfile.getBody());
logData.setClientSchemaId(clientProfile.getSchemaId());
}
}
if (includeServerProfile) {
if (serverProfile != null) {
logData.setServerProfileBody(serverProfile.getBody());
logData.setServerSchemaId(serverProfile.getSchemaId());
}
}
List<ByteBuffer> bytes = new ArrayList<>(logEvents.size());
for (LogEvent logEvent : logEvents) {
bytes.add(ByteBuffer.wrap(logEvent.getLogData()));
}
logData.setEventRecords(bytes);
EncoderFactory factory = EncoderFactory.get();
GenericDatumWriter<RecordData> writer = new GenericDatumWriter<>(logData.getSchema());
LOG.debug("Convert load data [{}] to bytes.", logData);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
factory.binaryEncoder(baos, null);
BinaryEncoder encoder = factory.binaryEncoder(baos, null);
try {
writer.write(logData, encoder);
encoder.flush();
event = EventBuilder.withBody(baos.toByteArray());
} catch (IOException ex) {
LOG.warn("Can't convert avro object {} to binary. Exception catched: {}", logData, ex);
}
LOG.trace("Build flume event with array body [{}]", baos);
return event;
}