本文整理汇总了Java中org.apache.avro.generic.GenericDatumWriter类的典型用法代码示例。如果您正苦于以下问题:Java GenericDatumWriter类的具体用法?Java GenericDatumWriter怎么用?Java GenericDatumWriter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
GenericDatumWriter类属于org.apache.avro.generic包,在下文中一共展示了GenericDatumWriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: convertAvroToJson
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
static void convertAvroToJson(InputStream inputStream, OutputStream outputStream, Schema schema)
throws IOException {
DatumReader<Object> reader = new GenericDatumReader<>(schema);
DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
BinaryDecoder binaryDecoder = DecoderFactory.get().binaryDecoder(inputStream, null);
JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, outputStream, true);
Object datum = null;
while (!binaryDecoder.isEnd()) {
datum = reader.read(datum, binaryDecoder);
writer.write(datum, jsonEncoder);
jsonEncoder.flush();
}
outputStream.flush();
}
示例2: createAvroFile
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
public File createAvroFile(String fileName, long recordCount, File parent) throws Exception {
final File target = FileTestUtil.file(testClass, fileName, parent);
try (DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>(schema))) {
if (codecFactory != null) {
writer.setCodec(codecFactory);
}
writer.create(schema, target);
for (long i = 0; i < recordCount; i++) {
writer.append(recordCreatorFn.apply(schema, i));
}
}
return target;
}
示例3: AvroFileInputStream
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
public AvroFileInputStream(FileStatus status) throws IOException {
pos = 0;
buffer = new byte[0];
GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
FileContext fc = FileContext.getFileContext(new Configuration());
fileReader =
DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
Schema schema = fileReader.getSchema();
writer = new GenericDatumWriter<Object>(schema);
output = new ByteArrayOutputStream();
JsonGenerator generator =
new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
generator.setPrettyPrinter(prettyPrinter);
encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
示例4: processSinglex
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
/**
* Process singlex.
*
* @throws Exception the exception
*/
public void processSinglex() throws Exception {
int base = (int) System.currentTimeMillis();
User user = User.newBuilder().setName("name" + base).setFavoriteColor("color" + base).setFavoriteNumber(base)
.build();
DatumWriter<GenericRecord> datumWriterUser = new GenericDatumWriter<GenericRecord>(User.getClassSchema());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] byteData = null;
try {
BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null);
datumWriterUser.write(user, binaryEncoder);
binaryEncoder.flush();
byteData = baos.toByteArray();
} finally {
baos.close();
}
System.out.println(byteData.length);
DatumReader<GenericRecord> datumReaderUser = new GenericDatumReader<GenericRecord>( User.getClassSchema());
GenericRecord genericRecord = datumReaderUser.read(null, DecoderFactory.get().binaryDecoder(byteData, null) );
System.out.println(genericRecord);
System.out.println( genericRecord.get("name"));
}
示例5: getJsonStringFromRecord
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
@Override
public String getJsonStringFromRecord(RecordField field)
throws AvroUiSandboxServiceException {
try {
GenericRecord record = FormAvroConverter.createGenericRecordFromRecordField(field);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JsonGenerator jsonGenerator
= new JsonFactory().createJsonGenerator(baos, JsonEncoding.UTF8);
jsonGenerator.useDefaultPrettyPrinter();
JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(record.getSchema(), jsonGenerator);
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(record.getSchema());
datumWriter.write(record, jsonEncoder);
jsonEncoder.flush();
baos.flush();
return new String(baos.toByteArray(), UTF8);
} catch (Exception e) {
throw Utils.handleException(e);
}
}
示例6: serialize
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
@Override
public byte[] serialize(String topic, T payload) {
try {
byte[] result = null;
if (payload != null) {
LOGGER.debug("data='{}'", payload);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(payload.getSchema());
datumWriter.write(payload, binaryEncoder);
binaryEncoder.flush();
byteArrayOutputStream.close();
result = byteArrayOutputStream.toByteArray();
LOGGER.debug("serialized data='{}'", DatatypeConverter.printHexBinary(result));
}
return result;
} catch (IOException ex) {
throw new SerializationException("Can't serialize payload='" + payload + "' for topic='" + topic + "'", ex);
}
}
示例7: createDataFile
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
private static Path createDataFile() throws IOException {
File avroFile = File.createTempFile("test-", "." + FILE_EXTENSION);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
try (DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(writer)) {
dataFileWriter.setFlushOnEveryBlock(true);
dataFileWriter.setSyncInterval(32);
dataFileWriter.create(schema, avroFile);
IntStream.range(0, NUM_RECORDS).forEach(index -> {
GenericRecord datum = new GenericData.Record(schema);
datum.put(FIELD_INDEX, index);
datum.put(FIELD_NAME, String.format("%d_name_%s", index, UUID.randomUUID()));
datum.put(FIELD_SURNAME, String.format("%d_surname_%s", index, UUID.randomUUID()));
try {
OFFSETS_BY_INDEX.put(index, dataFileWriter.sync() - 16L);
dataFileWriter.append(datum);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
Path path = new Path(new Path(fsUri), avroFile.getName());
fs.moveFromLocalFile(new Path(avroFile.getAbsolutePath()), path);
return path;
}
示例8: initialize
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
private void initialize() throws IOException, NoSuchAlgorithmException {
SeekableResettableInputBridge in = new SeekableResettableInputBridge(ris);
long pos = in.tell();
in.seek(0L);
fileReader = new DataFileReader<GenericRecord>(in,
new GenericDatumReader<GenericRecord>());
fileReader.sync(pos);
schema = fileReader.getSchema();
datumWriter = new GenericDatumWriter(schema);
out = new ByteArrayOutputStream();
encoder = EncoderFactory.get().binaryEncoder(out, encoder);
schemaHash = SchemaNormalization.parsingFingerprint("CRC-64-AVRO", schema);
schemaHashString = Hex.encodeHexString(schemaHash);
}
示例9: configure
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
@Override
public void configure(Context context) {
String consumerKey = context.getString("consumerKey");
String consumerSecret = context.getString("consumerSecret");
String accessToken = context.getString("accessToken");
String accessTokenSecret = context.getString("accessTokenSecret");
twitterStream = new TwitterStreamFactory().getInstance();
twitterStream.setOAuthConsumer(consumerKey, consumerSecret);
twitterStream.setOAuthAccessToken(new AccessToken(accessToken,
accessTokenSecret));
twitterStream.addListener(this);
avroSchema = createAvroSchema();
dataFileWriter = new DataFileWriter<GenericRecord>(
new GenericDatumWriter<GenericRecord>(avroSchema));
maxBatchSize = context.getInteger("maxBatchSize", maxBatchSize);
maxBatchDurationMillis = context.getInteger("maxBatchDurationMillis",
maxBatchDurationMillis);
}
示例10: testSerializer
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
/**
* tests Avro Serializer
*/
@Test
public void testSerializer() throws Exception {
Context context = new Context();
String schemaFile = getClass().getResource("/schema.avsc").getFile();
context.put(ES_AVRO_SCHEMA_FILE, schemaFile);
avroSerializer.configure(context);
Schema schema = new Schema.Parser().parse(new File(schemaFile));
GenericRecord user = generateGenericRecord(schema);
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Encoder encoder = new EncoderFactory().binaryEncoder(outputStream, null);
datumWriter.write(user, encoder);
encoder.flush();
Event event = EventBuilder.withBody(outputStream.toByteArray());
XContentBuilder expected = generateContentBuilder();
XContentBuilder actual = avroSerializer.serialize(event);
JsonParser parser = new JsonParser();
assertEquals(parser.parse(expected.string()), parser.parse(actual.string()));
}
示例11: serializeRecord
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
/**
* Serialize the record to prepare for publishing.
*
* @param record the GenericRecord
* @param schema the Avro Schema
* @param ggAvroSchema the internal representation of the Avro schema
* @return the serialized record
* @throws IOException if there is a problem
*/
private byte[] serializeRecord(GenericRecord record, Schema schema,
@SuppressWarnings("unused") AvroSchema ggAvroSchema) throws IOException {
byte[] rval;
BinaryEncoder encoder = null;
// serialize the record into a byte array
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
encoder = EncoderFactory.get().directBinaryEncoder(out, encoder);
writer.write(record, encoder);
encoder.flush();
rval = out.toByteArray();
//out.close(); // noop in the Apache version, so not bothering
return rval;
}
示例12: convertJsonToAvro
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
static void convertJsonToAvro(InputStream inputStream, OutputStream outputStream, Schema schema)
throws IOException {
DatumReader<Object> reader = new GenericDatumReader<>(schema);
DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
Encoder binaryEncoder = EncoderFactory.get().binaryEncoder(outputStream, null);
JsonDecoder jsonDecoder = DecoderFactory.get().jsonDecoder(schema, inputStream);
Object datum = null;
while (true) {
try {
datum = reader.read(datum, jsonDecoder);
} catch (EOFException eofException) {
break;
}
writer.write(datum, binaryEncoder);
binaryEncoder.flush();
}
outputStream.flush();
}
示例13: putRecords
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
public static byte[] putRecords(Collection<SinkRecord> records, AvroData avroData) throws IOException {
final DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>());
ByteArrayOutputStream out = new ByteArrayOutputStream();
Schema schema = null;
for (SinkRecord record : records) {
if (schema == null) {
schema = record.valueSchema();
org.apache.avro.Schema avroSchema = avroData.fromConnectSchema(schema);
writer.create(avroSchema, out);
}
Object value = avroData.fromConnectData(schema, record.value());
// AvroData wraps primitive types so their schema can be included. We need to unwrap
// NonRecordContainers to just their value to properly handle these types
if (value instanceof NonRecordContainer) {
value = ((NonRecordContainer) value).getValue();
}
writer.append(value);
}
writer.flush();
return out.toByteArray();
}
示例14: writeRowsHelper
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
private void writeRowsHelper(List<TableRow> rows, Schema avroSchema,
String destinationPattern, int shard) throws IOException {
String filename = destinationPattern.replace("*", String.format("%012d", shard));
try (WritableByteChannel channel = FileSystems.create(
FileSystems.matchNewResource(filename, false /* isDirectory */), MimeTypes.BINARY);
DataFileWriter<GenericRecord> tableRowWriter =
new DataFileWriter<>(new GenericDatumWriter<GenericRecord>(avroSchema))
.create(avroSchema, Channels.newOutputStream(channel))) {
for (Map<String, Object> record : rows) {
GenericRecordBuilder genericRecordBuilder = new GenericRecordBuilder(avroSchema);
for (Map.Entry<String, Object> field : record.entrySet()) {
genericRecordBuilder.set(field.getKey(), field.getValue());
}
tableRowWriter.append(genericRecordBuilder.build());
}
} catch (IOException e) {
throw new IllegalStateException(
String.format("Could not create destination for extract job %s", filename), e);
}
}
示例15: AvroKeyValueWriter
import org.apache.avro.generic.GenericDatumWriter; //导入依赖的package包/类
AvroKeyValueWriter(Schema keySchema, Schema valueSchema,
CodecFactory compressionCodec, OutputStream outputStream,
int syncInterval) throws IOException {
// Create the generic record schema for the key/value pair.
mKeyValuePairSchema = AvroKeyValue
.getSchema(keySchema, valueSchema);
// Create an Avro container file and a writer to it.
DatumWriter<GenericRecord> genericDatumWriter = new GenericDatumWriter<GenericRecord>(
mKeyValuePairSchema);
mAvroFileWriter = new DataFileWriter<GenericRecord>(
genericDatumWriter);
mAvroFileWriter.setCodec(compressionCodec);
mAvroFileWriter.setSyncInterval(syncInterval);
mAvroFileWriter.create(mKeyValuePairSchema, outputStream);
// Create a reusable output record.
mOutputRecord = new AvroKeyValue<Object, Object>(
new GenericData.Record(mKeyValuePairSchema));
}