本文整理匯總了Java中org.apache.avro.io.DatumReader類的典型用法代碼示例。如果您正苦於以下問題:Java DatumReader類的具體用法?Java DatumReader怎麽用?Java DatumReader使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
DatumReader類屬於org.apache.avro.io包,在下文中一共展示了DatumReader類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: convertAvroToJson
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
static void convertAvroToJson(InputStream inputStream, OutputStream outputStream, Schema schema)
throws IOException {
DatumReader<Object> reader = new GenericDatumReader<>(schema);
DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
BinaryDecoder binaryDecoder = DecoderFactory.get().binaryDecoder(inputStream, null);
JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, outputStream, true);
Object datum = null;
while (!binaryDecoder.isEnd()) {
datum = reader.read(datum, binaryDecoder);
writer.write(datum, jsonEncoder);
jsonEncoder.flush();
}
outputStream.flush();
}
示例2: jsonReadWriteExample
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
public void jsonReadWriteExample() throws IOException {
Employee employee = Employee.newBuilder().setFirstName("Gaurav")
.setLastName("Mazra").setSex(SEX.MALE).build();
DatumWriter<Employee> employeeWriter = new SpecificDatumWriter<>(Employee.class);
byte[] data;
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
Encoder jsonEncoder = EncoderFactory.get().jsonEncoder(Employee.getClassSchema(), baos);
employeeWriter.write(employee, jsonEncoder);
jsonEncoder.flush();
data = baos.toByteArray();
}
// serialized data
System.out.println(new String(data));
DatumReader<Employee> employeeReader = new SpecificDatumReader<>(Employee.class);
Decoder decoder = DecoderFactory.get().jsonDecoder(Employee.getClassSchema(), new String(data));
employee = employeeReader.read(null, decoder);
//data after deserialization
System.out.println(employee);
}
示例3: binaryReadWriteExample
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
public void binaryReadWriteExample() throws IOException {
Employee employee = Employee.newBuilder().setFirstName("Gaurav")
.setLastName("Mazra").setSex(SEX.MALE).build();
DatumWriter<Employee> employeeWriter = new SpecificDatumWriter<>(Employee.class);
byte[] data;
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
Encoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null);
employeeWriter.write(employee, binaryEncoder);
binaryEncoder.flush();
data = baos.toByteArray();
}
// serialized data
System.out.println(data);
DatumReader<Employee> employeeReader = new SpecificDatumReader<>(Employee.class);
Decoder binaryDecoder = DecoderFactory.get().binaryDecoder(data, null);
employee = employeeReader.read(null, binaryDecoder);
//data after deserialization
System.out.println(employee);
}
示例4: processSinglex
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
/**
* Process singlex.
*
* @throws Exception the exception
*/
public void processSinglex() throws Exception {
int base = (int) System.currentTimeMillis();
User user = User.newBuilder().setName("name" + base).setFavoriteColor("color" + base).setFavoriteNumber(base)
.build();
DatumWriter<GenericRecord> datumWriterUser = new GenericDatumWriter<GenericRecord>(User.getClassSchema());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] byteData = null;
try {
BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null);
datumWriterUser.write(user, binaryEncoder);
binaryEncoder.flush();
byteData = baos.toByteArray();
} finally {
baos.close();
}
System.out.println(byteData.length);
DatumReader<GenericRecord> datumReaderUser = new GenericDatumReader<GenericRecord>( User.getClassSchema());
GenericRecord genericRecord = datumReaderUser.read(null, DecoderFactory.get().binaryDecoder(byteData, null) );
System.out.println(genericRecord);
System.out.println( genericRecord.get("name"));
}
示例5: getSchema
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
public static Schema getSchema(SeekableInput input) throws IOException
{
DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> dataFileReader =
new DataFileReader<GenericRecord>(input, datumReader);
Schema schema = dataFileReader.getSchema();
if (PadDefaultNullsToSchema)
{
// a list of "cloned" fields, with optional default value set to null
ArrayList<Field> paddedFields = new ArrayList<Field>();
for (Field field: schema.getFields())
{
// should this field be padded?
boolean needsNullPadding = (field.schema() != null) // the field has nested schema
&& (field.schema().getType().equals(Type.UNION)) // the nested schema is UNION
&& (field.schema().getTypes().get(0).getType().equals(Type.NULL)); // the first element of union is NULL type
JsonNode defValue = needsNullPadding ? NullNode.getInstance() : field.defaultValue();
Field f = new Field(field.name(), field.schema(), field.doc(), defValue);
paddedFields.add(f);
}
schema = Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), schema.isError());
schema.setFields(paddedFields);
}
return schema;
}
示例6: checkNumeric
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
private void checkNumeric(String type, Object value) throws Exception {
String def =
"{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+"[{\"type\":\""+type+"\",\"name\":\"n\"}]}";
Schema schema = Schema.parse(def);
DatumReader<GenericRecord> reader =
new GenericDatumReader<GenericRecord>(schema);
String[] records = {"{\"n\":1}", "{\"n\":1.0}"};
for (String record : records) {
Decoder decoder = new ExtendedJsonDecoder(schema, record);
GenericRecord r = reader.read(null, decoder);
Assert.assertEquals(value, r.get("n"));
}
}
示例7: deserialize
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
try {
T result = null;
if (data != null) {
LOGGER.debug("data='{}'", DatatypeConverter.printHexBinary(data));
DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(
targetType.newInstance().getSchema());
Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
result = (T) datumReader.read(null, decoder);
LOGGER.debug("deserialized data='{}'", result);
}
return result;
} catch (Exception ex) {
throw new SerializationException(
"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
}
}
示例8: deserUserCompile
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
public void deserUserCompile(){
// Deserialize Users from disk
DatumReader<User> userDatumReader = new SpecificDatumReader<User>(User.class);
DataFileReader<User> dataFileReader = null;
User user = null;
try {
dataFileReader = new DataFileReader<User>(
new File("/Users/a/Desktop/tmp/users.avro"),
userDatumReader);
while (dataFileReader.hasNext()) {
// Reuse user object by passing it to next(). This saves us from
// allocating and garbage collecting many objects for files with
// many items.
user = dataFileReader.next(user);
System.out.println(user);
}
} catch (IOException e) {
e.printStackTrace();
}
}
示例9: MemberInfoDynDeser
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
/**
* 動態反序列:通過Schema文件進行動態反序列化操作
*
* @throws IOException
*/
public void MemberInfoDynDeser() throws IOException {
// 1.schema文件解析
Parser parser = new Parser();
Schema mSchema = parser.parse(this.getClass().getResourceAsStream("/Members.avsc"));
// 2.構建數據讀對象
DatumReader<GenericRecord> mGr = new SpecificDatumReader<GenericRecord>(mSchema);
DataFileReader<GenericRecord> mDfr = new DataFileReader<GenericRecord>(new File("/Users/a/Desktop/tmp/members.avro"), mGr);
// 3.從序列化文件中進行數據反序列化取出數據
GenericRecord gr = null;
while (mDfr.hasNext()) {
gr = mDfr.next();
System.err.println("deser data:" + gr.toString());
}
mDfr.close();
System.out.println("Dyn Builder Ser Start Complete.");
}
示例10: convertJsonToAvro
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
static void convertJsonToAvro(InputStream inputStream, OutputStream outputStream, Schema schema)
throws IOException {
DatumReader<Object> reader = new GenericDatumReader<>(schema);
DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
Encoder binaryEncoder = EncoderFactory.get().binaryEncoder(outputStream, null);
JsonDecoder jsonDecoder = DecoderFactory.get().jsonDecoder(schema, inputStream);
Object datum = null;
while (true) {
try {
datum = reader.read(datum, jsonDecoder);
} catch (EOFException eofException) {
break;
}
writer.write(datum, binaryEncoder);
binaryEncoder.flush();
}
outputStream.flush();
}
示例11: readIndividualsFromFile
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
public static List<IndividualWrapper<Individual, FitnessValue>> readIndividualsFromFile(
Path filePath, Configuration configuration) throws IOException {
List<IndividualWrapper<Individual, FitnessValue>> result =
new ArrayList<IndividualWrapper<Individual, FitnessValue>>();
SeekableInput seekableFileInput = new FsInput(filePath, configuration);
ReflectData reflectData = new ReflectData(configuration.getClassLoader());
DatumReader<IndividualWrapper<Individual, FitnessValue>> datumReader = new ReflectDatumReader<IndividualWrapper<Individual, FitnessValue>>(reflectData);
DataFileReader<IndividualWrapper<Individual, FitnessValue>> avroFileReader =
new DataFileReader<IndividualWrapper<Individual, FitnessValue>>(seekableFileInput, datumReader);
for (IndividualWrapper<Individual, FitnessValue> individualWrapper : avroFileReader)
result.add(individualWrapper);
avroFileReader.close();
return result;
}
示例12: testCompressFile
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
@Test
public void testCompressFile() throws Exception {
String avroCodec = "snappy";
localProps.put(StorageSinkConnectorConfig.AVRO_CODEC_CONFIG, avroCodec);
setUp();
task = new S3SinkTask(connectorConfig, context, storage, partitioner, format, SYSTEM_TIME);
List<SinkRecord> sinkRecords = createRecords(7);
// Perform write
task.put(sinkRecords);
task.close(context.assignment());
task.stop();
List<S3ObjectSummary> summaries = listObjects(S3_TEST_BUCKET_NAME, "/", s3);
for(S3ObjectSummary summary: summaries){
InputStream in = s3.getObject(summary.getBucketName(), summary.getKey()).getObjectContent();
DatumReader<Object> reader = new GenericDatumReader<>();
DataFileStream<Object> streamReader = new DataFileStream<>(in, reader);
// make sure that produced Avro file has proper codec set
Assert.assertEquals(avroCodec, streamReader.getMetaString(StorageSinkConnectorConfig.AVRO_CODEC_CONFIG));
streamReader.close();
}
long[] validOffsets = {0, 3, 6};
verify(sinkRecords, validOffsets);
}
示例13: main
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
public static void main(String[] args) throws IOException
{
String filename = args[0] ;
File file=new File(filename) ;
DatumReader<GenericRecord> reader= new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> dataFileReader=new DataFileReader<GenericRecord>(file,reader);
while (dataFileReader.hasNext())
{
GenericRecord result=dataFileReader.next();
String output = String.format("%s %s %s %f",
result.get("sighting_date"), result.get("city"), result.get("shape"), result.get("duration")) ;
System.out.println(output) ;
}
}
示例14: main
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
public static void main(String[] args) throws IOException
{
String filename = args[0] ;
File file=new File(filename) ;
DatumReader<GenericRecord> reader= new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> dataFileReader=new DataFileReader<GenericRecord>(file,reader);
while (dataFileReader.hasNext())
{
GenericRecord result=dataFileReader.next();
String output = String.format("%s %d",
result.get("shape"), result.get("count")) ;
System.out.println(output) ;
}
}
示例15: decode
import org.apache.avro.io.DatumReader; //導入依賴的package包/類
@Override
public <T> T decode(byte[] bytes, Class<T> type) throws IOException {
Assert.notNull(bytes, "'bytes' cannot be null");
Assert.notNull(bytes, "Class can not be null");
ByteBuffer buf = ByteBuffer.wrap(bytes);
byte[] payload = new byte[bytes.length-4];
Integer schemaId = buf.getInt();
buf.get(payload);
Schema schema = schemaRegistryClient.fetch(schemaId);
DatumReader reader = getDatumReader(type,schema);
Decoder decoder = DecoderFactory.get().binaryDecoder(payload,null);
return (T) reader.read(null,decoder);
}