本文整理汇总了Java中org.apache.avro.io.DecoderFactory.get方法的典型用法代码示例。如果您正苦于以下问题:Java DecoderFactory.get方法的具体用法?Java DecoderFactory.get怎么用?Java DecoderFactory.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.avro.io.DecoderFactory
的用法示例。
在下文中一共展示了DecoderFactory.get方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import org.apache.avro.io.DecoderFactory; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public void init(Properties props, String topicName) {
super.init(props, topicName);
try {
// init registry configured in properties
SchemaRegistry<Schema> registry = (SchemaRegistry<Schema>) Class.forName(
props.getProperty(KafkaAvroMessageEncoder.KAFKA_MESSAGE_CODER_SCHEMA_REGISTRY_CLASS)
).newInstance();
registry.init(props);
// use Cached Registry
this.registry = registry;// TODO new CachedSchemaRegistry<Schema>(registry);
decoderFactory = DecoderFactory.get();
} catch (Exception e) {
throw new MessageDecoderException(e);
}
}
示例2: testGetDataBinary
import org.apache.avro.io.DecoderFactory; //导入方法依赖的package包/类
@Test
public void testGetDataBinary() throws java.io.IOException {
// given
UiSpecsPropertiesDto propertiesDto = new UiSpecsPropertiesDto();
propertiesDto.setProperties(getFileAsObjectNode("jdbc_data_set_properties_with_schema.json"));
propertiesDto.setDependencies(singletonList(getJdbcDataStoreProperties()));
String dataSetDefinitionName = "JDBCDataset";
// when
Response schemaResponse = given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE) //
.accept(APPLICATION_JSON_UTF8_VALUE) //
.expect().statusCode(200).log().ifError() //
.post(getVersionPrefix() + "/runtimes/schema");
Schema schema = new Schema.Parser().parse(schemaResponse.asInputStream());
Response response = given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE) //
.accept(RuntimesController.AVRO_BINARY_MIME_TYPE_OFFICIAL_INVALID) //
.expect().statusCode(200).log().ifError() //
.post(getVersionPrefix() + "/runtimes/data");
// then
GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
DecoderFactory decoderFactory = DecoderFactory.get();
Decoder decoder = decoderFactory.binaryDecoder(response.asInputStream(), null);
assertRecordsEqualsToTestValues(reader, decoder);
}
示例3: testGetData
import org.apache.avro.io.DecoderFactory; //导入方法依赖的package包/类
@Test
public void testGetData() throws java.io.IOException {
// given
UiSpecsPropertiesDto propertiesDto = new UiSpecsPropertiesDto();
propertiesDto.setProperties(getFileAsObjectNode("jdbc_data_set_properties_with_schema.json"));
propertiesDto.setDependencies(singletonList(getJdbcDataStoreProperties()));
String dataSetDefinitionName = "JDBCDataset";
Response schemaResponse = given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE) //
.accept(APPLICATION_JSON_UTF8_VALUE) //
.expect().statusCode(200).log().ifError() //
.post(getVersionPrefix() + "/runtimes/schema");
Schema schema = new Schema.Parser().parse(schemaResponse.asInputStream());
// when
Response response = given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE) //
.accept(RuntimesController.AVRO_JSON_MIME_TYPE_OFFICIAL_INVALID) //
.expect().statusCode(200).log().ifError() //
.post(getVersionPrefix() + "/runtimes/data");
// then
GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
DecoderFactory decoderFactory = DecoderFactory.get();
Decoder decoder = decoderFactory.jsonDecoder(schema, response.asInputStream());
assertRecordsEqualsToTestValues(reader, decoder);
}
示例4: init
import org.apache.avro.io.DecoderFactory; //导入方法依赖的package包/类
/**
* To remove certain fields from the Avro schema or records of a topic/table, set property
* {topic/table name}.remove.fields={comma-separated, fully qualified field names} in workUnit.
*/
@Override
public EnvelopeSchemaConverter init(WorkUnitState workUnit) {
if (workUnit.contains(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY)) {
String removeFieldsPropName = workUnit.getProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY) + AvroProjectionConverter.REMOVE_FIELDS;
if (workUnit.contains(removeFieldsPropName)) {
this.fieldRemover = Optional.of(new AvroSchemaFieldRemover(workUnit.getProp(removeFieldsPropName)));
} else {
this.fieldRemover = Optional.absent();
}
}
String registryFactoryField = workUnit.contains(KafkaSchemaRegistryFactory.KAFKA_SCHEMA_REGISTRY_FACTORY_CLASS) ?
workUnit.getProp(KafkaSchemaRegistryFactory.KAFKA_SCHEMA_REGISTRY_FACTORY_CLASS) : DEFAULT_KAFKA_SCHEMA_REGISTRY_FACTORY_CLASS;
try {
KafkaSchemaRegistryFactory registryFactory = ((Class<? extends KafkaSchemaRegistryFactory>) Class.forName(registryFactoryField)).newInstance();
this.registry = registryFactory.create(workUnit.getProperties());
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
return null;
}
this.decoderFactory = DecoderFactory.get();
this.readers = CacheBuilder.newBuilder().build(new CacheLoader<Schema, GenericDatumReader<GenericRecord>>() {
@Override
public GenericDatumReader<GenericRecord> load(final Schema key) throws Exception {
return new GenericDatumReader<>(key);
}
});
return this;
}
示例5: decodeRecord
import org.apache.avro.io.DecoderFactory; //导入方法依赖的package包/类
public static <T> T decodeRecord(Class<T> recordClass, byte[] data) throws IOException {
DecoderFactory decoderFactory = DecoderFactory.get();
BinaryDecoder binaryDecoder = decoderFactory.binaryDecoder(data, null);
SpecificDatumReader<T> datumReader = new SpecificDatumReader<>(getSchema(recordClass));
return datumReader.read(null, binaryDecoder);
}