本文整理汇总了Java中org.apache.avro.generic.GenericData类的典型用法代码示例。如果您正苦于以下问题:Java GenericData类的具体用法?Java GenericData怎么用?Java GenericData使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
GenericData类属于org.apache.avro.generic包,在下文中一共展示了GenericData类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readAvroFile
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
/**
* Reads in binary Avro-encoded entities using the schema stored in the file
* and prints them out.
*/
public static void readAvroFile(File file) throws IOException {
GenericDatumReader datum = new GenericDatumReader();
DataFileReader reader = new DataFileReader(file, datum);
GenericData.Record record = new GenericData.Record(reader.getSchema());
while (reader.hasNext()) {
reader.next(record);
System.out.println("Name " + record.get("name") + " on "
+ record.get("Meetup_date") + " attending "
+ record.get("going") + " organized by "
+ record.get("organizer") + " on " + record.get("topics"));
}
reader.close();
}
示例2: write
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
@Override
public Object write( final Object obj ) throws IOException{
GenericRecord record = new GenericData.Record( avroSchema );
if( ! ( obj instanceof Map ) ){
return record;
}
Map<Object,Object> mapObj = (Map<Object,Object>)obj;
for( KeyAndFormatter childFormatter : childContainer ){
childFormatter.clear();
record.put( childFormatter.getName() , childFormatter.get( mapObj ) );
}
return record;
}
示例3: updateTopicProcessor
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
/**
* update processor class, method and db writer for each topic
*/
public void updateTopicProcessor() {
for (String topic : _topics.keySet()) {
try {
// get the processor class and method
final Class processorClass = Class.forName(_topics.get(topic).processor);
_topicProcessorClass.put(topic, processorClass.newInstance());
final Method method = processorClass.getDeclaredMethod("process", GenericData.Record.class, String.class);
_topicProcessorMethod.put(topic, method);
// get the database writer
final DatabaseWriter dw = new DatabaseWriter(JdbcUtil.wherehowsJdbcTemplate, _topics.get(topic).dbTable);
_topicDbWriter.put(topic, dw);
} catch (Exception e) {
Logger.error("Fail to create Processor for topic: " + topic, e);
_topicProcessorClass.remove(topic);
_topicProcessorMethod.remove(topic);
_topicDbWriter.remove(topic);
}
}
}
示例4: process
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
/**
* Process a Gobblin tracking event audit record
* @param record
* @param topic
* @return null
* @throws Exception
*/
public Record process(GenericData.Record record, String topic)
throws Exception {
if (record != null && record.get("name") != null) {
final String name = record.get("name").toString();
// only handle "DaliLimitedRetentionAuditor","DaliAutoPurgeAuditor" and "DsIgnoreIDPCAuditor"
if (name.equals(DALI_LIMITED_RETENTION_AUDITOR)
|| name.equals(DALI_AUTOPURGED_AUDITOR)
|| name.equals(DS_IGNORE_IDPC_AUDITOR)) {
Long timestamp = (Long) record.get("timestamp");
Map<String, String> metadata = StringUtil.convertObjectMapToStringMap(record.get("metadata"));
String hasError = metadata.get("HasError");
if (!hasError.equalsIgnoreCase("true")) {
String datasetPath = metadata.get("DatasetPath");
String datasetUrn = DATASET_URN_PREFIX + (datasetPath.startsWith("/") ? "" : "/") + datasetPath;
String ownerUrns = metadata.get("OwnerURNs");
DatasetInfoDao.updateKafkaDatasetOwner(datasetUrn, ownerUrns, DATASET_OWNER_SOURCE, timestamp);
}
}
}
return null;
}
示例5: createDataFile
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
private static Path createDataFile() throws IOException {
File avroFile = File.createTempFile("test-", "." + FILE_EXTENSION);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
try (DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(writer)) {
dataFileWriter.setFlushOnEveryBlock(true);
dataFileWriter.setSyncInterval(32);
dataFileWriter.create(schema, avroFile);
IntStream.range(0, NUM_RECORDS).forEach(index -> {
GenericRecord datum = new GenericData.Record(schema);
datum.put(FIELD_INDEX, index);
datum.put(FIELD_NAME, String.format("%d_name_%s", index, UUID.randomUUID()));
datum.put(FIELD_SURNAME, String.format("%d_surname_%s", index, UUID.randomUUID()));
try {
OFFSETS_BY_INDEX.put(index, dataFileWriter.sync() - 16L);
dataFileWriter.append(datum);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
Path path = new Path(new Path(fsUri), avroFile.getName());
fs.moveFromLocalFile(new Path(avroFile.getAbsolutePath()), path);
return path;
}
示例6: testIncompatibleSchemas
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
@Test
public void testIncompatibleSchemas() throws EventDeliveryException {
final DatasetSink sink = sink(in, config);
GenericRecordBuilder builder = new GenericRecordBuilder(
INCOMPATIBLE_SCHEMA);
GenericData.Record rec = builder.set("username", "koala").build();
putToChannel(in, event(rec, INCOMPATIBLE_SCHEMA, null, false));
// run the sink
sink.start();
assertThrows("Should fail", EventDeliveryException.class,
new Callable() {
@Override
public Object call() throws EventDeliveryException {
sink.process();
return null;
}
});
sink.stop();
Assert.assertEquals("Should have rolled back",
expected.size() + 1, remaining(in));
}
示例7: createParquetFile
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
/**
* Create a data file that gets exported to the db.
* @param fileNum the number of the file (for multi-file export)
* @param numRecords how many records to write to the file.
*/
protected void createParquetFile(int fileNum, int numRecords,
ColumnGenerator... extraCols) throws IOException {
String uri = "dataset:file:" + getTablePath();
Schema schema = buildSchema(extraCols);
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schema(schema)
.format(Formats.PARQUET)
.build();
Dataset dataset = Datasets.create(uri, descriptor);
DatasetWriter writer = dataset.newWriter();
try {
for (int i = 0; i < numRecords; i++) {
GenericRecord record = new GenericData.Record(schema);
record.put("id", i);
record.put("msg", getMsgPrefix() + i);
addExtraColumns(record, i, extraCols);
writer.write(record);
}
} finally {
writer.close();
}
}
示例8: testParquetRecordsNotSupported
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
public void testParquetRecordsNotSupported() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
Schema schema = Schema.createRecord("nestedrecord", null, null, false);
schema.setFields(Lists.newArrayList(buildField("myint",
Schema.Type.INT)));
GenericRecord record = new GenericData.Record(schema);
record.put("myint", 100);
// DB type is not used so can be anything:
ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
createParquetFile(0, TOTAL_RECORDS, gen);
createTable(gen);
try {
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
fail("Parquet records can not be exported.");
} catch (Exception e) {
// expected
assertTrue(true);
}
}
示例9: testAvroRecordsNotSupported
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
public void testAvroRecordsNotSupported() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
Schema schema = Schema.createRecord("nestedrecord", null, null, false);
schema.setFields(Lists.newArrayList(buildAvroField("myint",
Schema.Type.INT)));
GenericRecord record = new GenericData.Record(schema);
record.put("myint", 100);
// DB type is not used so can be anything:
ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
createAvroFile(0, TOTAL_RECORDS, gen);
createTable(gen);
try {
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
fail("Avro records can not be exported.");
} catch (Exception e) {
// expected
assertTrue(true);
}
}
示例10: convertToAvroRecord
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
private GenericRecord convertToAvroRecord(Schema avroRecordSchema, Object[] values) {
// TODO can be improve to create once and reuse
GenericRecord avroRec = new GenericData.Record(avroRecordSchema);
List<ColumnConverterDescriptor> columnConverters = converterDescriptor.getColumnConverters();
if (values.length != columnConverters.size()) {
// mismatch schema
// TODO better exception
throw new RuntimeException("Expecting " + columnConverters.size() + " fields, received "
+ values.length + " values");
}
for (int i = 0; i < values.length; i++) {
Object value = values[i];
ColumnConverterDescriptor columnConverterDescriptor = columnConverters.get(i);
Object valueToWrite = columnConverterDescriptor.getWritable(value);
avroRec.put(columnConverterDescriptor.getColumnName(), valueToWrite);
}
return avroRec;
}
示例11: bussinessDeal
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
/**
* 进行必要的业务处理
*
* @param transceiver
* @throws IOException
*/
private void bussinessDeal(Transceiver transceiver) throws IOException {
// 2.获取协议
Protocol protocol = Protocol.parse(this.getClass().getResourceAsStream("/Members.avpr"));
// 3.根据协议和通讯构造请求对象
GenericRequestor requestor = new GenericRequestor(protocol, transceiver);
// 4.根据schema获取messages主节点内容
GenericRecord loginGr = new GenericData.Record(protocol.getMessages().get("login").getRequest());
// 5.在根据协议里面获取request中的schema
GenericRecord mGr = new GenericData.Record(protocol.getType("Members"));
// 6.设置request中的请求数据
mGr.put("userName", "rita");
mGr.put("userPwd", "123456");
// 7、把二级内容加入到一级message的主节点中
loginGr.put("m", mGr);
// 8.设置完毕后,请求方法,正式发送访问请求信息,并得到响应内容
Object retObj = requestor.request("login", loginGr);
// 9.进行解析操作
GenericRecord upGr = (GenericRecord) retObj;
System.out.println(upGr.get("msg"));
}
示例12: MemberInfoDynSer
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
/**
* 动态序列化:通过动态解析Schema文件进行内容设置,并序列化内容
*
* @throws IOException
*/
public void MemberInfoDynSer() throws IOException {
// 1.解析schema文件内容
Parser parser = new Parser();
Schema mSchema = parser.parse(this.getClass().getResourceAsStream("/Members.avsc"));
// 2.构建数据写对象
DatumWriter<GenericRecord> mGr = new SpecificDatumWriter<GenericRecord>(mSchema);
DataFileWriter<GenericRecord> mDfw = new DataFileWriter<GenericRecord>(mGr);
// 3.创建序列化文件
mDfw.create(mSchema, new File("/Users/a/Desktop/tmp/members.avro"));
// 4.添加序列化数据
for (int i = 0; i < 20; i++) {
GenericRecord gr = new GenericData.Record(mSchema);
int r = i * new Random().nextInt(50);
gr.put("userName", "light-" + r);
gr.put("userPwd", "2016-" + r);
gr.put("realName", "滔滔" + r + "号");
mDfw.append(gr);
}
// 5.关闭数据文件写对象
mDfw.close();
System.out.println("Dyn Builder Ser Start Complete.");
}
示例13: serialize
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
/**
*
* @param t
* @return
*/
@Override
public byte[] serialize(Tuple4<String, String, String, String> t) {
if (!initialized) {
parser = new Schema.Parser();
schema = parser.parse(schemaJson);
recordInjection = GenericAvroCodecs.toBinary(schema);
initialized = true;
}
GenericData.Record avroRecord = new GenericData.Record(schema);
for (int i = 0; i < t.getArity() - 1; i += 2) {
avroRecord.put(t.getField(i).toString(), t.getField(i + 1).toString());
}
byte[] bytes = recordInjection.apply(avroRecord);
return bytes;
}
示例14: get
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
public static Object get(String fieldName, GenericData.Record record, Object defaultValue)
{
Schema decodedWithSchema = record.getSchema();
Optional<Schema.Field> field = decodedWithSchema.getFields().stream()
.filter(i -> i.name().equals(fieldName) || i.aliases().contains(fieldName))
.findFirst();
if(field.isPresent())
{
return record.get(field.get().pos());
}
else
{
return defaultValue;
}
}
示例15: serialize
import org.apache.avro.generic.GenericData; //导入依赖的package包/类
public Record serialize(AdvancedEmployee employee)
{
Record record = new Record(schema);
AvroUtils.put("name", employee.getName(), record);
AvroUtils.put("age", employee.getAge(), record);
AvroUtils.put("gender", employee.getGender(), record);
int numberOfEmails = (employee.getMails() != null) ? employee.getMails().size() : 0;
GenericData.Array<Utf8> emails = new GenericData.Array<>(numberOfEmails, schema.getField("emails").schema());
for(int i = 0; i < numberOfEmails; ++i)
{
emails.add(new Utf8(employee.getMails().get(i)));
}
record.put("emails", emails);
return record;
}