本文整理汇总了Java中org.apache.avro.Schema.Parser类的典型用法代码示例。如果您正苦于以下问题:Java Parser类的具体用法?Java Parser怎么用?Java Parser使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Parser类属于org.apache.avro.Schema包,在下文中一共展示了Parser类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: MemberInfoDynDeser
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
/**
* 动态反序列:通过Schema文件进行动态反序列化操作
*
* @throws IOException
*/
public void MemberInfoDynDeser() throws IOException {
// 1.schema文件解析
Parser parser = new Parser();
Schema mSchema = parser.parse(this.getClass().getResourceAsStream("/Members.avsc"));
// 2.构建数据读对象
DatumReader<GenericRecord> mGr = new SpecificDatumReader<GenericRecord>(mSchema);
DataFileReader<GenericRecord> mDfr = new DataFileReader<GenericRecord>(new File("/Users/a/Desktop/tmp/members.avro"), mGr);
// 3.从序列化文件中进行数据反序列化取出数据
GenericRecord gr = null;
while (mDfr.hasNext()) {
gr = mDfr.next();
System.err.println("deser data:" + gr.toString());
}
mDfr.close();
System.out.println("Dyn Builder Ser Start Complete.");
}
示例2: MemberInfoDynSer
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
/**
* 动态序列化:通过动态解析Schema文件进行内容设置,并序列化内容
*
* @throws IOException
*/
public void MemberInfoDynSer() throws IOException {
// 1.解析schema文件内容
Parser parser = new Parser();
Schema mSchema = parser.parse(this.getClass().getResourceAsStream("/Members.avsc"));
// 2.构建数据写对象
DatumWriter<GenericRecord> mGr = new SpecificDatumWriter<GenericRecord>(mSchema);
DataFileWriter<GenericRecord> mDfw = new DataFileWriter<GenericRecord>(mGr);
// 3.创建序列化文件
mDfw.create(mSchema, new File("/Users/a/Desktop/tmp/members.avro"));
// 4.添加序列化数据
for (int i = 0; i < 20; i++) {
GenericRecord gr = new GenericData.Record(mSchema);
int r = i * new Random().nextInt(50);
gr.put("userName", "light-" + r);
gr.put("userPwd", "2016-" + r);
gr.put("realName", "滔滔" + r + "号");
mDfw.append(gr);
}
// 5.关闭数据文件写对象
mDfw.close();
System.out.println("Dyn Builder Ser Start Complete.");
}
示例3: testGenerateAvro3
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
@Test
public void testGenerateAvro3() {
try {
Parser parser = new Schema.Parser();
Schema peopleSchema = parser.parse(new File(getTestResource("people.avsc").toURI()));
GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(peopleSchema);
DataFileWriter<GenericRecord> dfw = new DataFileWriter<GenericRecord>(datumWriter);
File tempfile = File.createTempFile("karma-people", "avro");
tempfile.deleteOnExit();
dfw.create(peopleSchema, new FileOutputStream(tempfile));
JSONArray array = new JSONArray(IOUtils.toString(new FileInputStream(new File(getTestResource("people.json").toURI()))));
for(int i = 0; i < array.length(); i++)
{
dfw.append(generatePersonRecord(peopleSchema, array.getJSONObject(i)));
}
dfw.flush();
dfw.close();
} catch (Exception e) {
logger.error("testGenerateAvro3 failed:", e);
fail("Execption: " + e.getMessage());
}
}
示例4: ReadAvro
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public ReadAvro(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) {
super(builder, config, parent, child, context);
String schemaString = getConfigs().getString(config, "writerSchemaString", null);
if (schemaString != null) {
this.writerSchema = new Parser().parse(schemaString);
} else {
String schemaFile = getConfigs().getString(config, "writerSchemaFile", null);
if (schemaFile != null) {
try {
this.writerSchema = new Parser().parse(new File(schemaFile));
} catch (IOException e) {
throw new MorphlineCompilationException("Cannot parse external Avro writer schema file: " + schemaFile, config, e);
}
} else {
this.writerSchema = null;
}
}
this.isJson = getConfigs().getBoolean(config, "isJson", false);
validateArguments();
}
示例5: ReadAvroContainer
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public ReadAvroContainer(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) {
super(builder, config, parent, child, context);
String schemaString = getConfigs().getString(config, "readerSchemaString", null);
if (schemaString != null) {
this.readerSchema = new Parser().parse(schemaString);
} else {
String schemaFile = getConfigs().getString(config, "readerSchemaFile", null);
if (schemaFile != null) {
try {
this.readerSchema = new Parser().parse(new File(schemaFile));
} catch (IOException e) {
throw new MorphlineCompilationException("Cannot parse external Avro reader schema file: " + schemaFile, config, e);
}
} else {
this.readerSchema = null;
}
}
if (getClass() == ReadAvroContainer.class) {
resolverCache = new BoundedLRUHashMap(getConfigs().getInt(config, "schemaCacheCapacity", 100));
validateArguments();
} else {
resolverCache = null;
}
}
示例6: MemberInfoToolsSer
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
/**
* 通过Java工具生成文件方式进行序列化操作 命令:C:\Users\Administrator>java -jar
* E:\avro\avro-tools-1.7.7.jar compile schema E:\avro\Members.avsc E:\avro
*
* @throws IOException
*/
public void MemberInfoToolsSer() throws IOException {
// 1.为Member生成对象进行设置必要的内容,这里实现三种设置方式的演示
// 1.1、构造方式
Members m1 = new Members("xiaoming", "123456", "校名");
// 1.2、属性设置
Members m2 = new Members();
m2.setUserName("xiaoyi");
m2.setUserPwd("888888");
m2.setRealName("小艺");
// 1.3、Builder方式设置
Members m3 = Members.newBuilder().setUserName("xiaohong").setUserPwd("999999").setRealName("小红").build();
// 2.构建反序列化写对象
DatumWriter<Members> mDw = new SpecificDatumWriter<Members>(Members.class);
DataFileWriter<Members> mDfw = new DataFileWriter<Members>(mDw);
// 2.1.通过对Members.avsc的解析创建Schema
Schema schema = new Parser().parse(this.getClass().getResourceAsStream("/Members.avsc"));
// 2.2.打开一个通道,把schema和输出的序列化文件关联起来
mDfw.create(schema, new File("E:/avro/members.avro"));
// 4.把刚刚创建的Users类数据追加到数据文件写入对象中
mDfw.append(m1);
mDfw.append(m2);
mDfw.append(m3);
// 5.关闭数据文件写入对象
mDfw.close();
System.out.println("Tools Builder Ser Start Complete.");
}
示例7: retrieveSchema
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
@Override
public Schema retrieveSchema(TableId table, String topic) {
try {
String subject = getSubject(topic);
logger.debug("Retrieving schema information for topic {} with subject {}", topic, subject);
SchemaMetadata latestSchemaMetadata = schemaRegistryClient.getLatestSchemaMetadata(subject);
org.apache.avro.Schema avroSchema = new Parser().parse(latestSchemaMetadata.getSchema());
return avroData.toConnectSchema(avroSchema);
} catch (IOException | RestClientException exception) {
throw new ConnectException(
"Exception encountered while trying to fetch latest schema metadata from Schema Registry",
exception
);
}
}
示例8: JsonReader
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public JsonReader(String dataFilepath, String schemaFilepath) throws Exception{
this.filepath = dataFilepath;
Parser parser = new Schema.Parser();
Schema schema = parser.parse(new File(schemaFilepath));
this.datumReader = new SpecificDatumReader<Object>(schema);
this.decoder = DecoderFactory.get().jsonDecoder(schema,
new FileInputStream(new File(dataFilepath)));
}
示例9: BinaryReader
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public BinaryReader(String dataFilepath, String schemaFilepath) throws Exception{
this.filepath = dataFilepath;
Parser parser = new Schema.Parser();
Schema schema = parser.parse(new File(schemaFilepath));
DatumReader<Object> datumReader = new SpecificDatumReader<Object>(schema);
this.dataFileReader = new DataFileReader<>(new File(dataFilepath), datumReader);
}
示例10: FileWriter
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public FileWriter(String schemaFile, String outputFile) throws IOException{
Parser parser = new Schema.Parser();
Schema schema = parser.parse(new File(schemaFile));
DatumWriter<Object> datumWriter = new SpecificDatumWriter<Object>(schema);
fileWriter = new DataFileWriter<>(datumWriter);
fileWriter.create(schema, new File(outputFile));
}
示例11: JsonFileWriter
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public JsonFileWriter(String schemaFile, String outputFilePath) throws Exception{
File outputFile = new File(outputFilePath);
if(outputFile == null || outputFile.getParentFile() == null || !outputFile.getParentFile().exists()){
throw new Exception("Invalid file path: " + outputFilePath);
}
Parser parser = new Schema.Parser();
Schema schema = parser.parse(new File(schemaFile));
datumWriter = new SpecificDatumWriter<Object>(schema);
OutputStream outputStream = new FileOutputStream(outputFile);
jsonEncoder = EncoderFactory.get().jsonEncoder(schema, outputStream);
}
示例12: initialiseInput
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
private void initialiseInput(final Job job, final MapReduce operation) throws IOException {
if (null == avroSchemaFilePath) {
throw new IllegalArgumentException("Avro schema file path has not been set");
}
final Schema schema = new Parser().parse(new File(avroSchemaFilePath));
AvroJob.setInputKeySchema(job, schema);
job.setInputFormatClass(AvroKeyInputFormat.class);
for (final Map.Entry<String, String> entry : operation.getInputMapperPairs().entrySet()) {
if (entry.getValue().contains(job.getConfiguration().get(MAPPER_GENERATOR))) {
AvroKeyInputFormat.addInputPath(job, new Path(entry.getKey()));
}
}
}
示例13: downloadCtlSchemaTest
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
/**
* Retrieves a CTL schema by its id.
*/
@Test
public void downloadCtlSchemaTest() throws Exception {
ApplicationDto application = createApplication(tenantAdminDto);
this.loginTenantDeveloper(tenantDeveloperUser);
String name = this.ctlRandomFieldType();
CTLSchemaDto saved = this.createCTLSchema(name, CTL_DEFAULT_NAMESPACE, 1, tenantDeveloperDto.getTenantId(), application.getApplicationToken(),
null, null);
FileData fd = client.downloadCtlSchemaByAppToken(client.getCTLSchemaById(saved.getId()), CTLSchemaExportMethod.FLAT, application.getApplicationToken());
Assert.assertNotNull(fd);
Schema loaded = new Parser().parse(new String(fd.getFileData()));
Assert.assertEquals(name, loaded.getName());
}
示例14: main
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
String schemaRoot = System.getenv("RECORD_SERVICE_HOME") + "/testdata/avro_schemas/";
System.out.println(schemaRoot);
Schema schema = new Parser().parse(new File(schemaRoot + "user.avsc"));
GenericRecord user1 = new GenericData.Record(schema);
user1.put("name", "Alyssa");
user1.put("favorite_number", 256);
// Leave favorite color null
GenericRecord user2 = new GenericData.Record(schema);
user2.put("name", "Ben");
user2.put("favorite_number", 7);
user2.put("favorite_color", "red");
// Serialize user1 and user2 to disk
File file = new File("users.avro");
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);
dataFileWriter.create(schema, file);
dataFileWriter.append(user1);
dataFileWriter.append(user2);
dataFileWriter.close();
// Deserialize users from disk
DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
DataFileReader<GenericRecord> dataFileReader =
new DataFileReader<GenericRecord>(file, datumReader);
try {
GenericRecord user = null;
while (dataFileReader.hasNext()) {
// Reuse user object by passing it to next(). This saves us from
// allocating and garbage collecting many objects for files with
// many items.
user = dataFileReader.next(user);
System.out.println(user);
}
} finally {
dataFileReader.close();
}
}
示例15: ToAvro
import org.apache.avro.Schema.Parser; //导入依赖的package包/类
public ToAvro(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) {
super(builder, config, parent, child, context);
String schemaFile = getConfigs().getString(config, "schemaFile", null);
String schemaString = getConfigs().getString(config, "schemaString", null);
this.schemaField = getConfigs().getString(config, "schemaField", null);
int numDefinitions = 0;
if (schemaFile != null) {
numDefinitions++;
}
if (schemaString != null) {
numDefinitions++;
}
if (schemaField != null) {
numDefinitions++;
}
if (numDefinitions == 0) {
throw new MorphlineCompilationException(
"Either schemaFile or schemaString or schemaField must be defined", config);
}
if (numDefinitions > 1) {
throw new MorphlineCompilationException(
"Must define only one of schemaFile or schemaString or schemaField at the same time", config);
}
if (schemaString != null) {
this.fixedSchema = new Parser().parse(schemaString);
} else if (schemaFile != null) {
try {
this.fixedSchema = new Parser().parse(new File(schemaFile));
} catch (IOException e) {
throw new MorphlineCompilationException(
"Cannot parse external Avro schema file: " + schemaFile, config, e);
}
} else {
this.fixedSchema = null;
}
Config mappingsConfig = getConfigs().getConfig(config, "mappings", ConfigFactory.empty());
for (Map.Entry<String, Object> entry : new Configs().getEntrySet(mappingsConfig)) {
mappings.put(entry.getKey(), entry.getValue().toString());
}
validateArguments();
}