本文整理汇总了Java中org.apache.avro.Schema.Parser方法的典型用法代码示例。如果您正苦于以下问题:Java Schema.Parser方法的具体用法?Java Schema.Parser怎么用?Java Schema.Parser使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.avro.Schema
的用法示例。
在下文中一共展示了Schema.Parser方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: decodeFromTransport
import org.apache.avro.Schema; //导入方法依赖的package包/类
/**
* Decode the context from the transport.
*
* @param input The FinancialForce Orizuru Avro Transport message from which to decode the context.
* @throws OrizuruConsumerException Exception thrown if decoding the context fails.
*/
@Override
public void decodeFromTransport(Transport input) throws OrizuruConsumerException {
try {
String contextSchemaStr = input.getContextSchema().toString();
Schema.Parser parser = new Schema.Parser();
this.schema = parser.parse(contextSchemaStr);
ByteBuffer contextBuffer = input.getContextBuffer();
this.data = contextBuffer.array();
} catch (Exception ex) {
throw new DecodeContextException(ex);
}
}
示例2: loadFromUrl
import org.apache.avro.Schema; //导入方法依赖的package包/类
private Schema loadFromUrl(String schemaUrl) throws IOException {
Configuration conf = new Configuration();
Schema.Parser parser = new Schema.Parser();
if (schemaUrl.toLowerCase(Locale.ENGLISH).startsWith("hdfs://")) {
FileSystem fs = FileSystem.get(conf);
FSDataInputStream input = null;
try {
input = fs.open(new Path(schemaUrl));
return parser.parse(input);
} finally {
if (input != null) {
input.close();
}
}
} else {
InputStream is = null;
try {
is = new URL(schemaUrl).openStream();
return parser.parse(is);
} finally {
if (is != null) {
is.close();
}
}
}
}
示例3: load
import org.apache.avro.Schema; //导入方法依赖的package包/类
@Override
public Schema load(String url) throws IOException {
Schema.Parser parser = new Schema.Parser();
InputStream is = null;
try {
FileSystem fs = FileSystem.get(URI.create(url), conf);
if (url.toLowerCase(Locale.ENGLISH).startsWith("hdfs:/")) {
is = fs.open(new Path(url));
} else {
is = new URL(url).openStream();
}
return parser.parse(is);
} finally {
if (is != null) {
is.close();
}
}
}
示例4: init
import org.apache.avro.Schema; //导入方法依赖的package包/类
@Override
public void init(java.io.InputStream inputStream, java.util.Properties props) {
topic = props.getProperty("topic");
if (props.containsKey("parse.key")) {
parseKey = props.getProperty("parse.key").trim().toLowerCase().equals("true");
}
if (props.containsKey("key.separator")) {
keySeparator = props.getProperty("key.separator");
}
if (props.containsKey("ignore.error")) {
ignoreError = props.getProperty("ignore.error").trim().toLowerCase().equals("true");
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String url = props.getProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG);
if (url == null) {
throw new ConfigException("Missing schema registry url!");
}
schemaRegistry = new CachedSchemaRegistryClient(
url, AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT);
if (!props.containsKey("value.schema")) {
throw new ConfigException("Must provide the Avro schema string in value.schema");
}
String valueSchemaString = props.getProperty("value.schema");
Schema.Parser parser = new Schema.Parser();
valueSchema = parser.parse(valueSchemaString);
if (parseKey) {
if (!props.containsKey("key.schema")) {
throw new ConfigException("Must provide the Avro schema string in key.schema");
}
String keySchemaString = props.getProperty("key.schema");
keySchema = parser.parse(keySchemaString);
}
keySubject = topic + "-key";
valueSubject = topic + "-value";
}
示例5: parseSchema
import org.apache.avro.Schema; //导入方法依赖的package包/类
private static Schema parseSchema(String schemaString) {
try {
Schema.Parser parser1 = new Schema.Parser();
Schema schema = parser1.parse(schemaString);
return schema;
} catch (SchemaParseException e) {
return null;
}
}
示例6: write
import org.apache.avro.Schema; //导入方法依赖的package包/类
/**
* 将avro格式的数据写入到parquet文件中
*
* @param parquetPath
*/
public void write(String parquetPath) {
Schema.Parser parser = new Schema.Parser();
try {
Schema schema = parser.parse(AvroParquetOperation.class.getClassLoader().getResourceAsStream("StringPair.avsc"));
GenericRecord datum = new GenericData.Record(schema);
datum.put("left", "L");
datum.put("right", "R");
Path path = new Path(parquetPath);
System.out.println(path);
AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(path, schema);
writer.write(datum);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
示例7: main
import org.apache.avro.Schema; //导入方法依赖的package包/类
/**
*
* @param args
* @throws InterruptedException
*/
public static void main(String[] args) throws InterruptedException {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(USER_SCHEMA);
Injection<GenericRecord, byte[]> recordInjection = GenericAvroCodecs.toBinary(schema);
KafkaProducer<String, byte[]> producer = new KafkaProducer<>(props);
SplittableRandom random = new SplittableRandom();
while (true) {
GenericData.Record avroRecord = new GenericData.Record(schema);
avroRecord.put("str1", "Str 1-" + random.nextInt(10));
avroRecord.put("str2", "Str 2-" + random.nextInt(1000));
avroRecord.put("int1", random.nextInt(10000));
byte[] bytes = recordInjection.apply(avroRecord);
ProducerRecord<String, byte[]> record = new ProducerRecord<>("mytopic", bytes);
producer.send(record);
Thread.sleep(100);
}
}
示例8: createPrimitiveSchema
import org.apache.avro.Schema; //导入方法依赖的package包/类
private static Schema createPrimitiveSchema(Schema.Parser parser, String type) {
String schemaString = String.format("{\"type\" : \"%s\"}", type);
return parser.parse(schemaString);
}
示例9: main
import org.apache.avro.Schema; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception{
// Check arguments length value
if(args.length == 0){
System.out.println("Enter topic name");
return;
}
//Assign topicName to string variable
String topicName = args[0].toString();
int number = Integer.parseInt(args[1]);
int partition = Integer.parseInt(args[2]);
// create instance for properties to access producer configs
Properties props = new Properties();
//Assign localhost id
props.put("bootstrap.servers", "localhost:9092");
//Set acknowledgements for producer requests.
props.put("acks", "all");
//If the request fails, the producer can automatically retry,
props.put("retries", 0);
//Specify buffer size in config
props.put("batch.size", 16384);
//Reduce the no of requests less than 0
props.put("linger.ms", 1);
//The buffer.memory controls the total amount of memory available to the producer for buffering.
props.put("buffer.memory", 33554432);
props.put("key.serializer",
"io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("value.serializer",
"io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("schema.registry.url", "http://localhost:8081");
Producer<Object, Object> producer = new KafkaProducer
<Object, Object>(props);
String userSchema = "{\"type\":\"record\"," +
"\"name\":\"myrecord\"," +
"\"fields\":[{\"name\":\"url\",\"type\":\"string\"}, {\"name\":\"id\",\"type\":\"int\"}]}";
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(userSchema);
for(int i=0; i< number; i++) {
GenericRecord avroRecord = new GenericData.Record(schema);
avroRecord.put("url", "google.com");
avroRecord.put("id", i);
ProducerRecord<Object, Object> record = new ProducerRecord(topicName, partition,"key", avroRecord);
producer.send(record);
}
System.out.println("Messages sent successfully");
producer.close();
}