本文整理匯總了Java中org.apache.avro.Schema.Parser方法的典型用法代碼示例。如果您正苦於以下問題:Java Schema.Parser方法的具體用法?Java Schema.Parser怎麽用?Java Schema.Parser使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.avro.Schema
的用法示例。
在下文中一共展示了Schema.Parser方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: decodeFromTransport
import org.apache.avro.Schema; //導入方法依賴的package包/類
/**
* Decode the context from the transport.
*
* @param input The FinancialForce Orizuru Avro Transport message from which to decode the context.
* @throws OrizuruConsumerException Exception thrown if decoding the context fails.
*/
@Override
public void decodeFromTransport(Transport input) throws OrizuruConsumerException {
try {
String contextSchemaStr = input.getContextSchema().toString();
Schema.Parser parser = new Schema.Parser();
this.schema = parser.parse(contextSchemaStr);
ByteBuffer contextBuffer = input.getContextBuffer();
this.data = contextBuffer.array();
} catch (Exception ex) {
throw new DecodeContextException(ex);
}
}
示例2: loadFromUrl
import org.apache.avro.Schema; //導入方法依賴的package包/類
private Schema loadFromUrl(String schemaUrl) throws IOException {
Configuration conf = new Configuration();
Schema.Parser parser = new Schema.Parser();
if (schemaUrl.toLowerCase(Locale.ENGLISH).startsWith("hdfs://")) {
FileSystem fs = FileSystem.get(conf);
FSDataInputStream input = null;
try {
input = fs.open(new Path(schemaUrl));
return parser.parse(input);
} finally {
if (input != null) {
input.close();
}
}
} else {
InputStream is = null;
try {
is = new URL(schemaUrl).openStream();
return parser.parse(is);
} finally {
if (is != null) {
is.close();
}
}
}
}
示例3: load
import org.apache.avro.Schema; //導入方法依賴的package包/類
@Override
public Schema load(String url) throws IOException {
Schema.Parser parser = new Schema.Parser();
InputStream is = null;
try {
FileSystem fs = FileSystem.get(URI.create(url), conf);
if (url.toLowerCase(Locale.ENGLISH).startsWith("hdfs:/")) {
is = fs.open(new Path(url));
} else {
is = new URL(url).openStream();
}
return parser.parse(is);
} finally {
if (is != null) {
is.close();
}
}
}
示例4: init
import org.apache.avro.Schema; //導入方法依賴的package包/類
@Override
public void init(java.io.InputStream inputStream, java.util.Properties props) {
topic = props.getProperty("topic");
if (props.containsKey("parse.key")) {
parseKey = props.getProperty("parse.key").trim().toLowerCase().equals("true");
}
if (props.containsKey("key.separator")) {
keySeparator = props.getProperty("key.separator");
}
if (props.containsKey("ignore.error")) {
ignoreError = props.getProperty("ignore.error").trim().toLowerCase().equals("true");
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String url = props.getProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG);
if (url == null) {
throw new ConfigException("Missing schema registry url!");
}
schemaRegistry = new CachedSchemaRegistryClient(
url, AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT);
if (!props.containsKey("value.schema")) {
throw new ConfigException("Must provide the Avro schema string in value.schema");
}
String valueSchemaString = props.getProperty("value.schema");
Schema.Parser parser = new Schema.Parser();
valueSchema = parser.parse(valueSchemaString);
if (parseKey) {
if (!props.containsKey("key.schema")) {
throw new ConfigException("Must provide the Avro schema string in key.schema");
}
String keySchemaString = props.getProperty("key.schema");
keySchema = parser.parse(keySchemaString);
}
keySubject = topic + "-key";
valueSubject = topic + "-value";
}
示例5: parseSchema
import org.apache.avro.Schema; //導入方法依賴的package包/類
private static Schema parseSchema(String schemaString) {
try {
Schema.Parser parser1 = new Schema.Parser();
Schema schema = parser1.parse(schemaString);
return schema;
} catch (SchemaParseException e) {
return null;
}
}
示例6: write
import org.apache.avro.Schema; //導入方法依賴的package包/類
/**
* 將avro格式的數據寫入到parquet文件中
*
* @param parquetPath
*/
public void write(String parquetPath) {
Schema.Parser parser = new Schema.Parser();
try {
Schema schema = parser.parse(AvroParquetOperation.class.getClassLoader().getResourceAsStream("StringPair.avsc"));
GenericRecord datum = new GenericData.Record(schema);
datum.put("left", "L");
datum.put("right", "R");
Path path = new Path(parquetPath);
System.out.println(path);
AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(path, schema);
writer.write(datum);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
示例7: main
import org.apache.avro.Schema; //導入方法依賴的package包/類
/**
*
* @param args
* @throws InterruptedException
*/
public static void main(String[] args) throws InterruptedException {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(USER_SCHEMA);
Injection<GenericRecord, byte[]> recordInjection = GenericAvroCodecs.toBinary(schema);
KafkaProducer<String, byte[]> producer = new KafkaProducer<>(props);
SplittableRandom random = new SplittableRandom();
while (true) {
GenericData.Record avroRecord = new GenericData.Record(schema);
avroRecord.put("str1", "Str 1-" + random.nextInt(10));
avroRecord.put("str2", "Str 2-" + random.nextInt(1000));
avroRecord.put("int1", random.nextInt(10000));
byte[] bytes = recordInjection.apply(avroRecord);
ProducerRecord<String, byte[]> record = new ProducerRecord<>("mytopic", bytes);
producer.send(record);
Thread.sleep(100);
}
}
示例8: createPrimitiveSchema
import org.apache.avro.Schema; //導入方法依賴的package包/類
private static Schema createPrimitiveSchema(Schema.Parser parser, String type) {
String schemaString = String.format("{\"type\" : \"%s\"}", type);
return parser.parse(schemaString);
}
示例9: main
import org.apache.avro.Schema; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception{
// Check arguments length value
if(args.length == 0){
System.out.println("Enter topic name");
return;
}
//Assign topicName to string variable
String topicName = args[0].toString();
int number = Integer.parseInt(args[1]);
int partition = Integer.parseInt(args[2]);
// create instance for properties to access producer configs
Properties props = new Properties();
//Assign localhost id
props.put("bootstrap.servers", "localhost:9092");
//Set acknowledgements for producer requests.
props.put("acks", "all");
//If the request fails, the producer can automatically retry,
props.put("retries", 0);
//Specify buffer size in config
props.put("batch.size", 16384);
//Reduce the no of requests less than 0
props.put("linger.ms", 1);
//The buffer.memory controls the total amount of memory available to the producer for buffering.
props.put("buffer.memory", 33554432);
props.put("key.serializer",
"io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("value.serializer",
"io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("schema.registry.url", "http://localhost:8081");
Producer<Object, Object> producer = new KafkaProducer
<Object, Object>(props);
String userSchema = "{\"type\":\"record\"," +
"\"name\":\"myrecord\"," +
"\"fields\":[{\"name\":\"url\",\"type\":\"string\"}, {\"name\":\"id\",\"type\":\"int\"}]}";
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(userSchema);
for(int i=0; i< number; i++) {
GenericRecord avroRecord = new GenericData.Record(schema);
avroRecord.put("url", "google.com");
avroRecord.put("id", i);
ProducerRecord<Object, Object> record = new ProducerRecord(topicName, partition,"key", avroRecord);
producer.send(record);
}
System.out.println("Messages sent successfully");
producer.close();
}