本文整理汇总了Java中org.apache.avro.SchemaNormalization类的典型用法代码示例。如果您正苦于以下问题:Java SchemaNormalization类的具体用法?Java SchemaNormalization怎么用?Java SchemaNormalization使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SchemaNormalization类属于org.apache.avro包,在下文中一共展示了SchemaNormalization类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initialize
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
private void initialize() throws IOException, NoSuchAlgorithmException {
SeekableResettableInputBridge in = new SeekableResettableInputBridge(ris);
long pos = in.tell();
in.seek(0L);
fileReader = new DataFileReader<GenericRecord>(in,
new GenericDatumReader<GenericRecord>());
fileReader.sync(pos);
schema = fileReader.getSchema();
datumWriter = new GenericDatumWriter(schema);
out = new ByteArrayOutputStream();
encoder = EncoderFactory.get().binaryEncoder(out, encoder);
schemaHash = SchemaNormalization.parsingFingerprint("CRC-64-AVRO", schema);
schemaHashString = Hex.encodeHexString(schemaHash);
}
示例2: testSchemaHash
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
@Test
public void testSchemaHash() throws IOException, NoSuchAlgorithmException {
File tempFile = newTestFile(true);
String target = tempFile.getAbsolutePath();
logger.info("Target: {}", target);
TransientPositionTracker tracker = new TransientPositionTracker(target);
Context context = new Context();
context.put(AvroEventDeserializer.CONFIG_SCHEMA_TYPE_KEY,
AvroEventDeserializer.AvroSchemaType.HASH.toString());
ResettableInputStream in =
new ResettableFileInputStream(tempFile, tracker);
EventDeserializer des =
new AvroEventDeserializer.Builder().build(context, in);
Event event = des.readEvent();
String eventSchemaHash =
event.getHeaders().get(AvroEventDeserializer.AVRO_SCHEMA_HEADER_HASH);
String expectedSchemaHash = Hex.encodeHexString(
SchemaNormalization.parsingFingerprint("CRC-64-AVRO", schema));
Assert.assertEquals(expectedSchemaHash, eventSchemaHash);
}
示例3: MapDbStore
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
public MapDbStore(String path, Collection<Serializer> serializers) {
super(serializers);
db = DBMaker.newFileDB(new File(path, "mapdb.store"))
.closeOnJvmShutdown()
.asyncWriteEnable()
.make();
events = db.createTreeMap("events").keySerializer(BTreeKeySerializer.TUPLE3).make();
schemas = db.createTreeMap("schemas").make();
for(Serializer serializer : serializers) {
Schema schema = serializer.getSchema();
long fingerprint = SchemaNormalization.parsingFingerprint64(schema);
byte[] data = schemas.get(fingerprint);
if (data == null) {
schemas.put(fingerprint, schema.toString().getBytes());
}
}
}
示例4: AbstractStore
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
public AbstractStore(Collection<Serializer> serializers) {
Map<Class, Serializer> serializerMap = new HashMap<>();
Map<String, Serializer> serializerByRecordMap = new HashMap<>();
Map<Class, Long> fingerprintMap = new HashMap<>();
for(Serializer serializer : serializers) {
serializerMap.put(serializer.getEventClass(), serializer);
Schema schema = serializer.getSchema();
serializerByRecordMap.put(schema.getFullName(), serializer);
long fingerprint = SchemaNormalization.parsingFingerprint64(schema);
fingerprintMap.put(serializer.getEventClass(), fingerprint);
}
serializersByClass
= new ImmutableMap.Builder<Class, Serializer>().putAll(serializerMap).build();
serializersByRecordName
= new ImmutableMap.Builder<String, Serializer>().putAll(serializerByRecordMap).build();
fingerprintByClass
= new ImmutableMap.Builder<Class, Long>().putAll(fingerprintMap).build();
}
示例5: BdbStore
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
public BdbStore(String path, Collection<Serializer> serializers) {
super(serializers);
EnvironmentConfig environmentConfig = new EnvironmentConfig();
environmentConfig.setAllowCreate(true);
environment = new Environment(new File(path), environmentConfig);
DatabaseConfig databaseConfig = new DatabaseConfig();
databaseConfig.setAllowCreate(true);
databaseConfig.setSortedDuplicates(false);
databaseConfig.setDeferredWrite(true);
eventDatabase = environment.openDatabase(null, "EventDatabase", databaseConfig);
schemaDatabase = environment.openDatabase(null, "SchemaDatabase", databaseConfig);
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
close();
}
});
for(Serializer serializer : serializers) {
Schema schema = serializer.getSchema();
long fingerprint = SchemaNormalization.parsingFingerprint64(schema);
DatabaseEntry keyEntry = new DatabaseEntry(ByteBuffer.allocate(8).putLong(fingerprint).array());
DatabaseEntry valueEntry = new DatabaseEntry();
OperationStatus operationStatus = schemaDatabase.get(null, keyEntry, valueEntry, LockMode.DEFAULT);
if (operationStatus == OperationStatus.NOTFOUND) {
valueEntry.setData(schema.toString().getBytes());
schemaDatabase.put(null, keyEntry, valueEntry);
}
}
}
示例6: InMemoryStore
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
public InMemoryStore(Collection<Serializer> serializers) {
super(serializers);
for(Serializer serializer : serializers) {
Schema schema = serializer.getSchema();
long fingerprint = SchemaNormalization.parsingFingerprint64(schema);
schemas.put(fingerprint, schema);
}
}
示例7: FixedAvroSerializer
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
public FixedAvroSerializer() throws IOException, NoSuchAlgorithmException {
InputStream in = this.getClass().getClassLoader().getResourceAsStream("FixedAvroSerializer.config");
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
String line;
while((line = reader.readLine()) != null) {
Schema schema = new Schema.Parser().parse(line);
byte [] fp = SchemaNormalization.parsingFingerprint(FP_ALGO, schema);
String fingerPrint = new String(Base64.decodeBase64(fp));
fingerprint2schemaMap.put(fingerPrint, schema);
schema2fingerprintMap.put(schema, fingerPrint);
}
}
示例8: main
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
// This is how a float is encoded to be used by Avro.
System.out.println("\n> 3.5f encoded in Avro:");
byte[] number = AvroUtils.convetIntToByteArrayInLE(Float.floatToIntBits(MyConstants.MY_FLOAT));
System.out.println(String.format("0x%s", new String(Hex.encodeHex(number))));
// This is how a double is encoded to be used by Avro.
System.out.println("\n> 20.5d encoded in Avro:");
number = AvroUtils.convetLongToByteArrayInLE(Double.doubleToLongBits(MyConstants.MY_DOUBLE));
System.out.println(String.format("0x%s", new String(Hex.encodeHex(number))));
// Create a sample record to use in our examples.
MyRecord myRecord = AvroUtils.createMyRecord();
// We can print using the field position in the schema.
System.out.println("\n> Getting fields using their position:");
System.out.println(myRecord.get(0));
System.out.println(myRecord.get(1));
// Using the field name in the schema.
System.out.println("\n> Getting fields using their name:");
System.out.println(myRecord.get("MyLong"));
System.out.println(myRecord.get("MyString"));
// Or using the Java property.
System.out.println("\n> Getting fields using their Java property:");
System.out.println(myRecord.getMyLong());
System.out.println(myRecord.getMyString());
// This way we can extract the schema. Note that this string is not exactly the same as the one defined in the schema file.
System.out.println("\n> Getting the field schema:");
System.out.println(myRecord.getSchema());
// Extract the byte representation of this single object. There is no schema in this format.
System.out.println("\n> Dumping the bytes in the single object format:");
ByteBuffer buffer = myRecord.toByteBuffer();
HexDump.dump(buffer.array(), 0, System.out, 0);
// Create a MyRecord from the previous byte buffer and dump some info.
System.out.println("\n> Getting the object from the single object bytes:");
MyRecord myRecord2 = MyRecord.fromByteBuffer(buffer);
System.out.println(myRecord2.getMyLong());
System.out.println(myRecord2.getMyString());
// This is how the schema CRC is calculated.
System.out.println("\n> Schema CRC in LE");
long l = SchemaNormalization.parsingFingerprint64(myRecord.getSchema());
HexDump.dump(AvroUtils.convetLongToByteArrayInLE(l), 0, System.out, 0);
System.out.println("\n> 20 in ZigZag");
int i = AvroUtils.convertToZigZag(20);
HexDump.dump(AvroUtils.convetIntToByteArrayInLE(i), 0, System.out, 0);
System.out.println("\n> 30 in ZigZag");
i = AvroUtils.convertToZigZag(30);
HexDump.dump(AvroUtils.convetIntToByteArrayInLE(i), 0, System.out, 0);
System.out.println("\n> 23 in ZigZag");
i = AvroUtils.convertToZigZag(23);
HexDump.dump(AvroUtils.convetIntToByteArrayInLE(i), 0, System.out, 0);
}
示例9: fingerprint64
import org.apache.avro.SchemaNormalization; //导入依赖的package包/类
/**
* Utility method used by velocity templates to generate serialVersionUID on AVRO beans.
*
* @param schema Data bean AVRO schema.
* @return serialVersionUID for Serializable AVRO databeans.
*/
public static long fingerprint64(Schema schema) {
return SchemaNormalization.parsingFingerprint64(schema);
}