本文整理汇总了Java中org.apache.avro.Schema.createMap方法的典型用法代码示例。如果您正苦于以下问题:Java Schema.createMap方法的具体用法?Java Schema.createMap怎么用?Java Schema.createMap使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.avro.Schema
的用法示例。
在下文中一共展示了Schema.createMap方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testValidateArrayWriterSchema
import org.apache.avro.Schema; //导入方法依赖的package包/类
@Test
public void testValidateArrayWriterSchema() throws Exception {
final Schema validReader = Schema.createArray(STRING_SCHEMA);
final Schema invalidReader = Schema.createMap(STRING_SCHEMA);
final SchemaCompatibility.SchemaPairCompatibility validResult =
new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(),
validReader,
STRING_ARRAY_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
final SchemaCompatibility.SchemaPairCompatibility invalidResult =
new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.incompatible(SchemaIncompatibilityType.TYPE_MISMATCH, invalidReader, STRING_ARRAY_SCHEMA,
"reader type: MAP not compatible with writer type: ARRAY", Arrays.asList("")),
invalidReader,
STRING_ARRAY_SCHEMA,
String.format(
"Data encoded using writer schema:%n%s%n"
+ "will or may fail to decode using reader schema:%n%s%n",
STRING_ARRAY_SCHEMA.toString(true),
invalidReader.toString(true)));
assertEquals(
validResult,
checkReaderWriterCompatibility(validReader, STRING_ARRAY_SCHEMA));
assertEquals(
invalidResult,
checkReaderWriterCompatibility(invalidReader, STRING_ARRAY_SCHEMA));
}
示例2: getComplexTypeDesc
import org.apache.avro.Schema; //导入方法依赖的package包/类
private Schema getComplexTypeDesc(DataType type) {
Schema resultTypeInfo = null;
switch (type.getCategory()) {
case Basic:
resultTypeInfo = parquetAvroSchemaMap.get(type.toString());
break;
case Struct:
// create record here
StructType structObjectType = (StructType) type;
List<DataType> dataTypes = Arrays.asList(structObjectType.getColumnTypes());
Schema[] schemas = dataTypes.stream().map(dataType -> getComplexTypeDesc(dataType))
.toArray(size -> new Schema[size]);
resultTypeInfo = ParquetUtils.createAvroRecordSchema(columnName,
structObjectType.getColumnNames(), schemas);
break;
case List:
resultTypeInfo =
Schema.createArray(getComplexTypeDesc(((ListType) type).getTypeOfElement()));
break;
case Union:
final UnionType unionObjectType = (UnionType) type;
final DataType[] columnTypes1 = unionObjectType.getColumnTypes();
List<Schema> colTypes = new ArrayList<>();
for (int i = 0; i < columnTypes1.length; i++) {
colTypes.add(getComplexTypeDesc(columnTypes1[i]));
}
resultTypeInfo = Schema.createUnion(colTypes);
break;
case Map:
MapType mapObjectType = (MapType) type;
resultTypeInfo = Schema.createMap(getComplexTypeDesc(mapObjectType.getTypeOfValue()));
break;
default:
break;
}
return resultTypeInfo;
}
示例3: AvroMapSchema
import org.apache.avro.Schema; //导入方法依赖的package包/类
public AvroMapSchema( final MapContainerField schema ) throws IOException{
this.schema = schema;
Schema childSchema = AvroSchemaFactory.getAvroSchema( schema.getField() );
avroSchema = Schema.createMap( childSchema );
}