本文整理匯總了Java中org.apache.avro.Schema.createArray方法的典型用法代碼示例。如果您正苦於以下問題:Java Schema.createArray方法的具體用法?Java Schema.createArray怎麽用?Java Schema.createArray使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.avro.Schema
的用法示例。
在下文中一共展示了Schema.createArray方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: testValidateArrayWriterSchema
import org.apache.avro.Schema; //導入方法依賴的package包/類
@Test
public void testValidateArrayWriterSchema() throws Exception {
final Schema validReader = Schema.createArray(STRING_SCHEMA);
final Schema invalidReader = Schema.createMap(STRING_SCHEMA);
final SchemaCompatibility.SchemaPairCompatibility validResult =
new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(),
validReader,
STRING_ARRAY_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
final SchemaCompatibility.SchemaPairCompatibility invalidResult =
new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.incompatible(SchemaIncompatibilityType.TYPE_MISMATCH, invalidReader, STRING_ARRAY_SCHEMA,
"reader type: MAP not compatible with writer type: ARRAY", Arrays.asList("")),
invalidReader,
STRING_ARRAY_SCHEMA,
String.format(
"Data encoded using writer schema:%n%s%n"
+ "will or may fail to decode using reader schema:%n%s%n",
STRING_ARRAY_SCHEMA.toString(true),
invalidReader.toString(true)));
assertEquals(
validResult,
checkReaderWriterCompatibility(validReader, STRING_ARRAY_SCHEMA));
assertEquals(
invalidResult,
checkReaderWriterCompatibility(invalidReader, STRING_ARRAY_SCHEMA));
}
示例2: getComplexTypeDesc
import org.apache.avro.Schema; //導入方法依賴的package包/類
private Schema getComplexTypeDesc(DataType type) {
Schema resultTypeInfo = null;
switch (type.getCategory()) {
case Basic:
resultTypeInfo = parquetAvroSchemaMap.get(type.toString());
break;
case Struct:
// create record here
StructType structObjectType = (StructType) type;
List<DataType> dataTypes = Arrays.asList(structObjectType.getColumnTypes());
Schema[] schemas = dataTypes.stream().map(dataType -> getComplexTypeDesc(dataType))
.toArray(size -> new Schema[size]);
resultTypeInfo = ParquetUtils.createAvroRecordSchema(columnName,
structObjectType.getColumnNames(), schemas);
break;
case List:
resultTypeInfo =
Schema.createArray(getComplexTypeDesc(((ListType) type).getTypeOfElement()));
break;
case Union:
final UnionType unionObjectType = (UnionType) type;
final DataType[] columnTypes1 = unionObjectType.getColumnTypes();
List<Schema> colTypes = new ArrayList<>();
for (int i = 0; i < columnTypes1.length; i++) {
colTypes.add(getComplexTypeDesc(columnTypes1[i]));
}
resultTypeInfo = Schema.createUnion(colTypes);
break;
case Map:
MapType mapObjectType = (MapType) type;
resultTypeInfo = Schema.createMap(getComplexTypeDesc(mapObjectType.getTypeOfValue()));
break;
default:
break;
}
return resultTypeInfo;
}
示例3: AvroArraySchema
import org.apache.avro.Schema; //導入方法依賴的package包/類
public AvroArraySchema( final ArrayContainerField schema ) throws IOException{
this.schema = schema;
Schema childSchema = AvroSchemaFactory.getAvroSchema( schema.getField() );
avroSchema = Schema.createArray( childSchema );
}