本文整理匯總了Java中org.apache.avro.Schema.Field.getProp方法的典型用法代碼示例。如果您正苦於以下問題:Java Field.getProp方法的具體用法?Java Field.getProp怎麽用?Java Field.getProp使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.avro.Schema.Field
的用法示例。
在下文中一共展示了Field.getProp方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: parse
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
/**
* Parse the entity from the body in JSON of the given event.
*
* @param event
* The event to parse.
* @param reuse
* If non-null, this may be reused and returned from this method.
* @return The parsed entity as a GenericRecord.
* @throws EventDeliveryException
* A recoverable error such as an error downloading the schema
* from the URL has occurred.
* @throws NonRecoverableEventException
* A non-recoverable error such as an unparsable schema or
* entity has occurred.
*/
@Override
public GenericRecord parse(Event event, GenericRecord reuse)
throws EventDeliveryException, NonRecoverableEventException {
JsonObject parser = new JsonParser().parse(new String(event.getBody())).getAsJsonObject();
GenericRecordBuilder recordBuilder = new GenericRecordBuilder(datasetSchema);
for (Field field:datasetSchema.getFields()) {
String at_header = field.getProp(FIELD_AT_HEADER_PROPERTY);
if(at_header != null && at_header.equals(Boolean.TRUE.toString())){
recordBuilder.set(field.name(), event.getHeaders().get(field.name()));
}else{
JsonElement element = parser.get(field.name());
recordBuilder.set(field.name(), getElementAsType(field.schema(), element));
}
}
return recordBuilder.build();
}
示例2: initConverters
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
/**
* Initialize converters per each schema field
*
* @param schema
* design schema
*/
private void initConverters(Schema schema) {
converters = new StringConverter[size];
List<Field> fields = schema.getFields();
for (int i = 0; i < size; i++) {
Field field = fields.get(i);
Schema fieldSchema = field.schema();
fieldSchema = AvroUtils.unwrapIfNullable(fieldSchema);
if (LogicalTypeUtils.isLogicalTimestampMillis(fieldSchema)) {
String datePattern = field.getProp(SchemaConstants.TALEND_COLUMN_PATTERN);
converters[i] = new StringTimestampConverter(datePattern);
} else {
Type type = fieldSchema.getType();
converters[i] = converterRegistry.get(type);
}
}
}
示例3: initDefaultSchema
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
private Schema initDefaultSchema(Schema designSchema) {
AvroRegistry avroReg = new AvroRegistry();
FieldAssembler<Schema> record = SchemaBuilder.record("Main").fields();
for (SplunkJSONEventField metadataField : SplunkJSONEventField.getMetadataFields()) {
Schema base = avroReg.getConverter(metadataField.getDataType()).getSchema();
FieldBuilder<Schema> fieldBuilder = record.name(metadataField.getName());
if (metadataField.getName().equals(SplunkJSONEventField.TIME.getName())) {
String datePattern;
Field designField = designSchema.getField(metadataField.getName());
if (designField != null) {
datePattern = designField.getProp(SchemaConstants.TALEND_COLUMN_PATTERN);
} else {
datePattern = designSchema.getProp(SchemaConstants.TALEND_COLUMN_PATTERN);
}
if (datePattern == null || datePattern.isEmpty()) {
datePattern = "dd-MM-yyyy";
}
fieldBuilder.prop(SchemaConstants.TALEND_COLUMN_PATTERN, datePattern);
}
fieldBuilder.type(AvroUtils.wrapAsNullable(base)).noDefault();
}
Schema defaultSchema = record.endRecord();
return defaultSchema;
}
示例4: getMarketoColumns
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
public List<String> getMarketoColumns(Schema schema) {
List<String> result = new ArrayList<>();
Map<String, String> mappings = getInputedNameMappingsForMarketo();
String marketoCol = null;
String schemaCol = null;
for (Field f : schema.getFields()) {
marketoCol = mappings.get(f.name());
if (StringUtils.isEmpty(marketoCol)) {
schemaCol = f.getProp(SchemaConstants.TALEND_COLUMN_DB_COLUMN_NAME);
if (!StringUtils.isEmpty(schemaCol)) {
marketoCol = schemaCol;
} else {
marketoCol = f.name();
}
}
result.add(marketoCol);
}
return result;
}
示例5: extractSchemaFromAvroWithoutTime
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
public static Schema extractSchemaFromAvroWithoutTime(File avroFile) throws FileNotFoundException, IOException {
DataFileStream<GenericRecord> dataStream =
new DataFileStream<GenericRecord>(new FileInputStream(avroFile), new GenericDatumReader<GenericRecord>());
Schema schema = new Schema();
for (final Field field : dataStream.getSchema().getFields()) {
final String columnName = field.name();
final String pinotType = field.getProp("pinotType");
final FieldSpec fieldSpec;
if (pinotType != null && "METRIC".equals(pinotType)) {
fieldSpec = new MetricFieldSpec();
fieldSpec.setFieldType(FieldType.METRIC);
} else {
fieldSpec = new DimensionFieldSpec();
fieldSpec.setFieldType(FieldType.DIMENSION); // default
}
fieldSpec.setName(columnName);
fieldSpec.setDataType(getColumnType(dataStream.getSchema().getField(columnName)));
fieldSpec.setSingleValueField(isSingleValueField(dataStream.getSchema().getField(columnName)));
fieldSpec.setDelimiter(",");
schema.addSchema(columnName, fieldSpec);
}
dataStream.close();
return schema;
}
示例6: getFieldName
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
protected static String getFieldName(Field field) {
String mongoName = field.getProp(MONGO_NAME_PROPERTY);
if (mongoName != null) {
return mongoName;
} else {
return field.name();
}
}
示例7: isDateTypeField
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
/**
* Check if the Avro field is of Date type
*
* @param field
* @return
*/
public static boolean isDateTypeField(Field field) {
if (field == null) {
return false;
}
if (!Type.LONG.equals(getFieldType(field))) {
return false;
}
String clazz = field.getProp(SchemaConstants.JAVA_CLASS_FLAG);
String pattr = field.getProp(SchemaConstants.TALEND_COLUMN_PATTERN);
return (clazz != null && clazz.equals(Date.class.getCanonicalName())) || !StringUtils.isEmpty(pattr);
}
示例8: open
import org.apache.avro.Schema.Field; //導入方法依賴的package包/類
@Override
public void open(String uId) throws IOException {
this.uId = uId;
processingConnection = sink.createConnection(container);
uploadConnection = sink.createConnection(container);
if (null == mainSchema) {
mainSchema = sink.getRuntimeSchema(container);
}
SnowflakeConnectionProperties connectionProperties = sprops.getConnectionProperties();
Map<LoaderProperty, Object> prop = new HashMap<>();
prop.put(LoaderProperty.tableName, sprops.getTableName());
prop.put(LoaderProperty.schemaName, connectionProperties.schemaName.getStringValue());
prop.put(LoaderProperty.databaseName, connectionProperties.db.getStringValue());
switch (sprops.outputAction.getValue()) {
case INSERT:
prop.put(LoaderProperty.operation, Operation.INSERT);
break;
case UPDATE:
prop.put(LoaderProperty.operation, Operation.MODIFY);
break;
case UPSERT:
prop.put(LoaderProperty.operation, Operation.UPSERT);
break;
case DELETE:
prop.put(LoaderProperty.operation, Operation.DELETE);
break;
}
List<Field> columns = mainSchema.getFields();
List<String> keyStr = new ArrayList<>();
List<String> columnsStr = new ArrayList<>();
for (Field f : columns) {
columnsStr.add(f.name());
if (null != f.getProp(SchemaConstants.TALEND_COLUMN_IS_KEY)) {
keyStr.add(f.name());
}
}
row = new Object[columnsStr.size()];
prop.put(LoaderProperty.columns, columnsStr);
if (sprops.outputAction.getValue() == UPSERT) {
keyStr.clear();
keyStr.add(sprops.upsertKeyColumn.getValue());
}
if (keyStr.size() > 0) {
prop.put(LoaderProperty.keys, keyStr);
}
prop.put(LoaderProperty.remoteStage, "~");
loader = (StreamLoader) LoaderFactory.createLoader(prop, uploadConnection, processingConnection);
loader.setListener(listener);
loader.start();
}