当前位置: 首页>>代码示例>>Java>>正文


Java Field.name方法代码示例

本文整理汇总了Java中org.apache.kafka.connect.data.Field.name方法的典型用法代码示例。如果您正苦于以下问题:Java Field.name方法的具体用法?Java Field.name怎么用?Java Field.name使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.kafka.connect.data.Field的用法示例。


在下文中一共展示了Field.name方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: serialize

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
@Override
public void serialize(Struct struct, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
  struct.validate();
  Storage result = new Storage();
  result.schema = struct.schema();
  result.fieldValues = new ArrayList<>();
  for (Field field : struct.schema().fields()) {
    log.trace("serialize() - Processing field '{}'", field.name());
    FieldValue fieldValue = new FieldValue();
    fieldValue.name = field.name();
    fieldValue.schema = field.schema();
    fieldValue.value(struct.get(field));
    result.fieldValues.add(fieldValue);
  }
  jsonGenerator.writeObject(result);
}
 
开发者ID:jcustenborder,项目名称:connect-utils,代码行数:17,代码来源:StructSerializationModule.java

示例2: handleStruct

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
void handleStruct(Event event) {
  final Struct input = (Struct) event.event;
  List<Field> fields = input.schema().fields();
  final Map result = new LinkedHashMap(fields.size());

  for (Field field : fields) {
    Object key = field.name();
    Object value = input.get(field);

    if (null == value) {
      continue;
    }

    if (!event.setValue(key, value)) {
      result.put(key, value);
    }
  }

  event.event = result.isEmpty() ? null : result;
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-splunk,代码行数:21,代码来源:ObjectMapperFactory.java

示例3: convertStruct

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
public void convertStruct(JsonNode sObjectNode, Schema schema, Struct struct) {
  for (Field field : schema.fields()) {
    String fieldName = field.name();
    JsonNode valueNode = sObjectNode.findValue(fieldName);

    final Object value;
    if (ADDRESS_SCHEMA_NAME.equals(field.schema().name())) {
      Struct address = new Struct(field.schema());
      for (Field addressField : field.schema().fields()) {
        JsonNode fieldValueNode = valueNode.findValue(addressField.name());
        Object fieldValue = PARSER.parseJsonNode(addressField.schema(), fieldValueNode);
        address.put(addressField, fieldValue);
      }
      value = address;
    } else {
      value = PARSER.parseJsonNode(field.schema(), valueNode);
    }
    struct.put(field, value);
  }
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-salesforce,代码行数:21,代码来源:SObjectHelper.java

示例4: assertMatchingSchema

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
protected void assertMatchingSchema(Struct value, Schema schema) {
    assertSame(schema, value.schema());
    assertEquals(schema.name(), value.schema().name());
    for (Field field : schema.fields()) {
        String fieldName = field.name();
        assertEquals(schema.field(fieldName).name(), value.schema().field(fieldName).name());
        assertEquals(schema.field(fieldName).index(), value.schema().field(fieldName).index());
        assertSame(schema.field(fieldName).schema(), value.schema().field(fieldName).schema());
    }
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:11,代码来源:SetSchemaMetadataTest.java

示例5: configure

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
@Override
protected void configure(InputStream inputStream, Map<String, String> metadata, final Long lastOffset) throws IOException {
  log.trace("configure() - creating csvParser");
  this.csvParser = this.config.createCSVParserBuilder().build();
  this.streamReader = new InputStreamReader(inputStream, this.config.charset);
  CSVReaderBuilder csvReaderBuilder = this.config.createCSVReaderBuilder(this.streamReader, csvParser);
  this.csvReader = csvReaderBuilder.build();

  String[] fieldNames;

  if (this.config.firstRowAsHeader) {
    log.trace("configure() - Reading the header row.");
    fieldNames = this.csvReader.readNext();
    log.info("configure() - field names from header row. fields = {}", Joiner.on(", ").join(fieldNames));
  } else {
    log.trace("configure() - Using fields from schema {}", this.config.valueSchema.name());
    fieldNames = new String[this.config.valueSchema.fields().size()];
    int index = 0;
    for (Field field : this.config.valueSchema.fields()) {
      fieldNames[index++] = field.name();
    }
    log.info("configure() - field names from schema order. fields = {}", Joiner.on(", ").join(fieldNames));
  }

  if (null != lastOffset) {
    log.info("Found previous offset. Skipping {} line(s).", lastOffset.intValue());
    String[] row = null;
    while (null != (row = this.csvReader.readNext()) && this.csvReader.getLinesRead() < lastOffset) {
      log.trace("skipped row");
    }
  }

  this.fieldNames = fieldNames;
  this.fileMetadata = metadata;
}
 
开发者ID:jcustenborder,项目名称:kafka-connect-spooldir,代码行数:36,代码来源:SpoolDirCsvSourceTask.java

示例6: toValue

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
/**
 *
 * @param keyValues
 * @param field
 * @return
 */
private byte[] toValue(final Map<String, Object> keyValues, final Field field) {
    Preconditions.checkNotNull(field);
    final Schema.Type type = field.schema().type();
    final String fieldName = field.name();
    final Object fieldValue = keyValues.get(fieldName);
    switch (type) {
        case STRING:
            return Bytes.toBytes((String) fieldValue);
        case BOOLEAN:
            return Bytes.toBytes((Boolean)fieldValue);
        case BYTES:
            return Bytes.toBytes((ByteBuffer) fieldValue);
        case FLOAT32:
            return Bytes.toBytes((Float)fieldValue);
        case FLOAT64:
            return Bytes.toBytes((Double)fieldValue);
        case INT8:
            return Bytes.toBytes((Byte)fieldValue);
        case INT16:
            return Bytes.toBytes((Short)fieldValue);
        case INT32:
            return Bytes.toBytes((Integer)fieldValue);
        case INT64:
            return Bytes.toBytes((Long)fieldValue);
        default:
            return null;
    }
}
 
开发者ID:mravi,项目名称:kafka-connect-hbase,代码行数:35,代码来源:JsonEventParser.java

示例7: toValue

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
private byte[] toValue(final GenericRecord record, final Field field) {
    Preconditions.checkNotNull(field);
    final Schema.Type type = field.schema().type();
    final String fieldName = field.name();
    final Object fieldValue = record.get(fieldName);
    switch (type) {
        case STRING:
            return Bytes.toBytes((String) fieldValue);
        case BOOLEAN:
            return Bytes.toBytes((Boolean)fieldValue);
        case BYTES:
            return Bytes.toBytes((ByteBuffer) fieldValue);
        case FLOAT32:
            return Bytes.toBytes((Float)fieldValue);
        case FLOAT64:
            return Bytes.toBytes((Double)fieldValue);
        case INT8:
            return Bytes.toBytes((Byte)fieldValue);
        case INT16:
            return Bytes.toBytes((Short)fieldValue);
        case INT32:
            return Bytes.toBytes((Integer)fieldValue);
        case INT64:
            return Bytes.toBytes((Long)fieldValue);
        default:
            return null;
    }
}
 
开发者ID:mravi,项目名称:kafka-connect-hbase,代码行数:29,代码来源:AvroEventParser.java

示例8: toJsonMap

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
public static Map<String, Object> toJsonMap(Struct struct) {
    Map<String, Object> jsonMap = new HashMap<String, Object>(0);
    List<Field> fields = struct.schema().fields();
    for (Field field : fields) {
        String fieldName = field.name();
        Schema.Type fieldType = field.schema().type();
        String schemaName=field.schema().name();
        switch (fieldType) {
            case STRING:
                jsonMap.put(fieldName, struct.getString(fieldName));
                break;
            case INT32:
            	if (Date.LOGICAL_NAME.equals(schemaName) 
            			|| Time.LOGICAL_NAME.equals(schemaName)) {
            		jsonMap.put(fieldName, (java.util.Date) struct.get(fieldName));
            	} else {
            		jsonMap.put(fieldName, struct.getInt32(fieldName));
            	}
                break;
            case INT16:
                jsonMap.put(fieldName, struct.getInt16(fieldName));
                break;
            case INT64:
            	if (Timestamp.LOGICAL_NAME.equals(schemaName)) {
            		jsonMap.put(fieldName, (java.util.Date) struct.get(fieldName));
            	} else {
            		jsonMap.put(fieldName, struct.getInt64(fieldName));
            	}
                break;
            case FLOAT32:
                jsonMap.put(fieldName, struct.getFloat32(fieldName));
                break;
            case STRUCT:
                jsonMap.put(fieldName, toJsonMap(struct.getStruct(fieldName)));
                break;
        }
    }
    return jsonMap;
}
 
开发者ID:DataReply,项目名称:kafka-connect-mongodb,代码行数:40,代码来源:SchemaUtils.java

示例9: removeImplicitRowTimeRowKeyFromSchema

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
public static Schema removeImplicitRowTimeRowKeyFromSchema(Schema schema) {
  SchemaBuilder schemaBuilder = SchemaBuilder.struct();
  for (Field field: schema.fields()) {
    String fieldName = field.name();
    fieldName = fieldName.substring(fieldName.indexOf('.') + 1);
    if (!fieldName.equalsIgnoreCase(SchemaUtil.ROWTIME_NAME)
        && !fieldName.equalsIgnoreCase(SchemaUtil.ROWKEY_NAME)) {
      schemaBuilder.field(fieldName, field.schema());
    }
  }
  return schemaBuilder.build();
}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:13,代码来源:SchemaUtil.java

示例10: getSchemaWithNoAlias

import org.apache.kafka.connect.data.Field; //导入方法依赖的package包/类
/**
 * Remove the alias when reading/writing from outside
 *
 * @param schema
 * @return
 */
public static Schema getSchemaWithNoAlias(Schema schema) {
  SchemaBuilder schemaBuilder = SchemaBuilder.struct();
  for (Field field: schema.fields()) {
    String name = field.name();
    if (name.contains(".")) {
      schemaBuilder.field(name.substring(name.indexOf(".") + 1), field.schema());
    } else {
      schemaBuilder.field(name, field.schema());
    }
  }

  return schemaBuilder.build();
}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:20,代码来源:SchemaUtil.java


注:本文中的org.apache.kafka.connect.data.Field.name方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。