本文整理汇总了Java中org.apache.kafka.connect.data.SchemaBuilder类的典型用法代码示例。如果您正苦于以下问题:Java SchemaBuilder类的具体用法?Java SchemaBuilder怎么用?Java SchemaBuilder使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
SchemaBuilder类属于org.apache.kafka.connect.data包,在下文中一共展示了SchemaBuilder类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: applyWithSchema
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
private R applyWithSchema(R record) {
final Struct value = requireStruct(operatingValue(record), PURPOSE);
Schema updatedSchema = schemaUpdateCache.get(value.schema());
if (updatedSchema == null) {
final SchemaBuilder builder = SchemaUtil.copySchemaBasics(value.schema(), SchemaBuilder.struct());
Struct defaultValue = (Struct) value.schema().defaultValue();
buildUpdatedSchema(value.schema(), "", builder, value.schema().isOptional(), defaultValue);
updatedSchema = builder.build();
schemaUpdateCache.put(value.schema(), updatedSchema);
}
final Struct updatedValue = new Struct(updatedSchema);
buildWithSchema(value, "", updatedValue);
return newRecord(record, updatedSchema, updatedValue);
}
示例2: delete
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Test
public void delete() {
final Schema keySchema = SchemaBuilder.struct()
.field("id", Schema.STRING_SCHEMA)
.build();
final Struct key = new Struct(keySchema)
.put("id", "asdf");
final SinkRecord record = new SinkRecord(
"testing",
1,
keySchema,
key,
null,
null,
123L
);
this.task.put(Arrays.asList(record));
}
示例3: alterAddColumns
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Test
public void alterAddColumns() {
final Schema keySchema = SchemaBuilder.struct()
.field("username", Schema.STRING_SCHEMA)
.field("companyID", Schema.INT64_SCHEMA)
.build();
final Schema valueSchema = SchemaBuilder.struct()
.field("username", Schema.STRING_SCHEMA)
.field("companyID", Schema.INT64_SCHEMA)
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("created", Timestamp.SCHEMA)
.field("updated", Timestamp.SCHEMA)
.build();
CassandraTableMetadata tableMetadata = mock(CassandraTableMetadata.class);
when(tableMetadata.columnMetadata("username")).thenReturn(mock(CassandraColumnMetadata.class));
when(tableMetadata.columnMetadata("companyID")).thenReturn(mock(CassandraColumnMetadata.class));
when(tableMetadata.columnMetadata("firstName")).thenReturn(mock(CassandraColumnMetadata.class));
when(tableMetadata.columnMetadata("lastName")).thenReturn(mock(CassandraColumnMetadata.class));
when(this.builder.session.tableMetadata("foo")).thenReturn(tableMetadata);
this.builder.build("foo", keySchema, valueSchema);
verify(this.session, times(1)).executeStatement(any(Alter.Options.class));
}
示例4: createComplexPrimaryKey
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Test
public void createComplexPrimaryKey() {
final Schema keySchema = SchemaBuilder.struct()
.field("username", Schema.STRING_SCHEMA)
.field("companyID", Schema.INT64_SCHEMA)
.build();
final Schema valueSchema = SchemaBuilder.struct()
.field("username", Schema.STRING_SCHEMA)
.field("companyID", Schema.INT64_SCHEMA)
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("created", Timestamp.SCHEMA)
.field("updated", Timestamp.SCHEMA)
.build();
this.builder.build("foo", keySchema, valueSchema);
verify(this.session, times(1)).executeStatement(any(Create.class));
}
示例5: configure
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Override
protected void configure(Map<String, Object> config) {
String valueFieldName;
if (config.get(FILE_READER_TEXT_FIELD_NAME_VALUE) == null ||
config.get(FILE_READER_TEXT_FIELD_NAME_VALUE).toString().equals("")) {
valueFieldName = FIELD_NAME_VALUE_DEFAULT;
} else {
valueFieldName = config.get(FILE_READER_TEXT_FIELD_NAME_VALUE).toString();
}
this.schema = SchemaBuilder.struct()
.field(valueFieldName, Schema.STRING_SCHEMA)
.build();
if (config.get(FILE_READER_TEXT_ENCODING) == null ||
config.get(FILE_READER_TEXT_ENCODING).toString().equals("")) {
this.charset = Charset.defaultCharset();
} else {
this.charset = Charset.forName(config.get(FILE_READER_TEXT_ENCODING).toString());
}
}
示例6: DelimitedTextFileReader
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
public DelimitedTextFileReader(FileSystem fs, Path filePath, Map<String, Object> config) throws IOException {
super(fs, filePath, new DelimitedTxtToStruct(), config);
//mapping encoding for text file reader
if (config.get(FILE_READER_DELIMITED_ENCODING) != null) {
config.put(TextFileReader.FILE_READER_TEXT_ENCODING, config.get(FILE_READER_DELIMITED_ENCODING));
}
this.inner = new TextFileReader(fs, filePath, config);
this.offset = new DelimitedTextOffset(0, hasHeader);
SchemaBuilder schemaBuilder = SchemaBuilder.struct();
if (hasNext()) {
String firstLine = inner.nextRecord().getValue();
String columns[] = firstLine.split(token);
IntStream.range(0, columns.length).forEach(index -> {
String columnName = hasHeader ? columns[index] : DEFAULT_COLUMN_NAME + "_" + ++index;
schemaBuilder.field(columnName, SchemaBuilder.STRING_SCHEMA);
});
if (!hasHeader) {
//back to the first line
inner.seek(this.offset);
}
}
this.schema = schemaBuilder.build();
}
示例7: SequenceFileReader
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
public SequenceFileReader(FileSystem fs, Path filePath, Map<String, Object> config) throws IOException {
super(fs, filePath, new SeqToStruct(), config);
this.reader = new SequenceFile.Reader(fs.getConf(),
SequenceFile.Reader.file(filePath),
SequenceFile.Reader.bufferSize(fs.getConf().getInt(FILE_READER_BUFFER_SIZE, DEFAULT_BUFFER_SIZE)));
this.key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), fs.getConf());
this.value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), fs.getConf());
this.schema = SchemaBuilder.struct()
.field(keyFieldName, getSchema(this.key))
.field(valueFieldName, getSchema(this.value))
.build();
this.offset = new SeqOffset(0);
this.recordIndex = this.hasNextIndex = -1;
this.hasNext = false;
}
示例8: getSchema
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
private Schema getSchema(Writable writable) {
if (writable instanceof ByteWritable) {
return SchemaBuilder.INT8_SCHEMA;
} else if (writable instanceof ShortWritable) {
return SchemaBuilder.INT16_SCHEMA;
} else if (writable instanceof IntWritable) {
return SchemaBuilder.INT32_SCHEMA;
} else if (writable instanceof LongWritable) {
return SchemaBuilder.INT64_SCHEMA;
} else if (writable instanceof FloatWritable) {
return SchemaBuilder.FLOAT32_SCHEMA;
} else if (writable instanceof DoubleWritable) {
return SchemaBuilder.INT64_SCHEMA;
} else if (writable instanceof BytesWritable) {
return SchemaBuilder.BYTES_SCHEMA;
} else if (writable instanceof BooleanWritable) {
return SchemaBuilder.BOOLEAN_SCHEMA;
}
return SchemaBuilder.STRING_SCHEMA;
}
示例9: parseStringValueTest
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Test
public void parseStringValueTest(){
Schema schema = SchemaBuilder.string();
ByteBuffer actual = DataUtility.parseValue(schema, "Testing Kinesis-Kafka Connector");
ByteBuffer expected = null;
try {
expected = ByteBuffer.wrap(((String) "Testing Kinesis-Kafka Connector").getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Assert.assertTrue(actual.equals(expected));
}
示例10: convertFieldType
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
private SchemaBuilder convertFieldType(Schema.Type type) {
switch (type) {
case INT8:
return SchemaBuilder.int8();
case INT16:
return SchemaBuilder.int16();
case INT32:
return SchemaBuilder.int32();
case INT64:
return SchemaBuilder.int64();
case FLOAT32:
return SchemaBuilder.float32();
case FLOAT64:
return SchemaBuilder.float64();
case BOOLEAN:
return SchemaBuilder.bool();
case STRING:
return SchemaBuilder.string();
default:
throw new DataException("Unexpected type in Cast transformation: " + type);
}
}
示例11: makeUpdatedSchema
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
private Schema makeUpdatedSchema(Schema schema) {
final SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
for (Field field : schema.fields()) {
builder.field(field.name(), field.schema());
}
if (topicField != null) {
builder.field(topicField.name, topicField.optional ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA);
}
if (partitionField != null) {
builder.field(partitionField.name, partitionField.optional ? Schema.OPTIONAL_INT32_SCHEMA : Schema.INT32_SCHEMA);
}
if (offsetField != null) {
builder.field(offsetField.name, offsetField.optional ? Schema.OPTIONAL_INT64_SCHEMA : Schema.INT64_SCHEMA);
}
if (timestampField != null) {
builder.field(timestampField.name, timestampField.optional ? OPTIONAL_TIMESTAMP_SCHEMA : Timestamp.SCHEMA);
}
if (staticField != null) {
builder.field(staticField.name, staticField.optional ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA);
}
return builder.build();
}
示例12: apply
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Override
public R apply(R record) {
final Schema schema = operatingSchema(record);
final Object value = operatingValue(record);
if (schema == null) {
return newRecord(record, null, Collections.singletonMap(fieldName, value));
} else {
Schema updatedSchema = schemaUpdateCache.get(schema);
if (updatedSchema == null) {
updatedSchema = SchemaBuilder.struct().field(fieldName, schema).build();
schemaUpdateCache.put(schema, updatedSchema);
}
final Struct updatedValue = new Struct(updatedSchema).put(fieldName, value);
return newRecord(record, updatedSchema, updatedValue);
}
}
示例13: schemaNameAndVersionUpdate
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Test
public void schemaNameAndVersionUpdate() {
final Map<String, String> props = new HashMap<>();
props.put("schema.name", "foo");
props.put("schema.version", "42");
final SetSchemaMetadata<SinkRecord> xform = new SetSchemaMetadata.Value<>();
xform.configure(props);
final SinkRecord record = new SinkRecord("", 0, null, null, SchemaBuilder.struct().build(), null, 0);
final SinkRecord updatedRecord = xform.apply(record);
assertEquals("foo", updatedRecord.valueSchema().name());
assertEquals(new Integer(42), updatedRecord.valueSchema().version());
}
示例14: updateSchemaOfStruct
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Test
public void updateSchemaOfStruct() {
final String fieldName1 = "f1";
final String fieldName2 = "f2";
final String fieldValue1 = "value1";
final int fieldValue2 = 1;
final Schema schema = SchemaBuilder.struct()
.name("my.orig.SchemaDefn")
.field(fieldName1, Schema.STRING_SCHEMA)
.field(fieldName2, Schema.INT32_SCHEMA)
.build();
final Struct value = new Struct(schema).put(fieldName1, fieldValue1).put(fieldName2, fieldValue2);
final Schema newSchema = SchemaBuilder.struct()
.name("my.updated.SchemaDefn")
.field(fieldName1, Schema.STRING_SCHEMA)
.field(fieldName2, Schema.INT32_SCHEMA)
.build();
Struct newValue = (Struct) SetSchemaMetadata.updateSchemaIn(value, newSchema);
assertMatchingSchema(newValue, newSchema);
}
示例15: testJsonSchemaMetadataTranslation
import org.apache.kafka.connect.data.SchemaBuilder; //导入依赖的package包/类
@Test
public void testJsonSchemaMetadataTranslation() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
converted = parse(converter.fromConnectData(TOPIC, Schema.OPTIONAL_BOOLEAN_SCHEMA, null));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": true }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isNull());
converted = parse(converter.fromConnectData(TOPIC, SchemaBuilder.bool().defaultValue(true).build(), true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false, \"default\": true }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
converted = parse(converter.fromConnectData(TOPIC, SchemaBuilder.bool().required().name("bool").version(3).doc("the documentation").parameter("foo", "bar").build(), true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false, \"name\": \"bool\", \"version\": 3, \"doc\": \"the documentation\", \"parameters\": { \"foo\": \"bar\" }}"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertEquals(true, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
}