本文整理汇总了Java中org.apache.kafka.connect.data.SchemaBuilder.int32方法的典型用法代码示例。如果您正苦于以下问题:Java SchemaBuilder.int32方法的具体用法?Java SchemaBuilder.int32怎么用?Java SchemaBuilder.int32使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.kafka.connect.data.SchemaBuilder
的用法示例。
在下文中一共展示了SchemaBuilder.int32方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: convertFieldType
import org.apache.kafka.connect.data.SchemaBuilder; //导入方法依赖的package包/类
private SchemaBuilder convertFieldType(Schema.Type type) {
switch (type) {
case INT8:
return SchemaBuilder.int8();
case INT16:
return SchemaBuilder.int16();
case INT32:
return SchemaBuilder.int32();
case INT64:
return SchemaBuilder.int64();
case FLOAT32:
return SchemaBuilder.float32();
case FLOAT64:
return SchemaBuilder.float64();
case BOOLEAN:
return SchemaBuilder.bool();
case STRING:
return SchemaBuilder.string();
default:
throw new DataException("Unexpected type in Cast transformation: " + type);
}
}
示例2: parseIntValueTest
import org.apache.kafka.connect.data.SchemaBuilder; //导入方法依赖的package包/类
@Test
public void parseIntValueTest(){
Schema schemaInt32 = SchemaBuilder.int32();
ByteBuffer actualInt32 = DataUtility.parseValue(schemaInt32, (int) 2);
ByteBuffer expectedInt32 = ByteBuffer.allocate(4).putInt( (int) 2);
Assert.assertTrue(actualInt32.equals(expectedInt32));
}
示例3: testPutPrimitives
import org.apache.kafka.connect.data.SchemaBuilder; //导入方法依赖的package包/类
/** Tests that an exception is thrown when the schema of the value is not BYTES. */
@Test
public void testPutPrimitives() {
task.start(props);
SinkRecord record8 =
new SinkRecord(null, -1, null, null, SchemaBuilder.int8(), (byte) 5, -1);
SinkRecord record16 =
new SinkRecord(null, -1, null, null, SchemaBuilder.int16(), (short) 5, -1);
SinkRecord record32 =
new SinkRecord(null, -1, null, null, SchemaBuilder.int32(), (int) 5, -1);
SinkRecord record64 =
new SinkRecord(null, -1, null, null, SchemaBuilder.int64(), (long) 5, -1);
SinkRecord recordFloat32 =
new SinkRecord(null, -1, null, null, SchemaBuilder.float32(), (float) 8, -1);
SinkRecord recordFloat64 =
new SinkRecord(null, -1, null, null, SchemaBuilder.float64(), (double) 8, -1);
SinkRecord recordBool =
new SinkRecord(null, -1, null, null, SchemaBuilder.bool(), true, -1);
SinkRecord recordString =
new SinkRecord(null, -1, null, null, SchemaBuilder.string(), "Test put.", -1);
List<SinkRecord> list = new ArrayList<>();
list.add(record8);
list.add(record16);
list.add(record32);
list.add(record64);
list.add(recordFloat32);
list.add(recordFloat64);
list.add(recordBool);
list.add(recordString);
task.put(list);
}
示例4: buildDecimalSchema
import org.apache.kafka.connect.data.SchemaBuilder; //导入方法依赖的package包/类
/**
* Build decimal schema from scale and precision
*
* @param scale The numeric scale
* @param precision The numeric precision
* @param optional Whether or not the field is optional
* @param backwards Whether or not this field needs to be backwards
* compatible
*
* @return Kafka decimal schema
*/
private Schema buildDecimalSchema (
int scale,
int precision,
boolean optional,
boolean backwards
) {
SchemaBuilder fieldBuilder = null;
if (
scale > 0 ||
precision <= 0 ||
precision - scale >= ColumnDataDecoder.NUMBER_LONG_MAX_PRECISION
) {
/* decimal type */
fieldBuilder = Decimal.builder(scale);
if (optional) {
fieldBuilder.optional();
}
if (backwards) {
/* add zero default for missing values */
fieldBuilder.defaultValue(BigDecimal.ZERO.setScale(scale));
}
}
else if (
scale <= 0 &&
precision - Math.abs (scale) <
ColumnDataDecoder.NUMBER_INTEGER_MAX_PRECISION
) {
/* integer data type */
fieldBuilder = SchemaBuilder.int32();
if (optional) {
fieldBuilder.optional();
}
if (backwards) {
/* add zero default for missing values */
fieldBuilder.defaultValue(0);
}
}
else {
/* long data type */
fieldBuilder = SchemaBuilder.int64();
if (optional) {
fieldBuilder.optional();
}
if (backwards) {
/* add zero default for missing values */
fieldBuilder.defaultValue(0L);
}
}
return fieldBuilder.build();
}