本文整理汇总了Java中org.apache.flink.table.api.TableSchema类的典型用法代码示例。如果您正苦于以下问题:Java TableSchema类的具体用法?Java TableSchema怎么用?Java TableSchema使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
TableSchema类属于org.apache.flink.table.api包,在下文中一共展示了TableSchema类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testTableSchema
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
@Test
public void testTableSchema() {
KafkaTableSource.Builder b = getBuilder();
configureBuilder(b);
KafkaTableSource source = b.build();
// check table schema
TableSchema schema = source.getTableSchema();
assertNotNull(schema);
assertEquals(5, schema.getColumnNames().length);
// check table fields
assertEquals("field1", schema.getColumnNames()[0]);
assertEquals("field2", schema.getColumnNames()[1]);
assertEquals("time1", schema.getColumnNames()[2]);
assertEquals("time2", schema.getColumnNames()[3]);
assertEquals("field3", schema.getColumnNames()[4]);
assertEquals(Types.LONG(), schema.getTypes()[0]);
assertEquals(Types.STRING(), schema.getTypes()[1]);
assertEquals(Types.SQL_TIMESTAMP(), schema.getTypes()[2]);
assertEquals(Types.SQL_TIMESTAMP(), schema.getTypes()[3]);
assertEquals(Types.DOUBLE(), schema.getTypes()[4]);
}
示例2: getTableSchema
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
@Override
public TableSchema getTableSchema() {
TypeInformation<?>[] types = new TypeInformation[] {
Types.LONG,
Types.BOOLEAN,
Types.FLOAT,
Types.FLOAT,
Types.FLOAT,
Types.FLOAT,
Types.SHORT,
Types.SQL_TIMESTAMP
};
String[] names = new String[]{
"rideId",
"isStart",
"startLon",
"startLat",
"endLon",
"endLat",
"passengerCnt",
"eventTime"
};
return new TableSchema(names, types);
}
示例3: mockExternalCatalogTable
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
private static ExternalCatalogTable mockExternalCatalogTable(Map<String, String> props) {
ExternalCatalogTable table = mock(ExternalCatalogTable.class);
TableSchema schema = new TableSchema(new String[] {"foo"}, new TypeInformation[] {INT_TYPE_INFO});
doReturn(schema).when(table).schema();
doReturn(props).when(table).properties();
doReturn("kafka+json").when(table).tableType();
return table;
}
示例4: Kafka09TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.9 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka09TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}
示例5: Kafka09JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.9 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka09JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
示例6: Kafka09AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.9 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka09AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
示例7: Kafka08TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.8 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka08TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}
示例8: Kafka08AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.8 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka08AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
示例9: Kafka08JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.8 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka08JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
示例10: Kafka011JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.11 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka011JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
示例11: Kafka011TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.11 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka011TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}
示例12: Kafka011AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.11 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka011AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
示例13: Kafka010AvroTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.10 Avro {@link StreamTableSource} using a given {@link SpecificRecord}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param schema Schema of the produced table.
* @param record Avro specific record.
*/
public Kafka010AvroTableSource(
String topic,
Properties properties,
TableSchema schema,
Class<? extends SpecificRecordBase> record) {
super(
topic,
properties,
schema,
record);
}
示例14: Kafka010JsonTableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.10 JSON {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param tableSchema The schema of the table.
* @param jsonSchema The schema of the JSON messages to decode from Kafka.
*/
public Kafka010JsonTableSource(
String topic,
Properties properties,
TableSchema tableSchema,
TableSchema jsonSchema) {
super(topic, properties, tableSchema, jsonSchema);
}
示例15: Kafka010TableSource
import org.apache.flink.table.api.TableSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.10 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param typeInfo Type information describing the result type. The field names are used
* to parse the JSON file and so are the types.
*/
public Kafka010TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
TableSchema schema,
TypeInformation<Row> typeInfo) {
super(topic, properties, schema, typeInfo);
this.deserializationSchema = deserializationSchema;
}