本文整理汇总了Java中org.apache.flink.streaming.util.serialization.DeserializationSchema类的典型用法代码示例。如果您正苦于以下问题:Java DeserializationSchema类的具体用法?Java DeserializationSchema怎么用?Java DeserializationSchema使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
DeserializationSchema类属于org.apache.flink.streaming.util.serialization包,在下文中一共展示了DeserializationSchema类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: KafkaTableSource
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
* Creates a generic Kafka {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param fieldNames Row field names.
* @param fieldTypes Row field types.
*/
KafkaTableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
String[] fieldNames,
TypeInformation<?>[] fieldTypes) {
this.topic = Preconditions.checkNotNull(topic, "Topic");
this.properties = Preconditions.checkNotNull(properties, "Properties");
this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema, "Deserialization schema");
this.fieldNames = Preconditions.checkNotNull(fieldNames, "Field names");
this.fieldTypes = Preconditions.checkNotNull(fieldTypes, "Field types");
Preconditions.checkArgument(fieldNames.length == fieldTypes.length,
"Number of provided field names and types does not match.");
this.typeInfo = new RowTypeInfo(fieldTypes, fieldNames);
}
示例2: KafkaTableSource
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
* Creates a generic Kafka {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param fieldNames Row field names.
* @param fieldTypes Row field types.
*/
KafkaTableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
String[] fieldNames,
TypeInformation<?>[] fieldTypes) {
this.topic = Preconditions.checkNotNull(topic, "Topic");
this.properties = Preconditions.checkNotNull(properties, "Properties");
this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema, "Deserialization schema");
this.fieldNames = Preconditions.checkNotNull(fieldNames, "Field names");
this.fieldTypes = Preconditions.checkNotNull(fieldTypes, "Field types");
Preconditions.checkArgument(fieldNames.length == fieldTypes.length,
"Number of provided field names and types does not match.");
}
示例3: testDeserializationSchema_withValidInput
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
* Test case for {@link KafkaConsumerBuilder#deserializationSchema(org.apache.flink.streaming.util.serialization.DeserializationSchema)}
* being provided a valid schema
*/
@Test
public void testDeserializationSchema_withValidInput() {
DeserializationSchema<String> schema = new SimpleStringSchema();
KafkaConsumerBuilder<String> builder = KafkaConsumerBuilder.getInstance();
Assert.assertNull(builder.getDeserializationSchema());
builder = builder.deserializationSchema(schema);
Assert.assertEquals(schema, builder.getDeserializationSchema());
}
示例4: AMQSourceConfig
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
AMQSourceConfig(ActiveMQConnectionFactory connectionFactory, String destinationName,
DeserializationSchema<OUT> deserializationSchema, RunningChecker runningChecker,
DestinationType destinationType) {
this.connectionFactory = Preconditions.checkNotNull(connectionFactory, "connectionFactory not set");
this.destinationName = Preconditions.checkNotNull(destinationName, "destinationName not set");
this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema, "deserializationSchema not set");
this.runningChecker = Preconditions.checkNotNull(runningChecker, "runningChecker not set");
this.destinationType = Preconditions.checkNotNull(destinationType, "destinationType not set");
}
示例5: ZMQSource
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
protected ZMQSource(ZMQConnectionConfig zmqConnectionConfig, String queueName,
boolean usesCorrelationId, DeserializationSchema<OUT> deserializationSchema) {
super(String.class);
this.zmqConnectionConfig = zmqConnectionConfig;
this.queueName = queueName;
this.usesCorrelationId = usesCorrelationId;
this.schema = deserializationSchema;
}
示例6: Kafka09TableSource
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.9 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param fieldNames Row field names.
* @param fieldTypes Row field types.
*/
public Kafka09TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
String[] fieldNames,
TypeInformation<?>[] fieldTypes) {
super(topic, properties, deserializationSchema, fieldNames, fieldTypes);
}
示例7: Kafka08TableSource
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.8 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param fieldNames Row field names.
* @param fieldTypes Row field types.
*/
public Kafka08TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
String[] fieldNames,
TypeInformation<?>[] fieldTypes) {
super(topic, properties, deserializationSchema, fieldNames, fieldTypes);
}
示例8: Kafka010TableSource
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
* Creates a Kafka 0.10 {@link StreamTableSource}.
*
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema to use for Kafka records.
* @param fieldNames Row field names.
* @param fieldTypes Row field types.
*/
public Kafka010TableSource(
String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
String[] fieldNames,
TypeInformation<?>[] fieldTypes) {
super(topic, properties, deserializationSchema, fieldNames, fieldTypes);
}
示例9: printTopic
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
private static void printTopic(String topicName, ConsumerConfig config,
DeserializationSchema<?> deserializationSchema,
int stopAfter) throws IOException {
List<MessageAndMetadata<byte[], byte[]>> contents = readTopicToList(topicName, config, stopAfter);
LOG.info("Printing contents of topic {} in consumer grouo {}", topicName, config.groupId());
for (MessageAndMetadata<byte[], byte[]> message: contents) {
Object out = deserializationSchema.deserialize(message.message());
LOG.info("Message: partition: {} offset: {} msg: {}", message.partition(), message.offset(), out.toString());
}
}
示例10: ConstructorTestClass
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public ConstructorTestClass(RMQConnectionConfig rmqConnectionConfig,
String queueName,
boolean usesCorrelationId,
DeserializationSchema<String> deserializationSchema) throws Exception {
super(rmqConnectionConfig, queueName, usesCorrelationId, deserializationSchema);
RMQConnectionConfig.Builder builder = new RMQConnectionConfig.Builder();
builder.setHost("hostTest").setPort(999).setUserName("userTest").setPassword("passTest").setVirtualHost("/");
factory = Mockito.spy(builder.build().getConnectionFactory());
try {
Mockito.doThrow(new RuntimeException()).when(factory).newConnection();
} catch (IOException e) {
fail("Failed to stub connection method");
}
}
示例11: FlinkConsumer
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public FlinkConsumer(String topic, DeserializationSchema schema,
Properties props) {
super(topic, schema, props);
}
示例12: getDeserializationSchema
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public DeserializationSchema<OUT> getDeserializationSchema() {
return deserializationSchema;
}
示例13: setDeserializationSchema
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public AMQSourceConfigBuilder<OUT> setDeserializationSchema(DeserializationSchema<OUT> deserializationSchema) {
this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema);
return this;
}
示例14: getKafkaConsumer
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
@Override
FlinkKafkaConsumerBase<Row> getKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) {
return new FlinkKafkaConsumer010<>(topic, deserializationSchema, properties);
}
示例15: getKafkaConsumer
import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
@Override
FlinkKafkaConsumerBase<Row> getKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) {
return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties);
}