当前位置: 首页>>代码示例>>Java>>正文


Java DeserializationSchema类代码示例

本文整理汇总了Java中org.apache.flink.streaming.util.serialization.DeserializationSchema的典型用法代码示例。如果您正苦于以下问题:Java DeserializationSchema类的具体用法?Java DeserializationSchema怎么用?Java DeserializationSchema使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


DeserializationSchema类属于org.apache.flink.streaming.util.serialization包,在下文中一共展示了DeserializationSchema类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: KafkaTableSource

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
 * Creates a generic Kafka {@link StreamTableSource}.
 *
 * @param topic                 Kafka topic to consume.
 * @param properties            Properties for the Kafka consumer.
 * @param deserializationSchema Deserialization schema to use for Kafka records.
 * @param fieldNames            Row field names.
 * @param fieldTypes            Row field types.
 */
KafkaTableSource(
        String topic,
        Properties properties,
        DeserializationSchema<Row> deserializationSchema,
        String[] fieldNames,
        TypeInformation<?>[] fieldTypes) {

    this.topic = Preconditions.checkNotNull(topic, "Topic");
    this.properties = Preconditions.checkNotNull(properties, "Properties");
    this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema, "Deserialization schema");
    this.fieldNames = Preconditions.checkNotNull(fieldNames, "Field names");
    this.fieldTypes = Preconditions.checkNotNull(fieldTypes, "Field types");

    Preconditions.checkArgument(fieldNames.length == fieldTypes.length,
            "Number of provided field names and types does not match.");
    
    this.typeInfo = new RowTypeInfo(fieldTypes, fieldNames);
}
 
开发者ID:datafibers-community,项目名称:df_data_service,代码行数:28,代码来源:KafkaTableSource.java

示例2: KafkaTableSource

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
 * Creates a generic Kafka {@link StreamTableSource}.
 *
 * @param topic                 Kafka topic to consume.
 * @param properties            Properties for the Kafka consumer.
 * @param deserializationSchema Deserialization schema to use for Kafka records.
 * @param fieldNames            Row field names.
 * @param fieldTypes            Row field types.
 */
KafkaTableSource(
		String topic,
		Properties properties,
		DeserializationSchema<Row> deserializationSchema,
		String[] fieldNames,
		TypeInformation<?>[] fieldTypes) {

	this.topic = Preconditions.checkNotNull(topic, "Topic");
	this.properties = Preconditions.checkNotNull(properties, "Properties");
	this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema, "Deserialization schema");
	this.fieldNames = Preconditions.checkNotNull(fieldNames, "Field names");
	this.fieldTypes = Preconditions.checkNotNull(fieldTypes, "Field types");

	Preconditions.checkArgument(fieldNames.length == fieldTypes.length,
			"Number of provided field names and types does not match.");
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:26,代码来源:KafkaTableSource.java

示例3: testDeserializationSchema_withValidInput

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
 * Test case for {@link KafkaConsumerBuilder#deserializationSchema(org.apache.flink.streaming.util.serialization.DeserializationSchema)}
 * being provided a valid schema
 */
@Test
public void testDeserializationSchema_withValidInput() {
	DeserializationSchema<String> schema = new SimpleStringSchema();
	KafkaConsumerBuilder<String> builder = KafkaConsumerBuilder.getInstance();
	Assert.assertNull(builder.getDeserializationSchema());		
	builder = builder.deserializationSchema(schema);
	Assert.assertEquals(schema, builder.getDeserializationSchema());		
}
 
开发者ID:ottogroup,项目名称:flink-operator-library,代码行数:13,代码来源:KafkaConsumerBuilderTest.java

示例4: AMQSourceConfig

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
AMQSourceConfig(ActiveMQConnectionFactory connectionFactory, String destinationName,
                DeserializationSchema<OUT> deserializationSchema, RunningChecker runningChecker,
                DestinationType destinationType) {
    this.connectionFactory = Preconditions.checkNotNull(connectionFactory, "connectionFactory not set");
    this.destinationName = Preconditions.checkNotNull(destinationName, "destinationName not set");
    this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema, "deserializationSchema not set");
    this.runningChecker = Preconditions.checkNotNull(runningChecker, "runningChecker not set");
    this.destinationType = Preconditions.checkNotNull(destinationType, "destinationType not set");
}
 
开发者ID:apache,项目名称:bahir-flink,代码行数:10,代码来源:AMQSourceConfig.java

示例5: ZMQSource

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
protected ZMQSource(ZMQConnectionConfig zmqConnectionConfig, String queueName,
					boolean usesCorrelationId, DeserializationSchema<OUT> deserializationSchema) {
	super(String.class);
	this.zmqConnectionConfig = zmqConnectionConfig;
	this.queueName = queueName;
	this.usesCorrelationId = usesCorrelationId;
	this.schema = deserializationSchema;
}
 
开发者ID:omaralvarez,项目名称:flink-zeromq,代码行数:9,代码来源:ZMQSource.java

示例6: Kafka09TableSource

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
 * Creates a Kafka 0.9 {@link StreamTableSource}.
 *
 * @param topic                 Kafka topic to consume.
 * @param properties            Properties for the Kafka consumer.
 * @param deserializationSchema Deserialization schema to use for Kafka records.
 * @param fieldNames            Row field names.
 * @param fieldTypes            Row field types.
 */
public Kafka09TableSource(
		String topic,
		Properties properties,
		DeserializationSchema<Row> deserializationSchema,
		String[] fieldNames,
		TypeInformation<?>[] fieldTypes) {

	super(topic, properties, deserializationSchema, fieldNames, fieldTypes);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:19,代码来源:Kafka09TableSource.java

示例7: Kafka08TableSource

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
 * Creates a Kafka 0.8 {@link StreamTableSource}.
 *
 * @param topic                 Kafka topic to consume.
 * @param properties            Properties for the Kafka consumer.
 * @param deserializationSchema Deserialization schema to use for Kafka records.
 * @param fieldNames            Row field names.
 * @param fieldTypes            Row field types.
 */
public Kafka08TableSource(
		String topic,
		Properties properties,
		DeserializationSchema<Row> deserializationSchema,
		String[] fieldNames,
		TypeInformation<?>[] fieldTypes) {

	super(topic, properties, deserializationSchema, fieldNames, fieldTypes);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:19,代码来源:Kafka08TableSource.java

示例8: Kafka010TableSource

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
/**
 * Creates a Kafka 0.10 {@link StreamTableSource}.
 *
 * @param topic                 Kafka topic to consume.
 * @param properties            Properties for the Kafka consumer.
 * @param deserializationSchema Deserialization schema to use for Kafka records.
 * @param fieldNames            Row field names.
 * @param fieldTypes            Row field types.
 */
public Kafka010TableSource(
		String topic,
		Properties properties,
		DeserializationSchema<Row> deserializationSchema,
		String[] fieldNames,
		TypeInformation<?>[] fieldTypes) {

	super(topic, properties, deserializationSchema, fieldNames, fieldTypes);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:19,代码来源:Kafka010TableSource.java

示例9: printTopic

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
private static void printTopic(String topicName, ConsumerConfig config,
							DeserializationSchema<?> deserializationSchema,
							int stopAfter) throws IOException {

	List<MessageAndMetadata<byte[], byte[]>> contents = readTopicToList(topicName, config, stopAfter);
	LOG.info("Printing contents of topic {} in consumer grouo {}", topicName, config.groupId());

	for (MessageAndMetadata<byte[], byte[]> message: contents) {
		Object out = deserializationSchema.deserialize(message.message());
		LOG.info("Message: partition: {} offset: {} msg: {}", message.partition(), message.offset(), out.toString());
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:13,代码来源:KafkaConsumerTestBase.java

示例10: ConstructorTestClass

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public ConstructorTestClass(RMQConnectionConfig rmqConnectionConfig,
							String queueName,
							boolean usesCorrelationId,
							DeserializationSchema<String> deserializationSchema) throws Exception {
	super(rmqConnectionConfig, queueName, usesCorrelationId, deserializationSchema);
	RMQConnectionConfig.Builder builder = new RMQConnectionConfig.Builder();
	builder.setHost("hostTest").setPort(999).setUserName("userTest").setPassword("passTest").setVirtualHost("/");
	factory = Mockito.spy(builder.build().getConnectionFactory());
	try {
		Mockito.doThrow(new RuntimeException()).when(factory).newConnection();
	} catch (IOException e) {
		fail("Failed to stub connection method");
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:15,代码来源:RMQSourceTest.java

示例11: FlinkConsumer

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public FlinkConsumer(String topic, DeserializationSchema schema,
        Properties props) {
  super(topic, schema, props);
}
 
开发者ID:hopshadoop,项目名称:hops-util,代码行数:5,代码来源:FlinkConsumer.java

示例12: getDeserializationSchema

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public DeserializationSchema<OUT> getDeserializationSchema() {
    return deserializationSchema;
}
 
开发者ID:apache,项目名称:bahir-flink,代码行数:4,代码来源:AMQSourceConfig.java

示例13: setDeserializationSchema

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
public AMQSourceConfigBuilder<OUT> setDeserializationSchema(DeserializationSchema<OUT> deserializationSchema) {
    this.deserializationSchema = Preconditions.checkNotNull(deserializationSchema);
    return this;
}
 
开发者ID:apache,项目名称:bahir-flink,代码行数:5,代码来源:AMQSourceConfig.java

示例14: getKafkaConsumer

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
@Override
FlinkKafkaConsumerBase<Row> getKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) {
    return new FlinkKafkaConsumer010<>(topic, deserializationSchema, properties);
}
 
开发者ID:datafibers-community,项目名称:df_data_service,代码行数:5,代码来源:Kafka010AvroTableSource.java

示例15: getKafkaConsumer

import org.apache.flink.streaming.util.serialization.DeserializationSchema; //导入依赖的package包/类
@Override
FlinkKafkaConsumerBase<Row> getKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) {
    return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties);
}
 
开发者ID:datafibers-community,项目名称:df_data_service,代码行数:5,代码来源:Kafka09AvroTableSource.java


注:本文中的org.apache.flink.streaming.util.serialization.DeserializationSchema类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。