本文整理汇总了Java中org.apache.kafka.common.serialization.Serializer.configure方法的典型用法代码示例。如果您正苦于以下问题:Java Serializer.configure方法的具体用法?Java Serializer.configure怎么用?Java Serializer.configure使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.kafka.common.serialization.Serializer
的用法示例。
在下文中一共展示了Serializer.configure方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: defaultSerdeShouldBeConfigured
import org.apache.kafka.common.serialization.Serializer; //导入方法依赖的package包/类
@Test
public void defaultSerdeShouldBeConfigured() {
final Map<String, Object> serializerConfigs = new HashMap<>();
serializerConfigs.put("key.serializer.encoding", "UTF8");
serializerConfigs.put("value.serializer.encoding", "UTF-16");
final Serializer<String> serializer = Serdes.String().serializer();
final String str = "my string for testing";
final String topic = "my topic";
serializer.configure(serializerConfigs, true);
assertEquals("Should get the original string after serialization and deserialization with the configured encoding",
str, streamsConfig.defaultKeySerde().deserializer().deserialize(topic, serializer.serialize(topic, str)));
serializer.configure(serializerConfigs, false);
assertEquals("Should get the original string after serialization and deserialization with the configured encoding",
str, streamsConfig.defaultValueSerde().deserializer().deserialize(topic, serializer.serialize(topic, str)));
}
示例2: main
import org.apache.kafka.common.serialization.Serializer; //导入方法依赖的package包/类
public static void main(String[] args) throws InterruptedException {
Properties props = new Properties();
props.put(APPLICATION_ID_CONFIG, "my-stream-processing-application");
props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.serializer", JsonPOJOSerializer.class.getName());
props.put("value.deserializer", JsonPOJODeserializer.class.getName());
Map<String, Object> serdeProps = new HashMap<>();
serdeProps.put("JsonPOJOClass", Messung.class);
final Serializer<Messung> serializer = new JsonPOJOSerializer<>();
serializer.configure(serdeProps, false);
final Deserializer<Messung> deserializer = new JsonPOJODeserializer<>();
deserializer.configure(serdeProps, false);
final Serde<Messung> serde = Serdes.serdeFrom(serializer, deserializer);
StreamsConfig config = new StreamsConfig(props);
KStreamBuilder builder = new KStreamBuilder();
builder.stream(Serdes.String(), serde, "produktion")
.filter( (k,v) -> v.type.equals("Biogas"))
.to(Serdes.String(), serde,"produktion2");
KafkaStreams streams = new KafkaStreams(builder, config);
streams.start();
}
示例3: LiKafkaProducerImpl
import org.apache.kafka.common.serialization.Serializer; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
private LiKafkaProducerImpl(LiKafkaProducerConfig configs,
Serializer<K> keySerializer,
Serializer<V> valueSerializer,
Serializer<LargeMessageSegment> largeMessageSegmentSerializer,
Auditor<K, V> auditor) {
// Instantiate the open source producer, which always sents raw bytes.
_producer = new KafkaProducer<>(configs.originals(), new ByteArraySerializer(), new ByteArraySerializer());
// Instantiate the key serializer if necessary.
_keySerializer = keySerializer != null ? keySerializer :
configs.getConfiguredInstance(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, Serializer.class);
_keySerializer.configure(configs.originals(), true);
// Instantiate the key serializer if necessary.
_valueSerializer = valueSerializer != null ? valueSerializer :
configs.getConfiguredInstance(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, Serializer.class);
_valueSerializer.configure(configs.originals(), false);
// prepare to handle large messages.
_largeMessageEnabled = configs.getBoolean(LiKafkaProducerConfig.LARGE_MESSAGE_ENABLED_CONFIG);
_maxMessageSegmentSize = configs.getInt(LiKafkaProducerConfig.MAX_MESSAGE_SEGMENT_BYTES_CONFIG);
Serializer<LargeMessageSegment> segmentSerializer = largeMessageSegmentSerializer != null ? largeMessageSegmentSerializer :
configs.getConfiguredInstance(LiKafkaProducerConfig.SEGMENT_SERIALIZER_CLASS_CONFIG, Serializer.class);
segmentSerializer.configure(configs.originals(), false);
_messageSplitter = new MessageSplitterImpl(_maxMessageSegmentSize, segmentSerializer);
// Instantiate auditor if necessary
_auditor = auditor != null ? auditor :
configs.getConfiguredInstance(LiKafkaProducerConfig.AUDITOR_CLASS_CONFIG, Auditor.class);
_auditor.configure(configs.configsWithCurrentProducer(_producer));
_auditor.start();
_numThreadsInSend = new AtomicInteger(0);
_closed = false;
}
示例4: getGenericRowSerde
import org.apache.kafka.common.serialization.Serializer; //导入方法依赖的package包/类
@Override
public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig,
boolean isInternal,
SchemaRegistryClient schemaRegistryClient) {
Map<String, Object> serdeProps = new HashMap<>();
serdeProps.put("JsonPOJOClass", GenericRow.class);
final Serializer<GenericRow> genericRowSerializer = new KsqlJsonSerializer(schema);
genericRowSerializer.configure(serdeProps, false);
final Deserializer<GenericRow> genericRowDeserializer = new KsqlJsonDeserializer(schema);
genericRowDeserializer.configure(serdeProps, false);
return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
}
示例5: getGenericRowSerde
import org.apache.kafka.common.serialization.Serializer; //导入方法依赖的package包/类
@Override
public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig,
boolean isInternal,
SchemaRegistryClient schemaRegistryClient) {
Map<String, Object> serdeProps = new HashMap<>();
final Serializer<GenericRow> genericRowSerializer = new KsqlDelimitedSerializer(schema);
genericRowSerializer.configure(serdeProps, false);
final Deserializer<GenericRow> genericRowDeserializer = new KsqlDelimitedDeserializer(schema);
genericRowDeserializer.configure(serdeProps, false);
return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
}
示例6: LiKafkaProducerImpl
import org.apache.kafka.common.serialization.Serializer; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
private LiKafkaProducerImpl(LiKafkaProducerConfig configs,
Serializer<K> keySerializer,
Serializer<V> valueSerializer,
Serializer<LargeMessageSegment> largeMessageSegmentSerializer,
Auditor<K, V> auditor) {
// Instantiate the open source producer, which always sents raw bytes.
_producer = new KafkaProducer<>(configs.originals(), new ByteArraySerializer(), new ByteArraySerializer());
try {
// Instantiate the key serializer if necessary.
_keySerializer = keySerializer != null ? keySerializer
: configs.getConfiguredInstance(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, Serializer.class);
_keySerializer.configure(configs.originals(), true);
// Instantiate the key serializer if necessary.
_valueSerializer = valueSerializer != null ? valueSerializer
: configs.getConfiguredInstance(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, Serializer.class);
_valueSerializer.configure(configs.originals(), false);
// prepare to handle large messages.
_largeMessageEnabled = configs.getBoolean(LiKafkaProducerConfig.LARGE_MESSAGE_ENABLED_CONFIG);
_maxMessageSegmentSize = configs.getInt(LiKafkaProducerConfig.MAX_MESSAGE_SEGMENT_BYTES_CONFIG);
Serializer<LargeMessageSegment> segmentSerializer = largeMessageSegmentSerializer != null ? largeMessageSegmentSerializer
: configs.getConfiguredInstance(LiKafkaProducerConfig.SEGMENT_SERIALIZER_CLASS_CONFIG, Serializer.class);
segmentSerializer.configure(configs.originals(), false);
_uuidFactory = configs.getConfiguredInstance(LiKafkaProducerConfig.UUID_FACTORY_CLASS_CONFIG, UUIDFactory.class);
_messageSplitter = new MessageSplitterImpl(_maxMessageSegmentSize, segmentSerializer, _uuidFactory);
// Instantiate auditor if necessary
if (auditor != null) {
_auditor = auditor;
_auditor.configure(configs.configsWithCurrentProducer(_producer));
} else {
_auditor = configs.getConfiguredInstance(LiKafkaProducerConfig.AUDITOR_CLASS_CONFIG, Auditor.class, _producer);
}
_auditor.start();
_numThreadsInSend = new AtomicInteger(0);
_closed = false;
} catch (Exception e) {
_producer.close();
throw e;
}
}
示例7: getJsonSerializer
import org.apache.kafka.common.serialization.Serializer; //导入方法依赖的package包/类
private static <T> Serializer<T> getJsonSerializer(boolean isKey) {
Serializer<T> result = new KafkaJsonSerializer<>();
result.configure(Collections.emptyMap(), isKey);
return result;
}