本文整理匯總了Java中org.apache.kafka.common.serialization.Serializer.configure方法的典型用法代碼示例。如果您正苦於以下問題:Java Serializer.configure方法的具體用法?Java Serializer.configure怎麽用?Java Serializer.configure使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.kafka.common.serialization.Serializer
的用法示例。
在下文中一共展示了Serializer.configure方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: defaultSerdeShouldBeConfigured
import org.apache.kafka.common.serialization.Serializer; //導入方法依賴的package包/類
@Test
public void defaultSerdeShouldBeConfigured() {
final Map<String, Object> serializerConfigs = new HashMap<>();
serializerConfigs.put("key.serializer.encoding", "UTF8");
serializerConfigs.put("value.serializer.encoding", "UTF-16");
final Serializer<String> serializer = Serdes.String().serializer();
final String str = "my string for testing";
final String topic = "my topic";
serializer.configure(serializerConfigs, true);
assertEquals("Should get the original string after serialization and deserialization with the configured encoding",
str, streamsConfig.defaultKeySerde().deserializer().deserialize(topic, serializer.serialize(topic, str)));
serializer.configure(serializerConfigs, false);
assertEquals("Should get the original string after serialization and deserialization with the configured encoding",
str, streamsConfig.defaultValueSerde().deserializer().deserialize(topic, serializer.serialize(topic, str)));
}
示例2: main
import org.apache.kafka.common.serialization.Serializer; //導入方法依賴的package包/類
public static void main(String[] args) throws InterruptedException {
Properties props = new Properties();
props.put(APPLICATION_ID_CONFIG, "my-stream-processing-application");
props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.serializer", JsonPOJOSerializer.class.getName());
props.put("value.deserializer", JsonPOJODeserializer.class.getName());
Map<String, Object> serdeProps = new HashMap<>();
serdeProps.put("JsonPOJOClass", Messung.class);
final Serializer<Messung> serializer = new JsonPOJOSerializer<>();
serializer.configure(serdeProps, false);
final Deserializer<Messung> deserializer = new JsonPOJODeserializer<>();
deserializer.configure(serdeProps, false);
final Serde<Messung> serde = Serdes.serdeFrom(serializer, deserializer);
StreamsConfig config = new StreamsConfig(props);
KStreamBuilder builder = new KStreamBuilder();
builder.stream(Serdes.String(), serde, "produktion")
.filter( (k,v) -> v.type.equals("Biogas"))
.to(Serdes.String(), serde,"produktion2");
KafkaStreams streams = new KafkaStreams(builder, config);
streams.start();
}
示例3: LiKafkaProducerImpl
import org.apache.kafka.common.serialization.Serializer; //導入方法依賴的package包/類
@SuppressWarnings("unchecked")
private LiKafkaProducerImpl(LiKafkaProducerConfig configs,
Serializer<K> keySerializer,
Serializer<V> valueSerializer,
Serializer<LargeMessageSegment> largeMessageSegmentSerializer,
Auditor<K, V> auditor) {
// Instantiate the open source producer, which always sents raw bytes.
_producer = new KafkaProducer<>(configs.originals(), new ByteArraySerializer(), new ByteArraySerializer());
// Instantiate the key serializer if necessary.
_keySerializer = keySerializer != null ? keySerializer :
configs.getConfiguredInstance(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, Serializer.class);
_keySerializer.configure(configs.originals(), true);
// Instantiate the key serializer if necessary.
_valueSerializer = valueSerializer != null ? valueSerializer :
configs.getConfiguredInstance(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, Serializer.class);
_valueSerializer.configure(configs.originals(), false);
// prepare to handle large messages.
_largeMessageEnabled = configs.getBoolean(LiKafkaProducerConfig.LARGE_MESSAGE_ENABLED_CONFIG);
_maxMessageSegmentSize = configs.getInt(LiKafkaProducerConfig.MAX_MESSAGE_SEGMENT_BYTES_CONFIG);
Serializer<LargeMessageSegment> segmentSerializer = largeMessageSegmentSerializer != null ? largeMessageSegmentSerializer :
configs.getConfiguredInstance(LiKafkaProducerConfig.SEGMENT_SERIALIZER_CLASS_CONFIG, Serializer.class);
segmentSerializer.configure(configs.originals(), false);
_messageSplitter = new MessageSplitterImpl(_maxMessageSegmentSize, segmentSerializer);
// Instantiate auditor if necessary
_auditor = auditor != null ? auditor :
configs.getConfiguredInstance(LiKafkaProducerConfig.AUDITOR_CLASS_CONFIG, Auditor.class);
_auditor.configure(configs.configsWithCurrentProducer(_producer));
_auditor.start();
_numThreadsInSend = new AtomicInteger(0);
_closed = false;
}
示例4: getGenericRowSerde
import org.apache.kafka.common.serialization.Serializer; //導入方法依賴的package包/類
@Override
public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig,
boolean isInternal,
SchemaRegistryClient schemaRegistryClient) {
Map<String, Object> serdeProps = new HashMap<>();
serdeProps.put("JsonPOJOClass", GenericRow.class);
final Serializer<GenericRow> genericRowSerializer = new KsqlJsonSerializer(schema);
genericRowSerializer.configure(serdeProps, false);
final Deserializer<GenericRow> genericRowDeserializer = new KsqlJsonDeserializer(schema);
genericRowDeserializer.configure(serdeProps, false);
return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
}
示例5: getGenericRowSerde
import org.apache.kafka.common.serialization.Serializer; //導入方法依賴的package包/類
@Override
public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig,
boolean isInternal,
SchemaRegistryClient schemaRegistryClient) {
Map<String, Object> serdeProps = new HashMap<>();
final Serializer<GenericRow> genericRowSerializer = new KsqlDelimitedSerializer(schema);
genericRowSerializer.configure(serdeProps, false);
final Deserializer<GenericRow> genericRowDeserializer = new KsqlDelimitedDeserializer(schema);
genericRowDeserializer.configure(serdeProps, false);
return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
}
示例6: LiKafkaProducerImpl
import org.apache.kafka.common.serialization.Serializer; //導入方法依賴的package包/類
@SuppressWarnings("unchecked")
private LiKafkaProducerImpl(LiKafkaProducerConfig configs,
Serializer<K> keySerializer,
Serializer<V> valueSerializer,
Serializer<LargeMessageSegment> largeMessageSegmentSerializer,
Auditor<K, V> auditor) {
// Instantiate the open source producer, which always sents raw bytes.
_producer = new KafkaProducer<>(configs.originals(), new ByteArraySerializer(), new ByteArraySerializer());
try {
// Instantiate the key serializer if necessary.
_keySerializer = keySerializer != null ? keySerializer
: configs.getConfiguredInstance(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, Serializer.class);
_keySerializer.configure(configs.originals(), true);
// Instantiate the key serializer if necessary.
_valueSerializer = valueSerializer != null ? valueSerializer
: configs.getConfiguredInstance(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, Serializer.class);
_valueSerializer.configure(configs.originals(), false);
// prepare to handle large messages.
_largeMessageEnabled = configs.getBoolean(LiKafkaProducerConfig.LARGE_MESSAGE_ENABLED_CONFIG);
_maxMessageSegmentSize = configs.getInt(LiKafkaProducerConfig.MAX_MESSAGE_SEGMENT_BYTES_CONFIG);
Serializer<LargeMessageSegment> segmentSerializer = largeMessageSegmentSerializer != null ? largeMessageSegmentSerializer
: configs.getConfiguredInstance(LiKafkaProducerConfig.SEGMENT_SERIALIZER_CLASS_CONFIG, Serializer.class);
segmentSerializer.configure(configs.originals(), false);
_uuidFactory = configs.getConfiguredInstance(LiKafkaProducerConfig.UUID_FACTORY_CLASS_CONFIG, UUIDFactory.class);
_messageSplitter = new MessageSplitterImpl(_maxMessageSegmentSize, segmentSerializer, _uuidFactory);
// Instantiate auditor if necessary
if (auditor != null) {
_auditor = auditor;
_auditor.configure(configs.configsWithCurrentProducer(_producer));
} else {
_auditor = configs.getConfiguredInstance(LiKafkaProducerConfig.AUDITOR_CLASS_CONFIG, Auditor.class, _producer);
}
_auditor.start();
_numThreadsInSend = new AtomicInteger(0);
_closed = false;
} catch (Exception e) {
_producer.close();
throw e;
}
}
示例7: getJsonSerializer
import org.apache.kafka.common.serialization.Serializer; //導入方法依賴的package包/類
private static <T> Serializer<T> getJsonSerializer(boolean isKey) {
Serializer<T> result = new KafkaJsonSerializer<>();
result.configure(Collections.emptyMap(), isKey);
return result;
}