本文整理汇总了Java中org.apache.kafka.common.serialization.Serdes.String方法的典型用法代码示例。如果您正苦于以下问题:Java Serdes.String方法的具体用法?Java Serdes.String怎么用?Java Serdes.String使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.kafka.common.serialization.Serdes
的用法示例。
在下文中一共展示了Serdes.String方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: before
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Before
public void before() {
final SessionKeySchema schema = new SessionKeySchema();
schema.init("topic");
bytesStore = new RocksDBSegmentedBytesStore(storeName,
retention,
numSegments,
schema);
stateDir = TestUtils.tempDirectory();
context = new MockProcessorContext(
stateDir,
Serdes.String(),
Serdes.Long(),
new NoOpRecordCollector(),
new ThreadCache("testCache", 0, new MockStreamsMetrics(new Metrics())));
bytesStore.init(context, bytesStore);
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:19,代码来源:RocksDBSegmentedBytesStoreTest.java
示例2: createIterator
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
private MergedSortedCacheWindowStoreKeyValueIterator<String, String> createIterator(
final Iterator<KeyValue<Windowed<Bytes>, byte[]>> storeKvs,
final Iterator<KeyValue<Bytes, LRUCacheEntry>> cacheKvs
) {
final DelegatingPeekingKeyValueIterator<Windowed<Bytes>, byte[]> storeIterator
= new DelegatingPeekingKeyValueIterator<>("store", new KeyValueIteratorStub<>(storeKvs));
final PeekingKeyValueIterator<Bytes, LRUCacheEntry> cacheIterator
= new DelegatingPeekingKeyValueIterator<>("cache", new KeyValueIteratorStub<>(cacheKvs));
return new MergedSortedCacheWindowStoreKeyValueIterator<>(
cacheIterator,
storeIterator,
new StateSerdes<>("name", Serdes.String(), Serdes.String()),
WINDOW_SIZE,
SINGLE_SEGMENT_CACHE_FUNCTION
);
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:18,代码来源:MergedSortedCacheWrappedWindowStoreKeyValueIteratorTest.java
示例3: shouldNotBeLoggingEnabledStoreWhenLogginNotEnabled
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Test
public void shouldNotBeLoggingEnabledStoreWhenLogginNotEnabled() throws Exception {
store = createStore(false, false);
final List<ProducerRecord> logged = new ArrayList<>();
final NoOpRecordCollector collector = new NoOpRecordCollector() {
@Override
public <K, V> void send(final String topic,
K key,
V value,
Integer partition,
Long timestamp,
Serializer<K> keySerializer,
Serializer<V> valueSerializer) {
logged.add(new ProducerRecord<K, V>(topic, partition, timestamp, key, value));
}
};
final MockProcessorContext context = new MockProcessorContext(TestUtils.tempDirectory(),
Serdes.String(),
Serdes.String(),
collector,
cache);
context.setTime(1);
store.init(context, store);
store.put("a", "b");
assertTrue(logged.isEmpty());
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:27,代码来源:RocksDBWindowStoreSupplierTest.java
示例4: shouldCreateLoggingEnabledStoreWhenStoreLogged
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Test
public void shouldCreateLoggingEnabledStoreWhenStoreLogged() throws Exception {
store = createStore(true, false);
final List<ProducerRecord> logged = new ArrayList<>();
final NoOpRecordCollector collector = new NoOpRecordCollector() {
@Override
public <K, V> void send(final String topic,
K key,
V value,
Integer partition,
Long timestamp,
Serializer<K> keySerializer,
Serializer<V> valueSerializer) {
logged.add(new ProducerRecord<K, V>(topic, partition, timestamp, key, value));
}
};
final MockProcessorContext context = new MockProcessorContext(TestUtils.tempDirectory(),
Serdes.String(),
Serdes.String(),
collector,
cache);
context.setTime(1);
store.init(context, store);
store.put("a", "b");
assertFalse(logged.isEmpty());
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:27,代码来源:RocksDBKeyValueStoreSupplierTest.java
示例5: test
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Test
public void test() throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
Properties config = new Properties();
config.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-app");
config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, senderProps.get("bootstrap.servers"));
config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
Producer<Integer, String> producer = createProducer();
ProducerRecord<Integer, String> record = new ProducerRecord<>("stream-test", 1, "test");
producer.send(record);
final Serde<String> stringSerde = Serdes.String();
final Serde<Integer> intSerde = Serdes.Integer();
KStreamBuilder builder = new KStreamBuilder();
KStream<Integer, String> kStream = builder
.stream(intSerde, stringSerde, "stream-test");
kStream.map((key, value) -> new KeyValue<>(key, value + "map")).to("stream-out");
KafkaStreams streams = new KafkaStreams(builder, new StreamsConfig(config),
new TracingKafkaClientSupplier(mockTracer));
streams.start();
await().atMost(15, TimeUnit.SECONDS).until(reportedSpansSize(), equalTo(3));
streams.close();
producer.close();
List<MockSpan> spans = mockTracer.finishedSpans();
assertEquals(3, spans.size());
checkSpans(spans);
assertNull(mockTracer.activeSpan());
}
示例6: createIterator
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
private MergedSortedCacheSessionStoreIterator<String, String> createIterator(final Iterator<KeyValue<Windowed<Bytes>, byte[]>> storeKvs,
final Iterator<KeyValue<Bytes, LRUCacheEntry>> cacheKvs) {
final DelegatingPeekingKeyValueIterator<Windowed<Bytes>, byte[]> storeIterator
= new DelegatingPeekingKeyValueIterator<>("store", new KeyValueIteratorStub<>(storeKvs));
final PeekingKeyValueIterator<Bytes, LRUCacheEntry> cacheIterator
= new DelegatingPeekingKeyValueIterator<>("cache", new KeyValueIteratorStub<>(cacheKvs));
return new MergedSortedCacheSessionStoreIterator<>(
cacheIterator, storeIterator, new StateSerdes<>("name", Serdes.String(), Serdes.String()),
SINGLE_SEGMENT_CACHE_FUNCTION
);
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:13,代码来源:MergedSortedCacheWrappedSessionStoreIteratorTest.java
示例7: successfulAllQuery
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Test
public void successfulAllQuery() throws Exception{
SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long());
Map<String, Long> result = client.getAllKeyValues();
assertThat(result.entrySet(),hasSize(4));
assertThat(result, hasEntry("key1", 3L));
assertThat(result, hasEntry("key2", 6L));
assertThat(result, hasEntry("key3", 9L));
assertThat(result, hasEntry("key4", 12L));
}
示例8: initializeStore
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Before
public void initializeStore() {
final File stateDir = TestUtils.tempDirectory();
context = new MockProcessorContext(stateDir,
Serdes.String(), Serdes.String(), new NoOpRecordCollector(), new ThreadCache("testCache", 100000, new MockStreamsMetrics(new Metrics()))) {
@Override
public <K, V> void forward(final K key, final V value) {
results.add(KeyValue.pair(key, value));
}
};
initStore(true);
processor.init(context);
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:16,代码来源:KStreamSessionWindowAggregateProcessorTest.java
示例9: initStore
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
private void initStore(final boolean enableCaching) {
final RocksDBSessionStoreSupplier<String, Long> supplier =
new RocksDBSessionStoreSupplier<>(STORE_NAME,
GAP_MS * 3,
Serdes.String(),
Serdes.Long(),
false,
Collections.<String, String>emptyMap(),
enableCaching);
sessionStore = (SessionStore<String, Long>) supplier.get();
sessionStore.init(context, sessionStore);
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:13,代码来源:KStreamSessionWindowAggregateProcessorTest.java
示例10: doShouldReduce
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
private void doShouldReduce(final KTable<String, Integer> reduced, final String topic) throws Exception {
final Map<String, Integer> results = new HashMap<>();
reduced.foreach(new ForeachAction<String, Integer>() {
@Override
public void apply(final String key, final Integer value) {
results.put(key, value);
}
});
driver = new KStreamTestDriver(builder, TestUtils.tempDirectory(), Serdes.String(), Serdes.Integer());
driver.setTime(10L);
driver.process(topic, "A", 1.1);
driver.process(topic, "B", 2.2);
driver.flushState();
assertEquals(Integer.valueOf(1), results.get("A"));
assertEquals(Integer.valueOf(2), results.get("B"));
driver.process(topic, "A", 2.6);
driver.process(topic, "B", 1.3);
driver.process(topic, "A", 5.7);
driver.process(topic, "B", 6.2);
driver.flushState();
assertEquals(Integer.valueOf(5), results.get("A"));
assertEquals(Integer.valueOf(6), results.get("B"));
}
示例11: doTestNotSendingOldValue
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
private void doTestNotSendingOldValue(final KStreamBuilder builder,
final KTableImpl<String, Integer, Integer> table1,
final KTableImpl<String, Integer, Integer> table2,
final String topic1) throws IOException {
MockProcessorSupplier<String, Integer> proc1 = new MockProcessorSupplier<>();
MockProcessorSupplier<String, Integer> proc2 = new MockProcessorSupplier<>();
builder.addProcessor("proc1", proc1, table1.name);
builder.addProcessor("proc2", proc2, table2.name);
driver = new KStreamTestDriver(builder, stateDir, Serdes.String(), Serdes.Integer());
driver.process(topic1, "A", 1);
driver.process(topic1, "B", 1);
driver.process(topic1, "C", 1);
driver.flushState();
proc1.checkAndClearProcessResult("A:(1<-null)", "B:(1<-null)", "C:(1<-null)");
proc2.checkAndClearProcessResult("A:(null<-null)", "B:(null<-null)", "C:(null<-null)");
driver.process(topic1, "A", 2);
driver.process(topic1, "B", 2);
driver.flushState();
proc1.checkAndClearProcessResult("A:(2<-null)", "B:(2<-null)");
proc2.checkAndClearProcessResult("A:(2<-null)", "B:(2<-null)");
driver.process(topic1, "A", 3);
driver.flushState();
proc1.checkAndClearProcessResult("A:(3<-null)");
proc2.checkAndClearProcessResult("A:(null<-null)");
driver.process(topic1, "A", null);
driver.process(topic1, "B", null);
driver.flushState();
proc1.checkAndClearProcessResult("A:(null<-null)", "B:(null<-null)");
proc2.checkAndClearProcessResult("A:(null<-null)", "B:(null<-null)");
}
示例12: shouldNotAddTableToOffsetResetLists
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Test
public void shouldNotAddTableToOffsetResetLists() {
final String topicName = "topic-1";
final String storeName = "test-store";
final Serde<String> stringSerde = Serdes.String();
builder.table(stringSerde, stringSerde, topicName, storeName);
assertFalse(builder.latestResetTopicsPattern().matcher(topicName).matches());
assertFalse(builder.earliestResetTopicsPattern().matcher(topicName).matches());
}
示例13: testRegexMatchesTopicsAWhenDeleted
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Test
public void testRegexMatchesTopicsAWhenDeleted() throws Exception {
final Serde<String> stringSerde = Serdes.String();
final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-A", "TEST-TOPIC-B");
final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-B");
final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
CLUSTER.createTopics("TEST-TOPIC-A", "TEST-TOPIC-B");
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-[A-Z]"));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
final Field streamThreadsField = streams.getClass().getDeclaredField("threads");
streamThreadsField.setAccessible(true);
final StreamThread[] streamThreads = (StreamThread[]) streamThreadsField.get(streams);
final StreamThread originalThread = streamThreads[0];
final TestStreamThread testStreamThread = new TestStreamThread(builder, streamsConfig,
new DefaultKafkaClientSupplier(),
originalThread.applicationId, originalThread.clientId, originalThread.processId, new Metrics(), Time.SYSTEM);
streamThreads[0] = testStreamThread;
final TestCondition bothTopicsAdded = new TestCondition() {
@Override
public boolean conditionMet() {
return testStreamThread.assignedTopicPartitions.equals(expectedFirstAssignment);
}
};
streams.start();
TestUtils.waitForCondition(bothTopicsAdded, STREAM_TASKS_NOT_UPDATED);
CLUSTER.deleteTopic("TEST-TOPIC-A");
final TestCondition oneTopicRemoved = new TestCondition() {
@Override
public boolean conditionMet() {
return testStreamThread.assignedTopicPartitions.equals(expectedSecondAssignment);
}
};
TestUtils.waitForCondition(oneTopicRemoved, STREAM_TASKS_NOT_UPDATED);
streams.close();
}
示例14: shouldFetchAndIterateOverExactKeys
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
@Test
public void shouldFetchAndIterateOverExactKeys() throws Exception {
final long windowSize = 0x7a00000000000000L;
final long retentionPeriod = 0x7a00000000000000L;
final RocksDBWindowStoreSupplier<String, String> supplier =
new RocksDBWindowStoreSupplier<>(
"window",
retentionPeriod, 2,
true,
Serdes.String(),
Serdes.String(),
windowSize,
true,
Collections.<String, String>emptyMap(),
false);
windowStore = supplier.get();
windowStore.init(context, windowStore);
windowStore.put("a", "0001", 0);
windowStore.put("aa", "0002", 0);
windowStore.put("a", "0003", 1);
windowStore.put("aa", "0004", 1);
windowStore.put("a", "0005", 0x7a00000000000000L - 1);
final List expected = Utils.mkList("0001", "0003", "0005");
assertThat(toList(windowStore.fetch("a", 0, Long.MAX_VALUE)), equalTo(expected));
List<KeyValue<Windowed<String>, String>> list = StreamsTestUtils.toList(windowStore.fetch("a", "a", 0, Long.MAX_VALUE));
assertThat(list, equalTo(Utils.mkList(
windowedPair("a", "0001", 0, windowSize),
windowedPair("a", "0003", 1, windowSize),
windowedPair("a", "0005", 0x7a00000000000000L - 1, windowSize)
)));
list = StreamsTestUtils.toList(windowStore.fetch("aa", "aa", 0, Long.MAX_VALUE));
assertThat(list, equalTo(Utils.mkList(
windowedPair("aa", "0002", 0, windowSize),
windowedPair("aa", "0004", 1, windowSize)
)));
}
示例15: setUp
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
unitUnderTest = new SpecificBlockingRestKiqrClientImpl<>(clientMock, "store", String.class, Long.class, Serdes.String(), Serdes.Long());
}