本文整理汇总了Java中org.apache.kafka.streams.KeyValue.pair方法的典型用法代码示例。如果您正苦于以下问题:Java KeyValue.pair方法的具体用法?Java KeyValue.pair怎么用?Java KeyValue.pair使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.kafka.streams.KeyValue
的用法示例。
在下文中一共展示了KeyValue.pair方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: shouldReduce
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Test
public void shouldReduce() throws Exception {
final String topic = "input";
final KeyValueMapper<String, Number, KeyValue<String, Integer>> intProjection =
new KeyValueMapper<String, Number, KeyValue<String, Integer>>() {
@Override
public KeyValue<String, Integer> apply(String key, Number value) {
return KeyValue.pair(key, value.intValue());
}
};
final KTable<String, Integer> reduced = builder.table(Serdes.String(), Serdes.Double(), topic, "store")
.groupBy(intProjection)
.reduce(MockReducer.INTEGER_ADDER, MockReducer.INTEGER_SUBTRACTOR, "reduced");
doShouldReduce(reduced, topic);
assertEquals(reduced.queryableStoreName(), "reduced");
}
示例2: shouldReduceWithInternalStoreName
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Test
public void shouldReduceWithInternalStoreName() throws Exception {
final String topic = "input";
final KeyValueMapper<String, Number, KeyValue<String, Integer>> intProjection =
new KeyValueMapper<String, Number, KeyValue<String, Integer>>() {
@Override
public KeyValue<String, Integer> apply(String key, Number value) {
return KeyValue.pair(key, value.intValue());
}
};
final KTable<String, Integer> reduced = builder.table(Serdes.String(), Serdes.Double(), topic, "store")
.groupBy(intProjection)
.reduce(MockReducer.INTEGER_ADDER, MockReducer.INTEGER_SUBTRACTOR);
doShouldReduce(reduced, topic);
assertNull(reduced.queryableStoreName());
}
示例3: nextCacheValue
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
private KeyValue<K, V> nextCacheValue(Bytes nextCacheKey) {
final KeyValue<Bytes, LRUCacheEntry> next = cacheIterator.next();
if (!next.key.equals(nextCacheKey)) {
throw new IllegalStateException("Next record key is not the peeked key value; this should not happen");
}
return KeyValue.pair(deserializeCacheKey(next.key), deserializeCacheValue(next.value));
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:10,代码来源:AbstractMergedSortedCacheStoreIterator.java
示例4: testMap
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Test
public void testMap() {
KStreamBuilder builder = new KStreamBuilder();
KeyValueMapper<Integer, String, KeyValue<String, Integer>> mapper =
new KeyValueMapper<Integer, String, KeyValue<String, Integer>>() {
@Override
public KeyValue<String, Integer> apply(Integer key, String value) {
return KeyValue.pair(value, key);
}
};
final int[] expectedKeys = new int[]{0, 1, 2, 3};
KStream<Integer, String> stream = builder.stream(intSerde, stringSerde, topicName);
MockProcessorSupplier<String, Integer> processor;
processor = new MockProcessorSupplier<>();
stream.map(mapper).process(processor);
driver = new KStreamTestDriver(builder);
for (int expectedKey : expectedKeys) {
driver.process(topicName, expectedKey, "V" + expectedKey);
}
assertEquals(4, processor.processed.size());
String[] expected = new String[]{"V0:0", "V1:1", "V2:2", "V3:3"};
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], processor.processed.get(i));
}
}
示例5: testTypeVariance
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Test
public void testTypeVariance() throws Exception {
KeyValueMapper<Number, Object, KeyValue<Number, String>> stringify = new KeyValueMapper<Number, Object, KeyValue<Number, String>>() {
@Override
public KeyValue<Number, String> apply(Number key, Object value) {
return KeyValue.pair(key, key + ":" + value);
}
};
new KStreamBuilder()
.<Integer, String>stream("numbers")
.map(stringify)
.to("strings");
}
示例6: toList
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
private List<KeyValue<Windowed<String>, Long>> toList(final KeyValueIterator<Bytes, byte[]> iterator) {
final List<KeyValue<Windowed<String>, Long>> results = new ArrayList<>();
while (iterator.hasNext()) {
final KeyValue<Bytes, byte[]> next = iterator.next();
final KeyValue<Windowed<String>, Long> deserialized
= KeyValue.pair(SessionKeySerde.from(next.key.get(), Serdes.String().deserializer(), "dummy"), Serdes.Long().deserializer().deserialize("", next.value));
results.add(deserialized);
}
return results;
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:11,代码来源:RocksDBSegmentedBytesStoreTest.java
示例7: deserializeStorePair
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Override
public KeyValue<Windowed<K>, AGG> deserializeStorePair(final KeyValue<Windowed<Bytes>, byte[]> pair) {
return KeyValue.pair(deserializeStoreKey(pair.key), serdes.valueFrom(pair.value));
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:5,代码来源:MergedSortedCacheSessionStoreIterator.java
示例8: FilteredCacheIterator
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
FilteredCacheIterator(final PeekingKeyValueIterator<Bytes, LRUCacheEntry> cacheIterator,
final HasNextCondition hasNextCondition,
final CacheFunction cacheFunction) {
this.cacheIterator = cacheIterator;
this.hasNextCondition = hasNextCondition;
this.wrappedIterator = new PeekingKeyValueIterator<Bytes, LRUCacheEntry>() {
@Override
public KeyValue<Bytes, LRUCacheEntry> peekNext() {
return cachedPair(cacheIterator.peekNext());
}
@Override
public void close() {
cacheIterator.close();
}
@Override
public Bytes peekNextKey() {
return cacheFunction.key(cacheIterator.peekNextKey());
}
@Override
public boolean hasNext() {
return cacheIterator.hasNext();
}
@Override
public KeyValue<Bytes, LRUCacheEntry> next() {
return cachedPair(cacheIterator.next());
}
private KeyValue<Bytes, LRUCacheEntry> cachedPair(KeyValue<Bytes, LRUCacheEntry> next) {
return KeyValue.pair(cacheFunction.key(next.key), next.value);
}
@Override
public void remove() {
cacheIterator.remove();
}
};
}
示例9: deserializeStorePair
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Override
KeyValue<Windowed<K>, V> deserializeStorePair(final KeyValue<Windowed<Bytes>, byte[]> pair) {
return KeyValue.pair(deserializeStoreKey(pair.key), serdes.valueFrom(pair.value));
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:5,代码来源:MergedSortedCacheWindowStoreKeyValueIterator.java
示例10: deserializeStorePair
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Override
public KeyValue<K, V> deserializeStorePair(final KeyValue<Bytes, byte[]> pair) {
return KeyValue.pair(serdes.keyFrom(pair.key.get()), serdes.valueFrom(pair.value));
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:5,代码来源:MergedSortedCacheKeyValueStoreIterator.java
示例11: deserializeStorePair
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Override
public KeyValue<Long, V> deserializeStorePair(final KeyValue<Long, byte[]> pair) {
return KeyValue.pair(pair.key, serdes.valueFrom(pair.value));
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:5,代码来源:MergedSortedCacheWindowStoreIterator.java
示例12: Input
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
Input(final String topic, final V value) {
this.topic = topic;
record = KeyValue.pair(anyUniqueKey, value);
}
示例13: testTransform
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
@Test
public void testTransform() {
KStreamBuilder builder = new KStreamBuilder();
TransformerSupplier<Number, Number, KeyValue<Integer, Integer>> transformerSupplier =
new TransformerSupplier<Number, Number, KeyValue<Integer, Integer>>() {
public Transformer<Number, Number, KeyValue<Integer, Integer>> get() {
return new Transformer<Number, Number, KeyValue<Integer, Integer>>() {
private int total = 0;
@Override
public void init(ProcessorContext context) {
}
@Override
public KeyValue<Integer, Integer> transform(Number key, Number value) {
total += value.intValue();
return KeyValue.pair(key.intValue() * 2, total);
}
@Override
public KeyValue<Integer, Integer> punctuate(long timestamp) {
return KeyValue.pair(-1, (int) timestamp);
}
@Override
public void close() {
}
};
}
};
final int[] expectedKeys = {1, 10, 100, 1000};
MockProcessorSupplier<Integer, Integer> processor = new MockProcessorSupplier<>();
KStream<Integer, Integer> stream = builder.stream(intSerde, intSerde, topicName);
stream.transform(transformerSupplier).process(processor);
driver = new KStreamTestDriver(builder);
for (int expectedKey : expectedKeys) {
driver.process(topicName, expectedKey, expectedKey * 10);
}
driver.punctuate(2);
driver.punctuate(3);
assertEquals(6, processor.processed.size());
String[] expected = {"2:10", "20:110", "200:1110", "2000:11110", "-1:2", "-1:3"};
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], processor.processed.get(i));
}
}
示例14: windowedPair
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
private static <K, V> KeyValue<Windowed<K>, V> windowedPair(K key, V value, long timestamp, long windowSize) {
return KeyValue.pair(new Windowed<>(key, WindowStoreUtils.timeWindowForSize(timestamp, windowSize)), value);
}
示例15: toStringKeyValue
import org.apache.kafka.streams.KeyValue; //导入方法依赖的package包/类
private KeyValue<String, String> toStringKeyValue(final KeyValue<Bytes, byte[]> binaryKv) {
return KeyValue.pair(new String(binaryKv.key.get()), new String(binaryKv.value));
}