本文整理汇总了Java中org.apache.kafka.common.serialization.Serdes.ByteArray方法的典型用法代码示例。如果您正苦于以下问题:Java Serdes.ByteArray方法的具体用法?Java Serdes.ByteArray怎么用?Java Serdes.ByteArray使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.kafka.common.serialization.Serdes
的用法示例。
在下文中一共展示了Serdes.ByteArray方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setUp
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
keySchema = new WindowKeySchema();
final int retention = 30000;
final int numSegments = 3;
underlying = new RocksDBSegmentedBytesStore("test", retention, numSegments, keySchema);
final RocksDBWindowStore<Bytes, byte[]> windowStore = new RocksDBWindowStore<>(underlying, Serdes.Bytes(), Serdes.ByteArray(), false, WINDOW_SIZE);
cacheListener = new CachingKeyValueStoreTest.CacheFlushListenerStub<>();
cachingStore = new CachingWindowStore<>(windowStore,
Serdes.String(),
Serdes.String(),
WINDOW_SIZE,
Segments.segmentInterval(retention, numSegments));
cachingStore.setFlushListener(cacheListener);
cache = new ThreadCache("testCache", MAX_CACHE_SIZE_BYTES, new MockStreamsMetrics(new Metrics()));
topic = "topic";
context = new MockProcessorContext(TestUtils.tempDirectory(), null, null, (RecordCollector) null, cache);
context.setRecordContext(new ProcessorRecordContext(DEFAULT_TIMESTAMP, 0, 0, topic));
cachingStore.init(context, cachingStore);
}
示例2: setUp
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
final SessionKeySchema schema = new SessionKeySchema();
schema.init("topic");
final int retention = 60000;
final int numSegments = 3;
underlying = new RocksDBSegmentedBytesStore("test", retention, numSegments, schema);
final RocksDBSessionStore<Bytes, byte[]> sessionStore = new RocksDBSessionStore<>(underlying, Serdes.Bytes(), Serdes.ByteArray());
cachingStore = new CachingSessionStore<>(sessionStore,
Serdes.String(),
Serdes.Long(),
Segments.segmentInterval(retention, numSegments)
);
cache = new ThreadCache("testCache", MAX_CACHE_SIZE_BYTES, new MockStreamsMetrics(new Metrics()));
context = new MockProcessorContext(TestUtils.tempDirectory(), null, null, (RecordCollector) null, cache);
context.setRecordContext(new ProcessorRecordContext(DEFAULT_TIMESTAMP, 0, 0, "topic"));
cachingStore.init(context, cachingStore);
}
示例3: get
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
public KeyValueStore get() {
if (!cached && !logged) {
return new MeteredKeyValueStore<>(
new RocksDBStore<>(name, keySerde, valueSerde), METRICS_SCOPE, time);
}
// when cached, logged, or both we use a bytes store as the inner most store
final RocksDBStore<Bytes, byte[]> rocks = new RocksDBStore<>(name,
Serdes.Bytes(),
Serdes.ByteArray());
if (cached && logged) {
return new CachingKeyValueStore<>(
new MeteredKeyValueStore<>(
new ChangeLoggingKeyValueBytesStore(rocks),
METRICS_SCOPE,
time),
keySerde,
valueSerde);
}
if (cached) {
return new CachingKeyValueStore<>(
new MeteredKeyValueStore<>(rocks, METRICS_SCOPE, time),
keySerde,
valueSerde);
} else {
// logged
return new MeteredKeyValueStore<>(
new ChangeLoggingKeyValueStore<>(rocks, keySerde, valueSerde),
METRICS_SCOPE,
time);
}
}
示例4: shouldPeekNextKey
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Test
public void shouldPeekNextKey() throws Exception {
final KeyValueStore<Bytes, byte[]> kv = new InMemoryKeyValueStore<>("one", Serdes.Bytes(), Serdes.ByteArray());
final ThreadCache cache = new ThreadCache("testCache", 1000000L, new MockStreamsMetrics(new Metrics()));
byte[][] bytes = {{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10}};
final String namespace = "one";
for (int i = 0; i < bytes.length - 1; i += 2) {
kv.put(Bytes.wrap(bytes[i]), bytes[i]);
cache.put(namespace, Bytes.wrap(bytes[i + 1]), new LRUCacheEntry(bytes[i + 1]));
}
final Bytes from = Bytes.wrap(new byte[]{2});
final Bytes to = Bytes.wrap(new byte[]{9});
final KeyValueIterator<Bytes, byte[]> storeIterator = kv.range(from, to);
final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(namespace, from, to);
final MergedSortedCacheKeyValueStoreIterator<byte[], byte[]> iterator =
new MergedSortedCacheKeyValueStoreIterator<>(cacheIterator,
storeIterator,
serdes);
final byte[][] values = new byte[8][];
int index = 0;
int bytesIndex = 2;
while (iterator.hasNext()) {
final byte[] keys = iterator.peekNextKey();
values[index++] = keys;
assertArrayEquals(bytes[bytesIndex++], keys);
iterator.next();
}
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:31,代码来源:MergedSortedCacheKeyValueStoreIteratorTest.java
示例5: setUp
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
final String storeName = "store";
underlyingStore = new InMemoryKeyValueStore<>(storeName, Serdes.Bytes(), Serdes.ByteArray());
cacheFlushListener = new CacheFlushListenerStub<>();
store = new CachingKeyValueStore<>(underlyingStore, Serdes.String(), Serdes.String());
store.setFlushListener(cacheFlushListener);
cache = new ThreadCache("testCache", maxCacheSizeBytes, new MockStreamsMetrics(new Metrics()));
context = new MockProcessorContext(null, null, null, (RecordCollector) null, cache);
topic = "topic";
context.setRecordContext(new ProcessorRecordContext(10, 0, 0, topic));
store.init(context, null);
}
示例6: RocksDBWindowBytesStore
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
RocksDBWindowBytesStore(final SegmentedBytesStore inner, final boolean retainDuplicates, final long windowSize) {
super(inner, Serdes.Bytes(), Serdes.ByteArray(), retainDuplicates, windowSize);
}
示例7: init
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Override
public void init(final String topic) {
serdes = new StateSerdes<>(topic, Serdes.Bytes(), Serdes.ByteArray());
}
示例8: RocksDBSessionBytesStore
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
RocksDBSessionBytesStore(final SegmentedBytesStore inner) {
super(inner, Serdes.Bytes(), Serdes.ByteArray());
}
示例9: setUp
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
store = new InMemoryKeyValueStore<>(namespace, Serdes.Bytes(), Serdes.ByteArray());
cache = new ThreadCache("testCache", 10000L, new MockStreamsMetrics(new Metrics()));
}
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:6,代码来源:MergedSortedCacheKeyValueStoreIteratorTest.java
示例10: KStreamTestDriver
import org.apache.kafka.common.serialization.Serdes; //导入方法依赖的package包/类
public KStreamTestDriver(final KStreamBuilder builder) {
this(builder, null, Serdes.ByteArray(), Serdes.ByteArray());
}