当前位置: 首页>>代码示例>>Java>>正文


Java Materialized类代码示例

本文整理汇总了Java中org.apache.kafka.streams.kstream.Materialized的典型用法代码示例。如果您正苦于以下问题:Java Materialized类的具体用法?Java Materialized怎么用?Java Materialized使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Materialized类属于org.apache.kafka.streams.kstream包,在下文中一共展示了Materialized类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
public static void main(String[] args) throws CertificateException, NoSuchAlgorithmException,
    KeyStoreException, IOException, URISyntaxException {
  Properties streamsConfig = new AggregatorConfig().getProperties();

  final StreamsBuilder builder = new StreamsBuilder();

  final KStream<Windowed<String>, String> words =
      builder.stream(String.format("%swords", HEROKU_KAFKA_PREFIX));

  words
      .groupBy((key, word) -> word)
      .windowedBy(TimeWindows.of(TimeUnit.SECONDS.toMillis(10)))
      .count(Materialized.as("windowed-counts"))
      .toStream()
      .process(PostgresSink::new);

  final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfig);

  streams.cleanUp();
  streams.start();

  Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
开发者ID:kissaten,项目名称:kafka-streams-on-heroku,代码行数:24,代码来源:Aggregator.java

示例2: aggregate

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
@SuppressWarnings("unchecked")
public SchemaKTable aggregate(final Initializer initializer,
                              final UdafAggregator aggregator,
                              final WindowExpression windowExpression,
                              final Serde<GenericRow> topicValueSerDe,
                              final String storeName) {
  final KTable aggKtable;
  if (windowExpression != null) {
    final Materialized<String, GenericRow, ?> materialized
        = Materialized.<String, GenericRow, WindowStore<Bytes, byte[]>>as(storeName)
        .withKeySerde(Serdes.String())
        .withValueSerde(topicValueSerDe);

    final KsqlWindowExpression ksqlWindowExpression = windowExpression.getKsqlWindowExpression();
    aggKtable = ksqlWindowExpression.applyAggregate(kgroupedStream, initializer, aggregator, materialized);
  } else {
    aggKtable = kgroupedStream.aggregate(initializer, aggregator, Materialized.with(Serdes.String(), topicValueSerDe));
  }
  return new SchemaKTable(schema, aggKtable, keyField, sourceSchemaKStreams, windowExpression != null,
                          SchemaKStream.Type.AGGREGATE, functionRegistry, schemaRegistryClient);

}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:23,代码来源:SchemaKGroupedStream.java

示例3: shouldCreateTumblingWindowAggregate

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
@Test
public void shouldCreateTumblingWindowAggregate() {
  final KGroupedStream stream = EasyMock.createNiceMock(KGroupedStream.class);
  final TimeWindowedKStream windowedKStream = EasyMock.createNiceMock(TimeWindowedKStream.class);
  final UdafAggregator aggregator = EasyMock.createNiceMock(UdafAggregator.class);
  final TumblingWindowExpression windowExpression = new TumblingWindowExpression(10, TimeUnit.SECONDS);
  final Initializer initializer = () -> 0;
  final Materialized<String, GenericRow, WindowStore<Bytes, byte[]>> store = Materialized.as("store");

  EasyMock.expect(stream.windowedBy(TimeWindows.of(10000L))).andReturn(windowedKStream);
  EasyMock.expect(windowedKStream.aggregate(same(initializer), same(aggregator), same(store))).andReturn(null);
  EasyMock.replay(stream, windowedKStream);

  windowExpression.applyAggregate(stream, initializer, aggregator, store);
  EasyMock.verify(stream, windowedKStream);
}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:17,代码来源:TumblingWindowExpressionTest.java

示例4: shouldCreateHoppingWindowAggregate

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
@Test
public void shouldCreateHoppingWindowAggregate() {
  final KGroupedStream stream = EasyMock.createNiceMock(KGroupedStream.class);
  final TimeWindowedKStream windowedKStream = EasyMock.createNiceMock(TimeWindowedKStream.class);
  final UdafAggregator aggregator = EasyMock.createNiceMock(UdafAggregator.class);
  final HoppingWindowExpression windowExpression = new HoppingWindowExpression(10, TimeUnit.SECONDS, 4, TimeUnit.MILLISECONDS);
  final Initializer initializer = () -> 0;
  final Materialized<String, GenericRow, WindowStore<Bytes, byte[]>> store = Materialized.as("store");

  EasyMock.expect(stream.windowedBy(TimeWindows.of(10000L).advanceBy(4L))).andReturn(windowedKStream);
  EasyMock.expect(windowedKStream.aggregate(same(initializer), same(aggregator), same(store))).andReturn(null);
  EasyMock.replay(stream, windowedKStream);

  windowExpression.applyAggregate(stream, initializer, aggregator, store);
  EasyMock.verify(stream, windowedKStream);
}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:17,代码来源:HoppingWindowExpressionTest.java

示例5: main

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-wordcount");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

    final StreamsBuilder builder = new StreamsBuilder();

    builder.<String, String>stream("streams-plaintext-input")
           .flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+")))
           .groupBy((key, value) -> value)
           .count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("counts-store"))
           .toStream()
           .to("streams-wordcount-output", Produced.with(Serdes.String(), Serdes.Long()));

    final Topology topology = builder.build();
    final KafkaStreams streams = new KafkaStreams(topology, props);
    final CountDownLatch latch = new CountDownLatch(1);

    // attach shutdown handler to catch control-c
    Runtime.getRuntime().addShutdownHook(new Thread("streams-shutdown-hook") {
        @Override
        public void run() {
            streams.close();
            latch.countDown();
        }
    });

    try {
        streams.start();
        latch.await();
    } catch (Throwable e) {
        System.exit(1);
    }
    System.exit(0);
}
 
开发者ID:smarcu,项目名称:datastreaming-presentation,代码行数:38,代码来源:WordCount.java

示例6: main

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
public static void main(String[] args) throws CertificateException, NoSuchAlgorithmException,
    KeyStoreException, IOException, URISyntaxException {
  Properties streamsConfig = new AnomalyDetectorConfig().getProperties();

  final StreamsBuilder builder = new StreamsBuilder();

  final KStream<String, String> loglines =
      builder.stream( String.format("%sloglines", HEROKU_KAFKA_PREFIX));

  KStream<Windowed<String>, Long> anomalies = loglines
      .filter((key, value) -> value.contains("login failed"))
      .selectKey((key, value) -> value.split("\\|")[0])
      .groupByKey()
      .windowedBy(TimeWindows.of(TimeUnit.SECONDS.toMillis(10)))
      .count(Materialized.as("windowed-counts"))
      .toStream();

  @SuppressWarnings("unchecked")
  KStream<Windowed<String>, Long>[] branches = anomalies
      .branch(
          (key, value) -> value > 1,
          (key, value) -> value > 0
      );

  branches[0].process(AlertSink::new);
  branches[1].process(EmailSink::new);

  final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfig);

  streams.cleanUp();
  streams.start();

  Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
开发者ID:kissaten,项目名称:kafka-streams-on-heroku,代码行数:35,代码来源:AnomalyDetector.java

示例7: applyAggregate

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public KTable applyAggregate(final KGroupedStream groupedStream,
                             final Initializer initializer,
                             final UdafAggregator aggregator,
                             final Materialized<String, GenericRow, ?> materialized) {
  return groupedStream.windowedBy(SessionWindows.with(sizeUnit.toMillis(gap)))
      .aggregate(initializer, aggregator, aggregator.getMerger(),
          materialized);
}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:11,代码来源:SessionWindowExpression.java

示例8: applyAggregate

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public KTable applyAggregate(final KGroupedStream groupedStream,
                             final Initializer initializer,
                             final UdafAggregator aggregator,
                             final Materialized<String, GenericRow, ?> materialized) {
  return groupedStream.windowedBy(TimeWindows.of(sizeUnit.toMillis(size)))
      .aggregate(initializer, aggregator, materialized);

}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:11,代码来源:TumblingWindowExpression.java

示例9: applyAggregate

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public KTable applyAggregate(KGroupedStream groupedStream, Initializer initializer, UdafAggregator aggregator, Materialized<String, GenericRow, ?> materialized) {
  return groupedStream.windowedBy(TimeWindows.of(
      sizeUnit.toMillis(size))
      .advanceBy(
          advanceByUnit.toMillis(advanceBy)))
      .aggregate(initializer, aggregator, materialized);
}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:10,代码来源:HoppingWindowExpression.java

示例10: applyAggregate

import org.apache.kafka.streams.kstream.Materialized; //导入依赖的package包/类
public abstract KTable applyAggregate(final KGroupedStream groupedStream,
final Initializer initializer,
final UdafAggregator aggregator,
final Materialized<String, GenericRow, ?> materialized);
 
开发者ID:confluentinc,项目名称:ksql,代码行数:5,代码来源:KsqlWindowExpression.java


注:本文中的org.apache.kafka.streams.kstream.Materialized类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。