本文整理汇总了Scala中org.apache.flink.streaming.api.windowing.time.Time类的典型用法代码示例。如果您正苦于以下问题:Scala Time类的具体用法?Scala Time怎么用?Scala Time使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Time类的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: StatisticsQuery
//设置package包名称以及导入依赖的类
package org.hpi.esb.flink.query
import org.apache.flink.api.common.functions.FoldFunction
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.streaming.api.windowing.time.Time
import org.hpi.esb.flink.utils.Statistics
class StatisticsQuery extends Query[String, String] {
override def execute(stream: DataStream[String]): DataStream[String] = {
stream
.map(_.toLong)
.timeWindowAll(Time.milliseconds(1000))
.fold(new Statistics(), new StatisticsFoldFunction())
.map(v => v.toString())
}
}
class StatisticsFoldFunction extends FoldFunction[Long, Statistics] {
override def fold(acc: Statistics, value: Long): Statistics = Statistics.fold(acc, value)
}
示例2: TwitterStream
//设置package包名称以及导入依赖的类
package flink
import flink.parsers.JsonTweetParser
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.twitter.TwitterSource
import org.joda.time.DateTime
import sinks.ElasticsearchSinks
object TwitterStream extends App with ElasticsearchSinks {
val streamEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
val streamSource = streamEnvironment.addSource(new TwitterSource("config/twitter/twitter-auth.properties"))
val tweets = streamSource
.flatMap(new JsonTweetParser).name("Filtering tweets")
.filter(_.text.nonEmpty)
val tweets2 = tweets
.map(tweet => (tweet.userLang, 1))
.keyBy(0)
.timeWindow(Time.seconds(5))
.sum(1)
.map(x=> (DateTime.now(),x._1,x._2))
.addSink(esPopularLanguagesSink)
streamEnvironment.execute("Twitter Stream")
}
示例3: ConsoleReporterTestJob
//设置package包名称以及导入依赖的类
package com.jgrier.flinkstuff.jobs
import com.jgrier.flinkstuff.sources.IntegerSource
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.api.scala._
object ConsoleReporterTestJob {
def main(args: Array[String]) {
val config = new Configuration()
config.setString("metrics.reporters", "consoleReporter")
config.setString("metrics.reporter.consoleReporter.class", "com.jgrier.flinkstuff.metrics.ConsoleReporter")
config.setString("metrics.reporter.consoleReporter.interval", "10 SECONDS")
val env = new StreamExecutionEnvironment(new LocalStreamEnvironment(config))
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val stream = env.addSource(new IntegerSource(100))
stream
.timeWindowAll(Time.seconds(1))
.sum(0)
.print
env.execute("ConsoleReporterTestJob")
}
}
示例4: Flink
//设置package包名称以及导入依赖的类
package uk.co.bitcat.streaming.flink
import java.util.Properties
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer09
import org.apache.flink.util.Collector
import uk.co.bitcat.streaming.flink.domain.{Measurement, MeasurementSchema}
import uk.co.bitcat.streaming.flink.watermark.TwoSecondDelayWatermark
object Flink {
def main(args: Array[String]) {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val properties = new Properties()
properties.setProperty("bootstrap.servers", "localhost:9092")
properties.setProperty("group.id", "flink_consumer")
env
.addSource(new FlinkKafkaConsumer09[Measurement]("pollution", new MeasurementSchema(), properties))
.assignTimestampsAndWatermarks(new TwoSecondDelayWatermark())
.timeWindowAll(Time.seconds(10))
.apply(
(0L, 0.0, 0), // (Window End Time, To Store Mean, Count)
(acc: (Long, Double, Int), m: Measurement) => { (0L, acc._2 + m.pollution, acc._3 + 1) },
( window: TimeWindow,
accs: Iterable[(Long, Double, Int)],
out: Collector[(Long, Double, Int)] ) =>
{
val acc = accs.iterator.next()
out.collect((window.getEnd, acc._2/acc._3, acc._3))
}
)
.filter(_._2 > 75.0)
.print() // Replace with call to custom sink to raise alert for pollution level
env.execute()
}
}
示例5: EventTimeWithWaterMarkAllowedLateness
//设置package包名称以及导入依赖的类
package com.vishnuviswanath.eventtime
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import Util._
import org.apache.flink.streaming.api.TimeCharacteristic
import com.vishnuviswanath.processtime.Util.ValueAndTimestamp
object EventTimeWithWaterMarkAllowedLateness {
def main(args: Array[String]) {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
//????watermark???????????????????????????
val text = env.addSource(new ValueAndTimestampWithDelay).assignTimestampsAndWatermarks(new TimestampExtractor)
val counts = text.map {
(x: String) => (x.split(",")(0), 1)
}.keyBy(0)
.timeWindow(Time.seconds(10), Time.seconds(5)).allowedLateness(Time.seconds(5))
.sum(1)
counts.print
env.execute("ProcessingTime processing example")
}
}
示例6: EventTimeWithWaterMark
//设置package包名称以及导入依赖的类
package com.vishnuviswanath.eventtime
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import Util._
import org.apache.flink.streaming.api.TimeCharacteristic
import com.vishnuviswanath.processtime.Util.ValueAndTimestamp
object EventTimeWithWaterMark {
def main(args: Array[String]) {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
//????watermark???????????????????????????
val text = env.addSource(new ValueAndTimestampWithDelay).assignTimestampsAndWatermarks(new TimestampExtractorWithWaterMarkDelay)
val counts = text.map {
(x: String) => (x.split(",")(0), 1)
}.keyBy(0)
.timeWindow(Time.seconds(10), Time.seconds(5))
.sum(1)
counts.print
env.execute("ProcessingTime processing example")
}
}
示例7: EventTime
//设置package包名称以及导入依赖的类
package com.vishnuviswanath.eventtime
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import Util._
import org.apache.flink.streaming.api.TimeCharacteristic
object EventTime {
def main(args: Array[String]) {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
//??????(a,2), (a,3) and (a,1)
//??????(a,1), (a,3) and (a,1)???2?3????????????????????19??????????13?????15-25????????
val text = env.addSource(new ValueAndTimestampWithDelay).assignTimestampsAndWatermarks(new TimestampExtractor)
val counts = text.map {
(x: String) => (x.split(",")(0), 1)
}.keyBy(0)
.timeWindow(Time.seconds(10), Time.seconds(5))
.sum(1)
counts.print
env.execute("ProcessingTime processing example")
}
}
示例8: WikipediaAnalysis
//设置package包名称以及导入依赖的类
package phu.quang.le.Stream
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.wikiedits._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010
import org.apache.flink.streaming.util.serialization.SimpleStringSchema
object WikipediaAnalysis extends App {
val env = StreamExecutionEnvironment.createLocalEnvironment()
val edits: DataStream[WikipediaEditEvent] = env.addSource(new WikipediaEditsSource())
val keyedEdits = edits.keyBy(_.getUser)
val result = keyedEdits
.timeWindow(Time.seconds(5))
.fold(("", 0L))((acc, event) => (event.getUser, acc._2 + event.getByteDiff))
result
.map(_.toString())
.addSink(new FlinkKafkaProducer010("localhost:9092", "wiki_results", new SimpleStringSchema()))
env.execute()
}