本文整理汇总了Scala中org.apache.spark.streaming.Milliseconds类的典型用法代码示例。如果您正苦于以下问题:Scala Milliseconds类的具体用法?Scala Milliseconds怎么用?Scala Milliseconds使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Milliseconds类的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: Combinators
//设置package包名称以及导入依赖的类
package com.stratio.ioft.streaming.transformations
import com.stratio.ioft.domain._
import com.stratio.ioft.domain.measures.{Acceleration, Attitude}
import com.stratio.ioft.domain.states.AttitudeHistory
import com.stratio.ioft.util.Math.Geometry._
import org.apache.spark.streaming.dstream.DStream
import com.stratio.ioft.domain.measures.VectorMeasure._
import com.stratio.ioft.streaming.transformations.Aggregators.attitudeHistoryStream
import org.apache.spark.streaming.Milliseconds
object Combinators {
def desiredAndActualAttitudeStream(
desiredAttitudeStream: DStream[(DroneIdType, (BigInt, Attitude))],
attitudeStream: DStream[(DroneIdType, (BigInt, Attitude))],
timeRange: Long
): DStream[(DroneIdType, (BigInt, Attitude, Attitude))] = {
def windowed(stream: DStream[(DroneIdType, (BigInt, Attitude))]) = {
val windowDuration = Milliseconds(timeRange)
stream.window(windowDuration, windowDuration)
}
windowed(desiredAttitudeStream) join attitudeHistoryStream(windowed(attitudeStream)) flatMap {
case (id, ( (ts, desired), actualAttitudeHistory)) =>
val closestAttitudes = actualAttitudeHistory.attitudeAt(ts)
closestAttitudes.headOption map { _ =>
val (_, actualAttitude: Attitude) = closestAttitudes.minBy {
case (actual_ts, _) => math.abs((actual_ts-ts).toLong)
}
id -> (ts, desired, actualAttitude)
}
}
}
}
示例2: StreamingContextFactory
//设置package包名称以及导入依赖的类
package spark.jobserver.context
import com.typesafe.config.Config
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Milliseconds, StreamingContext}
import spark.jobserver.{api, ContextLike, SparkStreamingJob}
class StreamingContextFactory extends ScalaContextFactory {
type C = StreamingContext with ContextLike
def isValidJob(job: api.SparkJobBase): Boolean = job.isInstanceOf[SparkStreamingJob]
def makeContext(sparkConf: SparkConf, config: Config, contextName: String): C = {
val interval = config.getInt("streaming.batch_interval")
val stopGracefully = config.getBoolean("streaming.stopGracefully")
val stopSparkContext = config.getBoolean("streaming.stopSparkContext")
new StreamingContext(sparkConf, Milliseconds(interval)) with ContextLike {
def stop() {
//Gracefully stops the spark context
stop(stopSparkContext, stopGracefully)
}
}
}
}