本文整理汇总了Scala中org.apache.spark.util.AccumulatorV2类的典型用法代码示例。如果您正苦于以下问题:Scala AccumulatorV2类的具体用法?Scala AccumulatorV2怎么用?Scala AccumulatorV2使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了AccumulatorV2类的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: CollectionAccumulatorAPISparkImpl
//设置package包名称以及导入依赖的类
package com.datawizards.sparklocal.impl.spark.accumulator
import com.datawizards.sparklocal.accumulator.{AccumulatorV2API, CollectionAccumulatorAPI}
import org.apache.spark.util.AccumulatorV2
class CollectionAccumulatorAPISparkImpl[T](acc: AccumulatorV2[T, java.util.List[T]])
extends AccumulatorV2APISparkImpl[T, java.util.List[T]](acc)
with CollectionAccumulatorAPI[T] {
override def copyAndReset(): AccumulatorV2API[T, java.util.List[T]] =
new CollectionAccumulatorAPISparkImpl(acc.copyAndReset())
override def copy(): AccumulatorV2API[T, java.util.List[T]] =
new CollectionAccumulatorAPISparkImpl(acc.copy())
override def merge(other: AccumulatorV2API[T, java.util.List[T]]): Unit = other match {
case a:CollectionAccumulatorAPISparkImpl[T] => acc.merge(a.acc)
case _ =>
throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
}
示例2: AtomicLongAccumulator
//设置package包名称以及导入依赖的类
package org.hammerlab.spark
import java.util.concurrent.atomic.AtomicLong
import org.apache.spark.util.AccumulatorV2
case class AtomicLongAccumulator(initialValue: Long = 0)
extends AccumulatorV2[Long, Long] {
private var _value = new AtomicLong(initialValue)
override def value: Long = _value.get
override def isZero: Boolean = value == 0
override def copy(): AccumulatorV2[Long, Long] = AtomicLongAccumulator(value)
override def reset(): Unit = _value = new AtomicLong(0)
override def add(v: Long): Unit = _value.addAndGet(v)
override def merge(other: AccumulatorV2[Long, Long]): Unit = add(other.value)
}