当前位置: 首页>>代码示例>>Scala>>正文


Scala RandomRDDs类代码示例

本文整理汇总了Scala中org.apache.spark.mllib.random.RandomRDDs的典型用法代码示例。如果您正苦于以下问题:Scala RandomRDDs类的具体用法?Scala RandomRDDs怎么用?Scala RandomRDDs使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了RandomRDDs类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: GenerateScalingData

//设置package包名称以及导入依赖的类
package com.highperformancespark.examples.tools

import com.highperformancespark.examples.dataframe.RawPanda

import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.apache.spark.mllib.random.RandomRDDs
import org.apache.spark.mllib.linalg.Vector

object GenerateScalingData {
  
  def generateGoldilocks(sc: SparkContext, rows: Long, numCols: Int):
      RDD[RawPanda] = {
    val zipRDD = RandomRDDs.exponentialRDD(sc, mean = 1000,  size = rows)
      .map(_.toInt.toString)
    val valuesRDD = RandomRDDs.normalVectorRDD(
      sc, numRows = rows, numCols = numCols)
    zipRDD.zip(valuesRDD).map{case (z, v) =>
      RawPanda(1, z, "giant", v(0) > 0.5, v.toArray)
    }
  }
  // end::MAGIC_PANDA[]
} 
开发者ID:gourimahapatra,项目名称:high-performance-spark,代码行数:25,代码来源:GenerateScalingData.scala

示例2: CustomDecaySpec

//设置package包名称以及导入依赖的类
package io.flatmap.ml.som

import io.flatmap.ml.som.SelfOrganizingMap.Shape
import org.apache.spark.mllib.random.RandomRDDs
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import util.TestSparkContext

class CustomDecaySpec extends FlatSpec with Matchers with BeforeAndAfterEach with TestSparkContext {

  def SOM(width: Int, height: Int, _sigma: Double, _learningRate: Double) =
    new SelfOrganizingMap with CustomDecay with GaussianNeighborboodKernel with QuantizationErrorMetrics {
      override val shape: Shape = (width, height)
      override val learningRate: Double = _learningRate
      override val sigma: Double = _sigma
    }

  "decay" should "decrease sigma and learningRate to one half each" in {
    val data = RandomRDDs.normalVectorRDD(sparkSession.sparkContext, numRows = 512L, numCols = 3)
    val sigma = 0.5
    val learningRate = 0.3
    val iterations = 20.0
    val (_, params) =
      SOM(6, 6, sigma, learningRate)
        .initialize(data)
        .train(data, iterations.toInt)
    params.sigma should equal (sigma / (1.0 + (iterations - 1.0) / iterations))
    params.learningRate should equal (learningRate / (1.0 + (iterations - 1.0) / iterations))
  }

} 
开发者ID:ShokuninSan,项目名称:som,代码行数:31,代码来源:CustomDecaySpec.scala

示例3: SelfOrganizingMapSpec

//设置package包名称以及导入依赖的类
package io.flatmap.ml.som

import breeze.numerics.closeTo
import breeze.linalg.DenseMatrix
import io.flatmap.ml.som.SelfOrganizingMap.Shape
import org.apache.spark.mllib.linalg.DenseVector
import org.apache.spark.mllib.random.RandomRDDs
import org.scalatest._
import util.{FakeDecayFunction, FakeMetrics, FakeNeighborhoodKernel, TestSparkContext}

class SelfOrganizingMapSpec extends FlatSpec with Matchers with BeforeAndAfterEach with TestSparkContext {

  def SOM(width: Int, height: Int) =
    new SelfOrganizingMap with FakeNeighborhoodKernel with FakeDecayFunction with FakeMetrics {
      override val shape: Shape = (width, height)
      override val learningRate: Double = 0.1
      override val sigma: Double = 0.2
    }

  "instantiation" should "create a SOM with codebook of zeros" in {
    val som = SOM(6, 6)
    som.codeBook should === (DenseMatrix.fill[Array[Double]](6, 6)(Array.emptyDoubleArray))
  }

  "initialize" should "copy random data points from RDD into codebook" in {
    val data = RandomRDDs.normalVectorRDD(sparkSession.sparkContext, numRows = 512L, numCols = 3)
    val som = SOM(6, 6)
    som.initialize(data).codeBook should !== (DenseMatrix.fill[Array[Double]](6, 6)(Array.emptyDoubleArray))
  }

  "winner" should "return best matching unit (BMU)" in {
    val som = SOM(6, 6)
    som.codeBook.keysIterator.foreach { case (x, y) => som.codeBook(x, y) = Array(0.2, 0.2, 0.2) }
    som.codeBook(3, 3) = Array(0.3, 0.3, 0.3)
    som.winner(new DenseVector(Array(2.0, 2.0, 2.0)), som.codeBook) should equal ((3, 3))
    som.winner(new DenseVector(Array(0.26, 0.26, 0.26)), som.codeBook) should equal ((3, 3))
  }

  "winner" should "return last best matching unit (BMU) index in case of multiple BMUs" in {
    val som = SOM(6, 6)
    som.codeBook.keysIterator.foreach { case (x, y) => som.codeBook(x, y) = Array(0.2, 0.2, 0.2) }
    som.codeBook(3, 3) = Array(0.3, 0.3, 0.3)
    som.winner(new DenseVector(Array(0.25, 0.25, 0.25)), som.codeBook) should equal ((5, 5))
  }

  "classify" should "return the best matching unit along with Euclidean distance" in {
    val som = SOM(6, 6)
    som.codeBook.keysIterator.foreach { case (x, y) => som.codeBook(x, y) = Array(0.2, 0.2, 0.2) }
    som.codeBook(3, 3) = Array(0.3, 0.3, 0.3)
    val (bmu, distance) = som.classify(new DenseVector(Array(0.26, 0.26, 0.26)))
    bmu should === ((3, 3))
    assert(closeTo(distance, 0.06, relDiff = 1e-2))
  }

} 
开发者ID:ShokuninSan,项目名称:som,代码行数:56,代码来源:SelfOrganizingMapSpec.scala


注:本文中的org.apache.spark.mllib.random.RandomRDDs类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。