当前位置: 首页>>代码示例>>Scala>>正文


Scala ALS类代码示例

本文整理汇总了Scala中org.apache.spark.mllib.recommendation.ALS的典型用法代码示例。如果您正苦于以下问题:Scala ALS类的具体用法?Scala ALS怎么用?Scala ALS使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了ALS类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: RankingDataProvider

//设置package包名称以及导入依赖的类
package com.github.jongwook

import org.apache.spark.SparkConf
import org.apache.spark.mllib.recommendation.{ALS, MatrixFactorizationModel, Rating}
import org.apache.spark.sql.SparkSession
import org.scalatest._

object RankingDataProvider {

  
  def apply(ratings: Seq[Rating], k: Int = 100): (Seq[Rating], Seq[Rating]) = {

    val spark = SparkSession.builder().master(new SparkConf().get("spark.master", "local[8]")).getOrCreate()
    val sc = spark.sparkContext

    val Array(trainRatings, testRatings) = sc.parallelize(ratings).cache().randomSplit(Array(0.9, 0.1), 0)
    val model = ALS.trainImplicit(trainRatings, rank = 10, iterations = 2, lambda = 2, blocks = 100, alpha = 10)

    val testUsers = testRatings.map(_.user).collect().toSet
    val testUsersBroadcast = spark.sparkContext.broadcast(testUsers)
    val testUserFeatures = model.userFeatures.filter {
      case (user, feature) => testUsersBroadcast.value.contains(user)
    }.repartition(100).cache()

    val testModel = new MatrixFactorizationModel(model.rank, testUserFeatures, model.productFeatures.repartition(100).cache())

    val result = testModel.recommendProductsForUsers(k)

    val prediction = result.values.flatMap(ratings => ratings).collect()
    val groundTruth = testRatings.collect()

    (prediction, groundTruth)
  }
}

class RankingDataProvider extends FlatSpec with Matchers {
  "Ranking Data Provider" should "calculate the rankings" in {
    val ratings = MovieLensLoader.load()
    val (prediction, groundTruth) = RankingDataProvider(ratings)
    prediction.map(_.user).distinct.sorted should equal (groundTruth.map(_.user).distinct.sorted)
  }
} 
开发者ID:jongwook,项目名称:spark-ranking-metrics,代码行数:43,代码来源:RankingDataProvider.scala

示例2: SparkAlsPredictor

//设置package包名称以及导入依赖的类
package com.rikima.ml.recommend

import org.apache.spark.SparkContext
import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel


object SparkAlsPredictor {

  def execute(sc: SparkContext, input: String, model_path: String): Unit = {
    // Load and parse the data
    val data = sc.textFile(input).map {
      case l =>
        val p = l.indexOf("#")
        l.substring(0, p)
    }
    val ratings = data.map(_.split('\t') match { case Array(user, item, rate) =>
      Rating(user.toInt, item.toInt, rate.toDouble)
    })

    // Build the recommendation model using ALS
    val model = MatrixFactorizationModel.load(sc, model_path)

    // Evaluate the model on rating data
    val usersProducts = ratings.map { case Rating(user, product, rate) =>
      (user, product)
    }
    val predictions =
      model.predict(usersProducts).map { case Rating(user, product, rate) =>
        ((user, product), rate)
      }

    val ratesAndPreds = ratings.map { case Rating(user, product, rate) =>
      ((user, product), rate)
    }.join(predictions)

    val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) =>
      val err = (r1 - r2)
      err * err
    }.mean()
    println("Mean Squared Error = " + MSE)
  }


  def main(args: Array[String]): Unit = {
    var input = ""
    var model_path = ""
    for (i <- 0 until args.length) {
      val a = args(i)
      if (a == "-i" || a == "--input") {
        input = args(i+1)
      }
      if (a == "-m" || a == "--model") {
        model_path = args(i+1)
      }
    }
    val sc = new SparkContext()
    execute(sc, input, model_path)
  }
} 
开发者ID:rikima,项目名称:spark_als_work,代码行数:61,代码来源:SparkAlsPredictor.scala

示例3: RecommendationExample

//设置package包名称以及导入依赖的类
import org.apache.log4j.PropertyConfigurator
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.recommendation.ALS
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel
import org.apache.spark.mllib.recommendation.Rating

object RecommendationExample {
  def main(args: Array[String]): Unit = {
    PropertyConfigurator.configure("file/log4j.properties")
    val conf = new SparkConf().setAppName("CollaborativeFilteringExample").setMaster("local")
    val sc = new SparkContext(conf)
    // Load and parse the data
    val data = sc.textFile("file/test.data")
    val ratings = data.map(_.split(',') match { case Array(user, item, rate) =>
      Rating(user.toInt, item.toInt, rate.toDouble)
    })

    // Build the recommendation model using ALS
    val rank = 10
    val numIterations = 10
    val model = ALS.train(ratings, rank, numIterations, 0.01)

    // Evaluate the model on rating data
    val usersProducts = ratings.map { case Rating(user, product, rate) =>
      (user, product)
    }
    val predictions =
      model.predict(usersProducts).map { case Rating(user, product, rate) =>
        ((user, product), rate)
      }
    val ratesAndPreds = ratings.map { case Rating(user, product, rate) =>
      ((user, product), rate)
    }.join(predictions)
    val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) =>
      val err = (r1 - r2)
      err * err
    }.mean()
    println("----------------------------------------")
    println("-------Mean Squared Error = " + MSE)
    println("----------------------------------------")

    // Save and load model
    model.save(sc, "target/tmp/myCollaborativeFilter")
    val sameModel = MatrixFactorizationModel.load(sc, "target/tmp/myCollaborativeFilter")
    sameModel.userFeatures.foreach(println)
    val proFCounts = sameModel.productFeatures.count()
    println(proFCounts)

  }
}
// scalastyle:on println 
开发者ID:Larry3z,项目名称:SparkPractice,代码行数:52,代码来源:RecommendationExample.scala

示例4: cf

//设置package包名称以及导入依赖的类
package spark

import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory

/**
  * Created by I311352 on 3/29/2017.
  */
class cf {

}

object cf extends App {

}

object RecommendationExample {
  def main(args: Array[String]): Unit = {
    val LOG = LoggerFactory.getLogger(getClass)

    val conf = new SparkConf().setAppName("mltest").setMaster("local[2]")
    val sc = new SparkContext(conf)
    val data = sc.textFile("data/test.data")
    data.foreach(r=>LOG.warn(r))
    val rating = data.map(_.split(",") match {
      case Array(user, item, rate) => Rating(user.toInt, item.toInt, rate.toDouble)
    })


    LOG.warn(rating.toString())

    // Build the recommendation model using ALS
    val rank = 10
    val numIterations = 20
    val model = ALS.train(rating, rank, numIterations, 0.01)

    val userProducts = rating map { case Rating(user, item, rating) => (user, item)}
    val predictions = model predict(userProducts) map {case Rating(user, product, rating) => ((user, product), rating)}
    val ratesAndPreds = rating.map { case Rating(user, product, rate) =>
      ((user, product), rate)
    }.join(predictions)

    val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) =>
      val err = (r1 - r2)
      err * err
    }.mean()

    LOG.warn("Mean Squared Error = " + MSE)

  }
} 
开发者ID:compasses,项目名称:elastic-spark,代码行数:53,代码来源:cf.scala


注:本文中的org.apache.spark.mllib.recommendation.ALS类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。