本文整理汇总了Scala中org.apache.spark.ml.classification.MultilayerPerceptronClassifier类的典型用法代码示例。如果您正苦于以下问题:Scala MultilayerPerceptronClassifier类的具体用法?Scala MultilayerPerceptronClassifier怎么用?Scala MultilayerPerceptronClassifier使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了MultilayerPerceptronClassifier类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: buildMultiPerpectronNetwork
//设置package包名称以及导入依赖的类
package com.sircamp.algorithms.neuralnetwork
import java.io.{BufferedWriter, FileOutputStream, OutputStreamWriter}
import com.sircamp.Application
import org.apache.spark.ml.classification.{MultilayerPerceptronClassificationModel, MultilayerPerceptronClassifier}
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.ml.linalg.VectorUDT
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.apache.spark.sql.types._
val file = new java.io.File(TEMP_FILE_PATH)
if( file.exists){
file.delete()
}
val writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file)))
var sb = new StringBuilder()
trainingData.collect().foreach(f=>{
var arr = f.features.toArray
var features = ""
for(i <- arr.indices){
features = features +((i+1)+":"+arr(i))+" "
}
writer.write((f.label+" "+features) + "\n")
})
writer.close()
}
def buildMultiPerpectronNetwork(trainingData:Dataset[Row], layers:Array[Int], maxIter:Int):MultilayerPerceptronClassificationModel = {
val trainer = new MultilayerPerceptronClassifier()
.setLayers(layers)
.setBlockSize(blockSize)
.setSeed(seed)
.setMaxIter(maxIter)
trainer.fit(trainingData)
}
}
示例2: MLPTest
//设置package包名称以及导入依赖的类
package cn.edu.bjtu
import org.apache.spark.SparkConf
import org.apache.spark.ml.classification.MultilayerPerceptronClassifier
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
import org.apache.spark.sql.SparkSession
object MLPTest {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf()
.setAppName("MLPTest")
.setMaster("spark://master:7077")
.setJars(Array("/home/hadoop/MLP.jar"))
val spark = SparkSession.builder()
.config(sparkConf)
.getOrCreate()
spark.sparkContext.setLogLevel("WARN")
// Load the data stored in LIBSVM format as a DataFrame.
val data = spark.read.format("libsvm")
.load("hdfs://master:9000/sample_formatted.txt")
// Split the data into train and test
val splits = data.randomSplit(Array(0.7, 0.3), seed = 14L)
val train = splits(0)
val test = splits(1)
val layers = Array[Int](20, 20, 2)
// create the trainer and set its parameters
val trainer = new MultilayerPerceptronClassifier()
.setLayers(layers)
.setBlockSize(128)
.setSeed(14L)
.setMaxIter(100)
// train the model
val model = trainer.fit(train)
// compute accuracy on the test set
val result = model.transform(test)
val predictionAndLabels = result.select("prediction", "label")
val evaluator = new MulticlassClassificationEvaluator()
.setMetricName("accuracy")
println("Sensitivity = " + predictionAndLabels.filter(x => x(0) == x(1) && x(0) == 1.0).count().toDouble / predictionAndLabels.filter(x => x(1) == 1.0).count().toDouble)
println("Specificity = " + predictionAndLabels.filter(x => x(0) == x(1) && x(0) == 0.0).count().toDouble / predictionAndLabels.filter(x => x(1) == 0.0).count().toDouble)
println("Test set accuracy = " + evaluator.evaluate(predictionAndLabels))
}
}
示例3: NeuralNetworkSpec
//设置package包名称以及导入依赖的类
package io.spinor.sparkdemo.mllib
import io.spinor.sparkdemo.data.MNISTData
import io.spinor.sparkdemo.util.DemoUtil
import org.apache.spark.ml.classification.MultilayerPerceptronClassifier
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{FlatSpec, Matchers}
import org.slf4j.LoggerFactory
class NeuralNetworkSpec extends FlatSpec with DemoUtil with Matchers {
val logger = LoggerFactory.getLogger(classOf[NeuralNetworkSpec])
"Training on MNIST data" should " run" in {
val sparkConf = new SparkConf()
sparkConf.setAppName("NeuralNetworkDemo")
sparkConf.setMaster("local[2]")
val sparkContext = new SparkContext(sparkConf)
val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
val sqlContext = sparkSession.sqlContext
import sqlContext.implicits._
val mNISTData = new MNISTData()
val trainingData = mNISTData.getTrainingData()
val trainingPoints = sparkContext.parallelize(trainingData.map(entry => LabeledPoint(entry._2, Vectors.dense(entry._1)))).toDF()
val classifier = new MultilayerPerceptronClassifier()
classifier
.setLayers(Array(784, 100))
.setBlockSize(125)
.setSeed(1234L)
.setMaxIter(10)
val model = classifier.fit(trainingPoints)
val testData = mNISTData.getTestData()
val testPoints = sparkContext.parallelize(testData.map(entry => {
LabeledPoint(entry._2, Vectors.dense(entry._1))})).toDF()
val result = model.transform(testPoints)
val predictionAndLabels = result.select("prediction", "label")
val evaluator = new MulticlassClassificationEvaluator().setMetricName("accuracy")
logger.info("accuracy:" + evaluator.evaluate(predictionAndLabels))
}
}
示例4: MlpcHelpers
//设置package包名称以及导入依赖的类
package com.zobot.ai.spark.helpers
import org.apache.spark.ml.classification.{MultilayerPerceptronClassificationModel, MultilayerPerceptronClassifier}
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
import org.apache.spark.sql.DataFrame
object MlpcHelpers {
case class NeuralNetworkLayers (
featureLayerSize: Int,
intermediateLayerSizes: List[Int],
classLayerSize: Int
)
def layersToArray(layers: NeuralNetworkLayers): Array[Int] = {
(layers.featureLayerSize :: layers.intermediateLayerSizes ::: List(layers.classLayerSize)).toArray
}
def createTrainer(layers: NeuralNetworkLayers, blockSize: Int, maxIterations: Int, seed: Option[Long]): MultilayerPerceptronClassifier = {
val mlpcClassifier = new MultilayerPerceptronClassifier()
.setLayers(layersToArray(layers))
.setBlockSize(blockSize)
.setMaxIter(maxIterations)
seed match {
case Some(n) => mlpcClassifier.setSeed(n)
case None => mlpcClassifier
}
}
def trainModel(trainer: MultilayerPerceptronClassifier, trainingData: DataFrame): MultilayerPerceptronClassificationModel = {
trainer.fit(trainingData)
}
def testModel(model: MultilayerPerceptronClassificationModel, testData: DataFrame): DataFrame = model.transform(testData)
def getModelAccuracy(testResults: DataFrame): Double = {
val predictionAndLabels = testResults.select("prediction", "label")
val evaluator = new MulticlassClassificationEvaluator().setMetricName("accuracy")
evaluator.evaluate(predictionAndLabels)
}
}