当前位置: 首页>>代码示例>>Scala>>正文


Scala MultilayerPerceptronClassificationModel类代码示例

本文整理汇总了Scala中org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel的典型用法代码示例。如果您正苦于以下问题:Scala MultilayerPerceptronClassificationModel类的具体用法?Scala MultilayerPerceptronClassificationModel怎么用?Scala MultilayerPerceptronClassificationModel使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了MultilayerPerceptronClassificationModel类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: buildMultiPerpectronNetwork

//设置package包名称以及导入依赖的类
package com.sircamp.algorithms.neuralnetwork

import java.io.{BufferedWriter, FileOutputStream, OutputStreamWriter}

import com.sircamp.Application
import org.apache.spark.ml.classification.{MultilayerPerceptronClassificationModel, MultilayerPerceptronClassifier}
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.ml.linalg.VectorUDT
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.apache.spark.sql.types._


    val file = new java.io.File(TEMP_FILE_PATH)
    if( file.exists){
      file.delete()
    }

    val writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file)))


    var sb = new StringBuilder()
    trainingData.collect().foreach(f=>{
      var arr = f.features.toArray
      var features = ""
      for(i <- arr.indices){
        features = features +((i+1)+":"+arr(i))+" "
      }
      writer.write((f.label+" "+features) + "\n")
    })
    writer.close()
  }

  def buildMultiPerpectronNetwork(trainingData:Dataset[Row], layers:Array[Int], maxIter:Int):MultilayerPerceptronClassificationModel = {

    val trainer = new MultilayerPerceptronClassifier()
      .setLayers(layers)
      .setBlockSize(blockSize)
      .setSeed(seed)
      .setMaxIter(maxIter)

    trainer.fit(trainingData)

  }
} 
开发者ID:sirCamp,项目名称:mushrooms-ml-classfier-scala-spark,代码行数:47,代码来源:NeuralNetworkBuilder.scala

示例2: LocalMultilayerPerceptronClassificationModel

//设置package包名称以及导入依赖的类
package io.hydrosphere.spark_ml_serving.classification

import io.hydrosphere.spark_ml_serving._
import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
import org.apache.spark.ml.linalg.{Vector, Vectors}

class LocalMultilayerPerceptronClassificationModel(override val sparkTransformer: MultilayerPerceptronClassificationModel) extends LocalTransformer[MultilayerPerceptronClassificationModel] {

  override def transform(localData: LocalData): LocalData = {
    localData.column(sparkTransformer.getFeaturesCol) match {
      case Some(column) =>
        val method = classOf[MultilayerPerceptronClassificationModel].getMethod("predict", classOf[Vector])
        method.setAccessible(true)
        val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data map { feature =>
          method.invoke(sparkTransformer, feature.asInstanceOf[Vector]).asInstanceOf[Double]
        })
        localData.withColumn(newColumn)
      case None => localData
    }
  }
}

object LocalMultilayerPerceptronClassificationModel extends LocalModel[MultilayerPerceptronClassificationModel] {
  override def load(metadata: Metadata, data: Map[String, Any]): MultilayerPerceptronClassificationModel = {
    val constructor = classOf[MultilayerPerceptronClassificationModel].getDeclaredConstructor(classOf[String], classOf[Array[Int]], classOf[Vector])
    constructor.setAccessible(true)
    constructor
      .newInstance(metadata.uid, data("layers").asInstanceOf[List[Int]].to[Array], Vectors.dense(data("weights").asInstanceOf[Map[String, Any]]("values").asInstanceOf[List[Double]].toArray))
      .setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
      .setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
  }

  override implicit def getTransformer(transformer: MultilayerPerceptronClassificationModel): LocalTransformer[MultilayerPerceptronClassificationModel] = new LocalMultilayerPerceptronClassificationModel(transformer)
} 
开发者ID:Hydrospheredata,项目名称:spark-ml-serving,代码行数:35,代码来源:LocalMultilayerPerceptronClassificationModel.scala

示例3: LocalMultilayerPerceptronClassificationModel

//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.api.ml.classification

import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
import org.apache.spark.ml.linalg.{Vector, Vectors}

class LocalMultilayerPerceptronClassificationModel(override val sparkTransformer: MultilayerPerceptronClassificationModel) extends LocalTransformer[MultilayerPerceptronClassificationModel] {

  override def transform(localData: LocalData): LocalData = {
    localData.column(sparkTransformer.getFeaturesCol) match {
      case Some(column) =>
        val method = classOf[MultilayerPerceptronClassificationModel].getMethod("predict", classOf[Vector])
        method.setAccessible(true)
        val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data map { feature =>
          method.invoke(sparkTransformer, feature.asInstanceOf[Vector]).asInstanceOf[Double]
        })
        localData.withColumn(newColumn)
      case None => localData
    }
  }
}

object LocalMultilayerPerceptronClassificationModel extends LocalModel[MultilayerPerceptronClassificationModel] {
  override def load(metadata: Metadata, data: Map[String, Any]): MultilayerPerceptronClassificationModel = {
    val constructor = classOf[MultilayerPerceptronClassificationModel].getDeclaredConstructor(classOf[String], classOf[Array[Int]], classOf[Vector])
    constructor.setAccessible(true)
    constructor
      .newInstance(metadata.uid, data("layers").asInstanceOf[List[Int]].to[Array], Vectors.dense(data("weights").asInstanceOf[Map[String, Any]]("values").asInstanceOf[List[Double]].toArray))
      .setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
      .setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
  }

  override implicit def getTransformer(transformer: MultilayerPerceptronClassificationModel): LocalTransformer[MultilayerPerceptronClassificationModel] = new LocalMultilayerPerceptronClassificationModel(transformer)
} 
开发者ID:Hydrospheredata,项目名称:mist,代码行数:35,代码来源:LocalMultilayerPerceptronClassificationModel.scala

示例4: MlpcHelpers

//设置package包名称以及导入依赖的类
package com.zobot.ai.spark.helpers

import org.apache.spark.ml.classification.{MultilayerPerceptronClassificationModel, MultilayerPerceptronClassifier}
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
import org.apache.spark.sql.DataFrame

object MlpcHelpers {
  case class NeuralNetworkLayers (
    featureLayerSize: Int,
    intermediateLayerSizes: List[Int],
    classLayerSize: Int
  )

  def layersToArray(layers: NeuralNetworkLayers): Array[Int] = {
    (layers.featureLayerSize :: layers.intermediateLayerSizes ::: List(layers.classLayerSize)).toArray
  }

  def createTrainer(layers: NeuralNetworkLayers, blockSize: Int, maxIterations: Int, seed: Option[Long]): MultilayerPerceptronClassifier = {
    val mlpcClassifier = new MultilayerPerceptronClassifier()
      .setLayers(layersToArray(layers))
      .setBlockSize(blockSize)
      .setMaxIter(maxIterations)
    seed match {
      case Some(n) => mlpcClassifier.setSeed(n)
      case None => mlpcClassifier
    }
  }

  def trainModel(trainer: MultilayerPerceptronClassifier, trainingData: DataFrame): MultilayerPerceptronClassificationModel = {
    trainer.fit(trainingData)
  }

  def testModel(model: MultilayerPerceptronClassificationModel, testData: DataFrame): DataFrame = model.transform(testData)

  def getModelAccuracy(testResults: DataFrame): Double = {
    val predictionAndLabels = testResults.select("prediction", "label")
    val evaluator = new MulticlassClassificationEvaluator().setMetricName("accuracy")
    evaluator.evaluate(predictionAndLabels)
  }
} 
开发者ID:BecauseNoReason,项目名称:zobot,代码行数:41,代码来源:MlpcHelpers.scala


注:本文中的org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。