本文整理汇总了Scala中scala.collection.immutable.ListMap类的典型用法代码示例。如果您正苦于以下问题:Scala ListMap类的具体用法?Scala ListMap怎么用?Scala ListMap使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ListMap类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: LocalKMeansModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.spark_ml_serving.clustering
import io.hydrosphere.spark_ml_serving._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}
import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe
class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
lazy val parent: OldKMeansModel = {
val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
parent.predict(vector)
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalKMeansModel extends LocalModel[KMeansModel] {
override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
val centers = sorted map {
case (_: String, value: Any) =>
val center = value.asInstanceOf[Map[String, Any]]
Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
}
val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
parentConstructor.setAccessible(true)
val mlk = parentConstructor.newInstance(centers.toArray)
val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
constructor.setAccessible(true)
var inst = constructor
.newInstance(metadata.uid, mlk)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
inst
}
override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
}
示例2: apply
//设置package包名称以及导入依赖的类
package pl.ekodo.json.files
import pl.ekodo.json.model._
import scala.collection.immutable.ListMap
def apply(cc: CaseClass): String = {
val sb = new StringBuilder
sb.append(s"case class ${cc.name}(\n")
val fields = ListMap(cc.fields.toSeq.sortBy(_._1): _*).map { case (k, v) => s" $k: ${print(v)}" }.mkString(",\n")
sb.append(fields)
sb.append("\n)\n")
sb.toString()
}
private def print(scalaType: ScalaType): String = scalaType match {
case AnyType => "Any"
case BigDecimalType => "BigDecimal"
case BooleanType => "Boolean"
case DoubleType => "Double"
case IntType => "Int"
case LongType => "Long"
case StringType => "String"
case cc: CaseClass => cc.name
case opt: OptionalType => s"Option[${print(opt.scalaType)}]"
case st: SeqType => s"List[${print(st.scalaType)}]"
}
}
示例3: CustomBundle
//设置package包名称以及导入依赖的类
package barstools.tapeout.transforms
import chisel3._
import scala.collection.immutable.ListMap
class CustomBundle[T <: Data](elts: (String, T)*) extends Record {
val elements = ListMap(elts map { case (field, elt) => field -> elt.chiselCloneType }: _*)
def apply(elt: String): T = elements(elt)
def apply(elt: Int): T = elements(elt.toString)
override def cloneType = (new CustomBundle(elements.toList: _*)).asInstanceOf[this.type]
}
class CustomIndexedBundle[T <: Data](elts: (Int, T)*) extends Record {
// Must be String, Data
val elements = ListMap(elts map { case (field, elt) => field.toString -> elt.chiselCloneType }: _*)
// TODO: Make an equivalent to the below work publicly (or only on subclasses?)
def indexedElements = ListMap(elts map { case (field, elt) => field -> elt.chiselCloneType }: _*)
def apply(elt: Int): T = elements(elt.toString)
override def cloneType = (new CustomIndexedBundle(indexedElements.toList: _*)).asInstanceOf[this.type]
}
object CustomIndexedBundle {
def apply[T <: Data](gen: T, idxs: Seq[Int]) = new CustomIndexedBundle(idxs.map(_ -> gen): _*)
// Allows Vecs of elements of different types/widths
def apply[T <: Data](gen: Seq[T]) = new CustomIndexedBundle(gen.zipWithIndex.map{ case (elt, field) => field -> elt }: _*)
}
示例4: CoercedScalaResultMarshaller
//设置package包名称以及导入依赖的类
package sangria.marshalling
import scala.collection.immutable.ListMap
class CoercedScalaResultMarshaller extends RawResultMarshaller {
type Node = Any
type MapBuilder = ArrayMapBuilder[Node]
override def rawScalarNode(rawValue: Any) = rawValue
def arrayNode(values: Vector[Node]) = values
def optionalArrayNodeValue(value: Option[Node]) = value
def addMapNodeElem(builder: MapBuilder, key: String, value: Node, optional: Boolean) = {
val res =
if (optional && value.isInstanceOf[None.type])
None
else if (optional)
Some(value)
else
value
builder.add(key, res)
}
def emptyMapNode(keys: Seq[String]) = new ArrayMapBuilder[Node](keys)
def mapNode(keyValues: Seq[(String, Node)]) = ListMap(keyValues: _*)
def mapNode(builder: MapBuilder) = builder.toListMap
def nullNode = None
def renderCompact(node: Any) = "" + node
def renderPretty(node: Any) = "" + node
}
object CoercedScalaResultMarshaller {
val default = new CoercedScalaResultMarshaller
}
示例5: MovieAgesChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object MovieAgesChart {
def main(args: Array[String]) {
val movie_data = Util.getMovieData()
val movie_ages = Util.getMovieAges(movie_data)
val movie_ages_sorted = ListMap(movie_ages.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
movie_ages_sorted foreach (x => ds.addValue(x._2,"Movies", x._1))
//0 -> 65, 1 -> 286, 2 -> 355, 3 -> 219, 4 -> 214, 5 -> 126
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:21,代码来源:MovieAgesChart.scala
示例6: CountByRatingChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
import java.awt.Font
import org.jfree.chart.axis.CategoryLabelPositions
object CountByRatingChart {
def main(args: Array[String]) {
val rating_data_raw = Util.sc.textFile("../../data/ml-100k/u.data")
val rating_data = rating_data_raw.map(line => line.split("\t"))
val ratings = rating_data.map(fields => fields(2).toInt)
val ratings_count = ratings.countByValue()
val sorted = ListMap(ratings_count.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
sorted.foreach{ case (k,v) => ds.addValue(v,"Rating Values", k)}
val chart = ChartFactories.BarChart(ds)
val font = new Font("Dialog", Font.PLAIN,5);
chart.peer.getCategoryPlot.getDomainAxis().
setCategoryLabelPositions(CategoryLabelPositions.UP_90);
chart.peer.getCategoryPlot.getDomainAxis.setLabelFont(font)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:31,代码来源:CountByRatingChart.scala
示例7: UserRatingsChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object UserRatingsChart {
def main(args: Array[String]) {
val user_data = Util.getUserData()
val user_fields = user_data.map(l => l.split("\\|"))
val ages = user_fields.map( x => (x(1).toInt)).collect()
val rating_data_raw = Util.sc.textFile("../../data/ml-100k/u.data")
val rating_data = rating_data_raw.map(line => line.split("\t"))
val user_ratings_grouped = rating_data.map(
fields => (fields(0).toInt, fields(2).toInt)).groupByKey()
val user_ratings_byuser = user_ratings_grouped.map(v => (v._1,v._2.size))
val user_ratings_byuser_local = user_ratings_byuser.map(v => v._2).collect()
val input = user_ratings_byuser_local
val min = 0
val max = 500
val bins = 200
val step = (max/bins).toInt
var mx = Map(0 -> 0)
for (i <- step until (max + step) by step) {
mx += (i -> 0);
}
for(i <- 0 until input.length){
for (j <- 0 until (max + step) by step) {
if(ages(i) >= (j) && input(i) < (j + step)){
mx = mx + (j -> (mx(j) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"Ratings", k)}
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:49,代码来源:UserRatingsChart.scala
示例8: UserAgesChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object UserAgesChart {
def main(args: Array[String]) {
val user_data = Util.getUserData()
val user_fields = user_data.map(l => l.split("\\|"))
val ages = user_fields.map( x => (x(1).toInt)).collect()
println(ages.getClass.getName)
val min = 0
val max = 80
val bins = 16
val step = (80/bins).toInt
var mx = Map(0 -> 0)
for (i <- step until (max + step) by step) {
mx += (i -> 0);
}
for(i <- 0 until ages.length){
for (j <- 0 until (max + step) by step) {
if(ages(i) >= (j) && ages(i) < (j + step)){
mx = mx + (j -> (mx(j) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"UserAges", k)}
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:42,代码来源:UserAgesChart.scala
示例9: UserOccupationChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
import java.awt.Font
import org.jfree.chart.axis.CategoryLabelPositions
object UserOccupationChart {
def main(args: Array[String]) {
val user_data = Util.getUserData()
val user_fields = user_data.map(l => l.split("\\|"))
val count_by_occupation = user_fields.map( fields => (fields(3), 1)).
reduceByKey( (x, y) => x + y).collect()
println(count_by_occupation)
val sorted = ListMap(count_by_occupation.toSeq.sortBy(_._2):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
sorted.foreach{ case (k,v) => ds.addValue(v,"UserAges", k)}
val chart = ChartFactories.BarChart(ds)
val font = new Font("Dialog", Font.PLAIN,5);
chart.peer.getCategoryPlot.getDomainAxis().
setCategoryLabelPositions(CategoryLabelPositions.UP_90);
chart.peer.getCategoryPlot.getDomainAxis.setLabelFont(font)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:32,代码来源:UserOccupationChart.scala
示例10: UserAgesChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object UserAgesChart {
def main(args: Array[String]) {
val userDataFrame = Util.getUserFieldDataFrame()
val ages_array = userDataFrame.select("age").collect()
val min = 0
val max = 80
val bins = 16
val step = (80/bins).toInt
var mx = Map(0 -> 0)
for (i <- step until (max + step) by step) {
mx += (i -> 0)
}
for( x <- 0 until ages_array.length) {
val age = Integer.parseInt(ages_array(x)(0).toString)
for (j <- 0 until (max + step) by step) {
if(age >= j && age < (j + step)){
mx = mx + (j -> (mx(j) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"UserAges", k)}
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:41,代码来源:UserAgesChart.scala
示例11: PlotLogData
//设置package包名称以及导入依赖的类
package org.sparksamples
//import org.sparksamples.Util
//import _root_.scalax.chart.ChartFactories
import java.awt.Font
import org.jfree.chart.axis.CategoryLabelPositions
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object PlotLogData {
def main(args: Array[String]) {
val records = Util.getRecords()._1
val records_x = records.map(r => Math.log(r(r.length -1).toDouble))
var records_int = new Array[Int](records_x.collect().length)
print(records_x.first())
val records_collect = records_x.collect()
for (i <- 0 until records_collect.length){
records_int(i) = records_collect(i).toInt
}
val min_1 = records_int.min
val max_1 = records_int.max
val min = min_1.toFloat
val max = max_1.toFloat
val bins = 10
val step = (max/bins).toFloat
var mx = Map(0.0.toString -> 0)
for (i <- step until (max + step) by step) {
mx += (i.toString -> 0);
}
for(i <- 0 until records_collect.length){
for (j <- 0.0 until (max + step) by step) {
if(records_int(i) >= (j) && records_int(i) < (j + step)){
mx = mx + (j.toString -> (mx(j.toString) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1.toFloat):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
var i = 0
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"", k)}
val chart = ChartFactories.BarChart(ds)
val font = new Font("Dialog", Font.PLAIN,4);
chart.peer.getCategoryPlot.getDomainAxis().
setCategoryLabelPositions(CategoryLabelPositions.UP_90);
chart.peer.getCategoryPlot.getDomainAxis.setLabelFont(font)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:62,代码来源:PlotLogData.scala
示例12: UseCaseBuilderSpec
//设置package包名称以及导入依赖的类
package org.cddcore.enginecomponents
import org.cddcore.utilities.{CddSpec, HierarchyBuilder, NullLifeCycle}
import scala.collection.immutable.ListMap
class UseCaseBuilderSpec extends CddSpec {
def uc(s: String, ec: EngineComponent[Int, String]*) = UseCase[Int, String](s, ec.toList, None, DefinedInSourceCodeAt.definedInSourceCodeAt(1), ListMap(), List())
val useCase1 = uc("useCase1")
val useCase2 = uc("useCase2")
val useCase3 = uc("useCase3")
val useCase4 = uc("useCase4")
import Scenario._
implicit def nullLifeCycle[C] = new NullLifeCycle[C]
val s1 = 1 produces "result"
val s2 = 2 produces "result"
val s3 = 3 produces "result"
type UC = UseCase[Int, String]
type Child = EngineComponent[Int, String]
"A UseCaseBuilder with no operations" should "have the passed in use case and depth 0" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
holder1.holder shouldBe useCase1
holder1.depth shouldBe 0
}
"A UseCaseBuilder addChild method with depth 0" should "add children to the use case and not mess with depth" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addChild(s1).addChild(s2).addChild(s3)
holder2.holder shouldBe useCase1.copy(components = List(s3, s2, s1))
holder2.depth shouldBe 0
}
"A UseCaseBuilder addNewParent method " should "nest children with new usecases increasing depth" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addNewParent(useCase2).addNewParent(useCase3)
holder2.holder shouldBe uc("useCase1", uc("useCase2", uc("useCase3")))
holder2.depth shouldBe 2
}
it should "allow scenarios to be added to current use case" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addNewParent(useCase2).addNewParent(useCase3).addChild(s1).addChild(s2).addChild(s3)
holder2.holder shouldBe uc("useCase1", uc("useCase2", uc("useCase3", s3, s2, s1)))
holder2.depth shouldBe 2
}
it should "allow scenarios to be added to current use case, then a pop and another use case added" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addNewParent(useCase2).addNewParent(useCase3).addChild(s1).popParent
holder2.depth shouldBe 1
val holder3 = holder2.addNewParent(useCase4).addChild(s2).addChild(s3)
holder3.holder shouldBe uc("useCase1", uc("useCase2", uc("useCase4", s3, s2), uc("useCase3", s1)))
holder3.depth shouldBe 2
}
}
示例13: StoreWordsCountsOrderedActor
//设置package包名称以及导入依赖的类
package actors
import akka.actor.Actor
import akka.event.Logging
import dataTire.file.WordsCountFile
import enteties.WordsCount
import utils.FileUtiles
import scala.collection.immutable.ListMap
class StoreWordsCountsOrderedActor extends Actor {
val log = Logging(context.system, this)
def receive = {
case countWords:WordsCount =>
log.info(s"Executing actor StoreWordsCountsOrderedActor")
val countWordsOrdered:ListMap[String, Integer] = ListMap(countWords.data.toList.sortBy{-_._2}:_*)
FileUtiles.writeToFile(s"${countWords.fileFullPath}.wordsCounter", WordsCountFile.storeWordsCount(countWordsOrdered))
case any =>
log.error(s"Handle not found for the actor: StoreWordsCountsOrderedActor, data: $any")
}
}
示例14: GlobalConst
//设置package包名称以及导入依赖的类
package global
import scala.collection.immutable.ListMap
class GlobalConst(val NAME: String,
val MIN_FONT_SIZE: Int,
val MAX_FONT_SIZE: Int,
val DEFAULT_THEME: String,
val DEFAULT_FONT_STYLE: String,
val DEFAULT_FONT_SIZE: Int,
val DEFAULT_TAB_SIZE: Int,
val DEFAULT_MAX_FILE_SIZE: Int,
val AVAILABLE_SYNTAX: Map[String, String])
object GlobalConst {
val syntaxLanguages = ListMap("Bash" -> "bash",
"C" -> "c",
"C++" -> "cpp",
"Clojure" -> "clojure",
"DOSBatch" -> "dosbatch",
"Groovy" -> "groovy",
"Java" -> "java",
"Javascript" -> "javascript",
"JFlex" -> "jflex",
"JSON" -> "json",
"Lua" -> "lua",
"Properties" -> "properties",
"Python" -> "python",
"Ruby" -> "ruby",
"Scala" -> "scala",
"SQL" -> "sql",
"TAL" -> "tal",
"XHTML" -> "xhtml",
"XML" -> "xml",
"XPath" -> "xpath")
def apply(
NAME: String = "LithePad v0.0.1.1 ",
MIN_FONT_SIZE: Int = 8,
MAX_FONT_SIZE: Int = 185,
DEFAULT_THEME: String = "Monokai",
DEFAULT_FONT_STYLE: String = "Monospaced",
DEFAULT_FONT_SIZE: Int = 16,
DEFAULT_TAB_SIZE: Int = 2,
DEFAULT_MAX_FILE_SIZE: Int = 10000,
AVAILABLE_SYNTAX: Map[String, String] = syntaxLanguages): GlobalConst =
new GlobalConst(NAME,
MIN_FONT_SIZE,
MAX_FONT_SIZE,
DEFAULT_THEME,
DEFAULT_FONT_STYLE,
DEFAULT_FONT_SIZE,
DEFAULT_TAB_SIZE,
DEFAULT_MAX_FILE_SIZE,
AVAILABLE_SYNTAX)
}
示例15: LocalKMeansModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.api.ml.clustering
import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}
import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe
class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
lazy val parent: OldKMeansModel = {
val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
parent.predict(vector)
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalKMeansModel extends LocalModel[KMeansModel] {
override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
val centers = sorted map {
case (_: String, value: Any) =>
val center = value.asInstanceOf[Map[String, Any]]
Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
}
val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
parentConstructor.setAccessible(true)
val mlk = parentConstructor.newInstance(centers.toArray)
val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
constructor.setAccessible(true)
var inst = constructor
.newInstance(metadata.uid, mlk)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
inst
}
override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
}