本文整理汇总了Scala中breeze.linalg.DenseVector类的典型用法代码示例。如果您正苦于以下问题:Scala DenseVector类的具体用法?Scala DenseVector怎么用?Scala DenseVector使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DenseVector类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: MllibLBFGS
//设置package包名称以及导入依赖的类
package optimizers
import breeze.linalg.{DenseVector, Vector}
import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
import org.apache.spark.mllib.optimization.{L1Updater, SimpleUpdater, SquaredL2Updater, Updater}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import utils.Functions._
class MllibLBFGS(val data: RDD[LabeledPoint],
loss: LossFunction,
regularizer: Regularizer,
params: LBFGSParameters
) extends Optimizer(loss, regularizer) {
val opt = new LogisticRegressionWithLBFGS
val reg: Updater = (regularizer: Regularizer) match {
case _: L1Regularizer => new L1Updater
case _: L2Regularizer => new SquaredL2Updater
case _: Unregularized => new SimpleUpdater
}
opt.optimizer.
setNumIterations(params.iterations).
setConvergenceTol(params.convergenceTol).
setNumCorrections(params.numCorrections).
setRegParam(regularizer.lambda).
setUpdater(reg)
override def optimize(): Vector[Double] = {
val model = opt.run(data)
val w = model.weights.toArray
return DenseVector(w)
}
}
示例2: rddvector
//设置package包名称以及导入依赖的类
package breeze
import breeze.linalg.{DenseMatrix, DenseVector}
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.LoggerFactory
import spark.RecommendationExample.getClass
/**
* Created by I311352 on 4/5/2017.
*/
object rddvector extends App {
val LOG = LoggerFactory.getLogger(getClass)
val conf = new SparkConf().setAppName("vector").setMaster("local[2]")
val sc = new SparkContext(conf)
val data = sc.textFile("data/testdata.txt")
println(data.take(10).toList)
val vectorRDD = data.map(value => {
val columns = value.split(",").map(value => value.toDouble)
new DenseVector(columns)
})
println(vectorRDD.take(100).toList)
// multiply each row by a constant vector
val constant = 5.0
val broadcastConstant = sc.broadcast(constant)
val scaledRDD = vectorRDD.map(row => {
row :* broadcastConstant.value
})
println(scaledRDD.take(10).toList)
val scaledRDDByPartition = vectorRDD.glom().map((value:Array[DenseVector[Double]]) => {
val arrayValues = value.map(denseVector => denseVector.data).flatten
val denseMatrix = new DenseMatrix[Double](value.length,value(0).length,arrayValues)
denseMatrix :*= broadcastConstant.value
denseMatrix.toDenseVector
})
println(scaledRDDByPartition.take(10).toList)
}
示例3: ColorApp
//设置package包名称以及导入依赖的类
package com.esri
import java.awt.Color
import breeze.linalg.DenseVector
object ColorApp extends App {
val rnd = new java.security.SecureRandom()
val colorSeq = for (_ <- 0 until 200)
yield {
val r = rnd.nextInt(255)
val g = rnd.nextInt(255)
val b = rnd.nextInt(255)
val hsb = Color.RGBtoHSB(r, g, b, null).map(_.toDouble)
DenseVector[Double](hsb)
}
val colorLen = colorSeq.length
val somSize = 8
val nodes = for {
q <- 0 until somSize
r <- 0 until somSize
} yield Node(q, r, colorSeq(rnd.nextInt(colorLen)))
val epochMax = colorLen * 100
implicit val pb = TerminalProgressBar(epochMax)
val som = SOM(nodes)
som.train(colorSeq, epochMax, somSize / 2, initialAlpha = 0.3)
som.saveAsPNG("/tmp/som.png", 20)
}
示例4: SOMSpec
//设置package包名称以及导入依赖的类
package com.esri
import breeze.linalg.DenseVector
import org.scalatest.{FlatSpec, Matchers}
class SOMSpec extends FlatSpec with Matchers {
it should "train the SOM" in {
val nodes = for (q <- 0 until 10) yield {
Node(q, 0, new DenseVector[Double](Array(q, 0.0)))
}
nodes.length shouldBe 10
val som = SOM(nodes)
som.train(new DenseVector[Double](Array(5.0, 0.0)), 1.0, 0.1)
}
}
示例5: MllibSGD
//设置package包名称以及导入依赖的类
package optimizers
import breeze.linalg.{DenseVector, Vector}
import org.apache.spark.mllib.classification.{LogisticRegressionWithSGD, SVMWithSGD}
import org.apache.spark.mllib.optimization.{L1Updater, SimpleUpdater, SquaredL2Updater, Updater}
import org.apache.spark.mllib.regression.{LabeledPoint, LinearRegressionWithSGD}
import org.apache.spark.rdd.RDD
import utils.Functions._
import scala.tools.cmd.gen.AnyVals.D
class MllibSGD(val data: RDD[LabeledPoint],
loss: LossFunction,
regularizer: Regularizer,
params: SGDParameters,
ctype: String
) extends Optimizer(loss, regularizer) {
val opt = ctype match {
case "SVM" => new SVMWithSGD()
case "LR" => new LogisticRegressionWithSGD()
case "Regression" => new LinearRegressionWithSGD()
}
val reg: Updater = (regularizer: Regularizer) match {
case _: L1Regularizer => new L1Updater
case _: L2Regularizer => new SquaredL2Updater
case _: Unregularized => new SimpleUpdater
}
ctype match {
case "SVM" => opt.asInstanceOf[SVMWithSGD].optimizer.
setNumIterations(params.iterations).
setMiniBatchFraction(params.miniBatchFraction).
setStepSize(params.stepSize).
setRegParam(regularizer.lambda).
setUpdater(reg)
case "LR" => opt.asInstanceOf[LogisticRegressionWithSGD].optimizer.
setNumIterations(params.iterations).
setMiniBatchFraction(params.miniBatchFraction).
setStepSize(params.stepSize).
setRegParam(regularizer.lambda).
setUpdater(reg)
case "Regression" => opt.asInstanceOf[LinearRegressionWithSGD].optimizer.
setNumIterations(params.iterations).
setMiniBatchFraction(params.miniBatchFraction).
setStepSize(params.stepSize).
setRegParam(regularizer.lambda).
setUpdater(reg)
}
override def optimize(): Vector[Double] = {
val model = opt.run(data)
val w = model.weights.toArray
DenseVector(w)
}
}
示例6: CocoaParameters
//设置package包名称以及导入依赖的类
package optimizers
import java.io.Serializable
import breeze.linalg.DenseVector
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
class CocoaParameters(var n: Int,
var numRounds: Int,
var localIterFrac: Double,
var lambda: Double,
var beta: Double,
var gamma: Double,
var numParts: Int,
var wInit: DenseVector[Double]) extends Serializable {
def this(train: RDD[LabeledPoint], test: RDD[LabeledPoint]) {
this(train.count().toInt,
200,
1.0,
0.01,
1.0,
1.0,
train.partitions.size,
DenseVector.zeros[Double](train.first().features.size))
}
def getLocalIters() = (localIterFrac * n / numParts).toInt
def getDistOptPar(): distopt.utils.Params ={
val loss = distopt.utils.OptUtils.hingeLoss _
return distopt.utils.Params(loss, n, wInit, numRounds, getLocalIters, lambda, beta, gamma)
}
override def toString = s"CocoaParameters(n: $n, numRounds: $numRounds, localIters: $getLocalIters, " +
s"lambda: $lambda, beta: $beta, gamma: $gamma, wInit: $wInit)"
}
示例7: weights
//设置package包名称以及导入依赖的类
package regression
import breeze.linalg.DenseVector
import breeze.numerics.{log, sigmoid}
trait Regressor {
private lazy val weightsWithIterations = learn
def weights: DenseVector[Double] = weightsWithIterations._1
def iterations: Seq[Double] = weightsWithIterations._2
protected def predict(x: DenseVector[Double], weights: DenseVector[Double]): Double
protected def costOfPrediction(h: Double, y: Double): Double
protected def learn: (DenseVector[Double], Seq[Double])
def predict(x: DenseVector[Double]): Double = {
predict(x, weights)
}
def cost(x: DenseVector[Double], y: Double): Double = {
costOfPrediction(predict(x), y)
}
def meanCost(data: Iterable[(DenseVector[Double], Double)]): Double = {
var cost = 0.0d
var total = 0L
data.foreach { case (x, y) =>
total += 1
cost += costOfPrediction(predict(x), y)
}
cost / total
}
}
trait LinearLike {
protected def predict(x: DenseVector[Double], weights: DenseVector[Double]): Double = {
x dot weights
}
protected def costOfPrediction(h: Double, y: Double): Double = {
val error = h - y
error * error / 2
}
}
trait LogisticLike {
protected def predict(x: DenseVector[Double], weights: DenseVector[Double]): Double = {
sigmoid(x dot weights)
}
protected def costOfPrediction(h: Double, y: Double): Double = {
-y * log(h) - (1.0 - y) * log(1.0 - h)
}
}
示例8: Evaluation
//设置package包名称以及导入依赖的类
package util
import breeze.linalg.{DenseMatrix, DenseVector, sum}
import regression.Regressor
object Evaluation {
def confusion(lr: Regressor, data: Iterable[(DenseVector[Double], Double)]): DenseMatrix[Double] = {
val confusion = DenseMatrix.zeros[Double](2, 2)
data.map { case (x, y) =>
(y.toInt, if (lr.predict(x) > 0.5) 1 else 0)
} foreach { case (truth, predicted) =>
confusion(truth, predicted) += 1.0
}
confusion
}
def printConfusionMtx(confusion: DenseMatrix[Double]): Unit = {
val negatives = confusion(0, 0) + confusion(0, 1)
val positives = confusion(1, 0) + confusion(1, 1)
val total = sum(confusion)
val falseNegatives = confusion(1, 0)
val falsePositives = confusion(0, 1)
val accuracy = (confusion(0, 0) + confusion(1, 1)) / total
println("============= Stats =============\n")
println(f"Positive examples: $positives%1.0f")
println(f"Negative examples: $negatives%1.0f")
println(f"Total: $total%1.0f")
println(f"Pos/Neg ratio: ${positives/negatives}%1.2f")
println("\n============= Results =============\n")
println("Confusion Matrix:")
println(confusion)
println(f"Accuracy: ${accuracy * 100}%2.2f%%")
println(f"False positives: ${falsePositives * 100 / negatives}%2.2f%%")
println(f"False negatives: ${falseNegatives * 100 / positives}%2.2f%%")
}
}
示例9: Transformations
//设置package包名称以及导入依赖的类
package util
import breeze.linalg.DenseVector
object Transformations {
def minMax(med: DenseVector[Double], halfRange: DenseVector[Double])(features: DenseVector[Double]): DenseVector[Double] = {
(features :- med) / halfRange
}
def zScore(means: DenseVector[Double], stddevs: DenseVector[Double])(features: DenseVector[Double]): DenseVector[Double] = {
(features :- means) :/ stddevs
}
def filter(indices: Set[Int])(features: DenseVector[Double]): DenseVector[Double] = {
val result = features.keysIterator.collect {
case i if indices.contains(i) => features(i)
}.toArray
DenseVector(result)
}
def addPolynomialFeatures(mask: DenseVector[Boolean], maxPower: Int)(features: DenseVector[Double]): DenseVector[Double] = {
val f = features(mask).toArray
val len = (maxPower - 1) * f.length
val polyFeatures = new Array[Double](len)
var j = 0
for (p <- 2 to maxPower) {
f.indices.foreach { i =>
polyFeatures(j) = Math.pow(f(i), p)
j += 1
}
}
DenseVector.vertcat(features, DenseVector(polyFeatures))
}
}
示例10: TabularSpec
//设置package包名称以及导入依赖的类
import org.scalatest._
import breeze.linalg.{DenseVector, argmax}
import scarla.domain.{Fixtures => F, State}
import scarla.mapping.Tabular
class TabularSpec extends FlatSpec with Matchers {
"A tabular mapping" should "have the correct dimensionality" in {
val m = new Tabular(F.spec(nd=3), 20)
m.dimensionality should be (scala.math.pow(20, 3))
}
it should "have no collisions in 1d" in {
val m = new Tabular(F.spec(nd=1), 10)
for (i <- 0 until 10) {
val p = DenseVector.zeros[Double](10)
p(i) = 1.0
m._phi(Vector(i)) should be (p)
}
}
it should "have no collisions in 2d" in {
val m = new Tabular(F.spec(nd=2), 10)
val ps = DenseVector.zeros[Double](100)
for (i <- 0 until 10; j <- 0 until 10) {
val l = argmax(m._phi(Vector(i, j)))
ps(l) should be (0.0)
ps(l) = 1.0
}
}
it should "have no collisions in 3d" in {
val m = new Tabular(F.spec(nd=3), 10)
val ps = DenseVector.zeros[Double](10000)
for (i <- 0 until 10; j <- 0 until 10; k <- 0 until 10) {
val l = argmax(m._phi(Vector(i, j, k)))
ps(l) should be (0.0)
ps(l) = 1.0
}
}
}
示例11: sampleFeature
//设置package包名称以及导入依赖的类
package glintlda.naive
import breeze.linalg.{DenseVector, Vector}
import breeze.stats.distributions.Multinomial
import glintlda.LDAConfig
import glintlda.util.FastRNG
def sampleFeature(feature: Int, oldTopic: Int): Int = {
var i = 0
val p = DenseVector.zeros[Double](config.topics)
var sum = 0.0
while (i < config.topics) {
p(i) = (documentCounts(i) + ?) * ((wordCounts(i) + ?) / (globalCounts(i) + ?Sum))
sum += p(i)
i += 1
}
p /= sum
Multinomial(p).draw()
}
}
示例12: StreamingSimpleModel
//设置package包名称以及导入依赖的类
package com.bigchange.streaming
import breeze.linalg.DenseVector
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.{LabeledPoint, StreamingLinearRegressionWithSGD}
import org.apache.spark.streaming.{Seconds, StreamingContext}
object StreamingSimpleModel {
def main(args: Array[String]) {
val ssc = new StreamingContext("local","test",Seconds(10))
val stream = ssc.socketTextStream("localhost",9999)
val numberFeatures = 100
val zeroVector = DenseVector.zeros[Double](numberFeatures)
val model = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(zeroVector.data))
.setNumIterations(1)
.setStepSize(0.01)
val labeledStream = stream.map { event =>
val split = event.split("\t")
val y = split(0).toDouble
val features = split(1).split(",").map(_.toDouble)
LabeledPoint(label = y, features = Vectors.dense(features))
}
model.trainOn(labeledStream)
// ??DStream?????
val predictAndTrue = labeledStream.transform { rdd =>
val latestModel = model.latestModel()
rdd.map { point =>
val predict = latestModel.predict(point.features)
predict - point.label
}
}
// ??MSE
predictAndTrue.foreachRDD { rdd =>
val mse = rdd.map(x => x * x).mean()
val rmse = math.sqrt(mse)
println(s"current batch, MSE: $mse, RMSE:$rmse")
}
ssc.start()
ssc.awaitTermination()
}
}
示例13: LazyWindower
//设置package包名称以及导入依赖的类
package nodes
import breeze.linalg.DenseVector
import org.apache.spark.rdd.RDD
import pipelines.FunctionNode
import utils.{ImageMetadata, ChannelMajorArrayVectorizedImage, Image}
class LazyWindower(
stride: Int,
windowSize: Int) extends FunctionNode[RDD[Image], RDD[Image]] {
def apply(in: RDD[Image]) = {
in.flatMap(getImageWindow)
}
def getImageWindow(image: Image) = {
val xDim = image.metadata.xDim
val yDim = image.metadata.yDim
val numChannels = image.metadata.numChannels
// Start at (0,0) in (x, y) and
(0 until xDim - windowSize + 1 by stride).toIterator.flatMap { x =>
(0 until yDim - windowSize + 1 by stride).toIterator.map { y =>
// Extract the window.
val pool = new DenseVector[Double](windowSize * windowSize * numChannels)
val startX = x
val endX = x + windowSize
val startY = y
val endY = y + windowSize
var c = 0
while (c < numChannels) {
var s = startX
while (s < endX) {
var b = startY
while (b < endY) {
pool(c + (s-startX)*numChannels +
(b-startY)*(endX-startX)*numChannels) = image.get(s, b, c)
b = b + 1
}
s = s + 1
}
c = c + 1
}
ChannelMajorArrayVectorizedImage(pool.toArray,
ImageMetadata(windowSize, windowSize, numChannels))
}
}
}
}
示例14: PassiveAggressiveBinaryModelEvaluation
//设置package包名称以及导入依赖的类
package hu.sztaki.ilab.ps.test.utils
import breeze.linalg.{DenseVector, SparseVector}
import hu.sztaki.ilab.ps.passive.aggressive.algorithm.PassiveAggressiveBinaryAlgorithm
import org.slf4j.LoggerFactory
class PassiveAggressiveBinaryModelEvaluation
object PassiveAggressiveBinaryModelEvaluation {
private val log = LoggerFactory.getLogger(classOf[PassiveAggressiveBinaryModelEvaluation])
def accuracy(model: DenseVector[Double],
testLines: Traversable[(SparseVector[Double], Option[Boolean])],
featureCount: Int,
pac: PassiveAggressiveBinaryAlgorithm): Double = {
var tt = 0
var ff = 0
var tf = 0
var ft = 0
var cnt = 0
testLines.foreach { case (vector, label) => label match {
case Some(lab) =>
val real = lab
val predicted = pac.predict(vector, model)
(real, predicted) match {
case (true, true) => tt +=1
case (false, false) => ff +=1
case (true, false) => tf +=1
case (false, true) => ft +=1
}
cnt += 1
case _ => throw new IllegalStateException("Labels shold not be missing.")
}
}
val percent = ((tt + ff).toDouble / cnt) * 100
percent
}
}
开发者ID:gaborhermann,项目名称:flink-parameter-server,代码行数:45,代码来源:PassiveAggressiveBinaryModelEvaluation.scala
示例15: QuadraticObjectiveFunction
//设置package包名称以及导入依赖的类
package cvx
import breeze.linalg.{DenseMatrix, DenseVector}
import MatrixUtils._
class QuadraticObjectiveFunction(
override val dim:Int,
val r:Double,
val a:DenseVector[Double],
val P:DenseMatrix[Double]
)
extends ObjectiveFunction(dim) {
if(a.length!=dim){
val msg = "Vector a must be of dimension "+dim+" but length(a) "+a.length
throw new IllegalArgumentException(msg)
}
if(!(P.rows==dim & P.cols==dim)) {
val msg = "Matrix P must be square of dimension "+dim+" but is "+P.rows+"x"+P.cols
throw new IllegalArgumentException(msg)
}
checkSymmetric(P,1e-13)
def valueAt(x:DenseVector[Double]) = { checkDim(x); r + (a dot x) + (x dot (P*x))/2 }
def gradientAt(x:DenseVector[Double]) = { checkDim(x); a+P*x }
def hessianAt(x:DenseVector[Double]) = { checkDim(x); P }
}