本文整理汇总了Scala中breeze.numerics.abs类的典型用法代码示例。如果您正苦于以下问题:Scala abs类的具体用法?Scala abs怎么用?Scala abs使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了abs类的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: TensorLDAModelTest
//设置package包名称以及导入依赖的类
package edu.uci.eecs.spectralLDA.algorithm
import breeze.linalg.{DenseMatrix, DenseVector, SparseVector, norm}
import breeze.numerics.abs
import org.scalatest._
import org.apache.spark.SparkContext
import edu.uci.eecs.spectralLDA.testharness.Context
class TensorLDAModelTest extends FlatSpec with Matchers {
private val sc: SparkContext = Context.getSparkContext
"Multinomial log-likelihood" should "be correct" in {
val p = DenseVector[Double](0.2, 0.5, 0.3)
val x1 = DenseVector[Double](20, 50, 30)
val x2 = DenseVector[Double](40, 40, 20)
abs(TensorLDAModel.multinomialLogLikelihood(p, x1) - (-4.697546)) should be <= 1e-6
abs(TensorLDAModel.multinomialLogLikelihood(p, x2) - (-15.42038)) should be <= 1e-6
}
}
示例2: RandomSampling
//设置package包名称以及导入依赖的类
package com.burness.algorithm.preprocess
import breeze.numerics.abs
import com.burness.utils.AbstractParams
import org.apache.spark.SparkContext
import org.apache.spark.sql.hive.HiveContext
import scopt.OptionParser
class RandomSampling(sc: SparkContext) {
case class Params(samplingRatio: Double =1.0,
inputTableName: String = null,
outputTableName: String = null)
extends AbstractParams[Params]
def parseParams(args: Array[String]): Params = {
val defaultParams = Params()
val parser = new OptionParser[Params]("RandomSampling") {
head("Random Sampling Params parse")
opt[String]("inputTableName")
.text("data input path")
.action((x, c) => c.copy(inputTableName = x))
opt[String]("outputTableName")
.text("data output path")
.action((x, c) => c.copy(outputTableName = x))
opt[Double]("samplingRatio")
.text("random sampling ratio")
.action((x, c) => c.copy(samplingRatio = x))
}
parser.parse(args, defaultParams) match {
case Some(params) =>
params
case None =>
defaultParams
}
}
def run(params: Params): Unit ={
val hiveContext = new HiveContext(sc)
import hiveContext.implicits._
import hiveContext.sql
// ????????????0.7???????????0-9??????6???
val result = sql(s"select * from ${params.inputTableName}").sample(withReplacement = false, params.samplingRatio)
val r = scala.util.Random
r.setSeed(System.currentTimeMillis())
val tempNum = abs(r.nextInt())
val tempName = "random_"+tempNum.toString+"_sample_table"
result.registerTempTable(tempName)
sql(s"create table ${params.outputTableName} as select * from $tempName")
}
}