本文整理汇总了Scala中scala.reflect.runtime.universe类的典型用法代码示例。如果您正苦于以下问题:Scala universe类的具体用法?Scala universe怎么用?Scala universe使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了universe类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: RuntimeCompilationTest
//设置package包名称以及导入依赖的类
package com.github.cuzfrog.webdriver
import scala.reflect.runtime.universe
import scala.tools.reflect.ToolBox
private object RuntimeCompilationTest{
val tb = universe.runtimeMirror(getClass.getClassLoader).mkToolBox()
val classDef = tb.parse {
"""
|private class MyParser extends Function[String,String]{
| override def apply(v1: String): String = v1 + "123"
|}
|
|scala.reflect.classTag[MyParser].runtimeClass
""".stripMargin
}
val clazz = tb.compile(classDef).apply().asInstanceOf[Class[Function[String,String]]]
val instance = clazz.getConstructor().newInstance()
println(instance.apply("asdf"))
}
示例2: SameTypeMatcher
//设置package包名称以及导入依赖的类
package knot.testKit
import org.scalatest.matchers.{MatchResult, Matcher}
import scala.reflect.runtime.universe
import scala.reflect.runtime.universe._
trait ReflectTypeMatchers {
case class SameTypeMatcher(right: Type) extends Matcher[Type] {
override def apply(left: universe.Type): MatchResult = {
MatchResult(
left =:= right,
s"universe.Type $left not same type with $right",
s"universe.Type $left same type with $right",
)
}
}
def sameType(tpe: Type) = new SameTypeMatcher(tpe)
}
object ReflectTypeMatchers extends ReflectTypeMatchers
示例3: JavaDecoder
//设置package包名称以及导入依赖的类
package knot.data.serialization.j
import java.io.{ByteArrayInputStream, InputStream, ObjectInputStream}
import knot.data.serialization.{Decoder, Deserializer, DeserializerFactory}
import scala.reflect.runtime.universe
class JavaDecoder(in: InputStream) extends ObjectInputStream(in) with Decoder{
}
class JavaDeserializerFactory extends DeserializerFactory[JavaDecoder] {
override def get[T: universe.TypeTag](): Deserializer[JavaDecoder, T] = {
new Deserializer[JavaDecoder, T] {
override def deserialize(bytes: Array[Byte]): T = {
val jd = new JavaDecoder(new ByteArrayInputStream(bytes))
try {
deserialize(jd)
} finally {
jd.close()
}
}
override def deserialize(decoder: JavaDecoder): T = {
decoder.readObject().asInstanceOf[T]
}
}
}
override def register[T: universe.TypeTag](): Unit = {
}
override def register[T: universe.TypeTag](deser: Deserializer[JavaDecoder, T]): Unit = {
}
}
示例4: JavaEncoder
//设置package包名称以及导入依赖的类
package knot.data.serialization.j
import java.io._
import knot.data.serialization.{Encoder, Serializer, SerializerFactory}
import scala.reflect.runtime.universe
class JavaEncoder(out: OutputStream) extends ObjectOutputStream(out) with Encoder {
}
class JavaSerializerFactory extends SerializerFactory[JavaEncoder] {
override def get[T: universe.TypeTag](): Serializer[JavaEncoder, T] = {
new Serializer[JavaEncoder, T] {
override def serialize(obj: T): Array[Byte] = {
val baos = new ByteArrayOutputStream()
val je = new JavaEncoder(baos)
try {
serialize(je, obj)
baos.toByteArray()
} finally {
je.close()
}
}
override def serialize(encoder: JavaEncoder, obj: T): Unit = {
encoder.writeObject(obj)
}
}
}
override def register[T: universe.TypeTag](): Unit = {
}
override def register[T: universe.TypeTag](ser: Serializer[JavaEncoder, T]): Unit = {
}
}
示例5: LocalKMeansModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.spark_ml_serving.clustering
import io.hydrosphere.spark_ml_serving._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}
import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe
class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
lazy val parent: OldKMeansModel = {
val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
parent.predict(vector)
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalKMeansModel extends LocalModel[KMeansModel] {
override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
val centers = sorted map {
case (_: String, value: Any) =>
val center = value.asInstanceOf[Map[String, Any]]
Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
}
val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
parentConstructor.setAccessible(true)
val mlk = parentConstructor.newInstance(centers.toArray)
val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
constructor.setAccessible(true)
var inst = constructor
.newInstance(metadata.uid, mlk)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
inst
}
override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
}
示例6: untilTimeout
//设置package包名称以及导入依赖的类
package scorex
import java.security.SecureRandom
import scala.annotation.tailrec
import scala.concurrent.duration._
import scala.reflect.runtime.universe
import scala.util._
package object utils {
@tailrec
final def untilTimeout[T](timeout: FiniteDuration,
delay: FiniteDuration = 100.milliseconds,
onFailure: => Unit = {})(fn: => T): T = {
Try {
fn
} match {
case Success(x) => x
case _ if timeout > delay =>
Thread.sleep(delay.toMillis)
untilTimeout(timeout - delay, delay, onFailure)(fn)
case Failure(e) =>
onFailure
throw e
}
}
def randomBytes(howMany: Int = 32): Array[Byte] = {
val r = new Array[Byte](howMany)
new SecureRandom().nextBytes(r) //overrides r
r
}
def objectFromString[T](fullClassName: String): Try[T] = Try {
val runtimeMirror = universe.runtimeMirror(getClass.getClassLoader)
val module = runtimeMirror.staticModule(fullClassName)
val obj = runtimeMirror.reflectModule(module)
obj.instance.asInstanceOf[T]
}
}
示例7: LocalKMeansModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.api.ml.clustering
import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}
import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe
class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
lazy val parent: OldKMeansModel = {
val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
parent.predict(vector)
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalKMeansModel extends LocalModel[KMeansModel] {
override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
val centers = sorted map {
case (_: String, value: Any) =>
val center = value.asInstanceOf[Map[String, Any]]
Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
}
val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
parentConstructor.setAccessible(true)
val mlk = parentConstructor.newInstance(centers.toArray)
val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
constructor.setAccessible(true)
var inst = constructor
.newInstance(metadata.uid, mlk)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
inst
}
override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
}
示例8: ScalaPBReadSupport
//设置package包名称以及导入依赖的类
package com.trueaccord.scalapb.parquet
import java.util
import com.trueaccord.scalapb.{GeneratedMessage, GeneratedMessageCompanion, Message}
import org.apache.hadoop.conf.Configuration
import org.apache.parquet.hadoop.api.{InitContext, ReadSupport}
import org.apache.parquet.hadoop.api.ReadSupport.ReadContext
import org.apache.parquet.io.api.{GroupConverter, RecordMaterializer}
import org.apache.parquet.schema.MessageType
class ScalaPBReadSupport[T <: GeneratedMessage with Message[T]] extends ReadSupport[T] {
override def prepareForRead(
configuration: Configuration,
keyValueMetaData: util.Map[String, String],
fileSchema: MessageType,
readContext: ReadContext): RecordMaterializer[T] = {
val protoClass = Option(keyValueMetaData.get(ScalaPBReadSupport.PB_CLASS)).getOrElse(throw new RuntimeException(s"Value for ${ScalaPBReadSupport.PB_CLASS} not found."))
val cmp = {
import scala.reflect.runtime.universe
val runtimeMirror = universe.runtimeMirror(getClass.getClassLoader)
val module = runtimeMirror.staticModule(protoClass)
val obj = runtimeMirror.reflectModule(module)
obj.instance.asInstanceOf[GeneratedMessageCompanion[T]]
}
new RecordMaterializer[T] {
val root = new ProtoMessageConverter[T](cmp, fileSchema, onEnd = _ => ())
override def getRootConverter: GroupConverter = root
override def getCurrentRecord: T = root.getCurrentRecord
}
}
override def init(context: InitContext): ReadContext = {
new ReadContext(context.getFileSchema)
}
}
object ScalaPBReadSupport {
val PB_CLASS = "parquet.scalapb.class"
}
示例9: Util
//设置package包名称以及导入依赖的类
package io.ics.disciple.util
import io.ics.disciple.dep._
import io.ics.disciple.labels._
import scala.reflect.runtime.universe
object Util {
type TT[T] = universe.TypeTag[T]
type Type = universe.Type
type L = PossibleLabel
def typeOf[T : TT]: Type = {
val r = universe.typeOf[T]
if (r =:= universe.typeOf[String]) universe.typeOf[String]
else r
}
def getId[P: TT](name: PossibleLabel) = name match {
case Label(n) => NamedId(n, this.typeOf[P])
case NoLabel => TTId(this.typeOf[P])
}
}
示例10: WebDescriptionClassesRegister
//设置package包名称以及导入依赖的类
package programEvaluator
import leon.purescala.Definitions.CaseClassDef
import leon.webDSL.webDescription._
import scala.reflect.runtime.universe
object WebDescriptionClassesRegister {
//The boolean indicates whether it extends WebElement
val fullNameToConstructorMap : Map[String, (universe.MethodMirror, Boolean)] = Map(
("leon.webDSL.webDescription.WebPage", (getReflectConstructor[WebPage], false)),
("leon.webDSL.webDescription.Element", (getReflectConstructor[Element], true)),
("leon.webDSL.webDescription.TextElement", (getReflectConstructor[TextElement], true)),
("leon.webDSL.webDescription.WebAttribute", (getReflectConstructor[WebAttribute], false)),
("leon.webDSL.webDescription.WebStyle", (getReflectConstructor[WebStyle], false)),
("leon.collection.Cons", (getReflectConstructor[leon.collection.Cons[_]], false)),
("leon.collection.Nil", (getReflectConstructor[leon.collection.Nil[_]], false))//,
// ("leon.lang.Map", getReflectConstructor[leon.lang.Map[_,_]])
)
private def getReflectConstructor[T: universe.TypeTag] = {
val mirror = universe.runtimeMirror(getClass.getClassLoader)
val classs = universe.typeOf[T].typeSymbol.asClass
val classMirror = mirror.reflectClass(classs)
val constructor = universe.typeOf[T].decl(universe.termNames.CONSTRUCTOR).asMethod
val constructorMirror = classMirror.reflectConstructor(constructor)
constructorMirror
}
}
示例11: VarStub
//设置package包名称以及导入依赖的类
package webby.route.v2
import webby.route._
object VarStub {
import scala.reflect.runtime.universe
trait Stub {
def makeStub(index: Int): AnyRef
def toVar(name: String, pat: Option[String]): Var[_]
}
object IntStub extends Stub {
override def makeStub(index: Int): AnyRef = index.asInstanceOf[AnyRef]
override def toVar(name: String, pat: Option[String]): Var[_] = new IntVar(name, pat)
}
object LongStub extends Stub {
override def makeStub(index: Int): AnyRef = index.asInstanceOf[AnyRef]
override def toVar(name: String, pat: Option[String]): Var[_] = new LongVar(name, pat)
}
object StringStub extends Stub {
override def makeStub(index: Int): AnyRef = index.toString
override def toVar(name: String, pat: Option[String]): Var[_] = new StringVar(name, pat)
}
object BigDecimalStub extends Stub {
override def makeStub(index: Int): AnyRef = BigDecimal(index)
override def toVar(name: String, pat: Option[String]): Var[_] = new BigDecimalVar(name, pat)
}
private val intType = universe.typeOf[scala.Int]
private val longType = universe.typeOf[scala.Long]
private val stringType = universe.typeOf[java.lang.String]
private val bigDecimalType = universe.typeOf[scala.BigDecimal]
def resolve(tpe: universe.Type, method: => String): Stub = tpe match {
case s if s =:= intType => IntStub
case s if s =:= longType => LongStub
case s if s =:= stringType => StringStub
case s if s =:= bigDecimalType => BigDecimalStub
case t => sys.error("Unknown variable type for route handler " + t + " in method " + method)
}
def indexFromStub(stub: Any): Int = stub match {
case int: Int => int
case lng: Long => lng.toInt
case str: String => str.toInt
case bd: BigDecimal => bd.toInt
case s => sys.error("Cannot recognize stub value: " + s)
}
}
示例12: SampleResourcesTyped
//设置package包名称以及导入依赖的类
package org.elastic.rest.scala.driver.test_utils
import org.elastic.rest.scala.driver.RestBaseRuntimeTyped._
import org.elastic.rest.scala.driver.test_utils.SampleResources.{InWrapper, OutWrapper}
import scala.reflect.runtime.universe
// $COVERAGE-OFF$This is test code included here purely to allow x-project test artefact sharing
/**
* Useful sample REST resources for testing
*/
object SampleResourcesTyped {
implicit val myTypedToStringHelper = new RuntimeTypedToStringHelper {
override def fromTyped[T](t: T)(implicit ct: universe.WeakTypeTag[T]): String =
t.asInstanceOf[InWrapper].fromTyped
}
implicit val myStringToTypedHelper = new RuntimeStringToTypedHelper {
override def toType[T](s: String)(implicit ct: universe.WeakTypeTag[T]): T =
OutWrapper(s).asInstanceOf[T]
}
}
// $COVERAGE-ON$
示例13: RestResourcesTypedTests
//设置package包名称以及导入依赖的类
package org.elastic.rest.scala.driver
import org.elastic.rest.scala.driver.RestBase._
import org.elastic.rest.scala.driver.RestBaseRuntimeTyped._
import org.elastic.rest.scala.driver.test_utils.SampleResources._
import org.elastic.rest.scala.driver.test_utils.SampleResourcesTyped._
import utest._
import scala.reflect.runtime.universe
object RestResourcesTypedTests extends TestSuite {
def checkOutputType[T](op: TypedDriverOp[T])(implicit ct: universe.WeakTypeTag[T]): Boolean =
ct.tpe =:= universe.typeOf[OutWrapper]
val tests = this {
"Test all resources, typed output (output type)" - {
// Output types:
checkOutputType(`/$resource_ut`("/").check()) ==> true
checkOutputType(`/$resource_ut`("/").read()) ==> true
checkOutputType(`/$resource_ut`("/").readS("body")) ==> true
checkOutputType(`/$resource_ut`("/").readJ(MockJson("body"))) ==> true
checkOutputType(`/$resource_ut`("/").send()) ==> true
checkOutputType(`/$resource_ut`("/").sendS("body")) ==> true
checkOutputType(`/$resource_ut`("/").sendJ(MockJson("body"))) ==> true
checkOutputType(`/$resource_ut`("/").write()) ==> true
checkOutputType(`/$resource_ut`("/").writeS("body")) ==> true
checkOutputType(`/$resource_ut`("/").writeJ(MockJson("body"))) ==> true
checkOutputType(`/$resource_ut`("/").delete()) ==> true
checkOutputType(`/$resource_ut`("/").deleteS("body")) ==> true
checkOutputType(`/$resource_ut`("/").deleteJ(MockJson("body"))) ==> true
}
"Test all resources, typed input and output (output type)" - {
// Output types:
checkOutputType(`/$resource_tt`("/").check()) ==> true
checkOutputType(`/$resource_tt`("/").read()) ==> true
checkOutputType(`/$resource_tt`("/").readS("body")) ==> true
checkOutputType(`/$resource_tt`("/").readJ(MockJson("body"))) ==> true
checkOutputType(`/$resource_tt`("/").send()) ==> true
checkOutputType(`/$resource_tt`("/").sendS("body")) ==> true
checkOutputType(`/$resource_tt`("/").sendJ(MockJson("body"))) ==> true
checkOutputType(`/$resource_tt`("/").write()) ==> true
checkOutputType(`/$resource_tt`("/").writeS("body")) ==> true
checkOutputType(`/$resource_tt`("/").writeJ(MockJson("body"))) ==> true
checkOutputType(`/$resource_tt`("/").delete()) ==> true
checkOutputType(`/$resource_tt`("/").deleteS("body")) ==> true
checkOutputType(`/$resource_tt`("/").deleteJ(MockJson("body"))) ==> true
}
}
}