本文整理汇总了Scala中com.esotericsoftware.kryo.Kryo类的典型用法代码示例。如果您正苦于以下问题:Scala Kryo类的具体用法?Scala Kryo怎么用?Scala Kryo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Kryo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: Configuration
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop
import java.io.{ ObjectInputStream, ObjectOutputStream }
import com.esotericsoftware.kryo.Kryo
import org.apache.hadoop.conf
import org.apache.hadoop.conf.{ Configuration ? HadoopConfiguration }
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.hammerlab.hadoop.kryo.WritableSerializer
import org.hammerlab.kryo.serializeAs
class Configuration(@transient var value: HadoopConfiguration)
extends Serializable {
private def writeObject(out: ObjectOutputStream): Unit = {
value.write(out)
}
private def readObject(in: ObjectInputStream): Unit = {
value = new HadoopConfiguration(false)
value.readFields(in)
}
}
object Configuration {
def apply(loadDefaults: Boolean = true): Configuration =
new HadoopConfiguration(loadDefaults)
def apply(conf: HadoopConfiguration): Configuration =
new Configuration(conf)
implicit def wrapConfiguration(conf: HadoopConfiguration): Configuration =
apply(conf)
implicit def unwrapConfiguration(conf: Configuration): HadoopConfiguration =
conf.value
implicit def unwrapConfigurationBroadcast(confBroadcast: Broadcast[Configuration]): Configuration =
confBroadcast.value
implicit def sparkContextToHadoopConfiguration(sc: SparkContext): Configuration =
sc.hadoopConfiguration
implicit class ConfWrapper(val conf: HadoopConfiguration) extends AnyVal {
def serializable: Configuration =
Configuration(conf)
}
def register(kryo: Kryo): Unit = {
kryo.register(
classOf[conf.Configuration],
new WritableSerializer[conf.Configuration]
)
kryo.register(
classOf[Configuration],
serializeAs[Configuration, conf.Configuration]
)
}
}
示例2: WritableSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo
import java.io.{ DataInputStream, DataOutputStream }
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
import org.apache.hadoop.io.Writable
class WritableSerializer[T <: Writable](ctorArgs: Any*) extends Serializer[T] {
override def read(kryo: Kryo, input: Input, clz: Class[T]): T = {
val t = clz.newInstance()
t.readFields(new DataInputStream(input))
t
}
override def write(kryo: Kryo, output: Output, t: T): Unit = {
t.write(new DataOutputStream(output))
}
}
示例3: SerializableSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo
import java.io.{ ObjectInputStream, ObjectOutputStream }
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
case class SerializableSerializer[T <: Serializable]()
extends Serializer[T] {
override def read(kryo: Kryo, input: Input, `type`: Class[T]): T =
new ObjectInputStream(input)
.readObject()
.asInstanceOf[T]
override def write(kryo: Kryo, output: Output, t: T): Unit =
new ObjectOutputStream(output)
.writeObject(t)
}
示例4: serializeAs
//设置package包名称以及导入依赖的类
package org.hammerlab
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
package object kryo {
def serializeAs[T, U](implicit to: T ? U, from: U ? T): Serializer[T] =
new Serializer[T] {
override def read(kryo: Kryo, input: Input, `type`: Class[T]): T =
from(
kryo
.readClassAndObject(input)
.asInstanceOf[U]
)
override def write(kryo: Kryo, output: Output, t: T): Unit =
kryo.writeClassAndObject(output, to(t))
}
}
示例5: SerializableSerializerTest
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream }
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{ Input, Output }
import org.hammerlab.test.Suite
class SerializableSerializerTest
extends Suite {
test("serde") {
val kryo = new Kryo()
kryo.setRegistrationRequired(true)
val baos = new ByteArrayOutputStream()
val output = new Output(baos)
val foo = new Foo
foo.n = 123
foo.s = "abc"
intercept[IllegalArgumentException] {
kryo.writeClassAndObject(output, foo)
}
.getMessage should startWith("Class is not registered: org.hammerlab.hadoop.kryo.Foo")
kryo.register(classOf[Foo], SerializableSerializer[Foo]())
kryo.writeClassAndObject(output, foo)
output.close()
val bytes = baos.toByteArray
bytes.length should be(93)
val bais = new ByteArrayInputStream(bytes)
val input = new Input(bais)
val after = kryo.readClassAndObject(input).asInstanceOf[Foo]
after.n should be(foo.n)
after.s should be(foo.s)
}
}
class Foo
extends Serializable {
var n = 0
var s = ""
private def writeObject(out: ObjectOutputStream): Unit = {
out.writeInt(n)
out.writeUTF(s)
}
private def readObject(in: ObjectInputStream): Unit = {
n = in.readInt()
s = in.readUTF()
}
}
示例6: BasesSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.bases
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
class BasesSerializer
extends Serializer[Bases] {
override def write(kryo: Kryo, output: Output, bases: Bases): Unit = {
output.writeInt(bases.bytes.length, true)
bases.foreach(base ? output.writeByte(base.byte))
}
override def read(kryo: Kryo, input: Input, cls: Class[Bases]): Bases = {
val count: Int = input.readInt(true)
val bases = Bases.newBuilder
(0 until count).foreach(_ ? bases += input.readByte())
bases.result()
}
}
示例7: NCKryoRegistrator
//设置package包名称以及导入依赖的类
package se.kth.climate.fast.netcdf
import org.apache.spark.serializer.KryoRegistrator
import com.esotericsoftware.kryo.{ Kryo, KryoException, Serializer }
import com.esotericsoftware.kryo.io.Output
import com.esotericsoftware.kryo.io.Input
import ucar.nc2.NetcdfFile
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import ucar.nc2.iosp.netcdf3.N3outputStreamWriter;
import java.util.UUID;
class NCKryoRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo) {
println("Registering custom NetCDF serializers");
com.esotericsoftware.minlog.Log.TRACE();
kryo.register(classOf[NetcdfFile], new NetcdfFileSerializer());
kryo.register(classOf[Array[String]], new com.esotericsoftware.kryo.serializers.DefaultArraySerializers.StringArraySerializer())
kryo.register(classOf[Array[Int]], new com.esotericsoftware.kryo.serializers.DefaultArraySerializers.IntArraySerializer())
//kryo.setRegistrationRequired(true);
}
}
class NetcdfFileSerializer extends Serializer[NetcdfFile] {
override def write(kryo: Kryo, output: Output, ncfile: NetcdfFile) {
val baos = new ByteArrayOutputStream();
val out = new DataOutputStream(baos);
val w = new N3outputStreamWriter(ncfile);
val ulim = ncfile.getUnlimitedDimension();
val numrec = if (ulim == null) 0 else ncfile.getUnlimitedDimension().getLength();
w.writeHeader(out, numrec);
w.writeDataAll(out);
out.flush();
out.close();
val byteArray = baos.toByteArray();
// println("**********BYTES***********");
// println(byteArray.take(100000).mkString);
output.writeInt(byteArray.length);
output.write(byteArray);
println(s"******** Wrote ncfile (size=${byteArray.length}) **********");
}
override def read(kryo: Kryo, input: Input, `type`: Class[NetcdfFile]): NetcdfFile = {
val len = input.readInt();
println(s"******** Reading ncfile (size=${len}) **********");
val raw = new Array[Byte](len);
var readBytes = 0;
do {
val res = input.read(raw, readBytes, len - readBytes);
if (res > -1) {
readBytes += res;
} else {
throw new KryoException(s"Read only $readBytes bytes when $len bytes were expected!");
}
} while (readBytes < len);
println(s"******** Read ncfile (read=${readBytes}, size=${len}) **********");
NetcdfFile.openInMemory(UUID.randomUUID().toString(), raw);
}
}
示例8: KafkaSparkStreamingRegistrator
//设置package包名称以及导入依赖的类
package com.miguno.kafkastorm.spark.serialization
import com.esotericsoftware.kryo.Kryo
import com.miguno.avro.Tweet
import com.twitter.chill.avro.AvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.spark.serializer.KryoRegistrator
class KafkaSparkStreamingRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo) {
// Registers a serializer for any generic Avro records. The kafka-storm-starter project does not yet include
// examples that work on generic Avro records, but we keep this registration for the convenience of our readers.
kryo.register(classOf[GenericRecord], AvroSerializer.GenericRecordSerializer[GenericRecord]())
// Registers a serializer specifically for the, well, specific Avro record `Tweet`
kryo.register(classOf[Tweet], AvroSerializer.SpecificRecordSerializer[Tweet])
()
}
}
示例9: KryoSerializer
//设置package包名称以及导入依赖的类
package com.flipkart.connekt.commons.serializers
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.pool.{KryoFactory, KryoPool}
import org.objenesis.strategy.StdInstantiatorStrategy
object KryoSerializer extends Serializer {
val factory = new KryoFactory() {
override def create(): Kryo = {
val kryo = new Kryo()
// configure kryo instance, customize settings
kryo.setInstantiatorStrategy(new StdInstantiatorStrategy())
kryo
}
}
val kryoPool = new KryoPool.Builder(factory).softReferences().build()
override def serialize(obj: AnyRef): Array[Byte] = {
val stream = new ByteArrayOutputStream()
val output = new Output(stream)
val kryoInstance = kryoPool.borrow()
kryoInstance.writeClassAndObject(output, obj)
output.close()
kryoPool.release(kryoInstance)
stream.toByteArray
}
override def deserialize[T](bytes: Array[Byte])(implicit cTag: reflect.ClassTag[T]): T = {
val stream = new ByteArrayInputStream(bytes)
val input = new Input(stream)
val kryoInstance = kryoPool.borrow()
val obj = kryoInstance.readClassAndObject(input)
kryoPool.release(kryoInstance)
obj.asInstanceOf[T]
}
}
示例10: KafkaSparkStreamingRegistrator
//设置package包名称以及导入依赖的类
package com.miguno.kafkastorm.spark.serialization
import com.esotericsoftware.kryo.Kryo
import com.miguno.avro.Tweet
import com.twitter.chill.avro.AvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.spark.serializer.KryoRegistrator
import scala.trace.{Pos, implicitlyFormatable}
class KafkaSparkStreamingRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo) {
// Registers a serializer for any generic Avro records. The kafka-storm-starter project does not yet include
// examples that work on generic Avro records, but we keep this registration for the convenience of our readers.
kryo.register(classOf[GenericRecord], AvroSerializer.GenericRecordSerializer[GenericRecord]())
// Registers a serializer specifically for the, well, specific Avro record `Tweet`
kryo.register(classOf[Tweet], AvroSerializer.SpecificRecordSerializer[Tweet])
()
}
}
示例11: NTMTrainedModel
//设置package包名称以及导入依赖的类
package ru.ispras.modis.tm.chinesetm
import java.io.{FileInputStream, FileOutputStream}
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import org.objenesis.strategy.StdInstantiatorStrategy
import ru.ispras.modis.tm.attribute.{DefaultAttributeType, AttributeType}
import ru.ispras.modis.tm.matrix.{AttributedPhi, Background, Theta}
import ru.ispras.modis.tm.plsa.TrainedModel
class NTMTrainedModel(phi: Map[AttributeType, AttributedPhi],
theta: Theta,
val noise: Map[AttributeType, Background],
val background: Map[AttributeType, Background],
perplexity: Double) extends TrainedModel(phi, theta, perplexity) {
def getNoise() = {
require(noise.contains(DefaultAttributeType), "there is no default attribute in collection")
require(noise.keys.size == 1, "Do not use this method in case of multiattribute collection")
noise(DefaultAttributeType)
}
def getBackground() = {
require(noise.contains(DefaultAttributeType), "there is no default attribute in collection")
require(noise.keys.size == 1, "Do not use this method in case of multiattribute collection")
noise(DefaultAttributeType)
}
}
object NTMTrainedModel {
def save(model: NTMTrainedModel, path: String) {
val kryo = new Kryo
kryo.setInstantiatorStrategy(new StdInstantiatorStrategy)
val output = new Output(new FileOutputStream(path))
kryo.writeObject(output, model)
output.close()
}
def load(path: String) = {
val kryo = new Kryo
kryo.setInstantiatorStrategy(new StdInstantiatorStrategy)
val input = new Input(new FileInputStream(path))
val trainedModel = kryo.readObject(input, classOf[NTMTrainedModel])
input.close()
trainedModel
}
}
示例12: overlaps
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.reference
import com.esotericsoftware.kryo.Kryo
def overlaps(other: Region): Boolean =
other.contigName == contigName &&
(overlapsLocus(other.start) || other.overlapsLocus(start))
override def toString: String = s"$contigName:$start-$end"
}
object Region {
implicit def intraContigPartialOrdering[R <: Region] =
new PartialOrdering[R] {
override def tryCompare(x: R, y: R): Option[Int] = {
if (x.contigName == y.contigName)
Some(x.start.compare(y.start))
else
None
}
override def lteq(x: R, y: R): Boolean = {
x.contigName == y.contigName && x.start <= y.start
}
}
def apply(contigName: ContigName, start: Locus, end: Locus): Region =
RegionImpl(contigName, start, end)
def apply(contigName: ContigName, interval: Interval): Region =
RegionImpl(contigName, interval.start, interval.end)
def unapply(region: Region): Option[(ContigName, Locus, Locus)] =
Some(
region.contigName,
region.start,
region.end
)
def register(kryo: Kryo): Unit = {
kryo.register(classOf[RegionImpl])
}
}
private case class RegionImpl(t: (ContigName, Locus, Locus))
extends AnyVal
with Region {
override def contigName: ContigName = t._1
override def start: Locus = t._2
override def end: Locus = t._3
}
示例13: Position
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.reference
import com.esotericsoftware.kryo.Kryo
case class Position(contigName: ContigName, locus: Locus)
extends Region
with HasLocus {
def start = locus
def end = locus.next
override def toString: String = s"$contigName:$locus"
}
object Position {
def registerKryo(kryo: Kryo): Unit = {
kryo.register(classOf[Position])
kryo.register(classOf[Array[Position]])
}
private implicit val tupleOrdering: Ordering[(ContigName, Locus)] = Ordering.Tuple2[ContigName, Locus]
val totalOrdering: Ordering[Position] =
new Ordering[Position] {
override def compare(x: Position, y: Position): Int =
tupleOrdering.compare(
(x.contigName, x.locus),
(y.contigName, y.locus)
)
}
val partialOrdering =
new PartialOrdering[Position] {
override def tryCompare(x: Position, y: Position): Option[KmerLength] =
if (x.contigName == y.contigName)
Some(x.compare(y.locus))
else
None
override def lteq(x: Position, y: Position): Boolean =
x.contigName == y.contigName && x.locus <= y.locus
}
}
示例14: RegistrarI
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.reference
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator
import org.hammerlab.genomics.reference.ContigName.{ Normalization, Factory }
abstract class RegistrarI(factory: Factory) extends KryoRegistrator {
implicit val n = factory
override def registerClasses(kryo: Kryo): Unit = {
kryo.register(classOf[Locus])
kryo.register(classOf[ContigName], new ContigNameSerializer)
kryo.register(classOf[Position])
}
}
class Registrar extends RegistrarI(Normalization.Strict)
class PermissiveRegistrar extends RegistrarI(Normalization.Lenient)
object PermissiveRegistrar extends PermissiveRegistrar
示例15: CustomKryoRegistrator
//设置package包名称以及导入依赖的类
package com.paypal.risk.smunf.util
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator
import com.paypal.risk.smunf.math.quantile._
import com.paypal.risk.smunf.math.stats._
import com.paypal.risk.smunf.sanity._
import com.paypal.risk.smunf.woe._
class CustomKryoRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo): Unit = {
kryo.register(classOf[BinCount])
kryo.register(classOf[WoeBin])
kryo.register(classOf[VariableBin])
kryo.register(classOf[VariableRecord])
kryo.register(classOf[WoeResult])
kryo.register(classOf[NumWoeMap])
kryo.register(classOf[CharWoeMap])
kryo.register(classOf[WoeMap])
kryo.register(classOf[SanityRecord])
kryo.register(classOf[LabelResult])
kryo.register(classOf[VariableResult])
kryo.register(classOf[StreamStatsAggregator])
kryo.register(classOf[TDigestUnit])
kryo.register(classOf[TDigestEstimator])
kryo.register(classOf[TDigestTree])
kryo.register(classOf[AVLTreeIndex])
kryo.register(classOf[HistogramUnit])
kryo.register(classOf[StreamingHistogramEstimator])
kryo.register(classOf[HistogramTree])
}
}