当前位置: 首页>>代码示例>>Scala>>正文


Scala ObjectOutputStream类代码示例

本文整理汇总了Scala中java.io.ObjectOutputStream的典型用法代码示例。如果您正苦于以下问题:Scala ObjectOutputStream类的具体用法?Scala ObjectOutputStream怎么用?Scala ObjectOutputStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了ObjectOutputStream类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: MQProducer

//设置package包名称以及导入依赖的类
package mq

import java.io.{ByteArrayOutputStream, ObjectOutputStream}

import com.fasterxml.jackson.databind.ObjectMapper
import twitter4j.Status
import com.rabbitmq.client.{Channel, ConnectionFactory}



object MQProducer {

  private val EXCHANGE_TWITTER = "twitter_exchange"

  def initiateRabbitMQProducer:Channel = {
    val factory = new ConnectionFactory
    factory.setHost("localhost")
    val connection = factory.newConnection
    val channel = connection.createChannel()
    channel.exchangeDeclare(EXCHANGE_TWITTER, "topic")
    channel
  }

  def produce(channel: Channel,topic:String ,status:Status): Unit ={
    println(status.getText)
    channel.basicPublish(EXCHANGE_TWITTER,topic,null,serialise(status))
  }


  def serialise(value: Any): Array[Byte] = {
    val stream: ByteArrayOutputStream = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(stream)
    oos.writeObject(value)
    oos.close
    stream.toByteArray
  }

} 
开发者ID:neruti-developers,项目名称:neruti-demo-backend,代码行数:39,代码来源:MQProducer.scala

示例2: encode

//设置package包名称以及导入依赖的类
package game_of_life.util

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

trait Codec {

  def encode(obj: AnyRef): Array[Byte]

  def decode[A >: Null](array: Array[Byte]): A
}

trait SerializationCodec extends Codec {

  def encode(obj: AnyRef): Array[Byte] = {
    val baos = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(baos)
    try {
      oos writeObject obj
    } finally {
      oos close ()
    }
    baos.toByteArray
  }

  def decode[A >: Null](array: Array[Byte]): A = {
    val ois = new ObjectInputStream(new ByteArrayInputStream(array))
    try {
      (ois readObject ()).asInstanceOf[A]
    } finally {
      ois close ()
    }
  }
}

object SerializationCodec extends SerializationCodec 
开发者ID:christian-schlichtherle,项目名称:akka-game-of-life,代码行数:36,代码来源:Codec.scala

示例3: Configuration

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop

import java.io.{ ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.Kryo
import org.apache.hadoop.conf
import org.apache.hadoop.conf.{ Configuration ? HadoopConfiguration }
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.hammerlab.hadoop.kryo.WritableSerializer
import org.hammerlab.kryo.serializeAs

class Configuration(@transient var value: HadoopConfiguration)
  extends Serializable {
  private def writeObject(out: ObjectOutputStream): Unit = {
    value.write(out)
  }

  private def readObject(in: ObjectInputStream): Unit = {
    value = new HadoopConfiguration(false)
    value.readFields(in)
  }
}

object Configuration {

  def apply(loadDefaults: Boolean = true): Configuration =
    new HadoopConfiguration(loadDefaults)

  def apply(conf: HadoopConfiguration): Configuration =
    new Configuration(conf)

  implicit def wrapConfiguration(conf: HadoopConfiguration): Configuration =
    apply(conf)

  implicit def unwrapConfiguration(conf: Configuration): HadoopConfiguration =
    conf.value

  implicit def unwrapConfigurationBroadcast(confBroadcast: Broadcast[Configuration]): Configuration =
    confBroadcast.value

  implicit def sparkContextToHadoopConfiguration(sc: SparkContext): Configuration =
    sc.hadoopConfiguration

  implicit class ConfWrapper(val conf: HadoopConfiguration) extends AnyVal {
    def serializable: Configuration =
      Configuration(conf)
  }

  def register(kryo: Kryo): Unit = {
    kryo.register(
      classOf[conf.Configuration],
      new WritableSerializer[conf.Configuration]
    )

    kryo.register(
      classOf[Configuration],
      serializeAs[Configuration, conf.Configuration]
    )
  }
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:62,代码来源:Configuration.scala

示例4: SerializableSerializer

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo

import java.io.{ ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }

case class SerializableSerializer[T <: Serializable]()
  extends Serializer[T] {
  override def read(kryo: Kryo, input: Input, `type`: Class[T]): T =
    new ObjectInputStream(input)
      .readObject()
      .asInstanceOf[T]

  override def write(kryo: Kryo, output: Output, t: T): Unit =
    new ObjectOutputStream(output)
      .writeObject(t)
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:19,代码来源:SerializableSerializer.scala

示例5: SerializableSerializerTest

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo

import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{ Input, Output }
import org.hammerlab.test.Suite

class SerializableSerializerTest
  extends Suite {
  test("serde") {
    val kryo = new Kryo()
    kryo.setRegistrationRequired(true)
    val baos = new ByteArrayOutputStream()
    val output = new Output(baos)

    val foo = new Foo
    foo.n = 123
    foo.s = "abc"

    intercept[IllegalArgumentException] {
      kryo.writeClassAndObject(output, foo)
    }
    .getMessage should startWith("Class is not registered: org.hammerlab.hadoop.kryo.Foo")

    kryo.register(classOf[Foo], SerializableSerializer[Foo]())

    kryo.writeClassAndObject(output, foo)

    output.close()

    val bytes = baos.toByteArray
    bytes.length should be(93)

    val bais = new ByteArrayInputStream(bytes)

    val input = new Input(bais)
    val after = kryo.readClassAndObject(input).asInstanceOf[Foo]

    after.n should be(foo.n)
    after.s should be(foo.s)
  }
}

class Foo
  extends Serializable {

  var n = 0
  var s = ""

  private def writeObject(out: ObjectOutputStream): Unit = {
    out.writeInt(n)
    out.writeUTF(s)
  }

  private def readObject(in: ObjectInputStream): Unit = {
    n = in.readInt()
    s = in.readUTF()
  }
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:61,代码来源:SerializableSerializerTest.scala

示例6: OnDiskHeap

//设置package包名称以及导入依赖的类
package offheap

import java.io.ObjectInputStream
import java.nio.channels.Channels
import java.io.RandomAccessFile
import java.io.ObjectOutputStream

class OnDiskHeap[T <: Node[T]](filename : String) extends Heap[T] {
  val file = new RandomAccessFile(filename, "rw")
  
  def append(node : T, reserve : Int = 1) : Long = {
    file.seek(file.length())
    val pointer = file.getFilePointer
    val oos = new ObjectOutputStream(Channels.newOutputStream(file.getChannel))
    oos.writeObject(node)
    oos.flush()
    var current = file.getFilePointer()
    file.seek(file.length());
    while(current < reserve) {
      file.writeByte(0)
      current += 1
    }
    pointer
  }
  
  def write(pointer : Long, node : T) : Unit = {
    file.seek(pointer)
    val oos = new ObjectOutputStream(Channels.newOutputStream(file.getChannel))
    oos.writeObject(node)
    oos.flush()
  }
  
  def read(pointer : Long) : T = {
    file.seek(pointer)
    val ois = new ObjectInputStream(Channels.newInputStream(file.getChannel))
    val value = ois.readObject
    value.asInstanceOf[T]
  }
  
  def commit() = {
    file.getFD.sync()
  }
} 
开发者ID:utwente-fmt,项目名称:lazy-persistent-trie,代码行数:44,代码来源:OnDiskHeap.scala

示例7: PacketKeySerializer

//设置package包名称以及导入依赖的类
package edu.uw.at.iroberts.wirefugue.kafka.serdes

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import java.util

import org.apache.kafka.common.serialization._


class PacketKeySerializer extends Serializer[PacketKey] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()

  override def serialize(topic: String, data: PacketKey): Array[Byte] = {
    val bs = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(bs)
    oos.writeObject(data)

    bs.toByteArray
  }
}

class PacketKeyDeserializer extends Deserializer[PacketKey] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()

  override def deserialize(topic: String, data: Array[Byte]): PacketKey = {
    val bs = new ByteArrayInputStream(data)
    val ois = new ObjectInputStream(bs)

    ois.readObject().asInstanceOf[PacketKey]
  }
}

class PacketKeySerde extends Serde[PacketKey] {
  val serializer = new PacketKeySerializer
  val deserializer = new PacketKeyDeserializer

  def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
    serializer.configure(configs, isKey)
    deserializer.configure(configs, isKey)
  }
  def close(): Unit = {
    serializer.close()
    deserializer.close()
  }
} 
开发者ID:robertson-tech,项目名称:wirefugue,代码行数:47,代码来源:PacketKeySerializer.scala

示例8: SeqSerialiser

//设置package包名称以及导入依赖的类
package serialisation

import java.io.{ByteArrayOutputStream, ObjectOutputStream}
import java.util

import org.apache.kafka.common.serialization.Serializer

class SeqSerialiser[ELEMENT] extends Serializer[Seq[ELEMENT]] {

  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
  }

  override def serialize(topic: String, data: Seq[ELEMENT]): Array[Byte] = {
    val byteStream = new ByteArrayOutputStream()
    val objectStream = new ObjectOutputStream(byteStream)
    data.foreach(objectStream.writeObject(_))
    objectStream.close()
    byteStream.toByteArray
  }

  override def close(): Unit = {
  }
} 
开发者ID:benwheeler,项目名称:kafka-streams-poc,代码行数:24,代码来源:SeqSerialiser.scala

示例9: Test10

//设置package包名称以及导入依赖的类
package chapter09

import java.io.{FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}
import scala.collection.mutable.ArrayBuffer

object Test10 extends App {

  case class Person(val name: String) extends Serializable {
    val friends = new ArrayBuffer[Person]

    def addFriend(p: Person) {
      friends += p
    }

    def isFriendOf(p: Person): Boolean = {
      friends.contains(p)
    }
  }

  val tom = Person("tom")
  val jerry = Person("jerry")
  val johnny = Person("johnny")

  tom.addFriend(johnny)
  jerry.addFriend(johnny)

  val persons = Array(tom, jerry, johnny)

  val out = new ObjectOutputStream(new FileOutputStream("src/Chapter09/10.obj"))
  out.writeObject(persons)
  out.close()

  val in = new ObjectInputStream(new FileInputStream("src/Chapter09/10.obj"))
  val Array(_tom,_jerry,_johnny) = in.readObject().asInstanceOf[Array[Person]]

  assert(_tom isFriendOf _johnny)
  assert(_jerry isFriendOf _johnny)
  // assert(_tom isFriendOf _jerry)
} 
开发者ID:johnnyqian,项目名称:scala-for-the-impatient,代码行数:40,代码来源:10.scala

示例10: Serialisation

//设置package包名称以及导入依赖的类
package root.core.serialisation

import java.io.{FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}

import scala.util.control.NonFatal


object Serialisation extends App {

  val fileName = "tempData.ser"

  val obj = Object
  val map = Map("key" -> obj)

  // deserialize object with not serializable filed

  serializeObject(obj)
  val desObj = deserializeObj
  assert(desObj.get eq obj, "deserialized obj should be the same")


//  serializeObject(map)
//  val desMap = deserializeObj
//  assert(desMap.get.asInstanceOf[map.type]("key") == map, "deserialized map should be the same")

  private def deserializeObj: Option[AnyRef] = {
    try {
      val fis = new FileInputStream(fileName)
      val ois = new ObjectInputStream(fis)
      Some(ois.readObject())
    } catch { case NonFatal(e) =>
        println(s"Deserialization fail $e")
        None
    }
  }

  private def serializeObject(obj: AnyRef) {
    try {
      val fos = new FileOutputStream(fileName)
      val oos = new ObjectOutputStream(fos)
      oos.writeObject(obj)
      oos.close()
    } catch { case NonFatal(e) =>
        println(s"Serialization fail $e")
    }
  }

}

object Object extends Serializable {
  @transient private val logger = new Logger()
}

class Logger {
  def log() = println("log")
} 
开发者ID:RicoGit,项目名称:scala-core,代码行数:57,代码来源:Serialisation.scala

示例11: SerializationTest

//设置package包名称以及导入依赖的类
package wvlet.log

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import wvlet.log.io.IOUtil

object SerializationTest {

  trait A extends LogSupport {
    info("new A")
    def hello = info("hello")
  }
}


class SerializationTest extends Spec {

  import SerializationTest._

  "Logger" should {
    "serializable" in {
      val a = new A {}
      val b = new ByteArrayOutputStream()
      IOUtil.withResource(new ObjectOutputStream(b)) {out =>
        out.writeObject(a)
      }
      val ser = b.toByteArray
      IOUtil.withResource(new ObjectInputStream(new ByteArrayInputStream(ser))) { in =>
        info("deserialization")
        val a = in.readObject().asInstanceOf[A]
        a.hello
      }
    }
  }
} 
开发者ID:wvlet,项目名称:log,代码行数:36,代码来源:SerializationTest.scala

示例12: bytesToValue

//设置package包名称以及导入依赖的类
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import java.io.{Serializable => JavaSerializable}

import scala.collection.mutable
import scala.reflect.ClassTag
import scala.reflect._

package object hedgehog {
  private[hedgehog] def bytesToValue[T <: JavaSerializable: ClassTag](bytes: Array[Byte]): T = {
    if (classTag[T].runtimeClass == classOf[String]) {
      new String(bytes).asInstanceOf[T]
    } else {
      val in = new ByteArrayInputStream(bytes)
      try {
        new ObjectInputStream(in).readObject.asInstanceOf[T]
      } finally {
        in.close()
      }
    }
  }

  private[hedgehog] def valueToBytes[T <: JavaSerializable: ClassTag](value: T): Array[Byte] =
    value match {
      case str: String => str.getBytes
      case _ =>
        val out = new ByteArrayOutputStream()
        try {
          new ObjectOutputStream(out).writeObject(value)
          out.toByteArray
        } finally {
          out.close()
        }
    }

  private[hedgehog] object Timer {
    private val times = new mutable.ArrayBuffer[Long]

    def time[T](func: => T): T = {
      val st = System.currentTimeMillis
      try {
        func
      } finally {
        times += (System.currentTimeMillis - st)
      }
    }

    def printStats(): Unit = {
      if (times.nonEmpty) {
        val mean = times.sum.toDouble / times.size.toDouble
        val std = math.sqrt(times.map(t => math.pow(t - mean, 2)).sum / times.size)
        println(s"max: ${times.max}, min: ${times.min}, avg: $mean, std: $std")
      } else {
        println("No timings!")
      }
    }

    def average: Double = times.sum.toDouble / times.size.toDouble

    def reset(): Unit = times.clear()
  }
} 
开发者ID:aluketa,项目名称:hedgehog,代码行数:62,代码来源:package.scala

示例13: serializeRoundTrip

//设置package包名称以及导入依赖的类
package deaktator.pops

import java.io.{ObjectInputStream, ByteArrayInputStream, ObjectOutputStream, ByteArrayOutputStream}


trait SerializabilityTest {
  def serializeRoundTrip[A](a: A): A = {
    val baos = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(baos)
    oos.writeObject(a)
    val bais = new ByteArrayInputStream(baos.toByteArray)
    oos.close()
    val ois = new ObjectInputStream(bais)
    val newA: A = ois.readObject().asInstanceOf[A]
    ois.close()
    newA
  }
} 
开发者ID:deaktator,项目名称:pops,代码行数:19,代码来源:SerializabilityTest.scala

示例14: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.apache.spark.sql.catalyst.expressions.UnsafeMapData
import org.apache.spark.sql.catalyst.util.ArrayBasedMapData
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[EmbeddedMapType])
case class EmbeddedMap(elements: Map[Any, Any]) extends Serializable {
  override def hashCode(): Int = 1

  override def equals(other: scala.Any): Boolean = other match {
    case that: EmbeddedMap => that.elements == this.elements
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class EmbeddedMapType extends UserDefinedType[EmbeddedMap] {

  override def sqlType: DataType = MapType(StringType, StringType)

  override def serialize(obj: EmbeddedMap): Any = {
    ArrayBasedMapData(obj.elements.keySet.map{ elem =>
      val outKey = new ByteArrayOutputStream()
      val osKey = new ObjectOutputStream(outKey)
      osKey.writeObject(elem)
      UTF8String.fromBytes(outKey.toByteArray)
    }.toArray,
      obj.elements.values.map{ elem =>
        val outValue = new ByteArrayOutputStream()
        val osValue = new ObjectOutputStream(outValue)
        osValue.writeObject(elem)
        UTF8String.fromBytes(outValue.toByteArray)
      }.toArray)
  }

  override def deserialize(datum: Any): EmbeddedMap = {
    datum match {
      case values: UnsafeMapData =>
        new EmbeddedMap(values.keyArray().toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        }.zip(values.valueArray().toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        }).toMap)
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[EmbeddedMap] = classOf[EmbeddedMap]
}

object EmbeddedMapType extends EmbeddedMapType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:61,代码来源:EmbeddedMapType.scala

示例15: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[EmbeddedSetType])
case class EmbeddedSet(elements: Array[Any]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: EmbeddedSet => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class EmbeddedSetType extends UserDefinedType[EmbeddedSet] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: EmbeddedSet): Any = {
    new GenericArrayData(obj.elements.map{elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): EmbeddedSet = {
    datum match {
      case values: ArrayData =>
        new EmbeddedSet(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[EmbeddedSet] = classOf[EmbeddedSet]
}

object EmbeddedSetType extends EmbeddedSetType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:60,代码来源:EmbeddedSetType.scala


注:本文中的java.io.ObjectOutputStream类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。