本文整理汇总了Scala中org.json4s.jackson.Serialization.read类的典型用法代码示例。如果您正苦于以下问题:Scala read类的具体用法?Scala read怎么用?Scala read使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了read类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: Segment
//设置package包名称以及导入依赖的类
package com.crystal
package models
// Kinesis
import stores.KinesisStream
// JSON
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{read, write}
// Rules
import rule_engine.rules.Rule
case class Segment(val name: String, rule: Rule) {
import Segment._
def containsUser(user: User): Boolean = {
rule.fulfilledBy(user.toMap)
}
def alreadyContainedUser(user: User): Boolean = {
val userWithoutNewEvents = User(user.id, user.persistedActions)
rule.fulfilledBy(userWithoutNewEvents.toMap)
}
def publishUserEntrance(user: User) = {
implicit val formats = Serialization.formats(ShortTypeHints(List(classOf[EnterSegmentEvent])))
val event = EnterSegmentEvent(user.id, name)
stream.put(user.id, write(event).getBytes("UTF-8"))
}
def publishUserExit(user: User) = {
implicit val formats = Serialization.formats(ShortTypeHints(List(classOf[ExitSegmentEvent])))
val event = ExitSegmentEvent(user.id, name)
stream.put(user.id, write(event).getBytes("UTF-8"))
}
}
object Segment {
val config = AppConfig.load().get
val stream = KinesisStream(config.outStreamName)
case class EnterSegmentEvent(user_id: String, segment_name: String)
case class ExitSegmentEvent(user_id: String, segment_name: String)
}
示例2: TaskStatus
//设置package包名称以及导入依赖的类
package com.pagerduty.scheduler.model
import java.time.Instant
import org.json4s.DefaultFormats
import org.json4s.jackson.Serialization.{read, write}
case class TaskStatus(numberOfAttempts: Int, completionResult: CompletionResult, nextAttemptAt: Option[Instant]) {
def isComplete = completionResult.isComplete
implicit val formats = DefaultFormats + new TaskKeyTimeSerializer + new CompletionResultSerializer
def toJson: String = write(this)
}
object TaskStatus {
val Dropped: TaskStatus = {
TaskStatus(numberOfAttempts = 0, CompletionResult.Dropped, nextAttemptAt = None)
}
val NeverAttempted: TaskStatus = {
TaskStatus(numberOfAttempts = 0, CompletionResult.Incomplete, nextAttemptAt = None)
}
def successful(numberOfAttempts: Int): TaskStatus = {
TaskStatus(numberOfAttempts, CompletionResult.Success, nextAttemptAt = None)
}
def failed(numberOfAttempts: Int): TaskStatus = {
TaskStatus(numberOfAttempts, CompletionResult.Failure, nextAttemptAt = None)
}
implicit val formats = DefaultFormats + new TaskKeyTimeSerializer + new CompletionResultSerializer
def fromJson(taskStatus: String): TaskStatus = read[TaskStatus](taskStatus)
}
示例3: TaskAttempt
//设置package包名称以及导入依赖的类
package com.pagerduty.scheduler.model
import java.time.Instant
import org.json4s.DefaultFormats
import org.json4s.jackson.Serialization.{read, write}
case class TaskAttempt(
attemptNumber: Int,
startedAt: Instant,
finishedAt: Instant,
taskResult: CompletionResult,
taskResultUpdatedAt: Instant,
exceptionClass: Option[String],
exceptionMessage: Option[String],
exceptionStackTrace: Option[String]) {
implicit val formats = DefaultFormats + new TaskKeyTimeSerializer + new CompletionResultSerializer
def toJson: String = write(this)
}
object TaskAttempt {
def apply(
attemptNumber: Int,
startedAt: Instant,
finishedAt: Instant,
taskResult: CompletionResult,
taskResultUpdatedAt: Instant,
exception: Option[Throwable]
): TaskAttempt = {
TaskAttempt(
attemptNumber,
startedAt,
finishedAt,
taskResult,
taskResultUpdatedAt,
exceptionClass = exception.map(_.getClass.getName),
exceptionMessage = exception.map(_.getMessage),
exceptionStackTrace = exception.map(_.getStackTraceString)
)
}
implicit val formats = DefaultFormats + new TaskKeyTimeSerializer + new CompletionResultSerializer
def fromJson(taskAttempt: String): TaskAttempt = read[TaskAttempt](taskAttempt)
}
示例4: OrderTaggingEventAdapter
//设置package包名称以及导入依赖的类
package poc.persistence.write
import java.nio.charset.Charset
import akka.actor.ExtendedActorSystem
import akka.event.Logging
import akka.persistence.journal.{Tagged, WriteEventAdapter}
import org.json4s.DefaultFormats
import poc.persistence.events.{OrderCancelled, OrderInitialized}
class OrderTaggingEventAdapter(actorSystem: ExtendedActorSystem) extends WriteEventAdapter {
private val log = Logging.getLogger(actorSystem, this)
override def toJournal(event: Any): Any = event match {
case e: OrderInitialized =>
log.debug("tagging OrderInitialized event")
Tagged(e, Set("UserEvent"))
case e: OrderCancelled =>
log.debug("tagged OrderCancelled event")
Tagged(e, Set("UserEvent"))
}
override def manifest(event: Any): String = ""
}
import akka.serialization.Serializer
class EventSerialization(actorSystem: ExtendedActorSystem) extends Serializer {
import org.json4s.jackson.Serialization.{read, write}
private val log = Logging.getLogger(actorSystem, this)
val UTF8: Charset = Charset.forName("UTF-8")
implicit val formats = DefaultFormats
// Completely unique value to identify this implementation of Serializer, used to optimize network traffic.
// Values from 0 to 16 are reserved for Akka internal usage.
// Make sure this does not conflict with any other kind of serializer or you will have problems
override def identifier: Int = 90020001
override def includeManifest = true
override def fromBinary(bytes: Array[Byte], manifestOpt: Option[Class[_]]): AnyRef = {
implicit val manifest = manifestOpt match {
case Some(x) => Manifest.classType(x)
case None => Manifest.AnyRef
}
val str = new String(bytes, UTF8)
val result = read(str)
result
}
override def toBinary(o: AnyRef): Array[Byte] = {
val jsonString = write(o)
val dat = write(o).getBytes(UTF8)
dat
}
}
示例5: WorldPipeline
//设置package包名称以及导入依赖的类
package springnz.sparkplug.examples
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.DataFrame
import org.json4s._
import org.json4s.jackson.Serialization.read
import springnz.sparkplug.core._
import springnz.sparkplug.data.JdbcDataFrameSource
import springnz.sparkplug.examples.WorldDataTypes._
import scala.util.Try
object WorldPipeline {
implicit val formats = DefaultFormats
}
trait WorldPipeline {
import WorldPipeline._
def dataSource: SparkOperation[DataFrame] = JdbcDataFrameSource("world", "Country")
lazy val countriesOperation: SparkOperation[RDD[Country]] = for {
dataFrame ? dataSource
} yield {
dataFrame.toJSON.mapWithFilter {
jsonString ? Try { read[Country](jsonString) }.toOption
}
}
}