本文整理汇总了Scala中org.apache.avro.generic.GenericDatumReader类的典型用法代码示例。如果您正苦于以下问题:Scala GenericDatumReader类的具体用法?Scala GenericDatumReader怎么用?Scala GenericDatumReader使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GenericDatumReader类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: Tip
//设置package包名称以及导入依赖的类
package com.alvin.niagara.model
import java.io.ByteArrayOutputStream
import java.util
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericData, GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.{DecoderFactory, EncoderFactory}
import scala.collection.JavaConversions._
import scala.io.Source
case class Tip(business_id: String, date: String, likes: Long, text: String, `type`: String, user_id: String)
object TipSerde {
val avroSchema = Source.fromInputStream(getClass.getResourceAsStream("/schema/tip.avsc")).mkString
val schema = new Schema.Parser().parse(avroSchema)
val reader = new GenericDatumReader[GenericRecord](schema)
val writer = new GenericDatumWriter[GenericRecord](schema)
def serialize(tip: Tip): Array[Byte] = {
val out = new ByteArrayOutputStream()
val encoder = EncoderFactory.get.binaryEncoder(out, null)
val avroRecord = new GenericData.Record(schema)
avroRecord.put("business_id", tip.business_id)
avroRecord.put("date", tip.date)
avroRecord.put("likes", tip.likes)
avroRecord.put("text", tip.text)
avroRecord.put("type", tip.`type`)
avroRecord.put("user_id", tip.user_id)
writer.write(avroRecord, encoder)
encoder.flush
out.close
out.toByteArray
}
def deserialize(bytes: Array[Byte]): Tip = {
val decoder = DecoderFactory.get.binaryDecoder(bytes, null)
val record = reader.read(null, decoder)
Tip(
record.get("business_id").toString,
record.get("date").toString,
record.get("likes").asInstanceOf[Long],
record.get("text").toString,
record.get("type").toString,
record.get("user_id").toString
)
}
}
示例2: AvroUtils
//设置package包名称以及导入依赖的类
package pulse.kafka.avro
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, File}
import com.twitter.util.Future
import org.apache.avro.Schema
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.DecoderFactory
import pulse.kafka.extensions.managedByteArrayInputStream
import pulse.kafka.extensions.managedByteArrayOutputStream
import pulse.kafka.extensions.catsStdInstancesForFuture
import scala.concurrent.ExecutionContext.Implicits._
object AvroUtils {
import pulse.common.syntax._
def jsonToAvroBytes(json: String, schemaFile: File): Future[Array[Byte]] =
use(new ByteArrayOutputStream()) { output =>
for {
s <- loadSchema(schemaFile)
_ <- convertImpl(json, output, s)
} yield output.toByteArray
}
private def convertImpl(json: String, output: ByteArrayOutputStream, schemaSpec: Schema): Future[GenericDatumReader[GenericRecord]] =
use(new ByteArrayInputStream(json.getBytes)) { input =>
for {
w <- getWriter(output, schemaSpec)
r <- getReader(input, schemaSpec, w)
} yield r
}
def getReader(input: ByteArrayInputStream, schemaSpec: Schema, w: DataFileWriter[GenericRecord]) = Future.value {
val reader = new GenericDatumReader[GenericRecord](schemaSpec)
val datum = reader.read(null, getJsonDecoder(input, schemaSpec))
w.append(datum)
w.flush()
reader
}
private def getJsonDecoder(input: ByteArrayInputStream, schema: Schema) =
DecoderFactory.get.jsonDecoder(schema, new DataInputStream(input))
private def getWriter(output: ByteArrayOutputStream, schemaSpec: Schema) = {
Future.value {
val writer = new DataFileWriter[GenericRecord](new GenericDatumWriter[GenericRecord]())
writer.create(schemaSpec, output)
}
}
private def loadSchema(schemaFile: File): Future[Schema] =
Future {
new Schema.Parser().parse(schemaFile)
}
}
示例3: AvroUtils
//设置package包名称以及导入依赖的类
package pulse.services.example.avro
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, File}
import org.apache.avro.Schema
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.DecoderFactory
import pulse.services.example.extensions._
object AvroUtils {
def jsonToAvroBytes(json: String, schemaFile: File) = {
use(new ByteArrayOutputStream())(output => {
val schemaSpec = loadSchema(schemaFile)
use(new ByteArrayInputStream(json.getBytes))(input => {
val writer = new DataFileWriter[GenericRecord](new GenericDatumWriter[GenericRecord]())
writer.create(schemaSpec, output)
val reader = new GenericDatumReader[GenericRecord](schemaSpec)
val datum = reader.read(null, getJsonDecoder(input, schemaSpec))
writer.append(datum)
writer.flush()
})
output.toByteArray
})
}
def getJsonDecoder(input: ByteArrayInputStream, schema: Schema) =
DecoderFactory.get.jsonDecoder(schema, new DataInputStream(input))
def loadSchema(schemaFile: File) =
new Schema.Parser().parse(schemaFile)
}
示例4: props
//设置package包名称以及导入依赖的类
package com.kakao.cuesheet.convert
import java.util.Arrays.copyOfRange
import kafka.serializer.Decoder
import kafka.utils.VerifiableProperties
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
sealed trait AvroDecoder[T] extends Decoder[T] {
def props: VerifiableProperties
protected val schema = new Schema.Parser().parse(props.getString(Avro.SCHEMA))
protected val skipBytes = props.getInt(Avro.SKIP_BYTES, 0)
protected val reader = new GenericDatumReader[GenericRecord](schema)
protected val decoder = Avro.recordDecoder(reader)
private def skip(bytes: Array[Byte], size: Int): Array[Byte] = {
val length = bytes.length
length - size match {
case remaining if remaining > 0 => copyOfRange(bytes, size, length)
case _ => new Array[Byte](0)
}
}
def parse(bytes: Array[Byte]): GenericRecord = {
val data = if (skipBytes == 0) bytes else skip(bytes, skipBytes)
decoder(data)
}
}
class AvroRecordDecoder(val props: VerifiableProperties) extends AvroDecoder[GenericRecord] {
override def fromBytes(bytes: Array[Byte]): GenericRecord = parse(bytes)
}
class AvroMapDecoder(val props: VerifiableProperties) extends AvroDecoder[Map[String, Any]] {
override def fromBytes(bytes: Array[Byte]): Map[String, Any] = Avro.toMap(parse(bytes))
}
class AvroJsonDecoder(val props: VerifiableProperties) extends AvroDecoder[String] {
override def fromBytes(bytes: Array[Byte]): String = Avro.toJson(parse(bytes))
}
示例5: JsonEncodingDecoding
//设置package包名称以及导入依赖的类
package com.giampaolotrapasso.avrosamples.test
import java.io.ByteArrayInputStream
import com.giampaolotrapasso.avrosamples.SchemaRegistry
import com.giampaolotrapasso.avrosamples.events.{Event, MovieChangedV1, MovieChangedV2, MovieChangedV3}
import com.giampaolotrapasso.avrosamples.serializers.{BinarySerializer, DataWithSchemaSerializer, JsonSerializer}
import com.sksamuel.avro4s.{AvroInputStream, FromRecord, RecordFormat, ToRecord}
import org.apache.avro.Schema
import org.apache.avro.file.SeekableByteArrayInput
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
import org.apache.avro.io.DecoderFactory
class JsonEncodingDecoding extends TestSpec {
val title = "Raiders of lost ark"
val year = 1986
val director = "Spielberg"
val wonOscars = 1
def deserialize[A <: Event: ToRecord: FromRecord: RecordFormat](oldSchema: Schema,
newSchema: Schema,
stream: ByteArrayInputStream) = {
val gdr = new GenericDatumReader[GenericRecord](oldSchema, newSchema)
val binDecoder = DecoderFactory.get().jsonDecoder(newSchema, stream)
val record: GenericRecord = gdr.read(null, binDecoder)
val format = RecordFormat[A]
format.from(record)
}
"BinaryEncodingDecodingTest" should "deserialize an added field V1(title, year) to V2(title, year, director) " in {
val obj = MovieChangedV1(title, year)
val bytes: Array[Byte] = JsonSerializer.serializeV1(obj)
//println("*** Json Size" + bytes.length)
val in = new SeekableByteArrayInput(bytes)
val result = deserialize[MovieChangedV2](SchemaRegistry.movieChanged(1), SchemaRegistry.movieChanged(2), in)
result should matchPattern {
case MovieChangedV2(`title`, `year`, "unknown") ?
}
}
}