本文整理汇总了Scala中org.apache.avro.generic.GenericDatumWriter类的典型用法代码示例。如果您正苦于以下问题:Scala GenericDatumWriter类的具体用法?Scala GenericDatumWriter怎么用?Scala GenericDatumWriter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GenericDatumWriter类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: Tip
//设置package包名称以及导入依赖的类
package com.alvin.niagara.model
import java.io.ByteArrayOutputStream
import java.util
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericData, GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.{DecoderFactory, EncoderFactory}
import scala.collection.JavaConversions._
import scala.io.Source
case class Tip(business_id: String, date: String, likes: Long, text: String, `type`: String, user_id: String)
object TipSerde {
val avroSchema = Source.fromInputStream(getClass.getResourceAsStream("/schema/tip.avsc")).mkString
val schema = new Schema.Parser().parse(avroSchema)
val reader = new GenericDatumReader[GenericRecord](schema)
val writer = new GenericDatumWriter[GenericRecord](schema)
def serialize(tip: Tip): Array[Byte] = {
val out = new ByteArrayOutputStream()
val encoder = EncoderFactory.get.binaryEncoder(out, null)
val avroRecord = new GenericData.Record(schema)
avroRecord.put("business_id", tip.business_id)
avroRecord.put("date", tip.date)
avroRecord.put("likes", tip.likes)
avroRecord.put("text", tip.text)
avroRecord.put("type", tip.`type`)
avroRecord.put("user_id", tip.user_id)
writer.write(avroRecord, encoder)
encoder.flush
out.close
out.toByteArray
}
def deserialize(bytes: Array[Byte]): Tip = {
val decoder = DecoderFactory.get.binaryDecoder(bytes, null)
val record = reader.read(null, decoder)
Tip(
record.get("business_id").toString,
record.get("date").toString,
record.get("likes").asInstanceOf[Long],
record.get("text").toString,
record.get("type").toString,
record.get("user_id").toString
)
}
}
开发者ID:AlvinCJin,项目名称:Niagara,代码行数:60,代码来源:Tip.scala
示例2: AvroNodeSerializer
//设置package包名称以及导入依赖的类
package eventgen.launcher.core.avro
import java.io.ByteArrayOutputStream
import eventgen.launcher.core.NodeSerializer
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericDatumWriter, GenericRecord}
import org.apache.avro.io.EncoderFactory
class AvroNodeSerializer extends NodeSerializer[Schema, AvroNode[_], ByteArrayOutputStream] {
override def serialize(metadata: Schema, node: AvroNode[_]): ByteArrayOutputStream = {
val record = node.asInstanceOf[AvroRecord]
val writer = new GenericDatumWriter[GenericRecord]
writer.setSchema(metadata)
val outputStream = new ByteArrayOutputStream
val encoder = EncoderFactory.get().jsonEncoder(metadata, outputStream, true)
writer.write(record.value, encoder)
encoder.flush()
outputStream
}
}
开发者ID:gpulse,项目名称:eventgenerator,代码行数:23,代码来源:AvroNodeSerializer.scala
示例3: AvroUtils
//设置package包名称以及导入依赖的类
package pulse.kafka.avro
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, File}
import com.twitter.util.Future
import org.apache.avro.Schema
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.DecoderFactory
import pulse.kafka.extensions.managedByteArrayInputStream
import pulse.kafka.extensions.managedByteArrayOutputStream
import pulse.kafka.extensions.catsStdInstancesForFuture
import scala.concurrent.ExecutionContext.Implicits._
object AvroUtils {
import pulse.common.syntax._
def jsonToAvroBytes(json: String, schemaFile: File): Future[Array[Byte]] =
use(new ByteArrayOutputStream()) { output =>
for {
s <- loadSchema(schemaFile)
_ <- convertImpl(json, output, s)
} yield output.toByteArray
}
private def convertImpl(json: String, output: ByteArrayOutputStream, schemaSpec: Schema): Future[GenericDatumReader[GenericRecord]] =
use(new ByteArrayInputStream(json.getBytes)) { input =>
for {
w <- getWriter(output, schemaSpec)
r <- getReader(input, schemaSpec, w)
} yield r
}
def getReader(input: ByteArrayInputStream, schemaSpec: Schema, w: DataFileWriter[GenericRecord]) = Future.value {
val reader = new GenericDatumReader[GenericRecord](schemaSpec)
val datum = reader.read(null, getJsonDecoder(input, schemaSpec))
w.append(datum)
w.flush()
reader
}
private def getJsonDecoder(input: ByteArrayInputStream, schema: Schema) =
DecoderFactory.get.jsonDecoder(schema, new DataInputStream(input))
private def getWriter(output: ByteArrayOutputStream, schemaSpec: Schema) = {
Future.value {
val writer = new DataFileWriter[GenericRecord](new GenericDatumWriter[GenericRecord]())
writer.create(schemaSpec, output)
}
}
private def loadSchema(schemaFile: File): Future[Schema] =
Future {
new Schema.Parser().parse(schemaFile)
}
}
开发者ID:gpulse,项目名称:kafka,代码行数:60,代码来源:AvroUtils.scala
示例4: AvroUtils
//设置package包名称以及导入依赖的类
package pulse.services.example.avro
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, File}
import org.apache.avro.Schema
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.DecoderFactory
import pulse.services.example.extensions._
object AvroUtils {
def jsonToAvroBytes(json: String, schemaFile: File) = {
use(new ByteArrayOutputStream())(output => {
val schemaSpec = loadSchema(schemaFile)
use(new ByteArrayInputStream(json.getBytes))(input => {
val writer = new DataFileWriter[GenericRecord](new GenericDatumWriter[GenericRecord]())
writer.create(schemaSpec, output)
val reader = new GenericDatumReader[GenericRecord](schemaSpec)
val datum = reader.read(null, getJsonDecoder(input, schemaSpec))
writer.append(datum)
writer.flush()
})
output.toByteArray
})
}
def getJsonDecoder(input: ByteArrayInputStream, schema: Schema) =
DecoderFactory.get.jsonDecoder(schema, new DataInputStream(input))
def loadSchema(schemaFile: File) =
new Schema.Parser().parse(schemaFile)
}
开发者ID:gpulse,项目名称:services,代码行数:34,代码来源:AvroUtils.scala
示例5: AvroFileWriter
//设置package包名称以及导入依赖的类
package com.landoop.avro
import java.io.{BufferedOutputStream, File, FileOutputStream}
import com.landoop.avro.codec.CodecFactory
import org.apache.avro.Schema
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.GenericRecord
object AvroFileWriter {
def fastWrite(file: File,
count: Int,
parallelization: Int,
schema: Schema,
records: IndexedSeq[GenericRecord]) = {
val out = new BufferedOutputStream(new FileOutputStream(file), 4 * 1048576)
import org.apache.avro.generic.GenericDatumWriter
val datumWriter = new GenericDatumWriter[GenericRecord](schema)
val builder = FastDataFileWriterBuilder(datumWriter, out, schema)
.withCodec(CodecFactory.snappyCodec())
.withFlushOnEveryBlock(false)
.withParallelization(parallelization)
builder.encoderFactory.configureBufferSize(4 * 1048576)
builder.encoderFactory.configureBlockSize(4 * 1048576)
val fileWriter = builder.build()
fileWriter.write(records)
fileWriter.close()
}
def write(file: File,
count: Int,
schema: Schema,
records: Seq[GenericRecord]) = {
val out = new BufferedOutputStream(new FileOutputStream(file), 4 * 1048576)
import org.apache.avro.generic.GenericDatumWriter
val datumWriter = new GenericDatumWriter[GenericRecord](schema)
val writer = new DataFileWriter(datumWriter)
.setCodec(org.apache.avro.file.CodecFactory.snappyCodec())
.create(schema, out)
writer.setFlushOnEveryBlock(false)
records.foreach(writer.append)
writer.close()
}
}
开发者ID:Landoop,项目名称:fast-avro-write,代码行数:51,代码来源:AvroFileWriter.scala
示例6: GenericRecordEventJsonConverter
//设置package包名称以及导入依赖的类
package com.pragmasoft.eventaggregator
import java.io.ByteArrayOutputStream
import com.pragmasoft.eventaggregator.model.KafkaAvroEvent
import com.sksamuel.elastic4s.source.Indexable
import org.apache.avro.generic.{GenericDatumWriter, GenericRecord}
import org.apache.avro.io.EncoderFactory
import org.joda.time.format.ISODateTimeFormat
object GenericRecordEventJsonConverter {
case class EventHeaderDescriptor(eventIdPath: Option[String], eventTsPath: Option[String]) {
import com.pragmasoft.eventaggregator.GenericRecordFieldExtractionSupport._
def extractEventId[T <: GenericRecord](event: T): Option[String] = eventIdPath.flatMap(event.getField[Any]).map(_.toString)
def extractEventTs[T <: GenericRecord](event: T): Option[Long] = eventTsPath.flatMap(event.getField[Long])
}
private def asJsonString(event: GenericRecord): String = {
val out = new ByteArrayOutputStream()
val jsonEncoder = EncoderFactory.get().jsonEncoder(event.getSchema, out)
val writer = new GenericDatumWriter[GenericRecord](event.getSchema)
writer.write(event, jsonEncoder)
jsonEncoder.flush()
out.close()
out.toString
}
implicit def kafkaAvroEventIndexable(implicit headerDescriptor: EventHeaderDescriptor): Indexable[KafkaAvroEvent[GenericRecord]] = new Indexable[KafkaAvroEvent[GenericRecord]] {
val timestampFormat = ISODateTimeFormat.dateTime().withZoneUTC
override def json(event: KafkaAvroEvent[GenericRecord]): String = {
val timestampJsonAttributeMaybe =
headerDescriptor.extractEventTs(event.data)
.map(ts => s""" "@timestamp" : "${timestampFormat.print(ts)}",""")
s"""{
| ${timestampJsonAttributeMaybe.getOrElse("")}
| "location" : { "topic" : "${event.location.topic}", "partition" : ${event.location.partition}, "offset" : ${event.location.offset} },
| "schemaName" : "${event.schemaName}",
| "data" : ${asJsonString(event.data)}
|} """.stripMargin
}
}
}
开发者ID:galarragas,项目名称:event-aggregator,代码行数:52,代码来源:GenericRecordEventJsonConverter.scala
注:本文中的org.apache.avro.generic.GenericDatumWriter类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论