本文整理汇总了Scala中com.esotericsoftware.kryo.Kryo类的典型用法代码示例。如果您正苦于以下问题:Scala Kryo类的具体用法?Scala Kryo怎么用?Scala Kryo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Kryo类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: Configuration
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop
import java.io.{ ObjectInputStream, ObjectOutputStream }
import com.esotericsoftware.kryo.Kryo
import org.apache.hadoop.conf
import org.apache.hadoop.conf.{ Configuration ? HadoopConfiguration }
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.hammerlab.hadoop.kryo.WritableSerializer
import org.hammerlab.kryo.serializeAs
class Configuration(@transient var value: HadoopConfiguration)
extends Serializable {
private def writeObject(out: ObjectOutputStream): Unit = {
value.write(out)
}
private def readObject(in: ObjectInputStream): Unit = {
value = new HadoopConfiguration(false)
value.readFields(in)
}
}
object Configuration {
def apply(loadDefaults: Boolean = true): Configuration =
new HadoopConfiguration(loadDefaults)
def apply(conf: HadoopConfiguration): Configuration =
new Configuration(conf)
implicit def wrapConfiguration(conf: HadoopConfiguration): Configuration =
apply(conf)
implicit def unwrapConfiguration(conf: Configuration): HadoopConfiguration =
conf.value
implicit def unwrapConfigurationBroadcast(confBroadcast: Broadcast[Configuration]): Configuration =
confBroadcast.value
implicit def sparkContextToHadoopConfiguration(sc: SparkContext): Configuration =
sc.hadoopConfiguration
implicit class ConfWrapper(val conf: HadoopConfiguration) extends AnyVal {
def serializable: Configuration =
Configuration(conf)
}
def register(kryo: Kryo): Unit = {
kryo.register(
classOf[conf.Configuration],
new WritableSerializer[conf.Configuration]
)
kryo.register(
classOf[Configuration],
serializeAs[Configuration, conf.Configuration]
)
}
}
开发者ID:hammerlab,项目名称:spark-util,代码行数:62,代码来源:Configuration.scala
示例2: WritableSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo
import java.io.{ DataInputStream, DataOutputStream }
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
import org.apache.hadoop.io.Writable
class WritableSerializer[T <: Writable](ctorArgs: Any*) extends Serializer[T] {
override def read(kryo: Kryo, input: Input, clz: Class[T]): T = {
val t = clz.newInstance()
t.readFields(new DataInputStream(input))
t
}
override def write(kryo: Kryo, output: Output, t: T): Unit = {
t.write(new DataOutputStream(output))
}
}
开发者ID:hammerlab,项目名称:spark-util,代码行数:20,代码来源:WritableSerializer.scala
示例3: SerializableSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo
import java.io.{ ObjectInputStream, ObjectOutputStream }
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
case class SerializableSerializer[T <: Serializable]()
extends Serializer[T] {
override def read(kryo: Kryo, input: Input, `type`: Class[T]): T =
new ObjectInputStream(input)
.readObject()
.asInstanceOf[T]
override def write(kryo: Kryo, output: Output, t: T): Unit =
new ObjectOutputStream(output)
.writeObject(t)
}
开发者ID:hammerlab,项目名称:spark-util,代码行数:19,代码来源:SerializableSerializer.scala
示例4: serializeAs
//设置package包名称以及导入依赖的类
package org.hammerlab
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
package object kryo {
def serializeAs[T, U](implicit to: T ? U, from: U ? T): Serializer[T] =
new Serializer[T] {
override def read(kryo: Kryo, input: Input, `type`: Class[T]): T =
from(
kryo
.readClassAndObject(input)
.asInstanceOf[U]
)
override def write(kryo: Kryo, output: Output, t: T): Unit =
kryo.writeClassAndObject(output, to(t))
}
}
开发者ID:hammerlab,项目名称:spark-util,代码行数:20,代码来源:package.scala
示例5: SerializableSerializerTest
//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream }
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{ Input, Output }
import org.hammerlab.test.Suite
class SerializableSerializerTest
extends Suite {
test("serde") {
val kryo = new Kryo()
kryo.setRegistrationRequired(true)
val baos = new ByteArrayOutputStream()
val output = new Output(baos)
val foo = new Foo
foo.n = 123
foo.s = "abc"
intercept[IllegalArgumentException] {
kryo.writeClassAndObject(output, foo)
}
.getMessage should startWith("Class is not registered: org.hammerlab.hadoop.kryo.Foo")
kryo.register(classOf[Foo], SerializableSerializer[Foo]())
kryo.writeClassAndObject(output, foo)
output.close()
val bytes = baos.toByteArray
bytes.length should be(93)
val bais = new ByteArrayInputStream(bytes)
val input = new Input(bais)
val after = kryo.readClassAndObject(input).asInstanceOf[Foo]
after.n should be(foo.n)
after.s should be(foo.s)
}
}
class Foo
extends Serializable {
var n = 0
var s = ""
private def writeObject(out: ObjectOutputStream): Unit = {
out.writeInt(n)
out.writeUTF(s)
}
private def readObject(in: ObjectInputStream): Unit = {
n = in.readInt()
s = in.readUTF()
}
}
开发者ID:hammerlab,项目名称:spark-util,代码行数:61,代码来源:SerializableSerializerTest.scala
示例6: BasesSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.bases
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
class BasesSerializer
extends Serializer[Bases] {
override def write(kryo: Kryo, output: Output, bases: Bases): Unit = {
output.writeInt(bases.bytes.length, true)
bases.foreach(base ? output.writeByte(base.byte))
}
override def read(kryo: Kryo, input: Input, cls: Class[Bases]): Bases = {
val count: Int = input.readInt(true)
val bases = Bases.newBuilder
(0 until count).foreach(_ ? bases += input.readByte())
bases.result()
}
}
开发者ID:hammerlab,项目名称:genomic-utils,代码行数:21,代码来源:BasesSerializer.scala
示例7: NCKryoRegistrator
//设置package包名称以及导入依赖的类
package se.kth.climate.fast.netcdf
import org.apache.spark.serializer.KryoRegistrator
import com.esotericsoftware.kryo.{ Kryo, KryoException, Serializer }
import com.esotericsoftware.kryo.io.Output
import com.esotericsoftware.kryo.io.Input
import ucar.nc2.NetcdfFile
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import ucar.nc2.iosp.netcdf3.N3outputStreamWriter;
import java.util.UUID;
class NCKryoRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo) {
println("Registering custom NetCDF serializers");
com.esotericsoftware.minlog.Log.TRACE();
kryo.register(classOf[NetcdfFile], new NetcdfFileSerializer());
kryo.register(classOf[Array[String]], new com.esotericsoftware.kryo.serializers.DefaultArraySerializers.StringArraySerializer())
kryo.register(classOf[Array[Int]], new com.esotericsoftware.kryo.serializers.DefaultArraySerializers.IntArraySerializer())
//kryo.setRegistrationRequired(true);
}
}
class NetcdfFileSerializer extends Serializer[NetcdfFile] {
override def write(kryo: Kryo, output: Output, ncfile: NetcdfFile) {
val baos = new ByteArrayOutputStream();
val out = new DataOutputStream(baos);
val w = new N3outputStreamWriter(ncfile);
val ulim = ncfile.getUnlimitedDimension();
val numrec = if (ulim == null) 0 else ncfile.getUnlimitedDimension().getLength();
w.writeHeader(out, numrec);
w.writeDataAll(out);
out.flush();
out.close();
val byteArray = baos.toByteArray();
// println("**********BYTES***********");
// println(byteArray.take(100000).mkString);
output.writeInt(byteArray.length);
output.write(byteArray);
println(s"******** Wrote ncfile (size=${byteArray.length}) **********");
}
override def read(kryo: Kryo, input: Input, `type`: Class[NetcdfFile]): NetcdfFile = {
val len = input.readInt();
println(s"******** Reading ncfile (size=${len}) **********");
val raw = new Array[Byte](len);
var readBytes = 0;
do {
val res = input.read(raw, readBytes, len - readBytes);
if (res > -1) {
readBytes += res;
} else {
throw new KryoException(s"Read only $readBytes bytes when $len bytes were expected!");
}
} while (readBytes < len);
println(s"******** Read ncfile (read=${readBytes}, size=${len}) **********");
NetcdfFile.openInMemory(UUID.randomUUID().toString(), raw);
}
}
开发者ID:ClimateFAST,项目名称:FASTSpark,代码行数:61,代码来源:NCKryoRegistrator.scala
示例8: KafkaSparkStreamingRegistrator
//设置package包名称以及导入依赖的类
package com.miguno.kafkastorm.spark.serialization
import com.esotericsoftware.kryo.Kryo
import com.miguno.avro.Tweet
import com.twitter.chill.avro.AvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.spark.serializer.KryoRegistrator
class KafkaSparkStreamingRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo) {
// Registers a serializer for any generic Avro records. The kafka-storm-starter project does not yet include
// examples that work on generic Avro records, but we keep this registration for the convenience of our readers.
kryo.register(classOf[GenericRecord], AvroSerializer.GenericRecordSerializer[GenericRecord]())
// Registers a serializer specifically for the, well, specific Avro record `Tweet`
kryo.register(classOf[Tweet], AvroSerializer.SpecificRecordSerializer[Tweet])
()
}
}
开发者ID:JohnReedLOL,项目名称:KafkaStormSpark,代码行数:22,代码来源:KafkaSparkStreamingRegistrator.scala
示例9: KryoSerializer
//设置package包名称以及导入依赖的类
package com.flipkart.connekt.commons.serializers
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.pool.{KryoFactory, KryoPool}
import org.objenesis.strategy.StdInstantiatorStrategy
object KryoSerializer extends Serializer {
val factory = new KryoFactory() {
override def create(): Kryo = {
val kryo = new Kryo()
// configure kryo instance, customize settings
kryo.setInstantiatorStrategy(new StdInstantiatorStrategy())
kryo
}
}
val kryoPool = new KryoPool.Builder(factory).softReferences().build()
override def serialize(obj: AnyRef): Array[Byte] = {
val stream = new ByteArrayOutputStream()
val output = new Output(stream)
val kryoInstance = kryoPool.borrow()
kryoInstance.writeClassAndObject(output, obj)
output.close()
kryoPool.release(kryoInstance)
stream.toByteArray
}
override def deserialize[T](bytes: Array[Byte])(implicit cTag: reflect.ClassTag[T]): T = {
val stream = new ByteArrayInputStream(bytes)
val input = new Input(stream)
val kryoInstance = kryoPool.borrow()
val obj = kryoInstance.readClassAndObject(input)
kryoPool.release(kryoInstance)
obj.asInstanceOf[T]
}
}
开发者ID:ayush03agarwal,项目名称:connekt,代码行数:45,代码来源:KryoSerializer.scala
示例10: KafkaSparkStreamingRegistrator
//设置package包名称以及导入依赖的类
package com.miguno.kafkastorm.spark.serialization
import com.esotericsoftware.kryo.Kryo
import com.miguno.avro.Tweet
import com.twitter.chill.avro.AvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.spark.serializer.KryoRegistrator
import scala.trace.{Pos, implicitlyFormatable}
class KafkaSparkStreamingRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo) {
// Registers a serializer for any generic Avro records. The kafka-storm-starter project does not yet include
// examples that work on generic Avro records, but we keep this registration for the convenience of our readers.
kryo.register(classOf[GenericRecord], AvroSerializer.GenericRecordSerializer[GenericRecord]())
// Registers a serializer specifically for the, well, specific Avro record `Tweet`
kryo.register(classOf[Tweet], AvroSerializer.SpecificRecordSerializer[Tweet])
()
}
}
开发者ID:JohnReedLOL,项目名称:full-stack-big-data,代码行数:23,代码来源:KafkaSparkStreamingRegistrator.scala
示例11: NTMTrainedModel
//设置package包名称以及导入依赖的类
package ru.ispras.modis.tm.chinesetm
import java.io.{FileInputStream, FileOutputStream}
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import org.objenesis.strategy.StdInstantiatorStrategy
import ru.ispras.modis.tm.attribute.{DefaultAttributeType, AttributeType}
import ru.ispras.modis.tm.matrix.{AttributedPhi, Background, Theta}
import ru.ispras.modis.tm.plsa.TrainedModel
class NTMTrainedModel(phi: Map[AttributeType, AttributedPhi],
theta: Theta,
val noise: Map[AttributeType, Background],
val background: Map[AttributeType, Background],
perplexity: Double) extends TrainedModel(phi, theta, perplexity) {
def getNoise() = {
require(noise.contains(DefaultAttributeType), "there is no default attribute in collection")
require(noise.keys.size == 1, "Do not use this method in case of multiattribute collection")
noise(DefaultAttributeType)
}
def getBackground() = {
require(noise.contains(DefaultAttributeType), "there is no default attribute in collection")
require(noise.keys.size == 1, "Do not use this method in case of multiattribute collection")
noise(DefaultAttributeType)
}
}
object NTMTrainedModel {
def save(model: NTMTrainedModel, path: String) {
val kryo = new Kryo
kryo.setInstantiatorStrategy(new StdInstantiatorStrategy)
val output = new Output(new FileOutputStream(path))
kryo.writeObject(output, model)
output.close()
}
def load(path: String) = {
val kryo = new Kryo
kryo.setInstantiatorStrategy(new StdInstantiatorStrategy)
val input = new Input(new FileInputStream(path))
val trainedModel = kryo.readObject(input, classOf[NTMTrainedModel])
input.close()
trainedModel
}
}
开发者ID:ispras,项目名称:atr4s,代码行数:49,代码来源:NTMTrainedModel.scala
示例12: overlaps
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.reference
import com.esotericsoftware.kryo.Kryo
def overlaps(other: Region): Boolean =
other.contigName == contigName &&
(overlapsLocus(other.start) || other.overlapsLocus(start))
override def toString: String = s"$contigName:$start-$end"
}
object Region {
implicit def intraContigPartialOrdering[R <: Region] =
new PartialOrdering[R] {
override def tryCompare(x: R, y: R): Option[Int] = {
if (x.contigName == y.contigName)
Some(x.start.compare(y.start))
else
None
}
override def lteq(x: R, y: R): Boolean = {
x.contigName == y.contigName && x.start <= y.start
}
}
def apply(contigName: ContigName, start: Locus, end: Locus): Region =
RegionImpl(contigName, start, end)
def apply(contigName: ContigName, interval: Interval): Region =
RegionImpl(contigName, interval.start, interval.end)
def unapply(region: Region): Option[(ContigName, Locus, Locus)] =
Some(
region.contigName,
region.start,
region.end
)
def register(kryo: Kryo): Unit = {
kryo.register(classOf[RegionImpl])
}
}
private case class RegionImpl(t: (ContigName, Locus, Locus))
extends AnyVal
with Region {
override def contigName: ContigName = t._1
override def start: Locus = t._2
override def end: Locus = t._3
}
开发者ID:hammerlab,项目名称:genomic-reference,代码行数:53,代码来源:Region.scala
示例13: Position
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.reference
import com.esotericsoftware.kryo.Kryo
case class Position(contigName: ContigName, locus: Locus)
extends Region
with HasLocus {
def start = locus
def end = locus.next
override def toString: String = s"$contigName:$locus"
}
object Position {
def registerKryo(kryo: Kryo): Unit = {
kryo.register(classOf[Position])
kryo.register(classOf[Array[Position]])
}
private implicit val tupleOrdering: Ordering[(ContigName, Locus)] = Ordering.Tuple2[ContigName, Locus]
val totalOrdering: Ordering[Position] =
new Ordering[Position] {
override def compare(x: Position, y: Position): Int =
tupleOrdering.compare(
(x.contigName, x.locus),
(y.contigName, y.locus)
)
}
val partialOrdering =
new PartialOrdering[Position] {
override def tryCompare(x: Position, y: Position): Option[KmerLength] =
if (x.contigName == y.contigName)
Some(x.compare(y.locus))
else
None
override def lteq(x: Position, y: Position): Boolean =
x.contigName == y.contigName && x.locus <= y.locus
}
}
开发者ID:hammerlab,项目名称:genomic-reference,代码行数:45,代码来源:Position.scala
示例14: RegistrarI
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.reference
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator
import org.hammerlab.genomics.reference.ContigName.{ Normalization, Factory }
abstract class RegistrarI(factory: Factory) extends KryoRegistrator {
implicit val n = factory
override def registerClasses(kryo: Kryo): Unit = {
kryo.register(classOf[Locus])
kryo.register(classOf[ContigName], new ContigNameSerializer)
kryo.register(classOf[Position])
}
}
class Registrar extends RegistrarI(Normalization.Strict)
class PermissiveRegistrar extends RegistrarI(Normalization.Lenient)
object PermissiveRegistrar extends PermissiveRegistrar
开发者ID:hammerlab,项目名称:genomic-reference,代码行数:21,代码来源:Registrar.scala
示例15: CustomKryoRegistrator
//设置package包名称以及导入依赖的类
package com.paypal.risk.smunf.util
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator
import com.paypal.risk.smunf.math.quantile._
import com.paypal.risk.smunf.math.stats._
import com.paypal.risk.smunf.sanity._
import com.paypal.risk.smunf.woe._
class CustomKryoRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo): Unit = {
kryo.register(classOf[BinCount])
kryo.register(classOf[WoeBin])
kryo.register(classOf[VariableBin])
kryo.register(classOf[VariableRecord])
kryo.register(classOf[WoeResult])
kryo.register(classOf[NumWoeMap])
kryo.register(classOf[CharWoeMap])
kryo.register(classOf[WoeMap])
kryo.register(classOf[SanityRecord])
kryo.register(classOf[LabelResult])
kryo.register(classOf[VariableResult])
kryo.register(classOf[StreamStatsAggregator])
kryo.register(classOf[TDigestUnit])
kryo.register(classOf[TDigestEstimator])
kryo.register(classOf[TDigestTree])
kryo.register(classOf[AVLTreeIndex])
kryo.register(classOf[HistogramUnit])
kryo.register(classOf[StreamingHistogramEstimator])
kryo.register(classOf[HistogramTree])
}
}
开发者ID:yanlzhang8936,项目名称:Smunf,代码行数:34,代码来源:CustomKryoRegistrator.scala
示例16: ContigSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.loci.set
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer ? KryoSerializer }
import com.google.common.collect.{ TreeRangeSet, Range ? JRange }
import JRange.closedOpen
import org.hammerlab.genomics.reference.{ ContigName, Interval, Locus }
// We serialize a LociSet simply by writing out its constituent Contigs.
class ContigSerializer extends KryoSerializer[Contig] {
def write(kryo: Kryo, output: Output, obj: Contig) = {
kryo.writeObject(output, obj.name)
output.writeInt(obj.numRanges)
for {
Interval(start, end) ? obj.ranges
} {
output.writeLong(start.locus)
output.writeLong(end.locus)
}
}
def read(kryo: Kryo, input: Input, klass: Class[Contig]): Contig = {
val name = kryo.readObject(input, classOf[ContigName])
val numRanges = input.readInt()
val treeRangeSet = TreeRangeSet.create[Locus]()
val ranges =
(0 until numRanges).foreach { _ ?
treeRangeSet.add(
closedOpen(
Locus(input.readLong()),
Locus(input.readLong())
)
)
}
Contig(name, treeRangeSet)
}
}
开发者ID:hammerlab,项目名称:genomic-loci,代码行数:40,代码来源:ContigSerializer.scala
示例17: Serializer
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.loci.set
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer ? KryoSerializer }
// We just serialize the underlying contigs, which contain their names which are the string keys of LociSet.map.
class Serializer extends KryoSerializer[LociSet] {
def write(kryo: Kryo, output: Output, obj: LociSet) = {
kryo.writeObject(output, obj.contigs)
}
def read(kryo: Kryo, input: Input, klass: Class[LociSet]): LociSet = {
val contigs = kryo.readObject(input, classOf[Array[Contig]])
LociSet.fromContigs(contigs)
}
}
开发者ID:hammerlab,项目名称:genomic-loci,代码行数:17,代码来源:Serializer.scala
示例18: ContigSerializer
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.loci.map
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer ? KryoSerializer }
import org.hammerlab.genomics.reference.{ ContigName, Locus }
class ContigSerializer[T] extends KryoSerializer[Contig[T]] {
def write(kryo: Kryo, output: Output, obj: Contig[T]) = {
kryo.writeObject(output, obj.name)
output.writeLong(obj.asMap.size)
obj.asMap.foreach {
case (range, value) =>
output.writeLong(range.start.locus)
output.writeLong(range.end.locus)
kryo.writeClassAndObject(output, value)
}
}
def read(kryo: Kryo, input: Input, klass: Class[Contig[T]]): Contig[T] = {
val builder = LociMap.newBuilder[T]
val contig = kryo.readObject(input, classOf[ContigName])
val count = input.readLong()
(0L until count).foreach { _ =>
val start = input.readLong()
val end = input.readLong()
val value: T = kryo.readClassAndObject(input).asInstanceOf[T]
builder.put(contig, Locus(start), Locus(end), value)
}
builder.result(contig)
}
}
开发者ID:hammerlab,项目名称:genomic-loci,代码行数:33,代码来源:ContigSerializer.scala
示例19: Serializer
//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.loci.map
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer ? KryoSerializer }
class Serializer[T] extends KryoSerializer[LociMap[T]] {
def write(kryo: Kryo, output: Output, obj: LociMap[T]) = {
output.writeLong(obj.contigs.size)
obj.contigs.foreach(contig =>
kryo.writeObject(output, contig)
)
}
def read(kryo: Kryo, input: Input, klass: Class[LociMap[T]]): LociMap[T] = {
val count: Long = input.readLong()
val contigs = (0L until count).map(i =>
kryo.readObject(input, classOf[Contig[T]])
)
LociMap.fromContigs(contigs)
}
}
开发者ID:hammerlab,项目名称:genomic-loci,代码行数:23,代码来源:Serializer.scala
示例20: SparkSerializer
//设置package包名称以及导入依赖的类
package com.truecar.mleap.spark.benchmark.util
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.twitter.chill.ScalaKryoInstantiator
object SparkSerializer {
def apply(): SparkSerializer = {
val kryoInstantiator = new ScalaKryoInstantiator()
kryoInstantiator.setRegistrationRequired(false)
val kryo = kryoInstantiator.newKryo()
kryo.setClassLoader(Thread.currentThread.getContextClassLoader)
SparkSerializer(kryo)
}
}
case class SparkSerializer(kryo: Kryo) {
def write[T](obj: T, output: Output) = {
kryo.writeClassAndObject(output, obj)
}
def read[T](input: Input): T = {
kryo.readClassAndObject(input).asInstanceOf[T]
}
}
开发者ID:TrueCar,项目名称:mleap,代码行数:28,代码来源:SparkSerializer.scala
注:本文中的com.esotericsoftware.kryo.Kryo类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论