• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala KryoRegistrator类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中org.apache.spark.serializer.KryoRegistrator的典型用法代码示例。如果您正苦于以下问题:Scala KryoRegistrator类的具体用法?Scala KryoRegistrator怎么用?Scala KryoRegistrator使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了KryoRegistrator类的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: registrationRequired

//设置package包名称以及导入依赖的类
package org.hammerlab.spark.confs

import org.apache.spark.serializer.{ KryoRegistrator, KryoSerializer }
import org.hammerlab.spark.SparkConfBase

trait Kryo {
  self: SparkConfBase ?

  def registrationRequired: Boolean = true
  def referenceTracking: Boolean = false
  def registrar: Class[_ <: KryoRegistrator] = null

  sparkConf(
    "spark.serializer" ? classOf[KryoSerializer].getCanonicalName,
    "spark.kryo.referenceTracking" ? referenceTracking.toString,
    "spark.kryo.registrationRequired" ? registrationRequired.toString
  )

  Option(registrar)
    .foreach(
      clz ?
        sparkConf(
          "spark.kryo.registrator" ? clz.getCanonicalName
        )
    )
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:27,代码来源:Kryo.scala


示例2: KryoTest

//设置package包名称以及导入依赖的类
package org.hammerlab.spark.confs

import org.apache.spark.serializer.KryoRegistrator
import org.hammerlab.spark.SparkConfBase
import org.hammerlab.test.Suite

class KryoTest
  extends Suite {
  test("override registration requirement") {
    val conf = HasSparkConf.conf
    conf.get("spark.kryo.referenceTracking") should be("true")
    conf.get("spark.kryo.registrationRequired") should be("false")
    conf.get("spark.kryo.registrator") should be("org.hammerlab.spark.confs.TestRegistrator")
  }
}

class TestRegistrator
  extends KryoRegistrator {
  override def registerClasses(kryo: com.esotericsoftware.kryo.Kryo): Unit = ???
}

object HasSparkConf
  extends SparkConfBase
    with Kryo {
  val conf = makeSparkConf
  override def registrationRequired = false
  override def referenceTracking = true
  override def registrar = classOf[TestRegistrator]
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:30,代码来源:KryoTest.scala


示例3: NCKryoRegistrator

//设置package包名称以及导入依赖的类
package se.kth.climate.fast.netcdf

import org.apache.spark.serializer.KryoRegistrator
import com.esotericsoftware.kryo.{ Kryo, KryoException, Serializer }
import com.esotericsoftware.kryo.io.Output
import com.esotericsoftware.kryo.io.Input
import ucar.nc2.NetcdfFile
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import ucar.nc2.iosp.netcdf3.N3outputStreamWriter;
import java.util.UUID;

class NCKryoRegistrator extends KryoRegistrator {
  override def registerClasses(kryo: Kryo) {
    println("Registering custom NetCDF serializers");
    com.esotericsoftware.minlog.Log.TRACE();
    kryo.register(classOf[NetcdfFile], new NetcdfFileSerializer());
    kryo.register(classOf[Array[String]], new com.esotericsoftware.kryo.serializers.DefaultArraySerializers.StringArraySerializer())
    kryo.register(classOf[Array[Int]], new com.esotericsoftware.kryo.serializers.DefaultArraySerializers.IntArraySerializer())

    //kryo.setRegistrationRequired(true);
  }
}

class NetcdfFileSerializer extends Serializer[NetcdfFile] {
  override def write(kryo: Kryo, output: Output, ncfile: NetcdfFile) {
    val baos = new ByteArrayOutputStream();
    val out = new DataOutputStream(baos);
    val w = new N3outputStreamWriter(ncfile);
    val ulim = ncfile.getUnlimitedDimension();
    val numrec = if (ulim == null) 0 else ncfile.getUnlimitedDimension().getLength();
    w.writeHeader(out, numrec);
    w.writeDataAll(out);
    out.flush();
    out.close();
    val byteArray = baos.toByteArray();
    //    println("**********BYTES***********");
    //    println(byteArray.take(100000).mkString);
    output.writeInt(byteArray.length);
    output.write(byteArray);
    println(s"******** Wrote ncfile (size=${byteArray.length}) **********");
  }

  override def read(kryo: Kryo, input: Input, `type`: Class[NetcdfFile]): NetcdfFile = {
    val len = input.readInt();
    println(s"******** Reading ncfile (size=${len}) **********");
    val raw = new Array[Byte](len);
    var readBytes = 0;
    do {
      val res = input.read(raw, readBytes, len - readBytes);
      if (res > -1) {
        readBytes += res;
      } else {
        throw new KryoException(s"Read only $readBytes bytes when $len bytes were expected!");
      }
    } while (readBytes < len);
    println(s"******** Read ncfile (read=${readBytes}, size=${len}) **********");
    NetcdfFile.openInMemory(UUID.randomUUID().toString(), raw);
  }
} 
开发者ID:ClimateFAST,项目名称:FASTSpark,代码行数:61,代码来源:NCKryoRegistrator.scala


示例4: KafkaSparkStreamingRegistrator

//设置package包名称以及导入依赖的类
package com.miguno.kafkastorm.spark.serialization

import com.esotericsoftware.kryo.Kryo
import com.miguno.avro.Tweet
import com.twitter.chill.avro.AvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.spark.serializer.KryoRegistrator


class KafkaSparkStreamingRegistrator extends KryoRegistrator {

  override def registerClasses(kryo: Kryo) {
    // Registers a serializer for any generic Avro records.  The kafka-storm-starter project does not yet include
    // examples that work on generic Avro records, but we keep this registration for the convenience of our readers.
    kryo.register(classOf[GenericRecord], AvroSerializer.GenericRecordSerializer[GenericRecord]())
    // Registers a serializer specifically for the, well, specific Avro record `Tweet`
    kryo.register(classOf[Tweet], AvroSerializer.SpecificRecordSerializer[Tweet])
    ()
  }

} 
开发者ID:JohnReedLOL,项目名称:KafkaStormSpark,代码行数:22,代码来源:KafkaSparkStreamingRegistrator.scala


示例5: KafkaSparkStreamingRegistrator

//设置package包名称以及导入依赖的类
package com.miguno.kafkastorm.spark.serialization

import com.esotericsoftware.kryo.Kryo
import com.miguno.avro.Tweet
import com.twitter.chill.avro.AvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.spark.serializer.KryoRegistrator
import scala.trace.{Pos, implicitlyFormatable}


class KafkaSparkStreamingRegistrator extends KryoRegistrator {

  override def registerClasses(kryo: Kryo) {
    // Registers a serializer for any generic Avro records.  The kafka-storm-starter project does not yet include
    // examples that work on generic Avro records, but we keep this registration for the convenience of our readers.
    kryo.register(classOf[GenericRecord], AvroSerializer.GenericRecordSerializer[GenericRecord]())
    // Registers a serializer specifically for the, well, specific Avro record `Tweet`
    kryo.register(classOf[Tweet], AvroSerializer.SpecificRecordSerializer[Tweet])
    ()
  }

} 
开发者ID:JohnReedLOL,项目名称:full-stack-big-data,代码行数:23,代码来源:KafkaSparkStreamingRegistrator.scala


示例6: RegistrarI

//设置package包名称以及导入依赖的类
package org.hammerlab.genomics.reference

import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator
import org.hammerlab.genomics.reference.ContigName.{ Normalization, Factory }

abstract class RegistrarI(factory: Factory) extends KryoRegistrator {
  implicit val n = factory
  override def registerClasses(kryo: Kryo): Unit = {
    kryo.register(classOf[Locus])
    kryo.register(classOf[ContigName], new ContigNameSerializer)
    kryo.register(classOf[Position])
  }
}


class Registrar extends RegistrarI(Normalization.Strict)
class PermissiveRegistrar extends RegistrarI(Normalization.Lenient)

object PermissiveRegistrar extends PermissiveRegistrar 
开发者ID:hammerlab,项目名称:genomic-reference,代码行数:21,代码来源:Registrar.scala


示例7: CustomKryoRegistrator

//设置package包名称以及导入依赖的类
package com.paypal.risk.smunf.util

import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator

import com.paypal.risk.smunf.math.quantile._
import com.paypal.risk.smunf.math.stats._
import com.paypal.risk.smunf.sanity._
import com.paypal.risk.smunf.woe._

class CustomKryoRegistrator extends KryoRegistrator {
  override def registerClasses(kryo: Kryo): Unit = {
    kryo.register(classOf[BinCount])
    kryo.register(classOf[WoeBin])
    kryo.register(classOf[VariableBin])
    kryo.register(classOf[VariableRecord])
    kryo.register(classOf[WoeResult])
    kryo.register(classOf[NumWoeMap])
    kryo.register(classOf[CharWoeMap])
    kryo.register(classOf[WoeMap])
    kryo.register(classOf[SanityRecord])
    kryo.register(classOf[LabelResult])
    kryo.register(classOf[VariableResult])
    kryo.register(classOf[StreamStatsAggregator])
    kryo.register(classOf[TDigestUnit])
    kryo.register(classOf[TDigestEstimator])
    kryo.register(classOf[TDigestTree])
    kryo.register(classOf[AVLTreeIndex])
    kryo.register(classOf[HistogramUnit])
    kryo.register(classOf[StreamingHistogramEstimator])
    kryo.register(classOf[HistogramTree])
  }
} 
开发者ID:yanlzhang8936,项目名称:Smunf,代码行数:34,代码来源:CustomKryoRegistrator.scala


示例8: SparkKryoRegistrator

//设置package包名称以及导入依赖的类
package hr.fer.ztel.thesis.spark

import breeze.linalg.{SparseVector => BreezeSparseVector, DenseVector=>BreezeDenseVector}
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator

class SparkKryoRegistrator extends KryoRegistrator {

  override def registerClasses(kryo : Kryo) {
    kryo.register(classOf[Array[Int]]) // bool vector
    kryo.register(classOf[Map[Int, Double]]) // vector
    kryo.register(classOf[Map[(Int, Int), Double]]) // matrix
    kryo.register(classOf[BreezeDenseVector[Double]])
    kryo.register(classOf[BreezeSparseVector[Double]])
    kryo.register(classOf[Array[Double]])
  }

} 
开发者ID:fpopic,项目名称:master_thesis,代码行数:19,代码来源:SparkKryoRegistrator.scala



注:本文中的org.apache.spark.serializer.KryoRegistrator类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala HashingTF类代码示例发布时间:2022-05-23
下一篇:
Scala TopicAndPartition类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap