• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala ObjectInputStream类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中java.io.ObjectInputStream的典型用法代码示例。如果您正苦于以下问题:Scala ObjectInputStream类的具体用法?Scala ObjectInputStream怎么用?Scala ObjectInputStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了ObjectInputStream类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: encode

//设置package包名称以及导入依赖的类
package game_of_life.util

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

trait Codec {

  def encode(obj: AnyRef): Array[Byte]

  def decode[A >: Null](array: Array[Byte]): A
}

trait SerializationCodec extends Codec {

  def encode(obj: AnyRef): Array[Byte] = {
    val baos = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(baos)
    try {
      oos writeObject obj
    } finally {
      oos close ()
    }
    baos.toByteArray
  }

  def decode[A >: Null](array: Array[Byte]): A = {
    val ois = new ObjectInputStream(new ByteArrayInputStream(array))
    try {
      (ois readObject ()).asInstanceOf[A]
    } finally {
      ois close ()
    }
  }
}

object SerializationCodec extends SerializationCodec 
开发者ID:christian-schlichtherle,项目名称:akka-game-of-life,代码行数:36,代码来源:Codec.scala


示例2: Configuration

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop

import java.io.{ ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.Kryo
import org.apache.hadoop.conf
import org.apache.hadoop.conf.{ Configuration ? HadoopConfiguration }
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.hammerlab.hadoop.kryo.WritableSerializer
import org.hammerlab.kryo.serializeAs

class Configuration(@transient var value: HadoopConfiguration)
  extends Serializable {
  private def writeObject(out: ObjectOutputStream): Unit = {
    value.write(out)
  }

  private def readObject(in: ObjectInputStream): Unit = {
    value = new HadoopConfiguration(false)
    value.readFields(in)
  }
}

object Configuration {

  def apply(loadDefaults: Boolean = true): Configuration =
    new HadoopConfiguration(loadDefaults)

  def apply(conf: HadoopConfiguration): Configuration =
    new Configuration(conf)

  implicit def wrapConfiguration(conf: HadoopConfiguration): Configuration =
    apply(conf)

  implicit def unwrapConfiguration(conf: Configuration): HadoopConfiguration =
    conf.value

  implicit def unwrapConfigurationBroadcast(confBroadcast: Broadcast[Configuration]): Configuration =
    confBroadcast.value

  implicit def sparkContextToHadoopConfiguration(sc: SparkContext): Configuration =
    sc.hadoopConfiguration

  implicit class ConfWrapper(val conf: HadoopConfiguration) extends AnyVal {
    def serializable: Configuration =
      Configuration(conf)
  }

  def register(kryo: Kryo): Unit = {
    kryo.register(
      classOf[conf.Configuration],
      new WritableSerializer[conf.Configuration]
    )

    kryo.register(
      classOf[Configuration],
      serializeAs[Configuration, conf.Configuration]
    )
  }
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:62,代码来源:Configuration.scala


示例3: SerializableSerializer

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo

import java.io.{ ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }

case class SerializableSerializer[T <: Serializable]()
  extends Serializer[T] {
  override def read(kryo: Kryo, input: Input, `type`: Class[T]): T =
    new ObjectInputStream(input)
      .readObject()
      .asInstanceOf[T]

  override def write(kryo: Kryo, output: Output, t: T): Unit =
    new ObjectOutputStream(output)
      .writeObject(t)
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:19,代码来源:SerializableSerializer.scala


示例4: SerializableSerializerTest

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo

import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{ Input, Output }
import org.hammerlab.test.Suite

class SerializableSerializerTest
  extends Suite {
  test("serde") {
    val kryo = new Kryo()
    kryo.setRegistrationRequired(true)
    val baos = new ByteArrayOutputStream()
    val output = new Output(baos)

    val foo = new Foo
    foo.n = 123
    foo.s = "abc"

    intercept[IllegalArgumentException] {
      kryo.writeClassAndObject(output, foo)
    }
    .getMessage should startWith("Class is not registered: org.hammerlab.hadoop.kryo.Foo")

    kryo.register(classOf[Foo], SerializableSerializer[Foo]())

    kryo.writeClassAndObject(output, foo)

    output.close()

    val bytes = baos.toByteArray
    bytes.length should be(93)

    val bais = new ByteArrayInputStream(bytes)

    val input = new Input(bais)
    val after = kryo.readClassAndObject(input).asInstanceOf[Foo]

    after.n should be(foo.n)
    after.s should be(foo.s)
  }
}

class Foo
  extends Serializable {

  var n = 0
  var s = ""

  private def writeObject(out: ObjectOutputStream): Unit = {
    out.writeInt(n)
    out.writeUTF(s)
  }

  private def readObject(in: ObjectInputStream): Unit = {
    n = in.readInt()
    s = in.readUTF()
  }
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:61,代码来源:SerializableSerializerTest.scala


示例5: OnDiskHeap

//设置package包名称以及导入依赖的类
package offheap

import java.io.ObjectInputStream
import java.nio.channels.Channels
import java.io.RandomAccessFile
import java.io.ObjectOutputStream

class OnDiskHeap[T <: Node[T]](filename : String) extends Heap[T] {
  val file = new RandomAccessFile(filename, "rw")
  
  def append(node : T, reserve : Int = 1) : Long = {
    file.seek(file.length())
    val pointer = file.getFilePointer
    val oos = new ObjectOutputStream(Channels.newOutputStream(file.getChannel))
    oos.writeObject(node)
    oos.flush()
    var current = file.getFilePointer()
    file.seek(file.length());
    while(current < reserve) {
      file.writeByte(0)
      current += 1
    }
    pointer
  }
  
  def write(pointer : Long, node : T) : Unit = {
    file.seek(pointer)
    val oos = new ObjectOutputStream(Channels.newOutputStream(file.getChannel))
    oos.writeObject(node)
    oos.flush()
  }
  
  def read(pointer : Long) : T = {
    file.seek(pointer)
    val ois = new ObjectInputStream(Channels.newInputStream(file.getChannel))
    val value = ois.readObject
    value.asInstanceOf[T]
  }
  
  def commit() = {
    file.getFD.sync()
  }
} 
开发者ID:utwente-fmt,项目名称:lazy-persistent-trie,代码行数:44,代码来源:OnDiskHeap.scala


示例6: PacketKeySerializer

//设置package包名称以及导入依赖的类
package edu.uw.at.iroberts.wirefugue.kafka.serdes

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import java.util

import org.apache.kafka.common.serialization._


class PacketKeySerializer extends Serializer[PacketKey] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()

  override def serialize(topic: String, data: PacketKey): Array[Byte] = {
    val bs = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(bs)
    oos.writeObject(data)

    bs.toByteArray
  }
}

class PacketKeyDeserializer extends Deserializer[PacketKey] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()

  override def deserialize(topic: String, data: Array[Byte]): PacketKey = {
    val bs = new ByteArrayInputStream(data)
    val ois = new ObjectInputStream(bs)

    ois.readObject().asInstanceOf[PacketKey]
  }
}

class PacketKeySerde extends Serde[PacketKey] {
  val serializer = new PacketKeySerializer
  val deserializer = new PacketKeyDeserializer

  def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
    serializer.configure(configs, isKey)
    deserializer.configure(configs, isKey)
  }
  def close(): Unit = {
    serializer.close()
    deserializer.close()
  }
} 
开发者ID:robertson-tech,项目名称:wirefugue,代码行数:47,代码来源:PacketKeySerializer.scala


示例7: SeqDeserialiser

//设置package包名称以及导入依赖的类
package serialisation

import java.io.{ByteArrayInputStream, ObjectInputStream}
import java.util

import org.apache.kafka.common.serialization.Deserializer

import scala.collection.mutable

class SeqDeserialiser[ELEMENT] extends Deserializer[Seq[ELEMENT]] {

  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
  }

  override def deserialize(topic: String, data: Array[Byte]): Seq[ELEMENT] = {
    val byteStream = new ByteArrayInputStream(data)
    val objectStream = new ObjectInputStream(byteStream)
    val result = mutable.Seq[ELEMENT]()
    while (objectStream.available() > 0) {
      result :+ objectStream.readObject().asInstanceOf[ELEMENT]
    }
    objectStream.close()
    result
  }

  override def close(): Unit = {
  }
} 
开发者ID:benwheeler,项目名称:kafka-streams-poc,代码行数:29,代码来源:SeqDeserialiser.scala


示例8: JavaDecoder

//设置package包名称以及导入依赖的类
package knot.data.serialization.j

import java.io.{ByteArrayInputStream, InputStream, ObjectInputStream}

import knot.data.serialization.{Decoder, Deserializer, DeserializerFactory}

import scala.reflect.runtime.universe

class JavaDecoder(in: InputStream) extends ObjectInputStream(in) with Decoder{
}

class JavaDeserializerFactory extends DeserializerFactory[JavaDecoder] {

  override def get[T: universe.TypeTag](): Deserializer[JavaDecoder, T] = {
    new Deserializer[JavaDecoder, T] {
      override def deserialize(bytes: Array[Byte]): T = {
        val jd = new JavaDecoder(new ByteArrayInputStream(bytes))
        try {
          deserialize(jd)
        } finally {
          jd.close()
        }
      }

      override def deserialize(decoder: JavaDecoder): T = {
        decoder.readObject().asInstanceOf[T]
      }
    }
  }

  override def register[T: universe.TypeTag](): Unit = {
  }

  override def register[T: universe.TypeTag](deser: Deserializer[JavaDecoder, T]): Unit = {
  }
} 
开发者ID:defvar,项目名称:knot,代码行数:37,代码来源:JavaDeserializerFactory.scala


示例9: Test10

//设置package包名称以及导入依赖的类
package chapter09

import java.io.{FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}
import scala.collection.mutable.ArrayBuffer

object Test10 extends App {

  case class Person(val name: String) extends Serializable {
    val friends = new ArrayBuffer[Person]

    def addFriend(p: Person) {
      friends += p
    }

    def isFriendOf(p: Person): Boolean = {
      friends.contains(p)
    }
  }

  val tom = Person("tom")
  val jerry = Person("jerry")
  val johnny = Person("johnny")

  tom.addFriend(johnny)
  jerry.addFriend(johnny)

  val persons = Array(tom, jerry, johnny)

  val out = new ObjectOutputStream(new FileOutputStream("src/Chapter09/10.obj"))
  out.writeObject(persons)
  out.close()

  val in = new ObjectInputStream(new FileInputStream("src/Chapter09/10.obj"))
  val Array(_tom,_jerry,_johnny) = in.readObject().asInstanceOf[Array[Person]]

  assert(_tom isFriendOf _johnny)
  assert(_jerry isFriendOf _johnny)
  // assert(_tom isFriendOf _jerry)
} 
开发者ID:johnnyqian,项目名称:scala-for-the-impatient,代码行数:40,代码来源:10.scala


示例10: Serialisation

//设置package包名称以及导入依赖的类
package root.core.serialisation

import java.io.{FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}

import scala.util.control.NonFatal


object Serialisation extends App {

  val fileName = "tempData.ser"

  val obj = Object
  val map = Map("key" -> obj)

  // deserialize object with not serializable filed

  serializeObject(obj)
  val desObj = deserializeObj
  assert(desObj.get eq obj, "deserialized obj should be the same")


//  serializeObject(map)
//  val desMap = deserializeObj
//  assert(desMap.get.asInstanceOf[map.type]("key") == map, "deserialized map should be the same")

  private def deserializeObj: Option[AnyRef] = {
    try {
      val fis = new FileInputStream(fileName)
      val ois = new ObjectInputStream(fis)
      Some(ois.readObject())
    } catch { case NonFatal(e) =>
        println(s"Deserialization fail $e")
        None
    }
  }

  private def serializeObject(obj: AnyRef) {
    try {
      val fos = new FileOutputStream(fileName)
      val oos = new ObjectOutputStream(fos)
      oos.writeObject(obj)
      oos.close()
    } catch { case NonFatal(e) =>
        println(s"Serialization fail $e")
    }
  }

}

object Object extends Serializable {
  @transient private val logger = new Logger()
}

class Logger {
  def log() = println("log")
} 
开发者ID:RicoGit,项目名称:scala-core,代码行数:57,代码来源:Serialisation.scala


示例11: SerializationTest

//设置package包名称以及导入依赖的类
package wvlet.log

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import wvlet.log.io.IOUtil

object SerializationTest {

  trait A extends LogSupport {
    info("new A")
    def hello = info("hello")
  }
}


class SerializationTest extends Spec {

  import SerializationTest._

  "Logger" should {
    "serializable" in {
      val a = new A {}
      val b = new ByteArrayOutputStream()
      IOUtil.withResource(new ObjectOutputStream(b)) {out =>
        out.writeObject(a)
      }
      val ser = b.toByteArray
      IOUtil.withResource(new ObjectInputStream(new ByteArrayInputStream(ser))) { in =>
        info("deserialization")
        val a = in.readObject().asInstanceOf[A]
        a.hello
      }
    }
  }
} 
开发者ID:wvlet,项目名称:log,代码行数:36,代码来源:SerializationTest.scala


示例12: bytesToValue

//设置package包名称以及导入依赖的类
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import java.io.{Serializable => JavaSerializable}

import scala.collection.mutable
import scala.reflect.ClassTag
import scala.reflect._

package object hedgehog {
  private[hedgehog] def bytesToValue[T <: JavaSerializable: ClassTag](bytes: Array[Byte]): T = {
    if (classTag[T].runtimeClass == classOf[String]) {
      new String(bytes).asInstanceOf[T]
    } else {
      val in = new ByteArrayInputStream(bytes)
      try {
        new ObjectInputStream(in).readObject.asInstanceOf[T]
      } finally {
        in.close()
      }
    }
  }

  private[hedgehog] def valueToBytes[T <: JavaSerializable: ClassTag](value: T): Array[Byte] =
    value match {
      case str: String => str.getBytes
      case _ =>
        val out = new ByteArrayOutputStream()
        try {
          new ObjectOutputStream(out).writeObject(value)
          out.toByteArray
        } finally {
          out.close()
        }
    }

  private[hedgehog] object Timer {
    private val times = new mutable.ArrayBuffer[Long]

    def time[T](func: => T): T = {
      val st = System.currentTimeMillis
      try {
        func
      } finally {
        times += (System.currentTimeMillis - st)
      }
    }

    def printStats(): Unit = {
      if (times.nonEmpty) {
        val mean = times.sum.toDouble / times.size.toDouble
        val std = math.sqrt(times.map(t => math.pow(t - mean, 2)).sum / times.size)
        println(s"max: ${times.max}, min: ${times.min}, avg: $mean, std: $std")
      } else {
        println("No timings!")
      }
    }

    def average: Double = times.sum.toDouble / times.size.toDouble

    def reset(): Unit = times.clear()
  }
} 
开发者ID:aluketa,项目名称:hedgehog,代码行数:62,代码来源:package.scala


示例13: serializeRoundTrip

//设置package包名称以及导入依赖的类
package deaktator.pops

import java.io.{ObjectInputStream, ByteArrayInputStream, ObjectOutputStream, ByteArrayOutputStream}


trait SerializabilityTest {
  def serializeRoundTrip[A](a: A): A = {
    val baos = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(baos)
    oos.writeObject(a)
    val bais = new ByteArrayInputStream(baos.toByteArray)
    oos.close()
    val ois = new ObjectInputStream(bais)
    val newA: A = ois.readObject().asInstanceOf[A]
    ois.close()
    newA
  }
} 
开发者ID:deaktator,项目名称:pops,代码行数:19,代码来源:SerializabilityTest.scala


示例14: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.apache.spark.sql.catalyst.expressions.UnsafeMapData
import org.apache.spark.sql.catalyst.util.ArrayBasedMapData
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[EmbeddedMapType])
case class EmbeddedMap(elements: Map[Any, Any]) extends Serializable {
  override def hashCode(): Int = 1

  override def equals(other: scala.Any): Boolean = other match {
    case that: EmbeddedMap => that.elements == this.elements
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class EmbeddedMapType extends UserDefinedType[EmbeddedMap] {

  override def sqlType: DataType = MapType(StringType, StringType)

  override def serialize(obj: EmbeddedMap): Any = {
    ArrayBasedMapData(obj.elements.keySet.map{ elem =>
      val outKey = new ByteArrayOutputStream()
      val osKey = new ObjectOutputStream(outKey)
      osKey.writeObject(elem)
      UTF8String.fromBytes(outKey.toByteArray)
    }.toArray,
      obj.elements.values.map{ elem =>
        val outValue = new ByteArrayOutputStream()
        val osValue = new ObjectOutputStream(outValue)
        osValue.writeObject(elem)
        UTF8String.fromBytes(outValue.toByteArray)
      }.toArray)
  }

  override def deserialize(datum: Any): EmbeddedMap = {
    datum match {
      case values: UnsafeMapData =>
        new EmbeddedMap(values.keyArray().toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        }.zip(values.valueArray().toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        }).toMap)
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[EmbeddedMap] = classOf[EmbeddedMap]
}

object EmbeddedMapType extends EmbeddedMapType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:61,代码来源:EmbeddedMapType.scala


示例15: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[EmbeddedSetType])
case class EmbeddedSet(elements: Array[Any]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: EmbeddedSet => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class EmbeddedSetType extends UserDefinedType[EmbeddedSet] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: EmbeddedSet): Any = {
    new GenericArrayData(obj.elements.map{elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): EmbeddedSet = {
    datum match {
      case values: ArrayData =>
        new EmbeddedSet(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[EmbeddedSet] = classOf[EmbeddedSet]
}

object EmbeddedSetType extends EmbeddedSetType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:60,代码来源:EmbeddedSetType.scala


示例16: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import com.orientechnologies.orient.core.record.ORecord
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[LinkSetType])
case class LinkSet(elements: Array[_ <: ORecord]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: LinkSet => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class LinkSetType extends UserDefinedType[LinkSet] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: LinkSet): Any = {
    new GenericArrayData(obj.elements.map{elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): LinkSet = {
    datum match {
      case values: ArrayData =>
        new LinkSet(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject().asInstanceOf[ORecord]
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[LinkSet] = classOf[LinkSet]
}

object LinkSetType extends LinkSetType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:61,代码来源:LinkSetType.scala


示例17: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import com.orientechnologies.orient.core.record.ORecord
import org.apache.spark.sql.catalyst.expressions.UnsafeMapData
import org.apache.spark.sql.catalyst.util.ArrayBasedMapData
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[LinkMapType])
case class LinkMap(elements: Map[String, _ <: ORecord]) extends Serializable {
  override def hashCode(): Int = 1

  override def equals(other: scala.Any): Boolean = other match {
    case that: LinkMap => that.elements == this.elements
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class LinkMapType extends UserDefinedType[LinkMap] {

  override def sqlType: DataType = MapType(StringType, StringType)

  override def serialize(obj: LinkMap): Any = {
    ArrayBasedMapData(obj.elements.keySet.map{ elem =>
      val outKey = new ByteArrayOutputStream()
      val osKey = new ObjectOutputStream(outKey)
      osKey.writeObject(elem)
      UTF8String.fromBytes(outKey.toByteArray)
    }.toArray,
      obj.elements.values.map{ elem =>
        val outValue = new ByteArrayOutputStream()
        val osValue = new ObjectOutputStream(outValue)
        osValue.writeObject(elem)
        UTF8String.fromBytes(outValue.toByteArray)
      }.toArray)
  }

  override def deserialize(datum: Any): LinkMap = {
    datum match {
      case values: UnsafeMapData =>
        new LinkMap(values.keyArray().toArray[UTF8String](StringType).map { elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject().toString
        }.zip(values.valueArray().toArray[UTF8String](StringType).map { elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject().asInstanceOf[ORecord]
        }).toMap)
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[LinkMap] = classOf[LinkMap]
}

object LinkMapType extends LinkMapType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:62,代码来源:LinkMapType.scala


示例18: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[EmbeddedListType])
case class EmbeddedList(elements: Array[Any]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: EmbeddedList => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class EmbeddedListType extends UserDefinedType[EmbeddedList] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: EmbeddedList): Any = {
    new GenericArrayData(obj.elements.map{elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): EmbeddedList = {
    datum match {
      case values: ArrayData =>
        new EmbeddedList(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[EmbeddedList] = classOf[EmbeddedList]
}

object EmbeddedListType extends EmbeddedListType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:60,代码来源:EmbeddedListType.scala


示例19: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import com.orientechnologies.orient.core.record.ORecord
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[LinkListType])
case class LinkList(elements: Array[_ <: ORecord]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: LinkList => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class LinkListType extends UserDefinedType[LinkList] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: LinkList): Any = {
    new GenericArrayData(obj.elements.map{ elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): LinkList = {
    datum match {
      case values: ArrayData =>
        new LinkList(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject().asInstanceOf[ORecord]
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[LinkList] = classOf[LinkList]
}

object LinkListType extends LinkListType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:61,代码来源:LinkListType.scala


示例20: Configuration

//设置package包名称以及导入依赖的类
package com.outr.jefe.runner

import java.io.{File, FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}
import java.util.zip.{GZIPInputStream, GZIPOutputStream}

import com.outr.jefe.repo._

import scala.collection.mutable.ListBuffer

case class Configuration(dependency: VersionedDependency,
                         mainClass: String,
                         args: Array[String] = Array.empty,
                         vmArgs: Array[String] = Array.empty,
                         workingDirectory: File = new File("."),
                         showDialogIfPossible: Boolean = true,
                         repositories: Repositories = Repositories(),
                         newProcess: Boolean = false)

object Configuration {
  def load(file: File): Configuration = {
    val ois = new ObjectInputStream(new GZIPInputStream(new FileInputStream(file)))
    try {
      ois.readObject().asInstanceOf[Configuration]
    } finally {
      ois.close()
    }
  }

  def save(configuration: Configuration, file: File): Unit = {
    Option(file.getParentFile).foreach(_.mkdirs())
    val oos = new ObjectOutputStream(new GZIPOutputStream(new FileOutputStream(file)))
    try {
      oos.writeObject(configuration)
    } finally {
      oos.flush()
      oos.close()
    }
  }
}

case class Repositories(list: List[Repository] = List(Ivy2.Local, Maven.Repo1, Sonatype.Releases, Sonatype.Snapshots)) {
  def withMaven(name: String, url: String): Repositories = copy(list ::: List(MavenRepository(name, url)))
} 
开发者ID:outr,项目名称:jefe,代码行数:44,代码来源:Configuration.scala



注:本文中的java.io.ObjectInputStream类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala HashMap类代码示例发布时间:2022-05-23
下一篇:
Scala BigInteger类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap