• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala ByteArrayOutputStream类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中java.io.ByteArrayOutputStream的典型用法代码示例。如果您正苦于以下问题:Scala ByteArrayOutputStream类的具体用法?Scala ByteArrayOutputStream怎么用?Scala ByteArrayOutputStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了ByteArrayOutputStream类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: MQProducer

//设置package包名称以及导入依赖的类
package mq

import java.io.{ByteArrayOutputStream, ObjectOutputStream}

import com.fasterxml.jackson.databind.ObjectMapper
import twitter4j.Status
import com.rabbitmq.client.{Channel, ConnectionFactory}



object MQProducer {

  private val EXCHANGE_TWITTER = "twitter_exchange"

  def initiateRabbitMQProducer:Channel = {
    val factory = new ConnectionFactory
    factory.setHost("localhost")
    val connection = factory.newConnection
    val channel = connection.createChannel()
    channel.exchangeDeclare(EXCHANGE_TWITTER, "topic")
    channel
  }

  def produce(channel: Channel,topic:String ,status:Status): Unit ={
    println(status.getText)
    channel.basicPublish(EXCHANGE_TWITTER,topic,null,serialise(status))
  }


  def serialise(value: Any): Array[Byte] = {
    val stream: ByteArrayOutputStream = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(stream)
    oos.writeObject(value)
    oos.close
    stream.toByteArray
  }

} 
开发者ID:neruti-developers,项目名称:neruti-demo-backend,代码行数:39,代码来源:MQProducer.scala


示例2: Tip

//设置package包名称以及导入依赖的类
package com.alvin.niagara.model

import java.io.ByteArrayOutputStream
import java.util

import org.apache.avro.Schema
import org.apache.avro.generic.{GenericData, GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.{DecoderFactory, EncoderFactory}

import scala.collection.JavaConversions._
import scala.io.Source


case class Tip(business_id: String, date: String, likes: Long, text: String, `type`: String, user_id: String)


object TipSerde {

  val avroSchema = Source.fromInputStream(getClass.getResourceAsStream("/schema/tip.avsc")).mkString
  val schema = new Schema.Parser().parse(avroSchema)

  val reader = new GenericDatumReader[GenericRecord](schema)
  val writer = new GenericDatumWriter[GenericRecord](schema)

  def serialize(tip: Tip): Array[Byte] = {

    val out = new ByteArrayOutputStream()
    val encoder = EncoderFactory.get.binaryEncoder(out, null)

    val avroRecord = new GenericData.Record(schema)
    avroRecord.put("business_id", tip.business_id)
    avroRecord.put("date", tip.date)
    avroRecord.put("likes", tip.likes)
    avroRecord.put("text", tip.text)
    avroRecord.put("type", tip.`type`)
    avroRecord.put("user_id", tip.user_id)

    writer.write(avroRecord, encoder)
    encoder.flush
    out.close
    out.toByteArray

  }

  def deserialize(bytes: Array[Byte]): Tip = {

    val decoder = DecoderFactory.get.binaryDecoder(bytes, null)
    val record = reader.read(null, decoder)

    Tip(
      record.get("business_id").toString,
      record.get("date").toString,
      record.get("likes").asInstanceOf[Long],
      record.get("text").toString,
      record.get("type").toString,
      record.get("user_id").toString
    )
  }
} 
开发者ID:AlvinCJin,项目名称:Niagara,代码行数:60,代码来源:Tip.scala


示例3: XML

//设置package包名称以及导入依赖的类
package org.cg.scala.dhc.util

import java.io.{ByteArrayOutputStream, File, FileInputStream}

import scala.util.matching.Regex
import scala.xml.Elem
import scala.xml.factory.XMLLoader
import javax.xml.parsers.SAXParser

object XML extends XMLLoader[Elem] {
  override def parser: SAXParser = {
    val f = javax.xml.parsers.SAXParserFactory.newInstance()
    f.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false);
    f.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
    f.newSAXParser()
  }
}

object FileUtil {

  def recursiveListFileInfos(baseDir: String, regex: String): Array[FileInfo] =
    recursiveListFiles(baseDir, regex).map(f => new FileInfo(f.getName, f.getCanonicalPath, f.lastModified()))

  def recursiveListFiles(baseDir: String, regex: String): Array[File] = recursiveListFiles(new File(baseDir), new Regex(regex))

  def recursiveListFiles(f: File, r: Regex): Array[File] = {
    val these = f.listFiles
    if (these != null) {
      val good = these.filter(f => r.findFirstIn(f.getName).isDefined)
      good ++ these.filter(_.isDirectory).flatMap(recursiveListFiles(_, r))
    }
    else
      new Array[File](0)
  }

  def fileToString(file: File) = {
    val inStream = new FileInputStream(file)
    val outStream = new ByteArrayOutputStream
    try {
      var reading = true
      while (reading) {
        inStream.read() match {
          case -1 => reading = false
          case c => outStream.write(c)
        }
      }
      outStream.flush()
    }
    finally {
      inStream.close()
    }
    new String(outStream.toByteArray())
  }

} 
开发者ID:curiosag,项目名称:datahubchecker,代码行数:56,代码来源:FileUtil.scala


示例4: Codec

//设置package包名称以及导入依赖的类
package at.hazm.quebic

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream}
import java.util.zip.{GZIPInputStream, GZIPOutputStream}

import scala.annotation.tailrec

sealed abstract class Codec(val id:Byte, val name:String) extends Type {
  def encode(buffer:Array[Byte]):Array[Byte]

  def decode(buffer:Array[Byte]):Array[Byte]
}

object Codec {
  val values:Seq[Codec] = Seq(PLAIN, GZIP)
  private[this] val valuesMap = values.groupBy(_.id).mapValues(_.head)

  def valueOf(id:Byte):Codec = valuesMap(id)

  case object PLAIN extends Codec(0, "plain") {
    def encode(buffer:Array[Byte]):Array[Byte] = buffer

    def decode(buffer:Array[Byte]):Array[Byte] = buffer
  }

  case object GZIP extends Codec(1, "gzip") {
    def encode(buffer:Array[Byte]):Array[Byte] = {
      val baos = new ByteArrayOutputStream()
      val out = new GZIPOutputStream(baos)
      out.write(buffer)
      out.finish()
      out.finish()
      baos.toByteArray
    }

    def decode(buffer:Array[Byte]):Array[Byte] = {
      val in = new GZIPInputStream(new ByteArrayInputStream(buffer))
      val out = new ByteArrayOutputStream()
      _copy(in, out, new Array[Byte](2014))
      out.close()
      out.toByteArray
    }
  }

  @tailrec
  private[Codec] def _copy(in:InputStream, out:OutputStream, buffer:Array[Byte]):Unit = {
    val len = in.read(buffer)
    if(len > 0) {
      out.write(buffer, 0, len)
      _copy(in, out, buffer)
    }
  }

} 
开发者ID:torao,项目名称:quebic,代码行数:55,代码来源:Codec.scala


示例5: AvroNodeSerializer

//设置package包名称以及导入依赖的类
package eventgen.launcher.core.avro

import java.io.ByteArrayOutputStream

import eventgen.launcher.core.NodeSerializer
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericDatumWriter, GenericRecord}
import org.apache.avro.io.EncoderFactory


class AvroNodeSerializer extends NodeSerializer[Schema, AvroNode[_], ByteArrayOutputStream] {
  override def serialize(metadata: Schema, node: AvroNode[_]): ByteArrayOutputStream = {
    val record = node.asInstanceOf[AvroRecord]
    val writer = new GenericDatumWriter[GenericRecord]
    writer.setSchema(metadata)
    val outputStream = new ByteArrayOutputStream
    val encoder = EncoderFactory.get().jsonEncoder(metadata, outputStream, true)
    writer.write(record.value, encoder)
    encoder.flush()
    outputStream
  }
} 
开发者ID:gpulse,项目名称:eventgenerator,代码行数:23,代码来源:AvroNodeSerializer.scala


示例6: encode

//设置package包名称以及导入依赖的类
package game_of_life.util

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

trait Codec {

  def encode(obj: AnyRef): Array[Byte]

  def decode[A >: Null](array: Array[Byte]): A
}

trait SerializationCodec extends Codec {

  def encode(obj: AnyRef): Array[Byte] = {
    val baos = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(baos)
    try {
      oos writeObject obj
    } finally {
      oos close ()
    }
    baos.toByteArray
  }

  def decode[A >: Null](array: Array[Byte]): A = {
    val ois = new ObjectInputStream(new ByteArrayInputStream(array))
    try {
      (ois readObject ()).asInstanceOf[A]
    } finally {
      ois close ()
    }
  }
}

object SerializationCodec extends SerializationCodec 
开发者ID:christian-schlichtherle,项目名称:akka-game-of-life,代码行数:36,代码来源:Codec.scala


示例7: ByteCodeExecutor

//设置package包名称以及导入依赖的类
package org.arnoldc

import java.io.{PrintStream, ByteArrayOutputStream}

class ByteCodeExecutor extends ClassLoader {
  val originalOutputStream = System.out

  def getOutput(bytecode: Array[Byte], className: String): String = {

    val outputRedirectionStream = new ByteArrayOutputStream()

   System.setOut(new PrintStream(outputRedirectionStream))

    invokeMainMethod(bytecode, className)
    System.setOut(originalOutputStream)
    outputRedirectionStream.toString
  }

  def invokeMainMethod(bytecode: Array[Byte], className: String) = {
    val template = new ByteCodeExecutor()
    val testClass = template.defineClass(className, bytecode, 0, bytecode.length)
    val testInstance = testClass.newInstance().asInstanceOf[ {def main(test: Array[String])}]
    testInstance.main(null)
  }
} 
开发者ID:derlorenz,项目名称:VongSprech,代码行数:26,代码来源:ByteCodeExecutor.scala


示例8: SerializableSerializerTest

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo

import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{ Input, Output }
import org.hammerlab.test.Suite

class SerializableSerializerTest
  extends Suite {
  test("serde") {
    val kryo = new Kryo()
    kryo.setRegistrationRequired(true)
    val baos = new ByteArrayOutputStream()
    val output = new Output(baos)

    val foo = new Foo
    foo.n = 123
    foo.s = "abc"

    intercept[IllegalArgumentException] {
      kryo.writeClassAndObject(output, foo)
    }
    .getMessage should startWith("Class is not registered: org.hammerlab.hadoop.kryo.Foo")

    kryo.register(classOf[Foo], SerializableSerializer[Foo]())

    kryo.writeClassAndObject(output, foo)

    output.close()

    val bytes = baos.toByteArray
    bytes.length should be(93)

    val bais = new ByteArrayInputStream(bytes)

    val input = new Input(bais)
    val after = kryo.readClassAndObject(input).asInstanceOf[Foo]

    after.n should be(foo.n)
    after.s should be(foo.s)
  }
}

class Foo
  extends Serializable {

  var n = 0
  var s = ""

  private def writeObject(out: ObjectOutputStream): Unit = {
    out.writeInt(n)
    out.writeUTF(s)
  }

  private def readObject(in: ObjectInputStream): Unit = {
    n = in.readInt()
    s = in.readUTF()
  }
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:61,代码来源:SerializableSerializerTest.scala


示例9: streamToBytes

//设置package包名称以及导入依赖的类
package akka.persistence

import java.io.{ ByteArrayOutputStream, InputStream }

package object serialization {
  
  def streamToBytes(inputStream: InputStream): Array[Byte] = {
    val len = 16384
    val buf = Array.ofDim[Byte](len)
    val out = new ByteArrayOutputStream

    @scala.annotation.tailrec
    def copy(): Array[Byte] = {
      val n = inputStream.read(buf, 0, len)
      if (n != -1) { out.write(buf, 0, n); copy() } else out.toByteArray
    }

    copy()
  }
} 
开发者ID:love1314sea,项目名称:akka-2.3.16,代码行数:21,代码来源:package.scala


示例10: PacketKeySerializer

//设置package包名称以及导入依赖的类
package edu.uw.at.iroberts.wirefugue.kafka.serdes

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import java.util

import org.apache.kafka.common.serialization._


class PacketKeySerializer extends Serializer[PacketKey] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()

  override def serialize(topic: String, data: PacketKey): Array[Byte] = {
    val bs = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(bs)
    oos.writeObject(data)

    bs.toByteArray
  }
}

class PacketKeyDeserializer extends Deserializer[PacketKey] {
  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = ()
  override def close(): Unit = ()

  override def deserialize(topic: String, data: Array[Byte]): PacketKey = {
    val bs = new ByteArrayInputStream(data)
    val ois = new ObjectInputStream(bs)

    ois.readObject().asInstanceOf[PacketKey]
  }
}

class PacketKeySerde extends Serde[PacketKey] {
  val serializer = new PacketKeySerializer
  val deserializer = new PacketKeyDeserializer

  def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
    serializer.configure(configs, isKey)
    deserializer.configure(configs, isKey)
  }
  def close(): Unit = {
    serializer.close()
    deserializer.close()
  }
} 
开发者ID:robertson-tech,项目名称:wirefugue,代码行数:47,代码来源:PacketKeySerializer.scala


示例11: Barcodes

//设置package包名称以及导入依赖的类
package controllers

import play.api.mvc.{Action, Controller}

class Barcodes extends Controller {
  val ImageResolution = 144

  // png? ???? Action
  def barcode(ean: Long) = Action {
      import java.lang.IllegalArgumentException

      val MimeType = "image/png"
      try {
        val imageData = ean13BarCode(ean, MimeType)
        Ok(imageData).as(MimeType)
      }
      catch {
        case e: IllegalArgumentException =>
          BadRequest("Couldn't generate bar code. Error: " + e.getMessage)
      }
  }

  def ean13BarCode(ean: Long, mimeType: String): Array[Byte] = {
    import java.io.ByteArrayOutputStream
    import java.awt.image.BufferedImage
    import org.krysalis.barcode4j.output.bitmap.BitmapCanvasProvider
    import org.krysalis.barcode4j.impl.upcean.EAN13Bean

    val output: ByteArrayOutputStream = new ByteArrayOutputStream
    val canvas: BitmapCanvasProvider = new BitmapCanvasProvider(
      output, mimeType, ImageResolution, BufferedImage.TYPE_BYTE_BINARY, false, 0)

    val barcode = new EAN13Bean()
    barcode.generateBarcode(canvas, String valueOf ean)
    canvas.finish

    output.toByteArray
  }
} 
开发者ID:makeajourney,项目名称:play-products-example,代码行数:40,代码来源:Barcodes.scala


示例12: BarcoderTest

//设置package包名称以及导入依赖的类
package kokellab.utils.misc

import java.io.{ByteArrayInputStream, ByteArrayOutputStream}

import com.google.zxing.BarcodeFormat
import org.scalacheck.Gen
import org.scalatest.prop.{GeneratorDrivenPropertyChecks, PropertyChecks}
import org.scalatest.{Matchers, PropSpec}

class BarcoderTest extends PropSpec with GeneratorDrivenPropertyChecks with Matchers {

		def fakeEncodeDecode(text: String, barcodeFormat: BarcodeFormat, dimensions: (Int, Int), imageFormat: String): String =
			if (text.isEmpty) text else encodeDecode(text.toUpperCase, barcodeFormat, dimensions, imageFormat)

		def genBoundedList[T](maxSize: Int, gen: Gen[T]): Gen[List[T]] =
			Gen.choose(0, maxSize) flatMap (size => Gen.listOfN(size, gen))

		def genBoundedString(maxSize: Int, gen: Gen[Char]): Gen[String] =
			Gen.choose(0, maxSize) flatMap (size => Gen.listOfN(size, gen) map (_.mkString))

		def encodeDecode(text: String, codeFormat: BarcodeFormat, dimensions: (Int, Int), imageFormat: String): String = {
			val barcoder = new Barcoder(codeFormat, imageFormat, dimensions._1, dimensions._2)
			val os = new ByteArrayOutputStream()
			barcoder.encode(text, os)
			val is = new ByteArrayInputStream(os.toByteArray)
			barcoder.decode(is)
		}

		val imageFormatGen = Gen.oneOf("png", "jpg", "gif")
		def test(barcodeFormat: BarcodeFormat, dimensionsGen: Gen[(Int, Int)], stringGen: Gen[String]) = {
			property(s"Decoding an encoded string should yield the original string for ${barcodeFormat.name} codes") {
				forAll(imageFormatGen, stringGen, dimensionsGen) { (imageFormat: String, text: String, dimensions: (Int, Int)) =>
					fakeEncodeDecode(text, barcodeFormat, dimensions, imageFormat) should equal (text.toUpperCase)
				}
			}
		}

		val rectangularGen: Gen[(Int, Int)] = for {
			width <- Gen.choose(20, 100)
			height <- Gen.choose(20, 100)
		} yield (width, height)

		val squareGen: Gen[(Int, Int)] = for {
			size <- Gen.choose(20, 100)
		} yield (size, size)

		val code39And93Gen: Gen[String] = genBoundedString(48, Gen.frequency((36, Gen.alphaNumChar), (7, Gen.oneOf('-', '.', '$', '/', '+', '%', ' '))))
		test(BarcodeFormat.CODE_39, rectangularGen, code39And93Gen)
		test(BarcodeFormat.CODE_93, rectangularGen, code39And93Gen)

		// TODO this fails due to https://github.com/zxing/zxing/issues/716
		// there's nothing I can do now
//		test(BarcodeFormat.CODE_128, rectangularGen, genBoundedString(48, Gen.choose[Char](0x20, 127)))

		// TODO QR codes break; also not my fault
//		test(BarcodeFormat.QR_CODE, squareGen, genBoundedString(4296, Gen.frequency((36, Gen.alphaNumChar), (8, Gen.oneOf('-', '.', '$', '/', '+', '%', ' ', ':')))))

} 
开发者ID:kokellab,项目名称:kl-common-scala,代码行数:59,代码来源:BarcoderTest.scala


示例13: HandlerSpec

//设置package包名称以及导入依赖的类
import java.io.{ByteArrayOutputStream, ByteArrayInputStream}

import org.scalatest._

class HandlerSpec extends FreeSpec with Matchers {

  "derived class should delegate and write result to output stream" - {
    val input = new ByteArrayInputStream("""{"input": 3}""".getBytes("UTF-8"))
    val output = new ByteArrayOutputStream()
    new MockHandler().handler(input, output)
    output.toString("UTF-8") shouldBe """{"output":6}"""
  }

}

class MockHandler extends Handler(MockHandler.double)

object MockHandler {
  case class TestInput(input: Int)
  case class TestOutput(output: Int)
  def double(input: TestInput): TestOutput = TestOutput(input.input * 2)
} 
开发者ID:yamitzky,项目名称:Scala-Lambda-Apex-Kuromoji,代码行数:23,代码来源:HandlerSpec.scala


示例14: SeqSerialiser

//设置package包名称以及导入依赖的类
package serialisation

import java.io.{ByteArrayOutputStream, ObjectOutputStream}
import java.util

import org.apache.kafka.common.serialization.Serializer

class SeqSerialiser[ELEMENT] extends Serializer[Seq[ELEMENT]] {

  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
  }

  override def serialize(topic: String, data: Seq[ELEMENT]): Array[Byte] = {
    val byteStream = new ByteArrayOutputStream()
    val objectStream = new ObjectOutputStream(byteStream)
    data.foreach(objectStream.writeObject(_))
    objectStream.close()
    byteStream.toByteArray
  }

  override def close(): Unit = {
  }
} 
开发者ID:benwheeler,项目名称:kafka-streams-poc,代码行数:24,代码来源:SeqSerialiser.scala


示例15: IntPrinterSpec

//设置package包名称以及导入依赖的类
import java.io.ByteArrayOutputStream

import org.scalatest.{FlatSpec, Matchers}




class IntPrinterSpec extends FlatSpec with Matchers {
  "IntPrinter" should "print string using while loop" in {
    val stream = new ByteArrayOutputStream()
    val typeIntPrinter = new IntPrinter(stream)
    typeIntPrinter.printSequenceUsingWhileLoop(Seq(1, 2, 3))
    stream.toString should equal ("123")
  }

  "IntPrinter" should "print string using foreach & lambda" in {
    val stream = new ByteArrayOutputStream()
    val typeIntPrinter = new IntPrinter(stream)
    typeIntPrinter.printSequenceUsingFunctionLiteralInForeach(Seq(1, 2, 3))
    stream.toString should equal ("123")
  }

  "IntPrinter" should "print string using foreach withoutLambda" in {
    val stream = new ByteArrayOutputStream()
    val typeIntPrinter = new IntPrinter(stream)
    typeIntPrinter.printSequenceUsingPartiallyAppliedFunctionInForeach(Seq(1, 2, 3))
    stream.toString should equal ("123")
  }

  "IntPrinter" should "print string using for loop" in {
    val stream = new ByteArrayOutputStream()
    val typeIntPrinter = new IntPrinter(stream)
    typeIntPrinter.printSequenceUsingForLoop(Seq(1, 2, 3))
    stream.toString should equal ("123")
  }
} 
开发者ID:szymonlyszkowski,项目名称:learning-scala,代码行数:37,代码来源:IntPrinterSpec.scala


示例16: Avro

//设置package包名称以及导入依赖的类
package com.lukecycon.avro

import java.io.ByteArrayOutputStream
import org.apache.avro.io.EncoderFactory
import org.apache.avro.file.BZip2Codec
import java.nio.ByteBuffer
import org.apache.avro.io.DecoderFactory

object Avro {
  def schemaFor[T: AvroFormat] = implicitly[AvroFormat[T]].schema

  def write[T: AvroFormat](thing: T, compress: Boolean = false): Array[Byte] = {
    val out = new ByteArrayOutputStream
    val encoder = EncoderFactory.get.binaryEncoder(out, null)

    implicitly[AvroFormat[T]].writeValue(thing, encoder)

    encoder.flush

    if (compress) {
      new BZip2Codec().compress(ByteBuffer.wrap(out.toByteArray)).array
    } else {
      out.toByteArray
    }
  }

  def writeHex[T: AvroFormat](thing: T): String =
    byteArrayToHexString(write(thing))

  def read[T: AvroFormat](bytes: Array[Byte],
                          compressed: Boolean = false): Either[String, T] = {
    val byts = if (compressed) {
      new BZip2Codec().decompress(ByteBuffer.wrap(bytes)).array
    } else {
      bytes
    }

    val decoder = DecoderFactory.get.binaryDecoder(byts, null)

    implicitly[AvroFormat[T]].decodeValue(Nil, decoder)
  }

  def readHex[T: AvroFormat](hex: String): Either[String, T] =
    read(
        hex
          .replace(" ", "")
          .grouped(2)
          .map(Integer.parseInt(_, 16).toByte)
          .toArray)

  private def byteArrayToHexString(bb: Array[Byte]): String =
    bb.map("%02X" format _).mkString.grouped(2).mkString(" ")
} 
开发者ID:themattchan,项目名称:Skaro,代码行数:54,代码来源:Avro.scala


示例17: MockStream

//设置package包名称以及导入依赖的类
package ppl.delite.framework.analysis

import java.io.{PrintWriter, ByteArrayOutputStream}
import scala.virtualization.lms.internal._
import ppl.delite.framework.DeliteApplication
import scala.virtualization.lms.util.OverloadHack

class MockStream extends ByteArrayOutputStream { 
   override def flush() {}
   override def close() {}
   def print(line: String) {}
}

//TODO: a lot of this is deprecated with the new LMS traversal framework
trait TraversalAnalysis extends GenericFatCodegen with OverloadHack {
  val IR: Expressions with Effects with FatExpressions
  import IR._
  implicit val mockStream: PrintWriter = new PrintWriter(new MockStream())
  val className: String
  var _result: Option[Any] = None

  def traverseNode(sym: Sym[Any], a: Def[Any]) = withStream(mockStream)(emitNode(sym, a))
  def traverseBlock(b: Block[Any])(implicit o: Overloaded1) = withStream(mockStream)(emitBlock(b))
  def traverse[A:Manifest,B:Manifest](f: Exp[A] => Exp[B]) = { emitSource(f, className, mockStream); result }
  def emitValDef(sym: Sym[Any], rhs: String) {}
  def emitAssignment(lhs: String, rhs: String) {}
  def result: Option[Any] = _result
  
  def emitSource[A : Manifest](args: List[Sym[_]], body: Block[A], className: String, stream: PrintWriter): List[(Sym[Any], Any)] = {
    traverseBlock(body)
    Nil
  }
} 
开发者ID:leratojeffrey,项目名称:OptiSDR-Compiler,代码行数:34,代码来源:TraversalAnalysis.scala


示例18: AvroUtils

//设置package包名称以及导入依赖的类
package pulse.kafka.avro

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, File}

import com.twitter.util.Future
import org.apache.avro.Schema
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.DecoderFactory
import pulse.kafka.extensions.managedByteArrayInputStream
import pulse.kafka.extensions.managedByteArrayOutputStream
import pulse.kafka.extensions.catsStdInstancesForFuture
import scala.concurrent.ExecutionContext.Implicits._

object AvroUtils {

  import pulse.common.syntax._


  def jsonToAvroBytes(json: String, schemaFile: File): Future[Array[Byte]] =
    use(new ByteArrayOutputStream()) { output =>
      for {
        s <- loadSchema(schemaFile)
        _ <- convertImpl(json, output, s)
      } yield output.toByteArray
    }

  private def convertImpl(json: String, output: ByteArrayOutputStream, schemaSpec: Schema): Future[GenericDatumReader[GenericRecord]] =

    use(new ByteArrayInputStream(json.getBytes)) { input =>
      for {
        w <- getWriter(output, schemaSpec)
        r <- getReader(input, schemaSpec, w)
      } yield r
    }

  def getReader(input: ByteArrayInputStream, schemaSpec: Schema, w: DataFileWriter[GenericRecord]) = Future.value {
    val reader = new GenericDatumReader[GenericRecord](schemaSpec)
    val datum = reader.read(null, getJsonDecoder(input, schemaSpec))
    w.append(datum)
    w.flush()
    reader
  }

  private def getJsonDecoder(input: ByteArrayInputStream, schema: Schema) =
    DecoderFactory.get.jsonDecoder(schema, new DataInputStream(input))

  private def getWriter(output: ByteArrayOutputStream, schemaSpec: Schema) = {
    Future.value {
      val writer = new DataFileWriter[GenericRecord](new GenericDatumWriter[GenericRecord]())
      writer.create(schemaSpec, output)
    }
  }

  private def loadSchema(schemaFile: File): Future[Schema] =
    Future {
      new Schema.Parser().parse(schemaFile)
    }
} 
开发者ID:gpulse,项目名称:kafka,代码行数:60,代码来源:AvroUtils.scala


示例19: managedByteArrayOutputStream

//设置package包名称以及导入依赖的类
package pulse.kafka.extensions

import java.io.{ByteArrayInputStream, ByteArrayOutputStream}

import pulse.common.Managed
import pulse.kafka.client.producer.EnvelopeProducer

trait Resources {
  implicit def managedByteArrayOutputStream = new Managed[ByteArrayOutputStream] {
    override def close(instance: ByteArrayOutputStream): Unit = instance.close()
  }

  implicit def managedEnvelopeProducer = new Managed[EnvelopeProducer] {
    override def close(instance: EnvelopeProducer): Unit = instance.close()
  }

  implicit def managedByteArrayInputStream = new Managed[ByteArrayInputStream] {
    override def close(instance: ByteArrayInputStream): Unit = instance.close()
  }
} 
开发者ID:gpulse,项目名称:kafka,代码行数:21,代码来源:Resources.scala


示例20: ConsoleProgressBarTest

//设置package包名称以及导入依赖的类
package hu.ssh.progressbar.console

import java.io.{ByteArrayOutputStream, PrintStream}

import com.google.common.base.{Splitter, Strings}
import com.google.common.collect.Iterables
import org.scalatest.{FlatSpec, Matchers}


class ConsoleProgressBarTest extends FlatSpec with Matchers {
  "ProgressBar" should "output as expected" in {
    val outputstream = new ByteArrayOutputStream
    try {
      val progressBar = ConsoleProgressBar.on(new PrintStream(outputstream)).withFormat(":percent")
      progressBar.tick(0)
      assert(getLastOutput(outputstream.toString) == "  0.00")
      progressBar.tick(25)
      assert(getLastOutput(outputstream.toString) == " 25.00")
      progressBar.tick(30)
      assert(getLastOutput(outputstream.toString) == " 55.00")
      progressBar.tick(44)
      assert(getLastOutput(outputstream.toString) == " 99.00")
      progressBar.tickOne()
      assert(getLastOutput(outputstream.toString) == "100.00")
    } finally outputstream.close()
  }

  private def getLastOutput(string: String): String = {
    if (Strings.isNullOrEmpty(string)) return string
    val outputs = Splitter.on(ConsoleProgressBar.CARRIAGE_RETURN).omitEmptyStrings.split(string)
    Iterables.getLast(outputs)
  }
} 
开发者ID:arguslab,项目名称:Argus-SAF,代码行数:34,代码来源:ConsoleProgressBarTest.scala



注:本文中的java.io.ByteArrayOutputStream类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala Source类代码示例发布时间:2022-05-23
下一篇:
Scala WordSpec类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap