• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala BufferedOutputStream类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中java.io.BufferedOutputStream的典型用法代码示例。如果您正苦于以下问题:Scala BufferedOutputStream类的具体用法?Scala BufferedOutputStream怎么用?Scala BufferedOutputStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了BufferedOutputStream类的16个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: Frontend

//设置package包名称以及导入依赖的类
package compiler
import java.io.{BufferedOutputStream, FileOutputStream}

object Frontend extends App {
  if (args.length != 2) {
    println(
      """
        |Wrong number of arguments:
        |Arguments should be <sourceFile.pas> <outputClassName>
      """.stripMargin)
    System.exit(1)
  }
  val filePath = args(0)
  val className = args(1)

  Compiler.compileFile(filePath, className) match {
    case Left(errors) => println("Compilation errors:"); errors.foreach(println)
    case Right(bytes) =>
      val outputStream = new BufferedOutputStream(new FileOutputStream(s"$className.class"))
      Stream.continually(outputStream.write(bytes))
      outputStream.close()
  }
} 
开发者ID:darthorimar,项目名称:pascalJvm,代码行数:24,代码来源:Frontend.scala


示例2: CodeGeneratorTest

//设置package包名称以及导入依赖的类
import java.io.{BufferedOutputStream, FileOutputStream}

import compiler.codeGenerator.CodeGenerator
import compiler.lexer.Lexer
import compiler.parser.Parser
import org.scalatest._


class CodeGeneratorTest extends FlatSpec with Matchers {

  private def tokenizeAndParse(code: String) = {
    val tokens = Lexer(code).right.get
    Parser(tokens)
  }

  "Code generator" should "work fine" in {
    val code =
      """
        |program;
        |  var i,b: Integer;
        |begin
        | for i := 100 downto 1 do
        |  begin
        |    b :=i + 10;
        |  end;
        |end.""".stripMargin

    tokenizeAndParse(code) match {
      case Left(errors) => println(errors)
      case Right(bytes) =>
        val ast = tokenizeAndParse(code).right.get
        val bytes = CodeGenerator(ast, "Main")

        val bos = new BufferedOutputStream(new FileOutputStream("Main.class"))
        Stream.continually(bos.write(bytes))
        bos.close()
    }

  }


} 
开发者ID:darthorimar,项目名称:pascalJvm,代码行数:43,代码来源:CodeGeneratorTest.scala


示例3: store

//设置package包名称以及导入依赖的类
package ru.fediq.scrapingkit.backend

import java.io.{BufferedOutputStream, FileOutputStream, PrintWriter}

import ru.fediq.scrapingkit.scraper.ScrapedEntity
import ru.fediq.scrapingkit.util.Utilities

import scala.concurrent.Future

trait FeedExporter extends AutoCloseable {
  def store[T <: ScrapedEntity](entity: T): Future[_]

  override def close() = {
    // Do nothing
  }
}

class NoOpFeedExporter extends FeedExporter {
  override def store[T <: ScrapedEntity](entity: T) = {
    Future.successful()
  }
}

class JsonLinesFeedExporter(
  path: String
) extends FeedExporter {
  val writer = new PrintWriter(new BufferedOutputStream(new FileOutputStream(path, true)))

  implicit val dispatcher = Utilities.singleDaemonDispatcher("feed-exporter")

  override def store[T <: ScrapedEntity](entity: T) = Future {
    writer.println(entity.dump)
  }

  override def close() = {
    writer.close()
  }
} 
开发者ID:fediq,项目名称:scraping-kit,代码行数:39,代码来源:FeedExporter.scala


示例4: write

//设置package包名称以及导入依赖的类
package applicant.nlp

import java.io.{BufferedOutputStream, File, FileOutputStream, InputStream}
import java.nio.charset.Charset
import java.util.Collections

import opennlp.tools.namefind.{NameFinderME, NameSampleDataStream, NameSampleTypeFilter, TokenNameFinderFactory,
    TokenNameFinderModel}
import opennlp.tools.util.{PlainTextByLineStream, TrainingParameters}


    def write(file: File) {
        if (model != null) {
            // Write out model
            var modelOut: BufferedOutputStream = null
            try {
                modelOut = new BufferedOutputStream(new FileOutputStream(file))
                model.serialize(modelOut)
            }
            finally {
                if (modelOut != null) {
                    modelOut.close()
                }
            }
        }
    }
} 
开发者ID:dataworks,项目名称:internship-2016,代码行数:28,代码来源:EntityModel.scala


示例5: unzip

//设置package包名称以及导入依赖的类
import java.io.{BufferedOutputStream, FileOutputStream}
import java.util.zip._


  def unzip(path: String): Unit = {
    val files = zipFile.entries()
    while(files.hasMoreElements) {
      val zipData = files.nextElement()
      val data: Array[Byte] = new Array[Byte](1024)
      val stream = zipFile.getInputStream(zipData)
      val output = new BufferedOutputStream(new FileOutputStream(path + zipData.getName))
      Stream
        .continually(stream.read(data))
        .takeWhile(-1 !=)
        .foreach(read => output.write(data, 0, read))
      output.close()
    }
  }
} 
开发者ID:Hnatekmar,项目名称:jdf2gtfs,代码行数:20,代码来源:Unzipper.scala


示例6: AvroFileWriter

//设置package包名称以及导入依赖的类
package com.landoop.avro

import java.io.{BufferedOutputStream, File, FileOutputStream}

import com.landoop.avro.codec.CodecFactory
import org.apache.avro.Schema
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.GenericRecord

object AvroFileWriter {
  def fastWrite(file: File,
                count: Int,
                parallelization: Int,
                schema: Schema,
                records: IndexedSeq[GenericRecord]) = {
    val out = new BufferedOutputStream(new FileOutputStream(file), 4 * 1048576)

    import org.apache.avro.generic.GenericDatumWriter
    val datumWriter = new GenericDatumWriter[GenericRecord](schema)
    val builder = FastDataFileWriterBuilder(datumWriter, out, schema)
      .withCodec(CodecFactory.snappyCodec())
      .withFlushOnEveryBlock(false)
      .withParallelization(parallelization)

    builder.encoderFactory.configureBufferSize(4 * 1048576)
    builder.encoderFactory.configureBlockSize(4 * 1048576)

    val fileWriter = builder.build()
    fileWriter.write(records)
    fileWriter.close()
  }

  def write(file: File,
            count: Int,
            schema: Schema,
            records: Seq[GenericRecord]) = {
    val out = new BufferedOutputStream(new FileOutputStream(file), 4 * 1048576)
    
    import org.apache.avro.generic.GenericDatumWriter
    val datumWriter = new GenericDatumWriter[GenericRecord](schema)
    val writer = new DataFileWriter(datumWriter)
      .setCodec(org.apache.avro.file.CodecFactory.snappyCodec())
      .create(schema, out)

    writer.setFlushOnEveryBlock(false)

    records.foreach(writer.append)
    writer.close()
  }
} 
开发者ID:Landoop,项目名称:fast-avro-write,代码行数:51,代码来源:AvroFileWriter.scala


示例7: FileAssistant

//设置package包名称以及导入依赖的类
package com.github.cuzfrog.utils

import java.io.FileInputStream
import java.io.BufferedInputStream
import java.io.FileOutputStream
import java.io.BufferedOutputStream

private[cuzfrog] object FileAssistant {
  def bytesFromFile(path: String): Array[Byte] = {
    val bis = new BufferedInputStream(new FileInputStream(path))
    val byteArray = Stream.continually(bis.read).takeWhile(_ != -1).map(_.toByte).toArray
    bis.close()
    byteArray
  }

  def bytesToFile(path: String, data: Array[Byte]): Unit = {
    val bos = new BufferedOutputStream(new FileOutputStream(path))
    Stream.continually(bos.write(data))
    bos.close()
  }

  
  def pathParse(path: String): (String, String, String) = {
    val PathParser = """(.*[\\/])(.*)(\.[\d\w]*)""".r
    path match {
      case PathParser(p, fn, s) => (p, fn, s)
      case _ => throw new IllegalArgumentException("Bad file path:"+path)
    }
  }
} 
开发者ID:cuzfrog,项目名称:excela,代码行数:31,代码来源:FileAssistant.scala


示例8: FileAssistant

//设置package包名称以及导入依赖的类
package com.github.cuzfrog.utils

import java.io.{BufferedInputStream, BufferedOutputStream, FileInputStream, FileOutputStream}

private[cuzfrog] object FileAssistant {
  def bytesFromFile(path: String): Array[Byte] = {
    val bis = new BufferedInputStream(new FileInputStream(path))
    val byteArray = Stream.continually(bis.read).takeWhile(_ != -1).map(_.toByte).toArray
    bis.close()
    byteArray
  }

  def bytesToFile(path: String, data: Array[Byte]): Unit = {
    val bos = new BufferedOutputStream(new FileOutputStream(path))
    Stream.continually(bos.write(data))
    bos.close()
  }

  
  def pathParse(path: String): (String, String, String) = {
    val PathParser = """(.*[\\/])(.*)(\.[\d\w]*)""".r
    path match {
      case PathParser(p, fn, s) => (p, fn, s)
      case _ => throw new IllegalArgumentException("Bad file path:"+path)
    }
  }
} 
开发者ID:cuzfrog,项目名称:excela,代码行数:28,代码来源:FileAssistant.scala


示例9: obfuscate

//设置package包名称以及导入依赖的类
package com.github.cuzfrog.utils

import java.io.{BufferedInputStream, BufferedOutputStream, FileInputStream, FileOutputStream}
import java.security.InvalidKeyException
import java.util.Random
import javax.crypto.{BadPaddingException, IllegalBlockSizeException}


  @deprecated("Drop this functionality", "0.2.0")
  def obfuscate(path: String, keys: List[Array[Byte]]): Array[Byte] = {

    val bis = new BufferedInputStream(new FileInputStream(path))
    val byteArray = Stream.continually(bis.read).takeWhile(_ != -1).map(_.toByte).toArray
    bis.close() //read the file
    val (didSucceed, decryptedString) = try {
        (true, decrypt(byteArray, keys))
      } catch {
        case [email protected](_: InvalidKeyException | _: IllegalBlockSizeException) => (false, null)
      }
    val unencrypted = if (didSucceed) decryptedString //return the unencrypted data
    else {
      val encrypted = encrypt(byteArray, keys(new Random(System.currentTimeMillis()).nextInt(keys.size)))
      val bos = new BufferedOutputStream(new FileOutputStream(path))
      Stream.continually(bos.write(encrypted))
      bos.close() //obfuscate the file/path
      byteArray //return the unencrypted data
    }
    unencrypted
  }

  private def decrypt(encrypted: Array[Byte], keys: List[Array[Byte]]): Array[Byte] = {
    keys.foreach {
      key =>
        try {
          return EncryptTool.decrypt(encrypted, key)
        } catch {
          case _: InvalidKeyException | _: BadPaddingException => //try every key.
        }
    }
    throw new InvalidKeyException("All keys have been tried, decrypt failed.")
  }

  private def encrypt(input: Array[Byte], key: Array[Byte]): Array[Byte] = try {
    EncryptTool.encrypt(input, key)
  } catch {
    case e: Throwable => throw new AssertionError("Encrypt failed, cause:" + e.toString + "|" + e.getMessage)
  }
} 
开发者ID:cuzfrog,项目名称:maila,代码行数:49,代码来源:ObfuscateTool.scala


示例10: UnzipUtility

//设置package包名称以及导入依赖的类
package it.milczarek.gpwquoter.file

import java.io.{BufferedOutputStream, File, FileInputStream, FileOutputStream}
import java.util.zip.ZipInputStream


object UnzipUtility {

  val bufferSize = 4096

  def unzip(zipFilePath: File, destDirectory: String) {
    def extractFile(zipIn: ZipInputStream, filePath: String) {
      val bos = new BufferedOutputStream(new FileOutputStream(filePath))
      val bytesIn = new Array[Byte](bufferSize)
      var read = zipIn.read(bytesIn)
      while (read != -1) {
        bos.write(bytesIn, 0, read)
        read = zipIn.read(bytesIn)
      }
      bos.close()
    }

    val destDir = new File(destDirectory)
    if (!destDir.exists()) destDir.mkdir()

    val zipIn = new ZipInputStream(new FileInputStream(zipFilePath))

    var entry = zipIn.getNextEntry
    while (entry != null) {
      val filePath = destDirectory + File.separator + entry.getName
      if (!entry.isDirectory) {
        extractFile(zipIn, filePath)
      } else {
        val dir = new File(filePath)
        dir.mkdir()
      }
      zipIn.closeEntry()
      entry = zipIn.getNextEntry
    }
    zipIn.close()
  }
} 
开发者ID:milczarekIT,项目名称:gpw-quoter,代码行数:43,代码来源:UnzipUtility.scala


示例11: write

//设置package包名称以及导入依赖的类
package fr.cnrs.liris.accio.core.report

import java.io.{BufferedOutputStream, FileOutputStream, PrintStream}
import java.nio.file.Path

import fr.cnrs.liris.accio.core.thrift
import fr.cnrs.liris.util.io.FileUtils


  def write(reports: Seq[thrift.Report], path: Path): Unit = {
    val reportStats = new ReportStatistics(reports)
    FileUtils.safeDelete(path)
    val out = new PrintStream(new BufferedOutputStream(new FileOutputStream(path.toFile)))
    try {
      print(reportStats, out)
    } finally {
      out.close()
    }
  }
} 
开发者ID:privamov,项目名称:alp,代码行数:21,代码来源:ReportCreator.scala


示例12: Dump

//设置package包名称以及导入依赖的类
package org.monarchinitiative.clique

import java.io.File

import org.backuity.clist._
import com.bigdata.rdf.sail.BigdataSail
import com.bigdata.rdf.sail.BigdataSailRepository
import java.util.Properties
import java.io.FileReader
import java.io.FileOutputStream
import org.openrdf.query.QueryLanguage
import java.io.BufferedOutputStream
import org.openrdf.rio.turtle.TurtleWriter


object Dump extends Command(description = "Dump Blazegraph database to a Turtle RDF file.") with Common {

  var file = arg[File](description = "File name for RDF output.")

  override def run(): Unit = {
    val blazegraphProperties = new Properties()
    blazegraphProperties.load(new FileReader(properties))
    val sail = new BigdataSail(blazegraphProperties)
    val repository = new BigdataSailRepository(sail)
    repository.initialize()
    val blazegraph = repository.getUnisolatedConnection
    val triplesQuery = blazegraph.prepareGraphQuery(QueryLanguage.SPARQL, "CONSTRUCT WHERE { ?s ?p ?o . }")
    val triplesOutput = new BufferedOutputStream(new FileOutputStream(file))
    triplesQuery.evaluate(new TurtleWriter(triplesOutput))
    triplesOutput.close()
    blazegraph.close()
  }

} 
开发者ID:balhoff,项目名称:clique-merge,代码行数:35,代码来源:Dump.scala


示例13: RichFile

//设置package包名称以及导入依赖的类
import java.io.BufferedOutputStream
import scala.io.Source
import java.io.File
import java.io.FileOutputStream
import scala.io.Codec
package object utils {

	
	implicit class RichFile(file: File) {
		def read() = Source.fromFile(file)(Codec.UTF8).mkString

		def write(data: String) {
			val fos = new BufferedOutputStream(new FileOutputStream(file))
			try {
				fos.write(data.getBytes("UTF-8"))
			} finally {
				fos.close
			}
		}
	}
} 
开发者ID:SnipyJulmy,项目名称:MSE-AdvProg,代码行数:22,代码来源:package.scala


示例14: Sentiment

//设置package包名称以及导入依赖的类
package uk.co.pollett.flink.newsreader.nlp.classify

import opennlp.tools.doccat.{DoccatModel, DocumentCategorizerME, DocumentSampleStream}
import opennlp.tools.util.PlainTextByLineStream

class Sentiment {
  def train(): Unit = {
    val dataIn = getClass.getResourceAsStream("/sentimentdatatext")
    val lineStream = new PlainTextByLineStream(dataIn, "UTF-8")
    val sampleStream = new DocumentSampleStream(lineStream)
    // Specifies the minimum number of times a feature must be seen
    val cutoff = 2
    val trainingIterations = 30

    def model = DocumentCategorizerME.train("en", sampleStream)

    import java.io.BufferedOutputStream
    import java.io.FileOutputStream
    def modelOut = new BufferedOutputStream(new FileOutputStream("/tmp/output.bin"))
    model.serialize(modelOut)
    println("model out")
    Thread.sleep(30000)
  }

  def categorize(text: List[String]): String = {
    def modelIn = getClass.getResourceAsStream("/en-sentiment.bin")
    def model = new DoccatModel(modelIn)
    def categorizer = new DocumentCategorizerME(model)

    def outcomes = categorizer.categorize(text.toArray)
    def category = categorizer.getBestCategory(outcomes)

    category
  }
} 
开发者ID:pollett,项目名称:flink-newsreader,代码行数:36,代码来源:Sentiment.scala


示例15: initFileDirectory

//设置package包名称以及导入依赖的类
package spark.jobserver.io

import java.io.{BufferedOutputStream, File, FileOutputStream}

import org.joda.time.DateTime
import org.slf4j.LoggerFactory

trait FileCasher {

  val rootDir: String
  val rootDirFile: File

  private val logger = LoggerFactory.getLogger(getClass)

  def initFileDirectory(): Unit = {
    if (!rootDirFile.exists()) {
      if (!rootDirFile.mkdirs()) {
        throw new RuntimeException("Could not create directory " + rootDir)
      }
    }
  }

  def createBinaryName(appName: String, binaryType: BinaryType, uploadTime: DateTime): String = {
    appName + "-" + uploadTime.toString("yyyyMMdd_hhmmss_SSS") + s".${binaryType.extension}"
  }

  // Cache the jar file into local file system.
  protected def cacheBinary(appName: String,
                          binaryType: BinaryType,
                          uploadTime: DateTime,
                          binBytes: Array[Byte]) {
    val outFile =
      new File(rootDir, createBinaryName(appName, binaryType, uploadTime))
    val bos = new BufferedOutputStream(new FileOutputStream(outFile))
    try {
      logger.debug("Writing {} bytes to file {}", binBytes.length, outFile.getPath)
      bos.write(binBytes)
      bos.flush()
    } finally {
      bos.close()
    }
  }

} 
开发者ID:nvijayap,项目名称:ijs,代码行数:45,代码来源:FileCasher.scala


示例16: TransactionLog

//设置package包名称以及导入依赖的类
package com.mchange.sc.v1.sbtethereum.repository

import java.io.{BufferedOutputStream,File,FileOutputStream,OutputStreamWriter,PrintWriter}
import java.util.Date
import java.text.SimpleDateFormat

import com.mchange.sc.v2.lang.borrow

import com.mchange.sc.v2.failable._

import com.mchange.sc.v1.consuela._
import com.mchange.sc.v1.consuela.ethereum.{EthHash,EthTransaction}

import scala.io.Codec

object TransactionLog {
  private val TimestampPattern = "yyyy-MM-dd'T'HH-mm-ssZ"

  lazy val File = Directory.map( dir => new java.io.File(dir, "transaction-log") )

  case class Entry( timestamp : Date, txn : EthTransaction.Signed, transactionHash : EthHash ) {
    override def toString() = {
      val ( ttype, payloadKey, payload ) = txn match {
        case m  : EthTransaction.Signed.Message          => ("Message", "data", m.data)
        case cc : EthTransaction.Signed.ContractCreation => ("ContractCreation", "init", cc.init)
      }
      val df = new SimpleDateFormat(TimestampPattern)
      val ts = df.format( timestamp )
      val first  = s"${ts}:type=${ttype},nonce=${txn.nonce.widen},gasPrice=${txn.gasPrice.widen},gasLimit=${txn.gasLimit.widen},value=${txn.value.widen},"
      val middle = if ( payload.length > 0 ) s"${payloadKey}=${payload.hex}," else ""
      val last   = s"v=${txn.v.widen},r=${txn.r.widen},s=${txn.s.widen},transactionHash=${transactionHash.bytes.hex}"
      first + middle + last
    }
  }

  def logTransaction( transaction : EthTransaction.Signed, transactionHash : EthHash ) : Unit = {
    File.flatMap { file =>
      Failable {
        val entry = TransactionLog.Entry( new Date(), transaction, transactionHash ) 
        borrow( new PrintWriter( new OutputStreamWriter( new BufferedOutputStream( new FileOutputStream( file, true ) ), Codec.UTF8.charSet ) ) )( _.println( entry ) )
      }
    }.get // Unit or vomit Exception
  }
} 
开发者ID:swaldman,项目名称:sbt-ethereum,代码行数:45,代码来源:TransactionLog.scala



注:本文中的java.io.BufferedOutputStream类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala Assertions类代码示例发布时间:2022-05-23
下一篇:
Scala MockitoSugar类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap