本文整理汇总了Scala中java.nio.channels.Channels类的典型用法代码示例。如果您正苦于以下问题:Scala Channels类的具体用法?Scala Channels怎么用?Scala Channels使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Channels类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: OnDiskHeap
//设置package包名称以及导入依赖的类
package offheap
import java.io.ObjectInputStream
import java.nio.channels.Channels
import java.io.RandomAccessFile
import java.io.ObjectOutputStream
class OnDiskHeap[T <: Node[T]](filename : String) extends Heap[T] {
val file = new RandomAccessFile(filename, "rw")
def append(node : T, reserve : Int = 1) : Long = {
file.seek(file.length())
val pointer = file.getFilePointer
val oos = new ObjectOutputStream(Channels.newOutputStream(file.getChannel))
oos.writeObject(node)
oos.flush()
var current = file.getFilePointer()
file.seek(file.length());
while(current < reserve) {
file.writeByte(0)
current += 1
}
pointer
}
def write(pointer : Long, node : T) : Unit = {
file.seek(pointer)
val oos = new ObjectOutputStream(Channels.newOutputStream(file.getChannel))
oos.writeObject(node)
oos.flush()
}
def read(pointer : Long) : T = {
file.seek(pointer)
val ois = new ObjectInputStream(Channels.newInputStream(file.getChannel))
val value = ois.readObject
value.asInstanceOf[T]
}
def commit() = {
file.getFD.sync()
}
}
开发者ID:utwente-fmt,项目名称:lazy-persistent-trie,代码行数:44,代码来源:OnDiskHeap.scala
示例2: MALImage
//设置package包名称以及导入依赖的类
package me.abarrow.ScalaSubNet.mal
import java.io.File
import java.io.FileOutputStream
import java.net.URL
import org.jsoup.Jsoup
import org.jsoup.parser.Parser
import java.nio.channels.Channels
object MALImage {
def saveMainImage(animeID:Int, imagePath:File):Boolean = {
val doc = Jsoup.parse(new URL(MALURLs.MAL_ANIME_PAGE_PREFIX + animeID.toString()), 60000)
val mainImage = doc.select("img.ac").first()
if (mainImage == null) {
return false
}
val imgSrc = mainImage.attr("src")
val rbc = Channels.newChannel(new URL(imgSrc).openStream())
val fos = new FileOutputStream(imagePath)
try {
fos.getChannel().transferFrom(rbc, 0, Long.MaxValue)
} finally {
fos.close()
rbc.close()
}
true
}
}
开发者ID:Abarrowman,项目名称:ScalaSubNet,代码行数:29,代码来源:MALImage.scala
示例3: FileManager
//设置package包名称以及导入依赖的类
package slide
import java.io.{File, FileOutputStream}
import java.net.{URL, URLConnection}
import java.nio.channels.{Channels, ReadableByteChannel}
class FileManager {
var currentFile: String = ""
var numberOfDownloads: Int = 0
def downloadFile(dlsite: String, path: String): Unit = {
val url: URL = new URL(dlsite)
val file: File = new File(path)
if (isConnected(url)) {
currentFile = path
onDownloadStart()
new Thread(new Runnable {
override def run(): Unit = {
try {
val rbc: ReadableByteChannel = Channels.newChannel(url.openStream())
val fos: FileOutputStream = new FileOutputStream(file)
fos.getChannel.transferFrom(rbc, 0, java.lang.Long.MAX_VALUE)
fos.close()
numberOfDownloads += 1
onDownloadFinished()
} catch {
case e: Exception =>
println("Error: Could not download ADB, please run as Administrator")
}
}
}).start()
}
}
def isConnected(site: URL): Boolean = {
try {
// test connection
val conn: URLConnection = site.openConnection()
conn.setConnectTimeout(5000)
conn.getContent
true
} catch {
case e: Exception => false
}
}
def onDownloadStart(): Unit = {}
def onDownloadFinished(): Unit = {}
// var onDownloadStart: () => Unit = null
// var onDownloadFinished: () => Unit = null
}
开发者ID:murizaky,项目名称:dasdasd,代码行数:60,代码来源:FileManager.scala
示例4: MNISTData
//设置package包名称以及导入依赖的类
package com.github.log0ymxm.mapper.examples.mnist
import java.io.FileOutputStream
import java.net.URL
import java.nio.channels.Channels
import java.nio.file.{ Files, Paths }
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
object MNISTData {
def fetchMnist(sc: SparkContext, path: String, name: String): RDD[breeze.linalg.DenseVector[Double]] = {
val location = s"$path/$name-mnist-dense-with-labels.data"
if (!Files.exists(Paths.get(location))) {
val url = s"http://mnist-data.s3.amazonaws.com/$name-mnist-dense-with-labels.data"
val channel = Channels.newChannel(new URL(url).openStream())
val fos = new FileOutputStream(location)
fos.getChannel.transferFrom(channel, 0, Long.MaxValue)
}
sc.textFile(location).map(row => breeze.linalg.DenseVector(row.split(",").map(_.toDouble)))
}
}
开发者ID:log0ymxm,项目名称:spark-mapper,代码行数:25,代码来源:MNISTData.scala
注:本文中的java.nio.channels.Channels类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论