本文整理汇总了Scala中org.jblas.DoubleMatrix类的典型用法代码示例。如果您正苦于以下问题:Scala DoubleMatrix类的具体用法?Scala DoubleMatrix怎么用?Scala DoubleMatrix使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DoubleMatrix类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: Util
//设置package包名称以及导入依赖的类
package com.sparksample
object Util {
val PATH = "../.."
val spConfig = (new SparkConf).setMaster("local").setAppName("SparkApp")
var sc = new SparkContext(spConfig)
def getMovieData() : RDD[String] = {
val movie_data = sc.textFile(PATH + "/data/ml-100k/u.item")
return movie_data
}
def getUserData() : RDD[String] = {
val user_data = sc.textFile(PATH + "/data/ml-100k/u.data")
return user_data
}
def getDate(): String = {
val today = Calendar.getInstance().getTime()
// (2) create a date "formatter" (the date format we want)
val formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss")
// (3) create a new String using the date format we want
val folderName = formatter.format(today)
return folderName
}
def cosineSimilarity(vec1: DoubleMatrix, vec2: DoubleMatrix): Double = {
vec1.dot(vec2) / (vec1.norm2() * vec2.norm2())
}
def avgPrecisionK(actual: Seq[Int], predicted: Seq[Int], k: Int): Double = {
val predK = predicted.take(k)
var score = 0.0
var numHits = 0.0
for ((p, i) <- predK.zipWithIndex) {
if (actual.contains(p)) {
numHits += 1.0
score += numHits / (i.toDouble + 1.0)
}
}
if (actual.isEmpty) {
1.0
} else {
score / scala.math.min(actual.size, k).toDouble
}
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:49,代码来源:Util.scala
示例2: UserItemPredictionCommand
//设置package包名称以及导入依赖的类
package com.advancedspark.serving.prediction
import com.netflix.hystrix.HystrixCommand
import com.netflix.hystrix.HystrixCommandGroupKey
import org.jblas.DoubleMatrix
import scala.util.parsing.json._
import com.netflix.dyno.jedis._
import collection.JavaConverters._
import scala.collection.immutable.List
class UserItemPredictionCommand(
dynoClient: DynoJedisClient, namespace: String, version: String, userId: String, itemId: String)
extends HystrixCommand[Double](HystrixCommandGroupKey.Factory.asKey("UserItemPrediction")) {
@throws(classOf[java.io.IOException])
def get(url: String) = scala.io.Source.fromURL(url).mkString
def run(): Double = {
try{
val userFactors = dynoClient.get(s"${namespace}:${version}:user-factors:${userId}").split(",").map(_.toDouble)
val itemFactors = dynoClient.get(s"${namespace}:${version}:item-factors:${itemId}").split(",").map(_.toDouble)
val userFactorsMatrix = new DoubleMatrix(userFactors)
val itemFactorsMatrix = new DoubleMatrix(itemFactors)
// Calculate prediction
userFactorsMatrix.dot(itemFactorsMatrix)
} catch {
case e: Throwable => {
System.out.println(e)
throw e
}
}
}
override def getFallback(): Double = {
System.out.println("UserItemPrediction Source is Down! Fallback!!")
0.0
}
}
开发者ID:frankiegu,项目名称:serve.ml,代码行数:46,代码来源:UserItemPredictionCommand.scala
示例3: ContextRecoMatrices
//设置package包名称以及导入依赖的类
package processing
import java.io.{BufferedReader, FileReader, PrintWriter}
import org.jblas.DoubleMatrix
object ContextRecoMatrices {
def load(file: String): Array[DoubleMatrix] = {
val reader: BufferedReader = new BufferedReader(new FileReader(file))
val d: Int = Integer.parseInt(reader.readLine())
val res = new Array[DoubleMatrix](d)
def readMatrix: DoubleMatrix = {
val dimension: Array[Int] = reader.readLine().split(",").map(_.toInt)
new DoubleMatrix(dimension(0), dimension(1), reader.readLine().split(",").map(_.toDouble): _*)
}
for (i <- 0 until d) {
res(i) = readMatrix
}
return res
}
def save(file: String, m: Array[DoubleMatrix]): Unit = {
val f = new PrintWriter(file)
f.println(m.length)
for (i <- 0 until m.length) {
f.println(s"${m(i).rows},${m(i).columns},${m(i).length}")
m(i).data.foreach { x => f.print(x); f.print(",") }
f.println()
}
f.close()
}
}
开发者ID:srihari,项目名称:recommendr,代码行数:35,代码来源:ContextRecoMatrices.scala
示例4: UserProductRecoModel
//设置package包名称以及导入依赖的类
package processing
import java.io.File
import controllers.Global
import org.apache.spark.mllib.recommendation.{Rating, MatrixFactorizationModel}
import org.apache.spark.rdd.RDD
import org.jblas.DoubleMatrix
class UserProductRecoModel(val weightFactor: Array[Double], rank: Int,
userFeatures: RDD[(Int, Array[Double])],
productFeatures: RDD[(Int, Array[Double])])
extends MatrixFactorizationModel(rank, userFeatures, productFeatures) {
override def recommendProducts(user: Int, num: Int): Array[Rating] = {
recommend(userFeatures.lookup(user).head, productFeatures, num)
.map(t => Rating(user, t._1, t._2))
}
private def recommend(
recommendToFeatures: Array[Double],
recommendableFeatures: RDD[(Int, Array[Double])],
num: Int): Array[(Int, Double)] = {
val recommendToVector = new DoubleMatrix(recommendToFeatures)
val scored = recommendableFeatures.map { case (id,features) =>
(id, recommendToVector.dot(new DoubleMatrix(features).mul(new DoubleMatrix(weightFactor))))
}
scored.top(num)(Ordering.by(_._2))
}
def withWeightFactor(weightFactor: Array[Double]): UserProductRecoModel = {
new UserProductRecoModel(weightFactor, this.rank, this.userFeatures, this.productFeatures)
}
}
object UserProductRecoModel{
def apply(model:MatrixFactorizationModel): UserProductRecoModel ={
val weightFactor:Array[Double] = if (new File("model/featureWeightFactors").exists){ Global.ctx.textFile("model/featureWeightFactors").map(_.toDouble).collect() } else new Array[Double](model.rank).map(x=>1.0)
new UserProductRecoModel(weightFactor, model.rank, model.userFeatures, model.productFeatures)
}
}
开发者ID:srihari,项目名称:recommendr,代码行数:44,代码来源:UserProductRecoModel.scala
注:本文中的org.jblas.DoubleMatrix类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论