本文整理汇总了Scala中scala.collection.immutable.ListMap类的典型用法代码示例。如果您正苦于以下问题:Scala ListMap类的具体用法?Scala ListMap怎么用?Scala ListMap使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ListMap类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: LocalKMeansModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.spark_ml_serving.clustering
import io.hydrosphere.spark_ml_serving._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}
import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe
class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
lazy val parent: OldKMeansModel = {
val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
parent.predict(vector)
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalKMeansModel extends LocalModel[KMeansModel] {
override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
val centers = sorted map {
case (_: String, value: Any) =>
val center = value.asInstanceOf[Map[String, Any]]
Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
}
val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
parentConstructor.setAccessible(true)
val mlk = parentConstructor.newInstance(centers.toArray)
val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
constructor.setAccessible(true)
var inst = constructor
.newInstance(metadata.uid, mlk)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
inst
}
override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
}
开发者ID:Hydrospheredata,项目名称:spark-ml-serving,代码行数:60,代码来源:LocalKMeansModel.scala
示例2: apply
//设置package包名称以及导入依赖的类
package pl.ekodo.json.files
import pl.ekodo.json.model._
import scala.collection.immutable.ListMap
def apply(cc: CaseClass): String = {
val sb = new StringBuilder
sb.append(s"case class ${cc.name}(\n")
val fields = ListMap(cc.fields.toSeq.sortBy(_._1): _*).map { case (k, v) => s" $k: ${print(v)}" }.mkString(",\n")
sb.append(fields)
sb.append("\n)\n")
sb.toString()
}
private def print(scalaType: ScalaType): String = scalaType match {
case AnyType => "Any"
case BigDecimalType => "BigDecimal"
case BooleanType => "Boolean"
case DoubleType => "Double"
case IntType => "Int"
case LongType => "Long"
case StringType => "String"
case cc: CaseClass => cc.name
case opt: OptionalType => s"Option[${print(opt.scalaType)}]"
case st: SeqType => s"List[${print(st.scalaType)}]"
}
}
开发者ID:marcindb,项目名称:json-to-case-class,代码行数:31,代码来源:ScalaTypePrinter.scala
示例3: CustomBundle
//设置package包名称以及导入依赖的类
package barstools.tapeout.transforms
import chisel3._
import scala.collection.immutable.ListMap
class CustomBundle[T <: Data](elts: (String, T)*) extends Record {
val elements = ListMap(elts map { case (field, elt) => field -> elt.chiselCloneType }: _*)
def apply(elt: String): T = elements(elt)
def apply(elt: Int): T = elements(elt.toString)
override def cloneType = (new CustomBundle(elements.toList: _*)).asInstanceOf[this.type]
}
class CustomIndexedBundle[T <: Data](elts: (Int, T)*) extends Record {
// Must be String, Data
val elements = ListMap(elts map { case (field, elt) => field.toString -> elt.chiselCloneType }: _*)
// TODO: Make an equivalent to the below work publicly (or only on subclasses?)
def indexedElements = ListMap(elts map { case (field, elt) => field -> elt.chiselCloneType }: _*)
def apply(elt: Int): T = elements(elt.toString)
override def cloneType = (new CustomIndexedBundle(indexedElements.toList: _*)).asInstanceOf[this.type]
}
object CustomIndexedBundle {
def apply[T <: Data](gen: T, idxs: Seq[Int]) = new CustomIndexedBundle(idxs.map(_ -> gen): _*)
// Allows Vecs of elements of different types/widths
def apply[T <: Data](gen: Seq[T]) = new CustomIndexedBundle(gen.zipWithIndex.map{ case (elt, field) => field -> elt }: _*)
}
开发者ID:ucb-bar,项目名称:barstools,代码行数:27,代码来源:ProgrammaticBundle.scala
示例4: CoercedScalaResultMarshaller
//设置package包名称以及导入依赖的类
package sangria.marshalling
import scala.collection.immutable.ListMap
class CoercedScalaResultMarshaller extends RawResultMarshaller {
type Node = Any
type MapBuilder = ArrayMapBuilder[Node]
override def rawScalarNode(rawValue: Any) = rawValue
def arrayNode(values: Vector[Node]) = values
def optionalArrayNodeValue(value: Option[Node]) = value
def addMapNodeElem(builder: MapBuilder, key: String, value: Node, optional: Boolean) = {
val res =
if (optional && value.isInstanceOf[None.type])
None
else if (optional)
Some(value)
else
value
builder.add(key, res)
}
def emptyMapNode(keys: Seq[String]) = new ArrayMapBuilder[Node](keys)
def mapNode(keyValues: Seq[(String, Node)]) = ListMap(keyValues: _*)
def mapNode(builder: MapBuilder) = builder.toListMap
def nullNode = None
def renderCompact(node: Any) = "" + node
def renderPretty(node: Any) = "" + node
}
object CoercedScalaResultMarshaller {
val default = new CoercedScalaResultMarshaller
}
开发者ID:sangria-graphql,项目名称:sangria-marshalling-api,代码行数:40,代码来源:CoercedScalaResultMarshaller.scala
示例5: MovieAgesChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object MovieAgesChart {
def main(args: Array[String]) {
val movie_data = Util.getMovieData()
val movie_ages = Util.getMovieAges(movie_data)
val movie_ages_sorted = ListMap(movie_ages.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
movie_ages_sorted foreach (x => ds.addValue(x._2,"Movies", x._1))
//0 -> 65, 1 -> 286, 2 -> 355, 3 -> 219, 4 -> 214, 5 -> 126
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:21,代码来源:MovieAgesChart.scala
示例6: CountByRatingChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
import java.awt.Font
import org.jfree.chart.axis.CategoryLabelPositions
object CountByRatingChart {
def main(args: Array[String]) {
val rating_data_raw = Util.sc.textFile("../../data/ml-100k/u.data")
val rating_data = rating_data_raw.map(line => line.split("\t"))
val ratings = rating_data.map(fields => fields(2).toInt)
val ratings_count = ratings.countByValue()
val sorted = ListMap(ratings_count.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
sorted.foreach{ case (k,v) => ds.addValue(v,"Rating Values", k)}
val chart = ChartFactories.BarChart(ds)
val font = new Font("Dialog", Font.PLAIN,5);
chart.peer.getCategoryPlot.getDomainAxis().
setCategoryLabelPositions(CategoryLabelPositions.UP_90);
chart.peer.getCategoryPlot.getDomainAxis.setLabelFont(font)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:31,代码来源:CountByRatingChart.scala
示例7: UserRatingsChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object UserRatingsChart {
def main(args: Array[String]) {
val user_data = Util.getUserData()
val user_fields = user_data.map(l => l.split("\\|"))
val ages = user_fields.map( x => (x(1).toInt)).collect()
val rating_data_raw = Util.sc.textFile("../../data/ml-100k/u.data")
val rating_data = rating_data_raw.map(line => line.split("\t"))
val user_ratings_grouped = rating_data.map(
fields => (fields(0).toInt, fields(2).toInt)).groupByKey()
val user_ratings_byuser = user_ratings_grouped.map(v => (v._1,v._2.size))
val user_ratings_byuser_local = user_ratings_byuser.map(v => v._2).collect()
val input = user_ratings_byuser_local
val min = 0
val max = 500
val bins = 200
val step = (max/bins).toInt
var mx = Map(0 -> 0)
for (i <- step until (max + step) by step) {
mx += (i -> 0);
}
for(i <- 0 until input.length){
for (j <- 0 until (max + step) by step) {
if(ages(i) >= (j) && input(i) < (j + step)){
mx = mx + (j -> (mx(j) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"Ratings", k)}
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:49,代码来源:UserRatingsChart.scala
示例8: UserAgesChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object UserAgesChart {
def main(args: Array[String]) {
val user_data = Util.getUserData()
val user_fields = user_data.map(l => l.split("\\|"))
val ages = user_fields.map( x => (x(1).toInt)).collect()
println(ages.getClass.getName)
val min = 0
val max = 80
val bins = 16
val step = (80/bins).toInt
var mx = Map(0 -> 0)
for (i <- step until (max + step) by step) {
mx += (i -> 0);
}
for(i <- 0 until ages.length){
for (j <- 0 until (max + step) by step) {
if(ages(i) >= (j) && ages(i) < (j + step)){
mx = mx + (j -> (mx(j) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"UserAges", k)}
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:42,代码来源:UserAgesChart.scala
示例9: UserOccupationChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
import java.awt.Font
import org.jfree.chart.axis.CategoryLabelPositions
object UserOccupationChart {
def main(args: Array[String]) {
val user_data = Util.getUserData()
val user_fields = user_data.map(l => l.split("\\|"))
val count_by_occupation = user_fields.map( fields => (fields(3), 1)).
reduceByKey( (x, y) => x + y).collect()
println(count_by_occupation)
val sorted = ListMap(count_by_occupation.toSeq.sortBy(_._2):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
sorted.foreach{ case (k,v) => ds.addValue(v,"UserAges", k)}
val chart = ChartFactories.BarChart(ds)
val font = new Font("Dialog", Font.PLAIN,5);
chart.peer.getCategoryPlot.getDomainAxis().
setCategoryLabelPositions(CategoryLabelPositions.UP_90);
chart.peer.getCategoryPlot.getDomainAxis.setLabelFont(font)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:32,代码来源:UserOccupationChart.scala
示例10: UserAgesChart
//设置package包名称以及导入依赖的类
package org.sparksamples
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object UserAgesChart {
def main(args: Array[String]) {
val userDataFrame = Util.getUserFieldDataFrame()
val ages_array = userDataFrame.select("age").collect()
val min = 0
val max = 80
val bins = 16
val step = (80/bins).toInt
var mx = Map(0 -> 0)
for (i <- step until (max + step) by step) {
mx += (i -> 0)
}
for( x <- 0 until ages_array.length) {
val age = Integer.parseInt(ages_array(x)(0).toString)
for (j <- 0 until (max + step) by step) {
if(age >= j && age < (j + step)){
mx = mx + (j -> (mx(j) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"UserAges", k)}
val chart = ChartFactories.BarChart(ds)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:41,代码来源:UserAgesChart.scala
示例11: PlotLogData
//设置package包名称以及导入依赖的类
package org.sparksamples
//import org.sparksamples.Util
//import _root_.scalax.chart.ChartFactories
import java.awt.Font
import org.jfree.chart.axis.CategoryLabelPositions
import scala.collection.immutable.ListMap
import scalax.chart.module.ChartFactories
object PlotLogData {
def main(args: Array[String]) {
val records = Util.getRecords()._1
val records_x = records.map(r => Math.log(r(r.length -1).toDouble))
var records_int = new Array[Int](records_x.collect().length)
print(records_x.first())
val records_collect = records_x.collect()
for (i <- 0 until records_collect.length){
records_int(i) = records_collect(i).toInt
}
val min_1 = records_int.min
val max_1 = records_int.max
val min = min_1.toFloat
val max = max_1.toFloat
val bins = 10
val step = (max/bins).toFloat
var mx = Map(0.0.toString -> 0)
for (i <- step until (max + step) by step) {
mx += (i.toString -> 0);
}
for(i <- 0 until records_collect.length){
for (j <- 0.0 until (max + step) by step) {
if(records_int(i) >= (j) && records_int(i) < (j + step)){
mx = mx + (j.toString -> (mx(j.toString) + 1))
}
}
}
val mx_sorted = ListMap(mx.toSeq.sortBy(_._1.toFloat):_*)
val ds = new org.jfree.data.category.DefaultCategoryDataset
var i = 0
mx_sorted.foreach{ case (k,v) => ds.addValue(v,"", k)}
val chart = ChartFactories.BarChart(ds)
val font = new Font("Dialog", Font.PLAIN,4);
chart.peer.getCategoryPlot.getDomainAxis().
setCategoryLabelPositions(CategoryLabelPositions.UP_90);
chart.peer.getCategoryPlot.getDomainAxis.setLabelFont(font)
chart.show()
Util.sc.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:62,代码来源:PlotLogData.scala
示例12: UseCaseBuilderSpec
//设置package包名称以及导入依赖的类
package org.cddcore.enginecomponents
import org.cddcore.utilities.{CddSpec, HierarchyBuilder, NullLifeCycle}
import scala.collection.immutable.ListMap
class UseCaseBuilderSpec extends CddSpec {
def uc(s: String, ec: EngineComponent[Int, String]*) = UseCase[Int, String](s, ec.toList, None, DefinedInSourceCodeAt.definedInSourceCodeAt(1), ListMap(), List())
val useCase1 = uc("useCase1")
val useCase2 = uc("useCase2")
val useCase3 = uc("useCase3")
val useCase4 = uc("useCase4")
import Scenario._
implicit def nullLifeCycle[C] = new NullLifeCycle[C]
val s1 = 1 produces "result"
val s2 = 2 produces "result"
val s3 = 3 produces "result"
type UC = UseCase[Int, String]
type Child = EngineComponent[Int, String]
"A UseCaseBuilder with no operations" should "have the passed in use case and depth 0" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
holder1.holder shouldBe useCase1
holder1.depth shouldBe 0
}
"A UseCaseBuilder addChild method with depth 0" should "add children to the use case and not mess with depth" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addChild(s1).addChild(s2).addChild(s3)
holder2.holder shouldBe useCase1.copy(components = List(s3, s2, s1))
holder2.depth shouldBe 0
}
"A UseCaseBuilder addNewParent method " should "nest children with new usecases increasing depth" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addNewParent(useCase2).addNewParent(useCase3)
holder2.holder shouldBe uc("useCase1", uc("useCase2", uc("useCase3")))
holder2.depth shouldBe 2
}
it should "allow scenarios to be added to current use case" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addNewParent(useCase2).addNewParent(useCase3).addChild(s1).addChild(s2).addChild(s3)
holder2.holder shouldBe uc("useCase1", uc("useCase2", uc("useCase3", s3, s2, s1)))
holder2.depth shouldBe 2
}
it should "allow scenarios to be added to current use case, then a pop and another use case added" in {
val holder1 = new HierarchyBuilder[UC, Child](useCase1)
val holder2 = holder1.addNewParent(useCase2).addNewParent(useCase3).addChild(s1).popParent
holder2.depth shouldBe 1
val holder3 = holder2.addNewParent(useCase4).addChild(s2).addChild(s3)
holder3.holder shouldBe uc("useCase1", uc("useCase2", uc("useCase4", s3, s2), uc("useCase3", s1)))
holder3.depth shouldBe 2
}
}
开发者ID:phil-rice,项目名称:CddCore2,代码行数:61,代码来源:UseCaseBuilderSpec.scala
示例13: StoreWordsCountsOrderedActor
//设置package包名称以及导入依赖的类
package actors
import akka.actor.Actor
import akka.event.Logging
import dataTire.file.WordsCountFile
import enteties.WordsCount
import utils.FileUtiles
import scala.collection.immutable.ListMap
class StoreWordsCountsOrderedActor extends Actor {
val log = Logging(context.system, this)
def receive = {
case countWords:WordsCount =>
log.info(s"Executing actor StoreWordsCountsOrderedActor")
val countWordsOrdered:ListMap[String, Integer] = ListMap(countWords.data.toList.sortBy{-_._2}:_*)
FileUtiles.writeToFile(s"${countWords.fileFullPath}.wordsCounter", WordsCountFile.storeWordsCount(countWordsOrdered))
case any =>
log.error(s"Handle not found for the actor: StoreWordsCountsOrderedActor, data: $any")
}
}
开发者ID:RoyShmuli,项目名称:Grym-exercise-akka,代码行数:25,代码来源:StoreWordsCountsOrderedActor.scala
示例14: GlobalConst
//设置package包名称以及导入依赖的类
package global
import scala.collection.immutable.ListMap
class GlobalConst(val NAME: String,
val MIN_FONT_SIZE: Int,
val MAX_FONT_SIZE: Int,
val DEFAULT_THEME: String,
val DEFAULT_FONT_STYLE: String,
val DEFAULT_FONT_SIZE: Int,
val DEFAULT_TAB_SIZE: Int,
val DEFAULT_MAX_FILE_SIZE: Int,
val AVAILABLE_SYNTAX: Map[String, String])
object GlobalConst {
val syntaxLanguages = ListMap("Bash" -> "bash",
"C" -> "c",
"C++" -> "cpp",
"Clojure" -> "clojure",
"DOSBatch" -> "dosbatch",
"Groovy" -> "groovy",
"Java" -> "java",
"Javascript" -> "javascript",
"JFlex" -> "jflex",
"JSON" -> "json",
"Lua" -> "lua",
"Properties" -> "properties",
"Python" -> "python",
"Ruby" -> "ruby",
"Scala" -> "scala",
"SQL" -> "sql",
"TAL" -> "tal",
"XHTML" -> "xhtml",
"XML" -> "xml",
"XPath" -> "xpath")
def apply(
NAME: String = "LithePad v0.0.1.1 ",
MIN_FONT_SIZE: Int = 8,
MAX_FONT_SIZE: Int = 185,
DEFAULT_THEME: String = "Monokai",
DEFAULT_FONT_STYLE: String = "Monospaced",
DEFAULT_FONT_SIZE: Int = 16,
DEFAULT_TAB_SIZE: Int = 2,
DEFAULT_MAX_FILE_SIZE: Int = 10000,
AVAILABLE_SYNTAX: Map[String, String] = syntaxLanguages): GlobalConst =
new GlobalConst(NAME,
MIN_FONT_SIZE,
MAX_FONT_SIZE,
DEFAULT_THEME,
DEFAULT_FONT_STYLE,
DEFAULT_FONT_SIZE,
DEFAULT_TAB_SIZE,
DEFAULT_MAX_FILE_SIZE,
AVAILABLE_SYNTAX)
}
开发者ID:billpcs,项目名称:lithepad,代码行数:59,代码来源:GlobalConst.scala
示例15: LocalKMeansModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.api.ml.clustering
import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}
import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe
class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
lazy val parent: OldKMeansModel = {
val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
parent.predict(vector)
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalKMeansModel extends LocalModel[KMeansModel] {
override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
val centers = sorted map {
case (_: String, value: Any) =>
val center = value.asInstanceOf[Map[String, Any]]
Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
}
val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
parentConstructor.setAccessible(true)
val mlk = parentConstructor.newInstance(centers.toArray)
val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
constructor.setAccessible(true)
var inst = constructor
.newInstance(metadata.uid, mlk)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
inst
}
override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
}
开发者ID:Hydrospheredata,项目名称:mist,代码行数:60,代码来源:LocalKMeansModel.scala
示例16: PostgresVersion
//设置package包名称以及导入依赖的类
package org.pgscala.embedded
import scala.collection.immutable.ListMap
case class PostgresVersion private (major: Int, minor: Int, patch: Int) extends Ordered[PostgresVersion] {
require(major > 0, "Major version must be positive")
require(minor >= 0, "Minor version cannot be negative")
require(patch >= 0, "Patch version cannot be negative")
override def toString: String = major + "." + minor + "." + patch
override def compare(that: PostgresVersion): Int =
Ordering[(Int, Int, Int)].compare((major, minor, patch), (that.major, that.minor, that.patch))
}
object PostgresVersion {
// latest available use-case versions
val `9.6.3` = PostgresVersion(9, 6, 3)
val `9.5.7` = PostgresVersion(9, 5, 7)
val `9.4.12` = PostgresVersion(9, 4, 12)
val `9.3.17` = PostgresVersion(9, 3, 17)
val `9.2.21` = PostgresVersion(9, 2, 21)
val `9.1.24` = PostgresVersion(9, 1, 24)
val `9.0.23` = PostgresVersion(9, 0, 23)
// use-cases - preferably use these over hardcoding the patch version
val `9.6` = `9.6.3`
val `9.5` = `9.5.7`
val `9.4` = `9.4.12`
val `9.3` = `9.3.17`
val `9.2` = `9.2.21`
val `9.1` = `9.1.24`
val `9.0` = `9.0.23`
val minorVersions: Map[String, PostgresVersion] = ListMap(
"9.6" -> `9.6`,
"9.5" -> `9.5`,
"9.4" -> `9.4`,
"9.3" -> `9.3`,
"9.2" -> `9.2`,
"9.1" -> `9.1`,
"9.0" -> `9.0`
)
val values: IndexedSeq[PostgresVersion] = minorVersions.values.toIndexedSeq
}
开发者ID:oradian,项目名称:pgscala-embedded,代码行数:48,代码来源:PostgresVersion.scala
示例17: EnglishParserController
//设置package包名称以及导入依赖的类
package controllers
import javax.inject._
import dao.HistoryDAO
import models.{History, Transfer}
import net.ruippeixotog.scalascraper.browser.JsoupBrowser
import net.ruippeixotog.scalascraper.dsl.DSL._
import net.ruippeixotog.scalascraper.dsl.DSL.Extract._
import net.ruippeixotog.scalascraper.dsl.DSL.Parse._
import net.ruippeixotog.scalascraper.model.Element
import play.api.data.Form
import play.api.data.Forms._
import play.api.mvc._
import scala.collection.immutable.ListMap
import scala.io._
@Singleton
class EnglishParserController @Inject()(historyDAO: HistoryDAO) extends Controller {
val wordRegexp = "^[a-z]{4,}$"
def enParser = Action { implicit request =>
val bindForm = transferForm.bindFromRequest
if(bindForm.hasErrors) {
Ok("url is not valid")
}
val url = bindForm.data.getOrElse("url", "")
if(url.isEmpty) {
Ok("please enter the url")
}
val html = Source.fromURL(url).mkString
val browser = JsoupBrowser()
val doc = browser.parseString(html)
val title = (doc >> texts("title")).mkString("/")
val words = html.split(" ").filter(p => p.matches(wordRegexp))
.groupBy(f => f).map{
case (k, v) => (k, v.length)
}
val filterWords = ListMap(words.toSeq.sortWith(_._2 > _._2):_*).filter(p => p._2 > 3)
val history: History = History(new java.sql.Timestamp((new java.util.Date).getTime), title, url)
historyDAO.insert(history)
//Ok(views.html.parser(title, filterWords))
Ok(views.html.parser(filterWords))
}
val transferForm = Form(
mapping(
"title" -> nonEmptyText,
"url" -> nonEmptyText
)(Transfer.apply)(Transfer.unapply)
)
}
开发者ID:tanan,项目名称:log-collector,代码行数:59,代码来源:EnglishParserController.scala
示例18: values
//设置package包名称以及导入依赖的类
package com.github.pcejrowski.grasca
import java.time.Instant
import com.github.pcejrowski.grasca.model.render.{RenderResult, RenderedValues}
import org.json4s._
import org.json4s.native.JsonMethods._
import scala.collection.immutable.ListMap
import scalaj.http._
def values(target: String, from: Instant = Instant.now.minus(DAY), until: Instant = Instant.now): Option[RenderedValues] = {
val response: HttpResponse[String] = Http(s"$renderEndpoint")
.param("target", target)
.param("from", from.getEpochSecond.toString)
.param("until", until.getEpochSecond.toString)
.param("format", "json")
.param("noNullPoints", "true")
.asString
parse(response.body)
.extractOpt[List[RenderResult]]
.map(transform)
}
private def transform(renderResults: List[RenderResult]): RenderedValues = {
renderResults
.map(entry => {
val values: ListMap[Instant, Option[Long]] = entry
.datapoints
.map { case List(value, Some(epochTime)) => Instant.ofEpochSecond(epochTime) -> value }
.sortBy(_._1)
.reverse
.toListMap
entry.target -> values
})
.sortBy(_._1)
.toListMap
}
}
object RenderAPI {
def apply(host: String, port: Int = 80) = new RenderAPI(host, port)
}
开发者ID:pcejrowski,项目名称:grasca,代码行数:46,代码来源:RenderAPI.scala
示例19: TransactionHelpers
//设置package包名称以及导入依赖的类
package utils
import models.{Months, Transaction, Transactions}
import scala.collection.immutable.ListMap
import scala.math.BigDecimal.RoundingMode
object TransactionHelpers {
def groupTransactionsByMonth(transactions: Transactions): Seq[((Int, Int), Seq[Transaction])] = {
val transactionsWithoutTopUps = transactions.transactions.filterNot(_.isLoad)
val groupedAndSortedByMonth = transactionsWithoutTopUps.groupBy(a => (a.created.getMonthOfYear, a.created.getYear)).toSeq.sortWith(_._1._1 > _._1._1).sortWith(_._1._2 > _._1._2)
groupedAndSortedByMonth
}
def calculateRoundupsByMonth(accountId: String, transactions: Transactions)(accessToken: String): Map[String, BigDecimal] = {
val byMonth = ListMap(transactions.transactions.filterNot(_.isLoad).groupBy(a => (a.created.getMonthOfYear, a.created.getYear)).toSeq.sortWith(_._1._1 > _._1._1).sortWith(_._1._2 > _._1._2): _*)
byMonth.map {
month =>
val roundUps = month._2.map { eachMonth =>
val roundedValue = (eachMonth.amount / 100 setScale(0, RoundingMode.UP)).abs
val rawValue = (eachMonth.amount / 100).abs
val roundUp = roundedValue - rawValue
if (roundUp.equals(BigDecimal(0))) BigDecimal(1) // £1 roundups when transaction amount is whole number
else roundUp
}.foldLeft(BigDecimal(0))(_ + _)
val formData = Map(
"account_id" -> Seq(accountId),
"type" -> Seq("basic"),
"url" -> Seq(""),
"params[title]" -> Seq(s"Round Up for ${Months(month._1._1)} ${month._1._2} - £$roundUps"),
"params[body]" -> Seq(s"Round ups for ${Months(month._1._1)} ${month._1._2} - £$roundUps"),
"params[image_url]" -> Seq("https://scontent-lht6-1.xx.fbcdn.net/v/t1.0-9/15871922_10212040156182063_1392533991348799017_n.jpg?oh=4669484d186b91d9b07911255a8d09d3&oe=5940244F")
)
//ws.url("https://api.monzo.com/feed").withHeaders(("Authorization", s"Bearer $accessToken")).post(formData)
(s"${Months(month._1._1)} ${month._1._2}", roundUps)
}
}
}
开发者ID:adamtrousdale,项目名称:monzo-roundup,代码行数:48,代码来源:TransactionHelpers.scala
示例20: Statistics
//设置package包名称以及导入依赖的类
package controllers
import javax.inject.Inject
import com.mohiva.play.silhouette.api.{Environment, Silhouette}
import com.mohiva.play.silhouette.impl.authenticators.SessionAuthenticator
import models._
import models.daos._
import play.api.i18n.{Messages, MessagesApi}
import play.api.libs.json.Json
import play.api.mvc.{Action, AnyContent}
import scala.collection.immutable.ListMap
import scala.concurrent.ExecutionContext.Implicits._
class Statistics @Inject()(statisticsDAO: StatisticsDAO,
organisationDAO: OrganisationDAO,
val messagesApi: MessagesApi,
val env: Environment[User, SessionAuthenticator])
extends Silhouette[User, SessionAuthenticator] {
def statistics: Action[AnyContent] = SecuredAction.async { implicit request =>
for {
friday <- statisticsDAO.friday
saturday <- statisticsDAO.saturday
sorting <- statisticsDAO.sorting
selling <- statisticsDAO.selling
organisations <- organisationDAO.all
} yield {
val statistics = ListMap(
Messages("friday") -> fill(organisations, friday),
Messages("saturday") -> fill(organisations, saturday),
Messages("sorting") -> fill(organisations, sorting),
Messages("selling") -> fill(organisations, selling)
)
Ok(views.html.statistics(statistics, request.identity))
}
}
private def fill(organisations: Seq[Organisation], statistic: Map[String, Int]): Map[String, Int] =
organisations.map(organisation => (organisation.name, statistic.getOrElse(organisation.name, 0))).toMap
def friday: Action[AnyContent] = SecuredAction.async { implicit request =>
statisticsDAO.friday.map(statistics => Ok(Json.toJson(statistics)))
}
def saturday: Action[AnyContent] = SecuredAction.async { implicit request =>
statisticsDAO.saturday.map(statistics => Ok(Json.toJson(statistics)))
}
def sorting: Action[AnyContent] = SecuredAction.async { implicit request =>
statisticsDAO.sorting.map(statistics => Ok(Json.toJson(statistics)))
}
def selling: Action[AnyContent] = SecuredAction.async { implicit request =>
statisticsDAO.selling.map(statistics => Ok(Json.toJson(statistics)))
}
}
开发者ID:wjglerum,项目名称:bamboesmanager,代码行数:59,代码来源:Statistics.scala
注:本文中的scala.collection.immutable.ListMap类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论