• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala KMeansModel类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中org.apache.spark.ml.clustering.KMeansModel的典型用法代码示例。如果您正苦于以下问题:Scala KMeansModel类的具体用法?Scala KMeansModel怎么用?Scala KMeansModel使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了KMeansModel类的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: LocalKMeansModel

//设置package包名称以及导入依赖的类
package io.hydrosphere.spark_ml_serving.clustering

import io.hydrosphere.spark_ml_serving._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}

import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe

class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
  lazy val parent: OldKMeansModel = {
    val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
    val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
    mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
  }

  override def transform(localData: LocalData): LocalData = {
    localData.column(sparkTransformer.getFeaturesCol) match {
      case Some(column) =>
        val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
          parent.predict(vector)
        })
        localData.withColumn(newColumn)
      case None => localData
    }
  }
}

object LocalKMeansModel extends LocalModel[KMeansModel] {
  override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
    val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
    val centers = sorted map {
      case (_: String, value: Any) =>
        val center = value.asInstanceOf[Map[String, Any]]
        Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
    }
    val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
    parentConstructor.setAccessible(true)
    val mlk = parentConstructor.newInstance(centers.toArray)

    val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
    constructor.setAccessible(true)
    var inst = constructor
      .newInstance(metadata.uid, mlk)
      .setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
      .setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])

    inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
    inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
    inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
    inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
    inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
    inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
    inst
  }
  override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
} 
开发者ID:Hydrospheredata,项目名称:spark-ml-serving,代码行数:60,代码来源:LocalKMeansModel.scala


示例2: LocalKMeansModel

//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.api.ml.clustering

import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.clustering.KMeansModel
import org.apache.spark.mllib.clustering.{KMeansModel => OldKMeansModel}
import org.apache.spark.mllib.clustering.{KMeansModel => MLlibKMeans}
import org.apache.spark.mllib.linalg.{Vectors, Vector => MLlibVec}

import scala.collection.immutable.ListMap
import scala.reflect.runtime.universe

class LocalKMeansModel(override val sparkTransformer: KMeansModel) extends LocalTransformer[KMeansModel] {
  lazy val parent: OldKMeansModel = {
    val mirror = universe.runtimeMirror(sparkTransformer.getClass.getClassLoader)
    val parentTerm = universe.typeOf[KMeansModel].decl(universe.TermName("parentModel")).asTerm
    mirror.reflect(sparkTransformer).reflectField(parentTerm).get.asInstanceOf[OldKMeansModel]
  }

  override def transform(localData: LocalData): LocalData = {
    localData.column(sparkTransformer.getFeaturesCol) match {
      case Some(column) =>
        val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
          parent.predict(vector)
        })
        localData.withColumn(newColumn)
      case None => localData
    }
  }
}

object LocalKMeansModel extends LocalModel[KMeansModel] {
  override def load(metadata: Metadata, data: Map[String, Any]): KMeansModel = {
    val sorted = ListMap(data.toSeq.sortBy { case (key: String, _: Any) => key.toInt}: _*)
    val centers = sorted map {
      case (_: String, value: Any) =>
        val center = value.asInstanceOf[Map[String, Any]]
        Vectors.dense(center("values").asInstanceOf[List[Double]].to[Array])
    }
    val parentConstructor = classOf[MLlibKMeans].getDeclaredConstructor(classOf[Array[MLlibVec]])
    parentConstructor.setAccessible(true)
    val mlk = parentConstructor.newInstance(centers.toArray)

    val constructor = classOf[KMeansModel].getDeclaredConstructor(classOf[String], classOf[MLlibKMeans])
    constructor.setAccessible(true)
    var inst = constructor
      .newInstance(metadata.uid, mlk)
      .setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
      .setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])

    inst = inst.set(inst.k, metadata.paramMap("k").asInstanceOf[Number].intValue())
    inst = inst.set(inst.initMode, metadata.paramMap("initMode").asInstanceOf[String])
    inst = inst.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
    inst = inst.set(inst.initSteps, metadata.paramMap("initSteps").asInstanceOf[Number].intValue())
    inst = inst.set(inst.seed, metadata.paramMap("seed").toString.toLong)
    inst = inst.set(inst.tol, metadata.paramMap("tol").asInstanceOf[Double])
    inst
  }
  override implicit def getTransformer(transformer: KMeansModel): LocalTransformer[KMeansModel] = new LocalKMeansModel(transformer)
} 
开发者ID:Hydrospheredata,项目名称:mist,代码行数:60,代码来源:LocalKMeansModel.scala



注:本文中的org.apache.spark.ml.clustering.KMeansModel类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala JsNumber类代码示例发布时间:2022-05-23
下一篇:
Scala FilterHolder类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap