本文整理汇总了Scala中org.apache.spark.ml.regression.DecisionTreeRegressionModel类的典型用法代码示例。如果您正苦于以下问题:Scala DecisionTreeRegressionModel类的具体用法?Scala DecisionTreeRegressionModel怎么用?Scala DecisionTreeRegressionModel使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DecisionTreeRegressionModel类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: LocalDecisionTreeRegressionModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.spark_ml_serving.regression
import io.hydrosphere.spark_ml_serving._
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.regression.DecisionTreeRegressionModel
import org.apache.spark.ml.tree.Node
class LocalDecisionTreeRegressionModel(override val sparkTransformer: DecisionTreeRegressionModel) extends LocalTransformer[DecisionTreeRegressionModel] {
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val method = classOf[DecisionTreeRegressionModel].getMethod("predict", classOf[Vector])
method.setAccessible(true)
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
method.invoke(sparkTransformer, vector).asInstanceOf[Double]
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalDecisionTreeRegressionModel extends LocalModel[DecisionTreeRegressionModel] {
override def load(metadata: Metadata, data: Map[String, Any]): DecisionTreeRegressionModel = {
createTree(metadata, data)
}
def createTree(metadata: Metadata, data: Map[String, Any]): DecisionTreeRegressionModel = {
val ctor = classOf[DecisionTreeRegressionModel].getDeclaredConstructor(classOf[String], classOf[Node], classOf[Int])
ctor.setAccessible(true)
val inst = ctor.newInstance(
metadata.uid,
DataUtils.createNode(0, metadata, data),
metadata.numFeatures.get.asInstanceOf[java.lang.Integer]
)
inst
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst
.set(inst.seed, metadata.paramMap("seed").toString.toLong)
.set(inst.cacheNodeIds, metadata.paramMap("cacheNodeIds").toString.toBoolean)
.set(inst.maxDepth, metadata.paramMap("maxDepth").toString.toInt)
.set(inst.labelCol, metadata.paramMap("labelCol").toString)
.set(inst.minInfoGain, metadata.paramMap("minInfoGain").toString.toDouble)
.set(inst.checkpointInterval, metadata.paramMap("checkpointInterval").toString.toInt)
.set(inst.minInstancesPerNode, metadata.paramMap("minInstancesPerNode").toString.toInt)
.set(inst.maxMemoryInMB, metadata.paramMap("maxMemoryInMB").toString.toInt)
.set(inst.maxBins, metadata.paramMap("maxBins").toString.toInt)
.set(inst.impurity, metadata.paramMap("impurity").toString)
}
override implicit def getTransformer(transformer: DecisionTreeRegressionModel): LocalTransformer[DecisionTreeRegressionModel] = new LocalDecisionTreeRegressionModel(transformer)
}
开发者ID:Hydrospheredata,项目名称:spark-ml-serving,代码行数:54,代码来源:LocalDecisionTreeRegressionModel.scala
示例2: LocalRandomForestRegressionModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.spark_ml_serving.regression
import io.hydrosphere.spark_ml_serving._
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.regression.{DecisionTreeRegressionModel, RandomForestRegressionModel}
class LocalRandomForestRegressionModel(override val sparkTransformer: RandomForestRegressionModel) extends LocalTransformer[RandomForestRegressionModel] {
override def transform(localData: LocalData): LocalData = {
val cls = classOf[RandomForestRegressionModel]
val predict = cls.getMethod("predict", classOf[Vector])
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val predictionCol = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map{ vector =>
predict.invoke(sparkTransformer, vector).asInstanceOf[Double]
})
localData.withColumn(predictionCol)
case None => localData
}
}
}
object LocalRandomForestRegressionModel extends LocalModel[RandomForestRegressionModel] {
override def load(metadata: Metadata, data: Map[String, Any]): RandomForestRegressionModel = {
val treesMetadata = metadata.paramMap("treesMetadata").asInstanceOf[Map[String, Any]]
val trees = treesMetadata map { treeKv =>
val treeMeta = treeKv._2.asInstanceOf[Map[String, Any]]
val meta = treeMeta("metadata").asInstanceOf[Metadata]
LocalDecisionTreeRegressionModel.createTree(
meta,
data(treeKv._1).asInstanceOf[Map[String, Any]]
)
}
val ctor = classOf[RandomForestRegressionModel].getDeclaredConstructor(classOf[String], classOf[Array[DecisionTreeRegressionModel]], classOf[Int])
ctor.setAccessible(true)
val inst = ctor
.newInstance(
metadata.uid,
trees.to[Array],
metadata.numFeatures.get.asInstanceOf[java.lang.Integer]
)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst
.set(inst.seed, metadata.paramMap("seed").toString.toLong)
.set(inst.subsamplingRate, metadata.paramMap("subsamplingRate").toString.toDouble)
.set(inst.impurity, metadata.paramMap("impurity").toString)
}
override implicit def getTransformer(transformer: RandomForestRegressionModel): LocalTransformer[RandomForestRegressionModel] = new LocalRandomForestRegressionModel(transformer)
}
开发者ID:Hydrospheredata,项目名称:spark-ml-serving,代码行数:53,代码来源:LocalRandomForestRegressionModel.scala
示例3: LocalDecisionTreeRegressionModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.api.ml.regression
import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.regression.DecisionTreeRegressionModel
import org.apache.spark.ml.tree.Node
class LocalDecisionTreeRegressionModel(override val sparkTransformer: DecisionTreeRegressionModel) extends LocalTransformer[DecisionTreeRegressionModel] {
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val method = classOf[DecisionTreeRegressionModel].getMethod("predict", classOf[Vector])
method.setAccessible(true)
val newColumn = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map { vector =>
method.invoke(sparkTransformer, vector).asInstanceOf[Double]
})
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalDecisionTreeRegressionModel extends LocalModel[DecisionTreeRegressionModel] {
override def load(metadata: Metadata, data: Map[String, Any]): DecisionTreeRegressionModel = {
createTree(metadata, data)
}
def createTree(metadata: Metadata, data: Map[String, Any]): DecisionTreeRegressionModel = {
val ctor = classOf[DecisionTreeRegressionModel].getDeclaredConstructor(classOf[String], classOf[Node], classOf[Int])
ctor.setAccessible(true)
val inst = ctor.newInstance(
metadata.uid,
DataUtils.createNode(0, metadata, data),
metadata.numFeatures.get.asInstanceOf[java.lang.Integer]
)
inst
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst
.set(inst.seed, metadata.paramMap("seed").toString.toLong)
.set(inst.cacheNodeIds, metadata.paramMap("cacheNodeIds").toString.toBoolean)
.set(inst.maxDepth, metadata.paramMap("maxDepth").toString.toInt)
.set(inst.labelCol, metadata.paramMap("labelCol").toString)
.set(inst.minInfoGain, metadata.paramMap("minInfoGain").toString.toDouble)
.set(inst.checkpointInterval, metadata.paramMap("checkpointInterval").toString.toInt)
.set(inst.minInstancesPerNode, metadata.paramMap("minInstancesPerNode").toString.toInt)
.set(inst.maxMemoryInMB, metadata.paramMap("maxMemoryInMB").toString.toInt)
.set(inst.maxBins, metadata.paramMap("maxBins").toString.toInt)
.set(inst.impurity, metadata.paramMap("impurity").toString)
}
override implicit def getTransformer(transformer: DecisionTreeRegressionModel): LocalTransformer[DecisionTreeRegressionModel] = new LocalDecisionTreeRegressionModel(transformer)
}
开发者ID:Hydrospheredata,项目名称:mist,代码行数:54,代码来源:LocalDecisionTreeRegressionModel.scala
示例4: LocalRandomForestRegressionModel
//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.api.ml.regression
import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.regression.{DecisionTreeRegressionModel, RandomForestRegressionModel}
class LocalRandomForestRegressionModel(override val sparkTransformer: RandomForestRegressionModel) extends LocalTransformer[RandomForestRegressionModel] {
override def transform(localData: LocalData): LocalData = {
val cls = classOf[RandomForestRegressionModel]
val predict = cls.getMethod("predict", classOf[Vector])
localData.column(sparkTransformer.getFeaturesCol) match {
case Some(column) =>
val predictionCol = LocalDataColumn(sparkTransformer.getPredictionCol, column.data.map(f => Vectors.dense(f.asInstanceOf[Array[Double]])).map{ vector =>
predict.invoke(sparkTransformer, vector).asInstanceOf[Double]
})
localData.withColumn(predictionCol)
case None => localData
}
}
}
object LocalRandomForestRegressionModel extends LocalModel[RandomForestRegressionModel] {
override def load(metadata: Metadata, data: Map[String, Any]): RandomForestRegressionModel = {
val treesMetadata = metadata.paramMap("treesMetadata").asInstanceOf[Map[String, Any]]
val trees = treesMetadata map { treeKv =>
val treeMeta = treeKv._2.asInstanceOf[Map[String, Any]]
val meta = treeMeta("metadata").asInstanceOf[Metadata]
LocalDecisionTreeRegressionModel.createTree(
meta,
data(treeKv._1).asInstanceOf[Map[String, Any]]
)
}
val ctor = classOf[RandomForestRegressionModel].getDeclaredConstructor(classOf[String], classOf[Array[DecisionTreeRegressionModel]], classOf[Int])
ctor.setAccessible(true)
val inst = ctor
.newInstance(
metadata.uid,
trees.to[Array],
metadata.numFeatures.get.asInstanceOf[java.lang.Integer]
)
.setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
.setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
inst
.set(inst.seed, metadata.paramMap("seed").toString.toLong)
.set(inst.subsamplingRate, metadata.paramMap("subsamplingRate").toString.toDouble)
.set(inst.impurity, metadata.paramMap("impurity").toString)
}
override implicit def getTransformer(transformer: RandomForestRegressionModel): LocalTransformer[RandomForestRegressionModel] = new LocalRandomForestRegressionModel(transformer)
}
开发者ID:Hydrospheredata,项目名称:mist,代码行数:53,代码来源:LocalRandomForestRegressionModel.scala
注:本文中的org.apache.spark.ml.regression.DecisionTreeRegressionModel类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论