本文整理汇总了Scala中org.apache.spark.sql.types类的典型用法代码示例。如果您正苦于以下问题:Scala types类的具体用法?Scala types怎么用?Scala types使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了types类的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: convertSparkTypeToPigTypeCode
//设置package包名称以及导入依赖的类
package com.paypal.risk.madmen20.util
import org.apache.spark.sql.types
import org.apache.spark.sql.types.DataType
def convertSparkTypeToPigTypeCode(typ: DataType): Int = {
typ match {
case _: types.NullType => 1
case _: types.BooleanType => 5
case _: types.ByteType => 6
case _: types.IntegerType => 10
case _: types.LongType => 15
case _: types.FloatType => 20
case _: types.DoubleType => 25
case _: types.TimestampType => 30
case _: types.BinaryType => 50
case _: types.StringType => 55
case _: types.DecimalType => 70
case _: types.MapType => 100
case _: types.StructType => 110
case _: types.ShortType => 10
case _ => 0 // Unknown type
}
}
}
开发者ID:yanlzhang8936,项目名称:madmen20,代码行数:28,代码来源:PigUtils.scala
示例2: toSpark
//设置package包名称以及导入依赖的类
package org.apache.spark.ml.mleap.converter
import com.truecar.mleap.core.linalg.Vector
import com.truecar.mleap.runtime.types.StructType
import com.truecar.mleap.spark.{SparkLeapFrame, MleapSparkSupport}
import org.apache.spark.sql.{types, Row, DataFrame, SQLContext}
import MleapSparkSupport._
trait LeapFrameToSpark[T] {
def toSpark(t: T)(implicit sqlContext: SQLContext): DataFrame
}
case class LeapFrameToSparkWrapper[T: LeapFrameToSpark](t: T) {
def toSpark(implicit sqlContext: SQLContext): DataFrame = {
implicitly[LeapFrameToSpark[T]].toSpark(t)
}
}
object LeapFrameToSpark {
implicit object SparkLeapFrameToSpark extends LeapFrameToSpark[SparkLeapFrame] {
override def toSpark(t: SparkLeapFrame)
(implicit sqlContext: SQLContext): DataFrame = {
val outputNames = t.schema.fields.map(_.name).toSet -- t.sparkSchema.fields.map(_.name).toSet
val outputs = outputNames.map {
name => (t.schema(name), t.schema.indexOf(name))
}.toArray.sortBy(_._2)
val (outputFields, outputIndices) = outputs.unzip
val outputMleapSchema = StructTypeToSpark(StructType(outputFields)).toSpark
val outputSchema = types.StructType(t.sparkSchema.fields ++ outputMleapSchema.fields)
val rows = t.dataset.rdd.map {
case (mleapRow, sparkValues) =>
val mleapData = outputIndices.map {
index =>
mleapRow.get(index) match {
case value: Vector => value.toSpark
case value => value
}
}
Row(sparkValues ++ mleapData: _*)
}
sqlContext.createDataFrame(rows, outputSchema)
}
}
}
开发者ID:TrueCar,项目名称:mleap,代码行数:49,代码来源:LeapFrameToSpark.scala
注:本文中的org.apache.spark.sql.types类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论