• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala ArrayData类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中org.apache.spark.sql.catalyst.util.ArrayData的典型用法代码示例。如果您正苦于以下问题:Scala ArrayData类的具体用法?Scala ArrayData怎么用?Scala ArrayData使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了ArrayData类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: TextualUtil

//设置package包名称以及导入依赖的类
package edu.utah.cs.simba.util

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, BindReferences, Expression}
import org.apache.spark.sql.catalyst.util.ArrayData

import scala.collection.mutable


object TextualUtil {
  def simFilter(leftText: ArrayData, rightText:ArrayData, sim: Double): Boolean = {
    val l = leftText.numElements()
    val r = rightText.numElements()
    if(sim * l > r || sim * r > l ) return false
    var sameText = 0
    val data = mutable.Set[String]()
    var i = 0
    while (i < l) {
      data.add(leftText.getUTF8String(i).toString)
      i += 1
    }
    var j = 0
    while (j < r){
      val tmp_str = rightText.getUTF8String(j).toString
      if(data.contains(tmp_str)) sameText += 1
      else data.add(tmp_str)
      j += 1
    }
    if(sameText/1.0/data.size >= sim) return true
    false
  }

  def getText(expression: Expression, schema: Seq[Attribute], input: InternalRow): ArrayData = {
    BindReferences.bindReference(expression, schema).eval(input).asInstanceOf[ArrayData]
  }
} 
开发者ID:zdccc,项目名称:SimbaExpand,代码行数:37,代码来源:TextualUtil.scala


示例2: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[EmbeddedSetType])
case class EmbeddedSet(elements: Array[Any]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: EmbeddedSet => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class EmbeddedSetType extends UserDefinedType[EmbeddedSet] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: EmbeddedSet): Any = {
    new GenericArrayData(obj.elements.map{elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): EmbeddedSet = {
    datum match {
      case values: ArrayData =>
        new EmbeddedSet(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[EmbeddedSet] = classOf[EmbeddedSet]
}

object EmbeddedSetType extends EmbeddedSetType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:60,代码来源:EmbeddedSetType.scala


示例3: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import com.orientechnologies.orient.core.record.ORecord
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[LinkSetType])
case class LinkSet(elements: Array[_ <: ORecord]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: LinkSet => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class LinkSetType extends UserDefinedType[LinkSet] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: LinkSet): Any = {
    new GenericArrayData(obj.elements.map{elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): LinkSet = {
    datum match {
      case values: ArrayData =>
        new LinkSet(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject().asInstanceOf[ORecord]
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[LinkSet] = classOf[LinkSet]
}

object LinkSetType extends LinkSetType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:61,代码来源:LinkSetType.scala


示例4: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[EmbeddedListType])
case class EmbeddedList(elements: Array[Any]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: EmbeddedList => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class EmbeddedListType extends UserDefinedType[EmbeddedList] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: EmbeddedList): Any = {
    new GenericArrayData(obj.elements.map{elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): EmbeddedList = {
    datum match {
      case values: ArrayData =>
        new EmbeddedList(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject()
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[EmbeddedList] = classOf[EmbeddedList]
}

object EmbeddedListType extends EmbeddedListType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:60,代码来源:EmbeddedListType.scala


示例5: Of

//设置package包名称以及导入依赖的类
package org.apache.spark.orientdb.udts

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import com.orientechnologies.orient.core.record.ORecord
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

@SQLUserDefinedType(udt = classOf[LinkListType])
case class LinkList(elements: Array[_ <: ORecord]) extends Serializable {
  override def hashCode(): Int = {
    var hashCode = 1
    val i = elements.iterator
    while (i.hasNext) {
      val obj = i.next()

      val elemValue = if (obj == null) 0 else obj.hashCode()
      hashCode = 31 * hashCode + elemValue
    }
    hashCode
  }

  override def equals(other: scala.Any): Boolean = other match {
    case that: LinkList => that.elements.sameElements(this.elements)
    case _ => false
  }

  override def toString: String = elements.mkString(", ")
}

class LinkListType extends UserDefinedType[LinkList] {

  override def sqlType: DataType = ArrayType(StringType)

  override def serialize(obj: LinkList): Any = {
    new GenericArrayData(obj.elements.map{ elem =>
      val out = new ByteArrayOutputStream()
      val os = new ObjectOutputStream(out)
      os.writeObject(elem)
      UTF8String.fromBytes(out.toByteArray)
    })
  }

  override def deserialize(datum: Any): LinkList = {
    datum match {
      case values: ArrayData =>
        new LinkList(values.toArray[UTF8String](StringType).map{ elem =>
          val in = new ByteArrayInputStream(elem.getBytes)
          val is = new ObjectInputStream(in)
          is.readObject().asInstanceOf[ORecord]
        })
      case other => sys.error(s"Cannot deserialize $other")
    }
  }

  override def userClass: Class[LinkList] = classOf[LinkList]
}

object LinkListType extends LinkListType 
开发者ID:orientechnologies,项目名称:spark-orientdb,代码行数:61,代码来源:LinkListType.scala



注:本文中的org.apache.spark.sql.catalyst.util.ArrayData类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala FrontendAuditFilter类代码示例发布时间:2022-05-23
下一篇:
Scala CompressionCodecName类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap