• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala JDBCOptions类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions的典型用法代码示例。如果您正苦于以下问题:Scala JDBCOptions类的具体用法?Scala JDBCOptions怎么用?Scala JDBCOptions使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了JDBCOptions类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: DatabaseWriterActivity

//设置package包名称以及导入依赖的类
package yumi.pipeline.activities

import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import yumi.pipeline.{Activity, Parameters, SessionData, YumiContext}

class DatabaseWriterActivity(val parameters: Parameters) extends Activity {
  val url = parameters.getString("url")
  val table = parameters.getString("table")
  val as = parameters.getString("as")
  val mode = parameters.getString("mode")
  val jdbcDriverClass = parameters.getString("driver")
  val properties = parameters
    .getAs[Map[String, Any]]("properties")
    .map {
      case (key, value) =>
        (key -> value.toString)
    }

  val format = url.split(":").take(2).mkString(":")

  override protected[this] def onInvoke(sessionData: SessionData)
                                       (implicit yumiContext: YumiContext): SessionData = {
    import yumiContext._

    val completeOptions = properties +
      (JDBCOptions.JDBC_TABLE_NAME -> as) +
      (JDBCOptions.JDBC_DRIVER_CLASS -> jdbcDriverClass) +
      ("url" -> url)

    dataFrameWriter.write(
      dataFrame = sparkSession.table(table),
      format = format,
      mode = mode,
      options = completeOptions,
      path =url
    )

    sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:41,代码来源:DatabaseWriterActivity.scala


示例2: DatabaseReaderActivity

//设置package包名称以及导入依赖的类
package yumi.pipeline.activities

import java.util.Properties

import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import yumi.pipeline.{Activity, Parameters, SessionData, YumiContext}

class DatabaseReaderActivity(val parameters: Parameters) extends Activity {
  val url = parameters.getString("url")
  val table = parameters.getString("table")
  val asView = parameters.getString("as")
  val jdbcDriverClass = parameters.getString("driver")
  val properties = parameters
    .getAs[Map[String, Any]]("properties")
    .map {
      case (key, value) =>
        (key -> value.toString)
    }

  override protected[this] def onInvoke(sessionData: SessionData)
                                       (implicit yumiContext: YumiContext): SessionData = {
    import yumiContext._

    val completeOptions = properties +
      (JDBCOptions.JDBC_DRIVER_CLASS -> jdbcDriverClass)

    val completeProperties = completeOptions
      .foldLeft(new Properties()) { (properties, keyValue) =>
        val (key, value) = keyValue
        properties.setProperty(key, value.toString)
        properties
      }

    sparkSession
      .read
      .jdbc(url = url, table = table, properties = completeProperties)
      .createTempView(asView)

    sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:42,代码来源:DatabaseReaderActivity.scala


示例3: DatabaseReaderActivitySpec

//设置package包名称以及导入依赖的类
package test.yumi.pipeline.activities

import java.util.Properties

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import org.mockito.Mockito.{verify, when}
import test.yumi.pipeline.MockSessionSpec
import yumi.YumiMap
import yumi.pipeline.activities.DatabaseReaderActivity

class DatabaseReaderActivitySpec extends MockSessionSpec {

  it should "load sql server table as spark view" in new MockSessionScope {

    // Arrange
    val url = "url"
    val table = "table_name"
    val as = "as"
    val driver = "com.mysql.Driver"
    val properties = Map("batchSize" -> "2000")
    val expectedProperties = new Properties()
    properties.foreach {
      case (key, value) =>
        expectedProperties.setProperty(key, value)
    }
    expectedProperties.put(JDBCOptions.JDBC_DRIVER_CLASS, driver)

    val parameters = createParameters()
      .add("url", url)
      .add("table", table)
      .add("driver", driver)
      .add("as", as)
      .add("properties", properties)
      .build()
    val sessionData = YumiMap()
    val dataFrame = mock[DataFrame]

    when(dataFrameReader.jdbc(any[String], any[String], any[Properties])).thenReturn(dataFrame)

    // Act
    val activity = new DatabaseReaderActivity(parameters)
    val resultSessionData = activity.invoke(sessionData)

    // Assert
    verify(sparkSession).read
    verify(dataFrameReader)
      .jdbc(url, table, expectedProperties)
    verify(dataFrame).createTempView(as)

    resultSessionData === sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:54,代码来源:DatabaseReaderActivitySpec.scala


示例4: DatabaseWriterActivitySpec

//设置package包名称以及导入依赖的类
package test.yumi.pipeline.activities

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import org.mockito.Mockito.{verify, when}
import test.yumi.pipeline.MockSessionSpec
import yumi.YumiMap
import yumi.pipeline.activities.DatabaseWriterActivity

class DatabaseWriterActivitySpec extends MockSessionSpec {

  it should "write to database table from spark view" in new MockSessionScope {

    // Arrange
    val url = "jdbc:postgresql://yumi.rds.amazonaws.com:5432/yumi_postgresql?user=yumi&password=somepassword"
    val table = "table_name"
    val as = "as"
    val mode = "append"
    val jdbcDriverClass = "org.postgresql.Driver"
    val properties = Map("batchSize" -> "2000")

    val completeOptions: Map[String, String] = properties +
      (JDBCOptions.JDBC_TABLE_NAME -> as) +
      (JDBCOptions.JDBC_DRIVER_CLASS -> jdbcDriverClass) +
      ("url" -> url)

    val parameters = createParameters()
      .add("url", url)
      .add("table", table)
      .add("as", as)
      .add("properties", properties)
      .add("mode", mode)
      .add("driver", jdbcDriverClass)
      .build()
    val sessionData = YumiMap()
    val dataFrame = mock[DataFrame]

    when(sparkSession.table(table)).thenReturn(dataFrame)

    // Act
    val activity = new DatabaseWriterActivity(parameters)
    val resultSessionData = activity.invoke(sessionData)

    // Assert
    verify(dataFrameWriter).write(dataFrame, "jdbc:postgresql", mode, url, completeOptions)

    resultSessionData === sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:50,代码来源:DatabaseWriterActivitySpec.scala



注:本文中的org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala Dimension类代码示例发布时间:2022-05-23
下一篇:
Scala Akka类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap