当前位置: 首页>>代码示例>>Scala>>正文


Scala JDBCOptions类代码示例

本文整理汇总了Scala中org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions的典型用法代码示例。如果您正苦于以下问题:Scala JDBCOptions类的具体用法?Scala JDBCOptions怎么用?Scala JDBCOptions使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了JDBCOptions类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: DatabaseWriterActivity

//设置package包名称以及导入依赖的类
package yumi.pipeline.activities

import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import yumi.pipeline.{Activity, Parameters, SessionData, YumiContext}

class DatabaseWriterActivity(val parameters: Parameters) extends Activity {
  val url = parameters.getString("url")
  val table = parameters.getString("table")
  val as = parameters.getString("as")
  val mode = parameters.getString("mode")
  val jdbcDriverClass = parameters.getString("driver")
  val properties = parameters
    .getAs[Map[String, Any]]("properties")
    .map {
      case (key, value) =>
        (key -> value.toString)
    }

  val format = url.split(":").take(2).mkString(":")

  override protected[this] def onInvoke(sessionData: SessionData)
                                       (implicit yumiContext: YumiContext): SessionData = {
    import yumiContext._

    val completeOptions = properties +
      (JDBCOptions.JDBC_TABLE_NAME -> as) +
      (JDBCOptions.JDBC_DRIVER_CLASS -> jdbcDriverClass) +
      ("url" -> url)

    dataFrameWriter.write(
      dataFrame = sparkSession.table(table),
      format = format,
      mode = mode,
      options = completeOptions,
      path =url
    )

    sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:41,代码来源:DatabaseWriterActivity.scala

示例2: DatabaseReaderActivity

//设置package包名称以及导入依赖的类
package yumi.pipeline.activities

import java.util.Properties

import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import yumi.pipeline.{Activity, Parameters, SessionData, YumiContext}

class DatabaseReaderActivity(val parameters: Parameters) extends Activity {
  val url = parameters.getString("url")
  val table = parameters.getString("table")
  val asView = parameters.getString("as")
  val jdbcDriverClass = parameters.getString("driver")
  val properties = parameters
    .getAs[Map[String, Any]]("properties")
    .map {
      case (key, value) =>
        (key -> value.toString)
    }

  override protected[this] def onInvoke(sessionData: SessionData)
                                       (implicit yumiContext: YumiContext): SessionData = {
    import yumiContext._

    val completeOptions = properties +
      (JDBCOptions.JDBC_DRIVER_CLASS -> jdbcDriverClass)

    val completeProperties = completeOptions
      .foldLeft(new Properties()) { (properties, keyValue) =>
        val (key, value) = keyValue
        properties.setProperty(key, value.toString)
        properties
      }

    sparkSession
      .read
      .jdbc(url = url, table = table, properties = completeProperties)
      .createTempView(asView)

    sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:42,代码来源:DatabaseReaderActivity.scala

示例3: DatabaseReaderActivitySpec

//设置package包名称以及导入依赖的类
package test.yumi.pipeline.activities

import java.util.Properties

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import org.mockito.Mockito.{verify, when}
import test.yumi.pipeline.MockSessionSpec
import yumi.YumiMap
import yumi.pipeline.activities.DatabaseReaderActivity

class DatabaseReaderActivitySpec extends MockSessionSpec {

  it should "load sql server table as spark view" in new MockSessionScope {

    // Arrange
    val url = "url"
    val table = "table_name"
    val as = "as"
    val driver = "com.mysql.Driver"
    val properties = Map("batchSize" -> "2000")
    val expectedProperties = new Properties()
    properties.foreach {
      case (key, value) =>
        expectedProperties.setProperty(key, value)
    }
    expectedProperties.put(JDBCOptions.JDBC_DRIVER_CLASS, driver)

    val parameters = createParameters()
      .add("url", url)
      .add("table", table)
      .add("driver", driver)
      .add("as", as)
      .add("properties", properties)
      .build()
    val sessionData = YumiMap()
    val dataFrame = mock[DataFrame]

    when(dataFrameReader.jdbc(any[String], any[String], any[Properties])).thenReturn(dataFrame)

    // Act
    val activity = new DatabaseReaderActivity(parameters)
    val resultSessionData = activity.invoke(sessionData)

    // Assert
    verify(sparkSession).read
    verify(dataFrameReader)
      .jdbc(url, table, expectedProperties)
    verify(dataFrame).createTempView(as)

    resultSessionData === sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:54,代码来源:DatabaseReaderActivitySpec.scala

示例4: DatabaseWriterActivitySpec

//设置package包名称以及导入依赖的类
package test.yumi.pipeline.activities

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import org.mockito.Mockito.{verify, when}
import test.yumi.pipeline.MockSessionSpec
import yumi.YumiMap
import yumi.pipeline.activities.DatabaseWriterActivity

class DatabaseWriterActivitySpec extends MockSessionSpec {

  it should "write to database table from spark view" in new MockSessionScope {

    // Arrange
    val url = "jdbc:postgresql://yumi.rds.amazonaws.com:5432/yumi_postgresql?user=yumi&password=somepassword"
    val table = "table_name"
    val as = "as"
    val mode = "append"
    val jdbcDriverClass = "org.postgresql.Driver"
    val properties = Map("batchSize" -> "2000")

    val completeOptions: Map[String, String] = properties +
      (JDBCOptions.JDBC_TABLE_NAME -> as) +
      (JDBCOptions.JDBC_DRIVER_CLASS -> jdbcDriverClass) +
      ("url" -> url)

    val parameters = createParameters()
      .add("url", url)
      .add("table", table)
      .add("as", as)
      .add("properties", properties)
      .add("mode", mode)
      .add("driver", jdbcDriverClass)
      .build()
    val sessionData = YumiMap()
    val dataFrame = mock[DataFrame]

    when(sparkSession.table(table)).thenReturn(dataFrame)

    // Act
    val activity = new DatabaseWriterActivity(parameters)
    val resultSessionData = activity.invoke(sessionData)

    // Assert
    verify(dataFrameWriter).write(dataFrame, "jdbc:postgresql", mode, url, completeOptions)

    resultSessionData === sessionData
  }
} 
开发者ID:coderdiaries,项目名称:yumi,代码行数:50,代码来源:DatabaseWriterActivitySpec.scala


注:本文中的org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。