spark提供了一个RDD来处理对JDBC的连接,但是十分的鸡肋.这个RDD只能进行查询,不能进行增删改,很少用,而且还必须是查询范围的SQL语句

ps:这个案例需要结合上一个中的

import java.sql.{Date, DriverManager, ResultSet}
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}

object JDBCDemo extends App {
  val conf = new SparkConf().setAppName("test").setMaster("local")
  val sc = new SparkContext(conf)
  val sql = "select * from emp where deptno>? and deptno <? "
  val jdbcurl = "jdbc:mysql://localhost/db2?useUnicode=true&characterEncoding=utf8"
  val user = "root"
  val password = "123456"
  val conn = () => {
    Class.forName("com.mysql.jdbc.Driver").newInstance()
    DriverManager.getConnection(jdbcurl, user, password)
  }

  val showInfo:JdbcRDD[(Int, String, Int, Date)] = new JdbcRDD(sc, conn, sql, 0, 10000, 1, (res: ResultSet) => {
    val empno: Int = res.getInt("empno")
    val ename: String = res.getString("ename")
    val mgr: Int = res.getInt("mgr")
    val hiredate: Date = res.getDate("hiredate")
    (empno, ename, mgr, hiredate)
  })
  showInfo.foreach(println)
  sc.stop()
}
Logo

腾讯云面向开发者汇聚海量精品云计算使用和开发经验,营造开放的云计算技术生态圈。

更多推荐