object JDBC {
def main(args: Array[String]): Unit = {
val sparkSession: SparkSession = SparkSession.builder().config(new SparkConf().setMaster("local[*]").setAppName("mysql-jdbc")).getOrCreate()
// 通用的load读取
sparkSession.read.format("jdbc")
.option("url","jdbc:mysql://hadoop001:3306/spark-sql")
.option("driver","com.mysql.jdbc.Driver")
.option("user","root")
.option("password","rootroot")
.option("dbtable","user")
.load().show
// 通用的load方法,参数另一种
// jdbc:mysql://hadoop001:3306/spark-sql?user=root&password=rootroot
val dataFrame: DataFrame = sparkSession.read.format("jdbc")
.options(Map("url" -> "jdbc:mysql://hadoop001:3306/spark-sql",
"driver" -> "com.mysql.jdbc.Driver",
"user" -> "root",
"password" -> "rootroot",
"dbtable" -> "user")).load()
// 通过jdbc的方法来连接
val pro = new Properties()
pro.setProperty("user","root")
pro.setProperty("password","rootroot")
sparkSession.read.jdbc("jdbc:mysql://hadoop001:3306/spark-sql","user1",pro).show
// 写入mysql(通用方法)
// ?useUnicode=true&characterEncoding=utf8设置字符集解决乱码问题
dataFrame.write.format("jdbc")
.options(Map(
"url" -> "jdbc:mysql://hadoop001:3306/spark-sql?user=root&password=rootroot&useUnicode=true&characterEncoding=utf8",
"driver" -> "com.mysql.jdbc.Driver",
"user" -> "root",
"password" -> "rootroot",
"dbtable" -> "user1"
)).mode(SaveMode.Append).save()
// jdbc的方式写入mysql
val prop = new Properties()
prop.setProperty("user","root")
prop.setProperty("password","rootroot")
dataFrame.write.mode(SaveMode.Append).jdbc("jdbc:mysql://hadoop001:3306/spark-sql","user1",prop)
sparkSession.close()
}
}
注意添加的表有主键可能会导致添加失败