1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45
| import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.{SQLContext, SaveMode} import java.util.Properties
/** * Created by mi on 17-4-11. */
case class resultset(name: String, info: String, summary: String)
object MysqlOpt {
def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("WordCount").setMaster("local") val sc = new SparkContext(conf) val sqlContext = new SQLContext(sc) import sqlContext.implicits._
//定义数据库和表信息 val url = "jdbc:mysql://localhost:3306/baidubaike?useUnicode=true&characterEncoding=UTF-8" val table = "baike_pages"
//写MySQL的方法1 val list = List( resultset("名字1", "标题1", "简介1"), resultset("名字2", "标题2", "简介2"), resultset("名字3", "标题3", "简介3"), resultset("名字4", "标题4", "简介4") ) val jdbcDF = sqlContext.createDataFrame(list) jdbcDF.collect().take(20).foreach(println) // jdbcDF.rdd.saveAsTextFile("/home/mi/coding/coding/Scala/spark-hbase/output") val prop = new Properties() prop.setProperty("user", "root") prop.setProperty("password", "123456") //jdbcDF.write.mode(SaveMode.Overwrite).jdbc(url,"baike_pages",prop) jdbcDF.write.mode(SaveMode.Append).jdbc(url, "baike_pages", prop)
} }
|