1. Use spark to read information from a table in MySQL
BUILD.SBT file
Name: = "Spark-hbase" Version: = "1.0" scalaversion: = "2.11.8" librarydependencies ++= Seq ( "Org.apache.spark" percent " Spark-core "%" 2.1.0 ", " MySQL "%" Mysql-connector-java "%" 5.1.31 ", " Org.apache.spark "percent" spark-sql "%" 2.1.0 ")
Mysql.scala file
Import Org.apache.spark. {sparkconf, Sparkcontext}import org.apache.spark.sql. {sqlcontext, Savemode}import java.util.properties/** * Created by Mi on 17-4-11. */case class resultset (name:string, info:string, summary:string) object mysqlopt {def main (args:array[string]): Unit = {val conf = new sparkconf (). Setappname ("WordCount"). Setmaster ("local") VA L SC = new Sparkcontext (conf) val sqlcontext = new SqlContext (SC) Import Sqlcontext.implicits._//define database and table information VA L URL = "Jdbc:mysql://localhost:3306/baidubaike?useunicode=true&characterencoding=utf-8" val table = "Baike_pages "//Read MySQL Method 1 val reader = SqlContext.read.format (" jdbc ") reader.option (" url ", url) reader.option (" DBTable ", Table) reader.option ("Driver", "Com.mysql.jdbc.Driver") reader.option ("User", "root") reader.option ("Password", " XXX ") Val df = Reader.load () df.show ()//Read MySQL Method 2//Val jdbcdf = SqlContext.read.format (" JdbC "). Options (//Map (" url "," jdbc:mysql://localhost:3306/baidubaike?useunicode=true&characterencoding= "). UTF-8 ",//" DBTable "," (select Name,info,summary from baike_pages) as Some_alias ",//" Driver ", "Com.mysql.jdbc.Driver",//"user", "root",////"Partitioncolumn", "day_id",//"low Erbound "0",//"Upperbound", "+",////"Numpartitions", "2",//"Fetchsize"- > "+",//"password", "XXX")). Load ()//Jdbcdf.show ()}}
Output
2. Use spark to write information from a table in MySQL
Import Org.apache.spark. {sparkconf, Sparkcontext}import org.apache.spark.sql. {sqlcontext, Savemode}import java.util.properties/** * Created by Mi on 17-4-11. */case class resultset (name:string, info:string, summary:string) object mysqlopt {def main (args:array[string]): Unit = {val conf = new sparkconf (). Setappname ("WordCount"). Setmaster ("local") VA L SC = new Sparkcontext (conf) val sqlcontext = new SqlContext (SC) Import Sqlcontext.implicits._//define database and table information VA L URL = "Jdbc:mysql://localhost:3306/baidubaike?useunicode=true&characterencoding=utf-8" val table = "Baike_pages "//method of writing MySQL 1 val list = list (ResultSet (" Name 1 "," Title 1 "," Introduction 1 "), ResultSet (" Name 2 "," Title 2 "," Introduction 2 "), Resul Tset ("Name 3", "Title 3", "Introduction 3"), ResultSet ("Name 4", "Title 4", "Introduction 4")) Val jdbcdf = sqlcontext.createdataframe (list) JD Bcdf.collect (). Take (). foreach (println)//JdbcDF.rdd.saveAsTextFile ("/home/mi/coding/coding/scala/sparK-hbase/output ") Val prop = new Properties () prop.setproperty (" User "," root ") Prop.setproperty (" Password "," 12345 6 ")//jdbcdf.write.mode (savemode.overwrite). JDBC (URL," Baike_pages ", prop) JdbcDF.write.mode (savemode.append). jdbc (URL, "Baike_pages", prop)}}
Spark Learning notes-read and write MySQL