Package Stuspark.com;import Java.io.ioexception;import Java.util.arraylist;import java.util.arrays;import Java.util.hashmap;import Java.util.list;import Java.util.map;import Org.apache.spark.sparkconf;import Org.apache.spark.api.java.javapairrdd;import Org.apache.spark.api.java.javardd;import Org.apache.spark.api.java.javasparkcontext;import Org.apache.spark.api.java.function.flatmapfunction;import Org.apache.spark.api.java.function.function;import Org.apache.spark.api.java.function.function2;import Org.apache.spark.api.java.function.pairfunction;import Org.apache.spark.sql.dataframe;import Org.apache.spark.sql.dataframereader;import Org.apache.spark.sql.row;import Org.apache.spark.sql.SQLContext; Import Scala. Tuple2;public class Javasparkjdbcsql {public static void main (string[] args) throws Ioexception{system.out.println (" Begin "); sparkconf sparkconf = new sparkconf (). Setappname ("Javasparkjdbcsql"). Setmaster ("local[2]"); Javasparkcontext sc = new Javasparkcontext (sparkconf); SqlContext sqLcontext = new SqlContext (SC);//Set database connection parameters map<string,string> Dbconoption = new hashmap<string,string> (); Dbconoption.put ("url", "JDBC:ORACLE:THIN:@127.0.0.1:1521:ORCL");d bconoption.put ("User", "Xho");d bconoption.put (" Password "," sys ");d bconoption.put (" Driver "," Oracle.jdbc.driver.OracleDriver ");d bconoption.put (" DBTable "," NUMB ") ;D Ataframereader dfread = Sqlcontext.read (). Format ("JDBC"). Options (dbconoption);D ataframe df=dfread.load ();//register as a table, Then use Df.registertemptable ("LK") in the SQL statement;//SQL can run Dataframe DF2 = Sqlcontext.sql ("SELECT * from LK") on Rdds registered as a table; Df2.show (),/*+---+---+-----+----+| one| two| three| four|+---+---+-----+----+| a| b| c| d| | a| a| b| b| | c| c| a| d| | a| a| c| s| | m| s| b| j| | a| l| o| k|+---+---+-----+----+*/list<string> List = Df2.tojavardd (). Map (new Function<row, string> () {public String Call (row row) {return row.getstring (0);}}). Collect (); javardd<string> words = Df2.tojavardd (). FlatMap (New flatmapfunction&Lt Row,string> () {Public iterable<string> call (row row) {list<string> ll = new arraylist<string> (); for (int i = 0; i < row.length (); i++) {Ll.add (row.getstring (i));} return ll;}); /maptopair saves the collection data as key valuejavapairrdd<string, integer> ones = Words.maptopair (new pairfunction<string, String, integer> () {public tuple2<string, integer> call (String s) {return new tuple2<string, integer> (S, 1 );}});/ /reducebykey based on key aggregation, operation on value javapairrdd<string, integer> counts = Ones.reducebykey (New Function2<integer , Integer, integer> () {Public integer call (integer i1, integer i2) {return i1 + I2;}}); The/collect package returns an array of list<tuple2<string, integer>> output = Counts.collect (); for (tuple2<?,? > Tuple: Output) {System.out.println (tuple._1 () + ":" + tuple._2 ());} /*d:2s:2a:7k:1b:4o:1j:1l:1m:1c:4*/sc.stop (); System.out.println ("End");}}
Learn Essays--JAVASPARKJDBC operations Oracle