def main (args:array[string]): Unit = {val sparkconf = new sparkconf (). Setappname ("DecisionTree1"). Setmaster ("local[2") ") Sparkconf.set (" Es.index.auto.create "," true ") Sparkconf.set (" Es.nodes "," 10.3.162.202 ") Sparkconf.set (" Es.port "," 9200 ") val sc = new Sparkcontext (sparkconf)//write2es (SC) read4es (SC); } def write2es (sc:sparkcontext) = {val numbers = Map ("One", 1, "One", "2", "three", 3) Val Airports = Map ("OTP", "Otopeni", "SFO", "San Fran") var Rdd = Sc.makerdd (Seq (numbers, airports)) Esspark.savetoes (R DD, "Spark/docs") println ("--------------------End-----------------")} def read4es (Sc:sparkcontext) {val Rdd = Esspark.esrdd (SC, "Spark/docs") Rdd.foreach (line = {val key = line._1 val value = line._2 println (" ------------------key: "+ key" for (TMP <-value) {val Key1 = tmp._1 val value1 = tmp._2 pri Ntln ("------------------key1:" + key1) println ("------------------value1: "+ value1)}})}
Example Dependent Jar:elasticsearch-spark_2.10-2.1.0.jar
Spark reads and writes data to Elasticsearch