Import org.apache.spark.SparkConf Import org.apache.spark.SparkContext import Org.apache.spark.sql.SQLContext Object
Rdd2dataframebyreflectionscala {case class person (name:string, Age:int) def main (args:array[string]): unit = { Val conf = new sparkconf ()//Create sparkconf Object Conf.setappname ("My Top Spark App")//Set the name of the application in the monitor page where the program runs can see the name con F.setmaster ("local") val sc = new Sparkcontext (conf) val sqlcontext = new SqlContext (SC) Import Sqlcontext.imp Licits._//From TXT read data to match the property defined by the person class String Integer val people = sc.textfile ("Peoples.txt"). Map (_.SPL
It (",")). Map (P => Person (P (1), P (2). Trim.toint)). TODF () people.registertemptable ("People") Val teenagers = sqlcontext.sql ("SELECT name, age from People WHERE age >= 6 and age <= 19")/** * against Dat
Aframe using the map operator, the return type is rdd<row>/teenagers.map (t => "Name:" + t (0)). Collect (). foreach (println) or by Field Name:teeNagers.map (t => "name:" + t.getas[string] ("name")). Collect (). foreach (println)}}