標籤:des blog java os 資料 io for cti
直接代碼吧:記得要開啟hive jdbc服務hive --service hiveserver
package hive;import java.sql.Connection;import java.sql.DriverManager;import java.sql.ResultSet;import java.sql.Statement;public class HiveDemo {static{//註冊jdbc驅動try {Class.forName("org.apache.hadoop.hive.jdbc.HiveDriver");} catch (ClassNotFoundException e) {// TODO Auto-generated catch blocke.printStackTrace();}}public static void main(String[] args) throws Exception {//建立串連Connection conn = DriverManager.getConnection("jdbc:hive://hadoop:10000/default","","");//System.out.println(conn);Statement st = conn.createStatement();String tableName = "u1_data";//刪除表st.executeQuery("drop table "+tableName);//建立表ResultSet rs = st.executeQuery("create table "+tableName+"("+ "userid int,"+ "movieid int,"+ "rating int,"+ "city string,"+ "viewTime string"+ ")"+ "row format delimited "+ "fields terminated by ‘\t‘ "+ "stored as textfile");//顯示所有的表String sql = "show tables";System.out.println("running:"+sql);rs = st.executeQuery(sql);if(rs.next()){System.out.println(rs.getString(1));}//得到表資訊sql = "describe "+tableName;System.out.println("running:"+sql);rs = st.executeQuery(sql);while(rs.next()){System.out.println(rs.getString(1)+"\t"+rs.getString(2));}//載入資料String filePath = "hdfs://hadoop:9000/input";sql = "load data inpath ‘"+filePath+"‘ overwrite into table "+tableName;System.out.println("running:"+sql);rs = st.executeQuery(sql);//查詢資料sql = "select * from "+tableName+" limit 5";System.out.println("running:"+sql);rs = st.executeQuery(sql);while(rs.next()){System.out.println(rs.getString(3)+"\t"+rs.getString(4));}//查詢數量sql = "select count(*) from "+tableName;System.out.println("running:"+sql);rs = st.executeQuery(sql);while(rs.next()){System.out.println(rs.getString(1));}//關閉資源rs.close();st.close();conn.close();}}