Read the hive file and import the data into HBase

Source: Internet
Author: User

Package Cn.tansun.bd.hbase;import Java.io.ioexception;import Java.net.uri;import java.text.simpledateformat;import Java.util.arraylist;import java.util.hashmap;import java.util.iterator;import Java.util.List;import java.util.Map; Import Java.util.map.entry;import Java.util.set;import Org.apache.hadoop.conf.configuration;import Org.apache.hadoop.fs.filesystem;import Org.apache.hadoop.fs.path;import Org.apache.hadoop.hbase.keyvalue;import Org.apache.hadoop.hbase.client.htable;import Org.apache.hadoop.hbase.io.immutablebyteswritable;import Org.apache.hadoop.hbase.mapreduce.hfileoutputformat2;import Org.apache.hadoop.hbase.util.bytes;import Org.apache.hadoop.io.longwritable;import Org.apache.hadoop.io.text;import Org.apache.hadoop.mapreduce.job;import Org.apache.hadoop.mapreduce.mapper;import Org.apache.hadoop.mapreduce.lib.input.fileinputformat;import Org.apache.hadoop.mapreduce.lib.input.textinputformat;import Org.apache.hadoop.mapreduce.lib.output.fileoutputformat;import Cn.tansun.bd.utiLs. jdbcutils;/** * @author author E-Mail: * @version created: July 5, 2016 7:57:17 class description */public class Hivemysql2hbasemr {@SuppressWar Nings ("deprecation") public static void main (string[] args) {getdatas ();/* * if (args.length! = 3) {System.err.println (* ") Usage:hivemysql2hbasemr <table_name><data_input_path>

  

Jdbcutils class:

Package Cn.tansun.bd.utils;import Java.io.bytearrayinputstream;import Java.io.ioexception;import Java.io.inputstream;import Java.sql.connection;import Java.sql.drivermanager;import Java.sql.ResultSet;import Java.sql.resultsetmetadata;import Java.sql.sqlexception;import Java.sql.statement;import java.util.ArrayList; Import Java.util.hashmap;import java.util.iterator;import Java.util.list;import Java.util.map;import Java.util.map.entry;import Java.util.properties;import java.util.set;/** * @author author E-mail:zgl * @version Created: June 23, 2016 4:25:03 class description */public class Jdbcutils {public Jdbcutils () {} public static String PATH = "J    Dbc.properties ";    public static Properties prop;    public static String URL = null;    public static String username = null;    public static String password = NULL;    public static Connection Conn;    public static Statement stmt;    public static ResultSet RS;    public static String fileName = null; static {try {InputstrEAM InputStream = JDBCUtils.class.getClassLoader (). getResourceAsStream (PATH);            Prop = new Properties ();            Prop.load (InputStream);            url = prop.getproperty ("Jdbc.url");            Username = Prop.getproperty ("Jdbc.username");            Password = prop.getproperty ("Jdbc.password");            if (InputStream! = null) {inputstream.close ();        }} catch (IOException e) {e.printstacktrace ();                }} public static void CloseConnection (Connection conn) {if (conn! = null) {try {            Conn.close ();            } catch (SQLException e) {e.printstacktrace ();    }}}/** * query based on SQL statement * * @param SQL * @return */@SuppressWarnings ("Rawtypes")        public static list<map> Selectdatas (String sql) {list<map> Listdatas = new arraylist<map> (); try {conn = drivermanager.getconnection (URL, username, password);            Conn.setautocommit (FALSE);  stmt = conn.preparestatement ("Load data local infile" "+" into table loadtest fields terminated by ', ' "            );            StringBuilder sb = new StringBuilder ();            InputStream is = new Bytearrayinputstream (sb.tostring (). GetBytes ());            ((com.mysql.jdbc.Statement) stmt). Setlocalinfileinputstream (IS);            rs = stmt.executequery (SQL);                if (rs! = null) {ResultSetMetaData metaData = Rs.getmetadata ();                int count = Metadata.getcolumncount ();                map<string, object> map = null;                    while (Rs.next ()) {map = new hashmap<string, object> (); for (int i = 1; i < count + 1; i++) {Map.put (Metadata.getcolumnname (i), Rs.getobject (i)                    );     } listdatas.add (map);           }}} catch (SQLException e) {e.printstacktrace ();    } return Listdatas;         }/** * * @param SQL * @return */public static list<string> Getstrmap (String sql) {         list<string> strlist = new arraylist<string> ();             try {conn = drivermanager.getconnection (URL, username, password);             Conn.setautocommit (FALSE); stmt = conn.preparestatement ("Load data local infile" "+" into table loadtest fields terminated by ', '             " );             StringBuilder sb = new StringBuilder ();             InputStream is = new Bytearrayinputstream (sb.tostring (). GetBytes ());             ((com.mysql.jdbc.Statement) stmt). Setlocalinfileinputstream (IS);             rs = stmt.executequery (SQL);                 if (rs! = null) {ResultSetMetaData metaData = Rs.getmetadata (); int count = Metadata.getcolumncount ();                 while (Rs.next ()) {for (int i = 1; i < count + 1; i++) {                     String str1 = Metadata.getcolumnname (i);                     String str2 = (string) rs.getobject (i);                 Strlist.add (STR2);         }}}} catch (SQLException e) {e.printstacktrace ();    } return strlist;    } public static String table_name = NULL;    public static String Rowkey = null;    public static String column_family = null;    public static String column_name = null;    private static String rows = null;    public static String sql = NULL;    public static String sql2 = null; @SuppressWarnings ("Rawtypes") public static void main (string[] args) {sql2 = "Select Group_concat (disti NCT Aq.column_hive_index, ', Aq.column_name, ' ORDER by ' + ' Aq.column_hive_index SEPARATOR ', ') as column _names from Archive_hqualifIER aq "+" where aq.table_id = the Aq.column_name ORDER by aq.column_hive_index "; sql =" Select DISTINCT S.tablen Ame, ar.rowkey,af.column_family, "+" Aq.column_name fromarchive_htable s,archive_hrowkey ar,archive_hfamily af, "+" Archive_hqualifier aq "+" WHERE s. rowkey_id = ar.rowkey_id and ar.family_id = af.family_id "+" and af.qualifier_id = aq.q     ualifier_id; "; String datas = null; list<string> strlist = getstrmap (sql); String substring = null; for (int i = 0; i < strlist.size (); i++) {datas = Strlist.get (i);//datas = Strlist.get (i). substring (0, Strlist.get (i). Length ()-1); System.out.print (datas); }    }}

  

Read the hive file and import the data into HBase

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.