Because of business needs, a batch of MySQL data needs to be imported into hbase, and data is now exported from MySQL to HDFs.
Version: Hadoop cdh4.5,hbase-0.946
1. Entity class
Yqbean is my entity class, please modify it according to your needs, the entity class needs implements writable, dbwritable.
2. Mr Implementation
Importjava.io.IOException;ImportJava.util.Iterator;Importorg.apache.hadoop.conf.Configuration;ImportOrg.apache.hadoop.fs.Path;Importorg.apache.hadoop.io.LongWritable;ImportOrg.apache.hadoop.io.Text;ImportOrg.apache.hadoop.mapreduce.Job;ImportOrg.apache.hadoop.mapreduce.Mapper;ImportOrg.apache.hadoop.mapreduce.Reducer;Importorg.apache.hadoop.mapreduce.lib.db.DBConfiguration;ImportOrg.apache.hadoop.mapreduce.lib.db.DBInputFormat;ImportOrg.apache.hadoop.mapreduce.lib.output.FileOutputFormat;/** * @author * @versioncreation Time: 2:09:22 AM * Class Description*/ Public classAccessData { Public Static classDataaccessmapextendsMapper<longwritable,yqbean,text,text>{@Overrideprotected voidmap (longwritable key, Yqbean value,context Context)throwsIOException, interruptedexception {System.out.println (value.tostring ()); Context.write (NewText (),NewText (value.tostring ())); } } Public Static classDataaccessreducerextendsReducer<text,text,text,text>{ protected voidReduce (Text key, iterable<text>values, context context)throwsIOException, interruptedexception { for(Iterator<text> ITR =values.iterator (); Itr.hasnext ();) {Context.write (Key, Itr.next ()); } } } Public Static voidMain (string[] args)throwsException {Configuration conf=NewConfiguration (); //the JDBC driver for MySQLdbconfiguration.configuredb (conf,"Com.mysql.jdbc.Driver", "Jdbc:mysql://ip:3306/tablename?useunicode=true&characterencoding=utf8", "username" , "passwd"); Job Job=NewJob (conf, "Test MySQL connection"); Job.setjarbyclass (AccessData.class); Job.setmapperclass (Dataaccessmap.class); Job.setreducerclass (dataaccessreducer.class); Job.setoutputkeyclass (Text.class); Job.setoutputvalueclass (Text.class); Job.setinputformatclass (Dbinputformat.class); Fileoutputformat.setoutputpath (Job,NewPath ("Hdfs://ip:9000/hdfsfile")); //column names in the corresponding database (Entity class fields)string[] Fields = {"id", "title", "Price", "author", "Quantity", "description", "category_id", "Imgurl"}; Dbinputformat.setinput (Job, Yqbean.class, "tablename", "SQL statement", "title", fields); System.exit (Job.waitforcompletion (true)? 0:1); }}