Seamless integration with hadoop makes it very convenient to use mapreduce for Distributed Computing of hbase data. This article will introduce the key points of mapreduce development under hbase. The premise of this article is that you have a certain understanding of hadoop mapreduce. If you are new to hadoop mapreduce programming, you can refer to the article "first mapreduce application" to establish basic concepts.
I. Java code
Package hbase; import Java. io. ioexception; import Org. apache. hadoop. conf. configuration; import Org. apache. hadoop. FS. path; import Org. apache. hadoop. hbase. hbaseconfiguration; import Org. apache. hadoop. hbase. hcolumndescriptor; import Org. apache. hadoop. hbase. htabledescriptor; import Org. apache. hadoop. hbase. client. hbaseadmin; import Org. apache. hadoop. hbase. client. put; import Org. apache. hadoop. hbase. mapreduce. tabl Eoutputformat; import Org. apache. hadoop. hbase. mapreduce. tablereducer; import Org. apache. hadoop. hbase. util. bytes; import Org. apache. hadoop. io. intwritable; import Org. apache. hadoop. io. longwritable; import Org. apache. hadoop. io. nullwritable; import Org. apache. hadoop. io. text; import Org. apache. hadoop. mapreduce. job; import Org. apache. hadoop. mapreduce. mapper; import Org. apache. hadoop. mapreduce. lib. input. fileinput Format; import Org. apache. hadoop. mapreduce. lib. input. textinputformat; public class wordcounthbase {public static class map extends mapper <longwritable, text, text, intwritable> {private intwritable I = new intwritable (1); public void map (longwritable key, text value, context) throws ioexception, interruptedexception {string s [] = value. tostring (). trim (). split (""); // separate each line with a space for (Str Ing M: S) {context. write (new text (M), I) ;}} public static class reduce extends tablereducer <text, intwritable, nullwritable> {public void reduce (Text key, iterable <intwritable> values, context) throws ioexception, interruptedexception {int sum = 0; For (intwritable I: values) {sum + = I. get ();} Put put = new put (bytes. tobytes (key. tostring (); // put instance, each word stores a row of put. add (bytes. toby TES ("content"), bytes. tobytes ("count"), bytes. tobytes (string. valueof (SUM); // The column family is content, the column is count, and the column value is the number context. write (nullwritable. get (), put) ;}} public static void createhbasetable (string tablename) throws ioexception {htabledescriptor htd = new htabledescriptor (tablename); hcolumndescriptor Col = new hcolumndescriptor ("content "); htd. addfamily (COL); configuration conf = hbaseconfiguration. cr Eate (); Conf. set ("hbase. zookeeper. quorum "," libin2 "); hbaseadmin admin = new hbaseadmin (CONF); If (Admin. tableexists (tablename) {system. out. println ("table exists, trying to recreate table ...... "); Admin. disabletable (tablename); Admin. deletetable (tablename);} system. out. println ("create new table:" + tablename); Admin. createtable (htd);} public static void main (string [] ARGs) throws ioexception, Interruptedexception, classnotfoundexception {string tablename = "wordcount"; configuration conf = new configuration (); Conf. set (tableoutputformat. output_table, tablename); createhbasetable (tablename); string input = ARGs [0]; job = new job (Conf, "wordcount table with" + input); job. setjarbyclass (wordcounthbase. class); job. setnumreducetasks (3); job. setmapperclass (map. class); job. setreducercia SS (reduce. class); job. setmapoutputkeyclass (text. class); job. setmapoutputvalueclass (intwritable. class); job. setinputformatclass (textinputformat. class); job. setoutputformatclass (tableoutputformat. class); fileinputformat. addinputpath (job, new path (input); system. exit (job. waitforcompletion (true )? 0: 1 );}}
2. Compress Java code into a jar package
If two jar packages are used at the same time, you need to add a ":" separator between the two jar packages.