First, write a Java program. In the near future, we will compare the implementation of clojure and provide the introduction of macro in clojure implementation.
Entry class
Package JVM. storm. starter; import JVM. storm. starter. wordcount. splitsentence; import JVM. storm. starter. wordcount. wordcount; import JVM. storm. starter. wordcount. wordcountspout; import backtype. storm. config; import backtype. storm. stormsubmitter; import backtype. storm. generated. alreadyaliveexception; import backtype. storm. generated. invalidtopologyexception; import backtype. storm. topology. inputdeclarer; import backtype. storm. topology. topologybuilder; import backtype. storm. tuple. fields;/*** @ author guiqiangl E-mail: larry.lv.word@gmail.com * @ version Creation Time: 04:40:26 */public class wordcountstart {public static void main (string [] ARGs) {topologybuilder builder = new topologybuilder (); builder. setspout ("1", new wordcountspout (""), 5); // The transmitter inputdeclarer fieldsgrouping = builder. setbolt ("2", new splitsentence (), 5); fieldsgrouping. fieldsgrouping ("1", new fields ("word"); builder. setbolt ("3", new wordcount (), 5 ). fieldsgrouping ("2", new fields ("word"); config conf = new config (); Conf. setdebug (false); // Local Mode // localcluster cluster = new localcluster (); // cluster. submittopology ("rolling-demo", Conf, builder. createtopology (); // remotely start Conf. setnumworkers (20); Conf. setmaxspoutpending (5000); try {stormsubmitter. submittopology ("rolling-demo", Conf, builder. createtopology ();} catch (alreadyaliveexception e) {e. printstacktrace ();} catch (invalidtopologyexception e) {e. printstacktrace ();} // end // cluster. killtopology ("rolling-demo"); // cluster. shutdown ();}}
Transmitter
Package JVM. storm. starter. wordcount; import Java. util. map; import Java. util. random; import Org. apache. log4j. logger; import backtype. storm. spout. spoutoutputcollector; import backtype. storm. task. topologycontext; import backtype. storm. topology. irichspout; import backtype. storm. topology. outputfieldsdeclarer; import backtype. storm. tuple. fields; import backtype. storm. tuple. values;/*** @ author guiqiangl E-mail: larry.lv.word@gmail.com * @ version Creation Time: 04:41:34 **/public class wordcountspout implements irichspout {Private Static final long serialversionuid =-620768344883063619l; public static logger log = logger. getlogger (wordcountspout. class); spoutoutputcollector _ collector; Public wordcountspout (string) {} public void open (@ suppresswarnings ("rawtypes") map Conf, topologycontext context, spoutoutputcollector collector ctor) {_ collector = collector;} public void close () {} public void nexttuple () {string [] words = new string [] {"Nathan", "Mike ", "Jackson", "Golda", "bertels"}; random Rand = new random (); string word = words [Rand. nextint (words. length)]; _ collector. emit (new values (Word); try {thread. sleep (1000);} catch (interruptedexception e) {e. printstacktrace () ;}} public void ack (Object msgid) {} public void fail (Object msgid) {} public void declareoutputfields (outputfieldsdeclarer declarer) {declarer. declare (new fields ("word") ;}@ overridepublic Boolean isdistributed () {return false ;}}
Word Splitting:
Package JVM. storm. starter. wordcount; import Java. util. map; import backtype. storm. task. outputcollector; import backtype. storm. task. topologycontext; import backtype. storm. topology. irichbolt; import backtype. storm. topology. outputfieldsdeclarer; import backtype. storm. tuple. fields; import backtype. storm. tuple. tuple; import backtype. storm. tuple. values;/*** @ author guiqiangl E-mail: larry.lv.word@gmail.com * @ version Creation Time: 04:48:29 **/public class splitsentence implements irichbolt {Private Static final long serialversionuid =-bytes; outputcollector _ collector; Public void prepare (@ suppresswarnings ("rawtypes") map Conf, topologycontext context, outputcollector collector) {_ collector = collector;} public void execute (tuple) {string sentence = tuple. getstring (0); For (string word: sentence. split ("") {_ collector. emit (tuple, new values (Word); // anchoring // _ collector. emit (new values (Word); // unanchoring} _ collector. ack (tuple);} public void cleanup () {} public void declareoutputfields (outputfieldsdeclarer declarer) {declarer. declare (new fields ("word "));}}
Count:
Package JVM. storm. starter. wordcount; import Java. io. bufferedwriter; import Java. io. filewriter; import Java. io. ioexception; import Java. util. hashmap; import Java. util. iterator; import Java. util. map; import backtype. storm. task. outputcollector; import backtype. storm. task. topologycontext; import backtype. storm. topology. irichbolt; import backtype. storm. topology. outputfieldsdeclarer; import backtype. storm. tuple. fields; import backtype. storm. tuple. tuple;/*** @ author guiqiangl E-mail: larry.lv.word@gmail.com * @ version Creation Time: 04:56:13 **/public class wordcount implements irichbolt {Private Static final long serialversionuid =-6706714875516091987l; Public Map <string, integer> countermap = new hashmap <string, integer> (); outputcollector _ collector; bufferedwriter output = NULL; Public void prepare (@ suppresswarnings ("rawtypes") map Conf, topologycontext context, outputcollector collector) {_ collector = collector; try {output = new bufferedwriter (New filewriter ("/home/hadoop/desktop/wordcount.txt", true);} catch (ioexception e) {e. printstacktrace (); try {output. close () ;}catch (ioexception E1) {e1.printstacktrace () ;}} public void execute (tuple) {string sentence = tuple. getstring (0); integer COUNT = countermap. get (sentence); If (COUNT = NULL) {COUNT = 0;} count ++; countermap. put (sentence, count); iterator <string> iterator = countermap. keyset (). iterator (); While (iterator. hasnext () {string next = iterator. next (); try {system. out. print (next + ":" + countermap. get (next) + ""); output. write (next + ":" + countermap. get (next) + ""); output. flush ();} catch (ioexception e) {e. printstacktrace (); try {output. close () ;}catch (ioexception E1) {e1.printstacktrace () ;}} system. out. println (); _ collector. ack (tuple);} public void cleanup () {} public void declareoutputfields (outputfieldsdeclarer declarer) {declarer. declare (new fields ("word "));}}