Map class:
1 PackageLYC.YUSHAO.HADOOP.MR.WC;2 3 Importjava.io.IOException;4 5 Importorg.apache.hadoop.io.LongWritable;6 ImportOrg.apache.hadoop.io.Text;7 ImportOrg.apache.hadoop.mapreduce.Mapper;8 9 Public classWcmapperextendsmapper<longwritable, text, text, longwritable> {Ten One @Override A protected voidmap (longwritable key, Text value, context context) - throwsIOException, interruptedexception { - //first, receive the data the //Accept Data -String line =value.tostring (); - //to split - //Split +string[] Words = Line.split (""); - //to cycle + //Loop A for(String w:words) { at //Send - //Send -Context.write (NewText (W),NewLongwritable (1)); - } - - } in -}
Reduce class:
1 PackageLYC.YUSHAO.HADOOP.MR.WC;2 3 Importjava.io.IOException;4 5 Importorg.apache.hadoop.io.LongWritable;6 ImportOrg.apache.hadoop.io.Text;7 ImportOrg.apache.hadoop.mapreduce.Reducer;8 9 Public classWcreducerextendsReducer<text, Longwritable, Text, longwritable> {Ten One @Override A protected voidReduce (Text key, iterable<longwritable>values, -Context context)throwsIOException, interruptedexception { - //Define a counter the //Define a counter - LongCounter = 0; - - //receiving data Loops + //Accept data and loop - for(longwritable i:values) { + //sum ACounter + =i.get (); at - } - //Send -Context.write (Key,Newlongwritable (counter)); - - } in -}
WordCount class:
1 PackageLYC.YUSHAO.HADOOP.MR.WC;2 3 Importorg.apache.hadoop.conf.Configuration;4 ImportOrg.apache.hadoop.fs.Path;5 Importorg.apache.hadoop.io.LongWritable;6 ImportOrg.apache.hadoop.io.Text;7 ImportOrg.apache.hadoop.mapreduce.Job;8 ImportOrg.apache.hadoop.mapreduce.lib.input.FileInputFormat;9 ImportOrg.apache.hadoop.mapreduce.lib.output.FileOutputFormat;Ten One Public classWordCount { A - Public Static voidMain (string[] args)throwsException { -Configuration conf =NewConfiguration (); the //abstract MapReduce into a single job -Job Job =job.getinstance (conf); - - //Notice!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +Job.setjarbyclass (WordCount.class); - + //assemble a custom class A at //Set Mapper ' s properties -Job.setmapperclass (Wcmapper.class); - -Job.setmapoutputkeyclass (Text.class); - -Job.setmapoutputvalueclass (longwritable.class); in - //reading HDFs Data toFileinputformat.setinputpaths (Job,NewPath ("/words.txt")); + - //Set Reducer ' s properties theJob.setreducerclass (Wcreducer.class); * //output to HDFs inside $Job.setoutputkeyclass (Text.class);Panax NotoginsengJob.setoutputvalueclass (longwritable.class); -Fileoutputformat.setoutputpath (Job,NewPath ("/wcout111")); the + //Some methods of invoking the job to commit A //submit,but This isn't good the //job.submit (); +Job.waitforcompletion (true); - } $ $}
Right-click: project Name
Export
JAR File
Finish
On the command line:
Hadoop Jar/root/mrs.jar
success!!
The first program of Hadoop Big Data--wordcount