PackageOrg.lukey.hadoop.classifyBayes;ImportJava.io.BufferedReader;Importjava.io.IOException;ImportJava.io.InputStreamReader;ImportJava.net.URI;Importorg.apache.hadoop.conf.Configuration;ImportOrg.apache.hadoop.fs.FSDataInputStream;ImportOrg.apache.hadoop.fs.FileSystem;ImportOrg.apache.hadoop.fs.Path;Importorg.apache.hadoop.io.DoubleWritable;Importorg.apache.hadoop.io.IntWritable;Importorg.apache.hadoop.io.LongWritable;ImportOrg.apache.hadoop.io.Text;ImportOrg.apache.hadoop.mapreduce.Job;ImportOrg.apache.hadoop.mapreduce.Mapper;ImportOrg.apache.hadoop.mapreduce.Reducer;ImportOrg.apache.hadoop.mapreduce.lib.input.FileInputFormat;ImportOrg.apache.hadoop.mapreduce.lib.output.FileOutputFormat;Importorg.apache.hadoop.mapreduce.lib.output.MultipleOutputs; Public classProbability {//Client Public Static voidMain (string[] args)throwsException {Configuration conf=NewConfiguration (); //read the total number of words, set to CongfigurationString Totalwordspath = "/user/hadoop/output/totalwords.txt"; FileSystem FS=Filesystem.get (Uri.create (Totalwordspath), conf); Fsdatainputstream InputStream= Fs.open (NewPath (Totalwordspath)); BufferedReader Buffer=NewBufferedReader (NewInputStreamReader (InputStream)); String StrLine=Buffer.readline (); String[] Temp= Strline.split (":"); if(Temp.length = = 2){ //temp[0] = totalwordsConf.setint (Temp[0], Integer.parseint (temp[1])); } /*string[] Otherargs = new Genericoptionsparser (conf, args). Getremainingargs (); if (otherargs.length! = 2) {System.out.println ("Usage <in> <out>"); System.exit (-1); }*/Job Job=NewJob (conf, "File Count"); Job.setjarbyclass (probability.class); Job.setmapperclass (wordsofclasscountmapper.class); Job.setreducerclass (wordsofclasscountreducer.class); String input= "/user/hadoop/mid/wordsfrequence"; String Output= "/user/hadoop/output/probability/"; Fileinputformat.addinputpath (Job,NewPath (input)); Fileoutputformat.setoutputpath (Job,NewPath (output)); Job.setoutputkeyclass (Text.class); Job.setoutputvalueclass (intwritable.class); System.exit (Job.waitforcompletion (true) ? 0:1); } Private StaticMultipleoutputs<text, intwritable>MoS; //Mapper Static classWordsofclasscountmapperextendsmapper<longwritable, text, text, intwritable> { Private StaticIntwritable number =Newintwritable (); @Overrideprotected voidMap (longwritable key, text value, mapper<longwritable, text, text, intwritable>. Context context)throwsIOException, interruptedexception {string[] temp= Value.tostring (). Split ("\ T"); if(Temp.length = = 3){ //folder name Category nameString DirName = temp[0]; Value.set (temp[1]); Number.set (Integer.parseint (temp[2])); Mos.write (value, number, dirName); }} @Overrideprotected voidCleanup (mapper<longwritable, text, text, intwritable>. Context context)throwsIOException, interruptedexception {//TODO auto-generated Method StubMos.close (); } @Overrideprotected voidSetup (mapper<longwritable, text, text, intwritable>. Context context)throwsIOException, interruptedexception {//TODO auto-generated Method StubMOS =NewMultipleoutputs<text, intwritable>(context); } } //Reducer Static classWordsofclasscountreducerextendsReducer<text, Intwritable, Text, doublewritable> { //result indicates the number of words within each filedoublewritable result =NewDoublewritable (3); Configuration conf=NewConfiguration (); intTotal = Conf.getint ("Totalwords", 1); @Overrideprotected voidReduce (Text key, iterable<intwritable>values, Reducer<text, Intwritable, Text, doublewritable>. Context context)throwsIOException, interruptedexception {//TODO auto-generated Method Stub//double sum = 0;//For (intwritable value:values) {//sum + = Value.get ();// }//result.set (sum);Context.write (key, result); } }}
Mutilple output reduce cannot write