I. Demand
Statistics on the number of users in different provinces of the Web site for the information given in log logs
Second, programming code
Package org.apache.hadoop.studyhdfs.mapreduce;
Import java.io.IOException;
Import Org.apache.commons.lang.StringUtils;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.conf.Configured;
Import Org.apache.hadoop.fs.Path;
Import org.apache.hadoop.io.IntWritable;
Import org.apache.hadoop.io.LongWritable;
Import Org.apache.hadoop.io.Text;
Import Org.apache.hadoop.mapreduce.Job;
Import Org.apache.hadoop.mapreduce.Mapper;
Import Org.apache.hadoop.mapreduce.Mapper.Context;
Import Org.apache.hadoop.mapreduce.Reducer;
Import Org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
Import Org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
Import Org.apache.hadoop.util.Tool;
Import Org.apache.hadoop.util.ToolRunner;
Import Org.jboss.netty.util.internal.StringUtil; public class Provincecountmapreduce extends configured implements Tool {//1.MAP * * <KEYIN,VALUEIN,KEYOUT,VALUEOUT&G
T */public static class Wordcountmapper extends Mapper<longwritable,text,intwritable,intwritable>{private intwritable Mapoutputkey =new intwritable () Private intwritable Mapoutputvalue
Intwritable (1); @Override public void Map (longwritable key, Text value, Context context) throws IOException, interruptedexception {//get
Linevalue String linevalue =value.tostring ();
Split string[] STRs =linevalue.split ("T");
Line blank String url=strs[1];
String Provinceidvalue =strs[23]; Guolv if (Strs.length < 30 | | | Stringutils.isblank (provinceidvalue) | |
Stringutils.isblank (URL)) {return;
int Provinceid =integer.max_value;
try {provinceid=integer.valueof (provinceidvalue);} catch (Exception e) {return;} if (Provinceid = = Integer.max_value) {
Return
} mapoutputkey.set (Provinceid);
Context.write (Mapoutputkey, Mapoutputvalue); }//2.reduce public static class Wordcountreduce extends Reducer<intwritable,intwritable,intwritable,intwritable >{private intwritable Outputvalue =new intwritable () @Override public void reduce (intwritable key, iterable< IntwriTable> Values,context context) throws IOException, interruptedexception {//to does int sum = 0; for (intwritable Value:va
lues) {sum +=value.get ();} outputvalue.set (sum);
Context.write (key, Outputvalue); } public int run (string[] args) throws exception{//1.get Configuration Configuration conf =super.getconf ();//2.create
Job Job Job =job.getinstance (conf, This.getclass (). Getsimplename ());
Job.setjarbyclass (Provincecountmapreduce.class);
3.set Job//3.1 Set input path InputPath =new path (args[0));
Fileinputformat.addinputpath (Job, InputPath);
3.2 Set Mapper Job.setmapperclass (Wordcountmapper.class);
Job.setmapoutputkeyclass (Intwritable.class);
Job.setmapoutputvalueclass (Intwritable.class);
3.3 Set reduce job.setreducerclass (Wordcountreduce.class);
Job.setoutputkeyclass (Intwritable.class);
Job.setoutputvalueclass (Intwritable.class);
3.4 Set Input path OutputPath =new path (args[1));
Fileoutputformat.setoutputpath (Job, OutputPath); 4.submmit Boolean issuccess =job.waitforcompLetion (TRUE);
return issuccess?0:1; public static void Main (string[] args) throws Exception {args =new string[]{"hdfs://hadoop-senior02.beifeng.com:8020/i
nput/2015082818 "," hdfs://hadoop-senior02.beifeng.com:8020/output15/"};
Configuration conf =new Configuration ();
Conf.set ("Mapreduce.map.output.compress", "true");
int Status=toolrunner.run (conf, new Provincecountmapreduce (), args);
System.exit (status); }
}
3. Operation Result
1) Run code: Bin/hdfs dfs-text/output15/par*
2 Running Result:
1 3527
2 1672
3 511
4 325
5 776
6 661
7 95
8 80
9 183
10 93
11 135
12 289
13 264
14 374
15 163
16 419
17 306
18 272
19 226
20 2861
21 124
22 38
23 96
24 100
25 20
26 157
27 49
28 21
29 85
30 42
32 173
The above is a small set to introduce the Java Code statistics site in different provinces users of the number of visits to the relevant introduction, I hope to help you, this small series also thank you very much for the cloud Habitat Community support site!