Map
Package hadoop3;
Import java.io.IOException;
Import org.apache.hadoop.io.LongWritable;
Import Org.apache.hadoop.io.Text;
Import Org.apache.hadoop.mapreduce.Mapper;
public class Duobiaomap extends mapper<longwritable,text,text,text>{
protected void Map (longwritable key,text value,context Context) throws IOException, Interruptedexception
{
String line=value.tostring ();
if (Line.contains ("Factoryname") | | | line.contains ("ADDRESSID"))
{
Return
}
String [] Str=line.split ("\ t");
String Flag=new string ();
if (Str[0].length () ==1)
{flag= "2";
Context.write (new text (Str[0]), new text (flag+ "+" +str[1]);
}
else if (str[0].length () >1)
{
flag= "1";
Context.write (new text (Str[1]), new text (flag+ "+" +str[0]);
}
else {}
}
}
Reduce
Package hadoop3;
Import java.io.IOException;
Import Java.util.Iterator;
Import Org.apache.hadoop.io.Text;
Import Org.apache.hadoop.mapreduce.Reducer;
public class Duobiaoreduce extends reducer<text,text,text,text>
{private static int num=0;
protected void reduce (Text key,iterable<text> values,context Context) throws IOException, Interruptedexception
{
if (num==0)
{
Context.write (new text ("Factory"), new text ("Address"));
num++;
}
Iterator<text> Itr=values.iterator ();
String [] factory=new string[100];
int factorynum=0;
String [] address=new string[100];
int addressnum=0;
while (Itr.hasnext ())
{
String [] Str1=itr.next (). toString (). Split ("\\+");
if (Str1[0].compareto ("1") ==0)
{
FACTORY[FACTORYNUM]=STR1[1];
factorynum++;
}
else if (Str1[0].compareto ("2") ==0)
{
ADDRESS[ADDRESSNUM]=STR1[1];
addressnum++;
}
else {}
}
if (factorynum!=0 && addressnum!=0) {
for (int i=0;i<address.length;i++)
{
for (int j=0;j<factory.length;j++)
{
Context.write (new text (Factory[j]), new text (Address[i]));
}
}
}
}
}
//
Package hadoop3;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.conf.Configured;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.Path;
Import Org.apache.hadoop.io.Text;
Import Org.apache.hadoop.mapreduce.Job;
Import Org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
Import Org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
Import Org.apache.hadoop.util.Tool;
Import Org.apache.hadoop.util.ToolRunner;
Import Com.sun.jersey.core.impl.provider.entity.XMLJAXBElementProvider.Text;
public class Duobiao extends configured implements tool{
public static void Main (string[] args) throws Exception {
TODO auto-generated Method Stub
Toolrunner.run (New Duobiao (), args);
}
@Override
public int run (string[] arg0) throws Exception {
TODO auto-generated Method Stub
Configuration conf=getconf ();
Job Job=new job ();
Job.setjarbyclass (GetClass ());
FileSystem fs=filesystem.get (conf);
Fs.delete (New Path ("/outfile1105"), true);
Fileinputformat.addinputpath (Job, New Path ("/luo/duobiao.txt"));
Fileinputformat.addinputpath (Job, New Path ("/luo/duobiao2.txt"));
Fileoutputformat.setoutputpath (Job, New Path ("/outfile1105"));
Job.setmapoutputkeyclass (Text.class);
Job.setmapoutputvalueclass (Text.class);
Job.setmapperclass (Duobiaomap.class);
Job.setreducerclass (Duobiaoreduce.class);
Job.waitforcompletion (TRUE);
return 0;
}
}
mapreduce-Implementing multiple Table associations