Map class
Package hadoop3;
Import java.io.IOException;
Import org.apache.hadoop.io.LongWritable;
Import Org.apache.hadoop.io.Text;
Import Org.apache.hadoop.mapreduce.Mapper;
public class Danbiaomap extends Mapper <longwritable,text,text,text>{
String Childname=new string ();
String Parientname=new string ();
String Flag=new string ();
protected void Map (longwritable key,text value,context Context) throws IOException, Interruptedexception
{
String [] str=value.tostring (). Split ("\ t");
if (Str[0].compareto ("Child")!=0)
{//left table
flag= "1";
CHILDNAME=STR[0];
PARIENTNAME=STR[1];
Context.write (new text (parientname), new text (flag+ "+" +childname+ "+" +parientname));
Right table
Flag= "2";
Context.write (new text (childname), new text (flag+ "+" +childname+ "+" +parientname));
}
}
}
Reduce class
Package hadoop3;
Import java.io.IOException;
Import Java.util.Iterator;
Import Org.apache.hadoop.io.Text;
Import Org.apache.hadoop.mapreduce.Reducer;
public class Danbiaoreduce extends reducer<text,text,text,text>{
private int num=0;
protected void reduce (Text key,iterable<text> value, Context context) throws IOException, Interruptedexception
{
if (num==0)
{
Context.write (new text ("grandchild"), new text ("Grandparient"));
num++;
}
Iterator <Text> itr=value.iterator ();
int grandchildnum=0;
String [] grandchild=new string[100];
int grandparientnum=0;
String [] grandparient=new string[100];
while (Itr.hasnext ())
{
String [] Record=itr.next (). toString (). Split ("\\+");
if (Record[0].compareto ("1") ==0)
{
GRANDCHILD[GRANDCHILDNUM]=RECORD[1];
grandchildnum++;
}
else if (Record[0].compareto ("2") ==0)
{
GRANDPARIENT[GRANDPARIENTNUM]=RECORD[2];
grandparientnum++;
}
Else
{}
}
if (grandchildnum!=0 && grandparientnum!=0)
{
for (int i=0;i<grandparientnum;i++)
{
for (int j=0;j<grandchildnum;j++)
{
Context.write (new text (Grandchild[i]), new text (Grandparient[j]));
}
}
}
}
}
Main class
Package hadoop3;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.conf.Configured;
Import Org.apache.hadoop.fs.FileSystem;
Import Org.apache.hadoop.fs.Path;
Import Org.apache.hadoop.io.Text;
Import Org.apache.hadoop.mapreduce.Job;
Import Org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
Import Org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
Import Org.apache.hadoop.util.Tool;
Import Org.apache.hadoop.util.ToolRunner;
Import Com.sun.jersey.core.impl.provider.entity.XMLJAXBElementProvider.Text;
public class Danbiao extends configured implements tool{
public static void Main (string[] args) throws Exception {
TODO auto-generated Method Stub
Toolrunner.run (New Danbiao (), args);
}
@Override
public int run (string[] arg0) throws Exception {
TODO auto-generated Method Stub
Configuration conf=getconf ();
Job Job=new job ();
Job.setjarbyclass (GetClass ());
FileSystem fs=filesystem.get (conf);
Fs.delete (New Path ("/outfile1104"), true);
Fileinputformat.addinputpath (job,new Path ("/luo/danbiao.txt"));
Fileoutputformat.setoutputpath (Job, New Path ("/outfile1104"));
Job.setoutputkeyclass (Text.class);
Job.setoutputvalueclass (Text.class);
Job.setmapperclass (Danbiaomap.class);
Job.setreducerclass (Danbiaoreduce.class);
Job.waitforcompletion (TRUE);
return 0;
}
}
mapreduce-implementing a single-table association