标签:
1、项目名称:
package com.stjoin; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; public class STjoin { public static int time = 0; //map将输入分割成child和parent,然后正序输出一次作为右表, //反序输出一次作为左表,需要注意的是在输出的value中必须加上左右表区别标志 public static class Map extends Mapper<Object, Text, Text, Text>{ public void map(Object key,Text value,Context context)throws IOException,InterruptedException{ String childname = new String(); String parentname = new String(); String relationtype = new String(); String line = value.toString(); System.out.println("mapper..............."); int i = 0; while(line.charAt(i) != ‘ ‘){ i++; } String[] values = {line.substring(0, i),line.substring(i+1)}; System.out.println("child:"+values[0]+" parent:"+values[1]); if(values[0].compareTo("child") != 0){//如果是child,则为0,否则为-1 childname=values[0]; parentname=values[1]; //左表 relationtype="1"; context.write(new Text(values[1]),new Text(relationtype+"+"+childname+"+"+parentname)); System.out.println("key:"+values[1]+" value: "+relationtype+"+"+childname+"+"+parentname); //右表 relationtype = "2"; context.write(new Text(values[0]), new Text(relationtype+"+"+childname+"+"+parentname)); System.out.println("key:"+values[0]+" value: "+relationtype+"+"+childname+"+"+parentname); } } } public static class Reduce extends Reducer<Text, Text, Text, Text>{ public void reduce(Text key,Iterable<Text> values,Context context) throws IOException, InterruptedException{ System.out.println("reduce....................."); System.out.println("key:"+key+" values:"+values); //输出表头 if(time==0){ context.write(new Text("grandchild"), new Text("grandparent")); time++; } int grandchildnum = 0; String grandchild[] = new String[10]; int grandparentnum = 0; String grandparent[] = new String[10]; Iterator ite = values.iterator(); while(ite.hasNext()){ String record = ite.next().toString(); System.out.println("record: "+record); int len = record.length(); int i = 2; if(len==0) continue; char relationtype = record.charAt(0); String childname = new String(); String parentname = new String(); //获取value-list中的value的child while(record.charAt(i)!=‘+‘){ childname = childname + record.charAt(i); i++; } System.out.println("childname: "+childname); i=i+1; //获取value-list中的value的parent while(i<len){ parentname=parentname+record.charAt(i); i++; } System.out.println("parentname: "+parentname); //左表,取出child放入grandchild数组中 if (relationtype==‘1‘) { grandchild[grandchildnum] = childname; grandchildnum++; } //右表,取出child放入grandparent数组中 else{ grandparent[grandparentnum]=parentname; grandparentnum++; } } //grandchild和grandparent数组求笛卡儿积 if(grandparentnum!=0&&grandchildnum!=0){ for(int m = 0 ; m < grandchildnum ; m++){ for(int n = 0 ; n < grandparentnum; n++){ context.write(new Text(grandchild[m]), new Text(grandparent[n])); System.out.println("grandchild: "+grandchild[m]+" grandparent: "+grandparent[n]); } } } } } public static void main(String [] args)throws Exception{ Configuration conf = new Configuration(); String otherArgs[] = new GenericOptionsParser(conf,args).getRemainingArgs(); if(otherArgs.length != 2){ System.err.println("Usage: sort<in><out>"); System.exit(2); } Job job = new Job(conf,"single table join"); job.setJarByClass(STjoin.class); job.setMapperClass(Map.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job,new Path(otherArgs[1])); System.exit(job.waitForCompletion(true)? 0 : 1); } }
版本2(简化版):
package com.stjoin; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; public class STjoin { public static int time = 0; public static class Map extends Mapper<Object, Text, Text, Text>{ public void map(Object key,Text value,Context context)throws IOException,InterruptedException{ String relationtype = new String(); String line = value.toString(); System.out.println("mapper..............."); int i = 0; //遍历方法一:一个字符一个字符对照确定分割点 /* while(line.charAt(i) != ‘ ‘){ i++; } String[] values = {line.substring(0, i),line.substring(i+1)}; */ //遍历方法二:使用迭代器取出child和parent String[] values = new String[10]; StringTokenizer itr = new StringTokenizer(line); while(itr.hasMoreTokens()){ values[i] = itr.nextToken(); i = i+1; } System.out.println("child:"+values[0]+" parent:"+values[1]); if(values[0].compareTo("child") != 0){//如果是child,则为0,否则为-1 relationtype="1"; context.write(new Text(values[1]),new Text(relationtype+"+"+values[0])); System.out.println("key:"+values[1]+" value: "+relationtype+"+"+values[0]); relationtype = "2"; context.write(new Text(values[0]), new Text(relationtype+"+"+values[1])); System.out.println("key:"+values[0]+" value: "+relationtype+"+"+values[1]); } } } public static class Reduce extends Reducer<Text, Text, Text, Text>{ public void reduce(Text key,Iterable<Text> values,Context context) throws IOException, InterruptedException{ System.out.println("reduce....................."); System.out.println("key:"+key+" values:"+values); if(time==0){ context.write(new Text("grandchild"), new Text("grandparent")); time++; } int grandchildnum = 0; String grandchild[] = new String[10]; int grandparentnum = 0; String grandparent[] = new String[10]; String name = new String(); //遍历方法一:用迭代器 // Iterator ite = values.iterator(); // while(ite.hasNext()){ //遍历方法二:用for循环 for(Text val : values){ // String record = ite.next().toString(); String record = val.toString(); System.out.println("record: "+record); int i = 2; char relationtype = record.charAt(0); name = record.substring(i); System.out.println("name: "+name); if (relationtype==‘1‘) { grandchild[grandchildnum] = name; grandchildnum++; } else{ grandparent[grandparentnum]=name; grandparentnum++; } } //遍历方法三:就是详细方法的charAt(),一个一个字符遍历 if(grandparentnum!=0&&grandchildnum!=0){ for(int m = 0 ; m < grandchildnum ; m++){ for(int n = 0 ; n < grandparentnum; n++){ context.write(new Text(grandchild[m]), new Text(grandparent[n])); System.out.println("grandchild: "+grandchild[m]+" grandparent: "+grandparent[n]); } } } } } public static void main(String [] args)throws Exception{ Configuration conf = new Configuration(); String otherArgs[] = new GenericOptionsParser(conf,args).getRemainingArgs(); if(otherArgs.length != 2){ System.err.println("Usage: sort<in><out>"); System.exit(2); } Job job = new Job(conf,"single table join"); job.setJarByClass(STjoin.class); job.setMapperClass(Map.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job,new Path(otherArgs[1])); System.exit(job.waitForCompletion(true)? 0 : 1); } }
标签:
原文地址:http://www.cnblogs.com/yangyquin/p/5021182.html