当前位置:首页 > 开发 > 开源软件 > 正文

hadoop实战多表关联

发表于: 2013-07-18   作者:cywhoyi   来源:转载   浏览次数:
摘要: 多表关联处理获取结果,大致意思把数据切割成左右表 package org.apache.hadoop.examples; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Pa

多表关联处理获取结果,大致意思把数据切割成左右表

package org.apache.hadoop.examples;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class STjoin {
	public static int time = 0;

	public static class Map extends Mapper<Object, Text, Text, Text> {

		public void map(Object key, Text value, Context context)
				throws IOException, InterruptedException {
			String childname = new String();
			String parentname = new String();
			String type = new String();
			String line = value.toString();
			int i = 0;
			while (line.charAt(i) != ' ') {
				i++;
			}

			String[] values = { line.substring(0, i), line.substring(i + 1) };
			if (values[0].compareTo("child") != 0) {
				childname = values[0];
				parentname = values[1];
				type = "1";
				context.write(new Text(values[1]), new Text(type + "+"
						+ childname + "+" + parentname));

				type = "2";
				context.write(new Text(values[0]), new Text(type + "+"
						+ childname + "+" + parentname));

			}
		}
	}

	public static class IntSumReducer extends Reducer<Text, Text, Text, Text> {

		public void reduce(Text key, Iterable<IntWritable> values,
				Context context) throws IOException, InterruptedException {
			if (time == 0) {
				context.write(new Text("grandchild"), new Text("grandparent"));
				time++;
			}
			
			int grandchildnum = 0;
			String grandchild[] = new String[10];
			int grandparentnum = 0;
			String grandparent[] = new String[10];
			Iterator ite = values.iterator();
			while (ite.hasNext()) {
				String record = ite.next().toString();
				int len = record.length();
				int i = 2;
				if (len == 0)
					continue;
				char type = record.charAt(0);
				String childname = new String();
				String parentname = new String();
				System.out.println("------------------" + record);
				while (record.charAt(i) != '+') {
					childname = childname + record.charAt(i);
					i++;
					// System.out.println("childname" + childname);
				}
				i = i + 1;

				while (i < len) {
					parentname = parentname + record.charAt(i);
					i++;
					// System.out.println("parentname" + parentname);
				}
				if (type == '1') {
					grandchild[grandchildnum] = childname;
					grandchildnum++;
				} else {
					grandparent[grandparentnum] = parentname;
					grandparentnum++;
				}
			}
			if (grandparentnum != 0 && grandchildnum != 0) {
				for (int i = 0; i < grandchildnum; i++) {
					for (int j = 0; j < grandparentnum; j++) {
						context.write(new Text(grandchild[i]), new Text(
								grandparent[j]));
					}
				}
			}
		}
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		String[] otherArgs = new GenericOptionsParser(conf, args)
				.getRemainingArgs();
		if (otherArgs.length != 2) {
			System.err.println("Usage: wordcount <in> <out>");
			System.exit(2);
		}
		Job job = new Job(conf, "stsort");
		job.setJarByClass(STjoin.class);
		job.setMapperClass(Map.class);
		job.setReducerClass(IntSumReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}
}

 

hadoop实战多表关联

  • 0

    开心

    开心

  • 0

    板砖

    板砖

  • 0

    感动

    感动

  • 0

    有用

    有用

  • 0

    疑问

    疑问

  • 0

    难过

    难过

  • 0

    无聊

    无聊

  • 0

    震惊

    震惊

编辑推荐
数据库中经常要用到多个表的关联。mysql的关联主要包括inner join,left join,right join三种,下
一、多对一进行关联 (多个学生对应同一间宿舍)---学生是主体,宿舍是附体,关联关系<many-to-one
MongoDB之DBref(关联插入,查询,删除) 实例深入 如图所示,A,B,C三个Collection互相关联。 其中的数
一、多对一进行关联 (多个学生对应同一间宿舍)---学生是主体,宿舍是附体,关联关系<many-to-one
首先要介绍一下集合的概念:集合具有无序性、唯一性。 无序性:指集合内部元素没有相对顺序的概念,
1.开启SQL SERVER的'Ad Hoc Distributed Queries'开关,需要用拥有服务器sysadmin角色权限的登录帐
环境: ruby 1.9.3, rails 3.2.6 关于1对N的表关联,在Rails官网上关于Post和comment的例子,有很
DBRef is a more formal specification for creating references between documents. DBRefs (gener
由于子表的某些字段是父表的外键,正常情况之下,显示的只是一个键值。 如下图的Highlight列,如果
《Hadoop硬实战》 Hadoop是一个开源的MapReduce平台,设计运行在大型分布式集群环境中,为开发者进
版权所有 IT知识库 CopyRight © 2009-2015 IT知识库 IT610.com , All Rights Reserved. 京ICP备09083238号