转自http://a123159521.iteye.com/blog/1226924
-
package org.frame.base.hbase.hadoop;
-
-
import java.io.IOException;
-
import java.util.StringTokenizer;
-
-
import org.apache.hadoop.conf.Configuration;
-
import org.apache.hadoop.fs.Path;
-
import org.apache.hadoop.io.IntWritable;
-
import org.apache.hadoop.io.Text;
-
import org.apache.hadoop.mapreduce.Job;
-
import org.apache.hadoop.mapreduce.Mapper;
-
import org.apache.hadoop.mapreduce.Reducer;
-
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-
import org.apache.hadoop.util.GenericOptionsParser;
-
-
public class WordCount {
-
-
/**
-
* TokenizerMapper 继续自 Mapper
-
*
-
* [一个文件就一个map,两个文件就会有两个map]
-
* map[这里读入输入文件内容 以" \t\n\r\f" 进行分割,然后设置 word ==> one 的key/value对]
-
*
-
* @param Object Input key Type:
-
* @param Text Input value Type:
-
* @param Text Output key Type:
-
* @param IntWritable Output value Type:
-
*
-
* Writable的主要特点是它使得Hadoop框架知道对一个Writable类型的对象怎样进行serialize以及deserialize.
-
* WritableComparable在Writable的基础上增加了compareT接口,使得Hadoop框架知道怎样对WritableComparable类型的对象进行排序。
-
*
-
* @author yangchunlong.tw
-
*
-
*/
-
public static class TokenizerMapper
-
extends Mapper<Object, Text, Text, IntWritable>{
-
-
private final static IntWritable one = new IntWritable(1);
-
private Text word = new Text();
-
public void map(Object key, Text value, Context context
-
) throws IOException, InterruptedException {
-
StringTokenizer itr = new StringTokenizer(value.toString());
-
while (itr.hasMoreTokens()) {
-
word.set(itr.nextToken());
-
context.write(word, one);
-
}
-
}
-
}
-
-
/**
-
* IntSumReducer 继承自 Reducer
-
*
-
* [不管几个Map,都只有一个Reduce,这是一个汇总]
-
* reduce[循环所有的map值,把word ==> one 的key/value对进行汇总]
-
*
-
* 这里的key为Mapper设置的word[每一个key/value都会有一次reduce]
-
*
-
* 当循环结束后,最后的确context就是最后的结果.
-
*
-
* @author yangchunlong.tw
-
*
-
*/
-
public static class IntSumReducer
-
extends Reducer<Text,IntWritable,Text,IntWritable> {
-
private IntWritable result = new IntWritable();
-
-
public void reduce(Text key, Iterable<IntWritable> values,
-
Context context
-
) throws IOException, InterruptedException {
-
int sum = 0;
-
for (IntWritable val : values) {
-
sum += val.get();
-
}
-
result.set(sum);
-
context.write(key, result);
-
}
-
}
-
-
public static void main(String[] args) throws Exception {
-
Configuration conf = new Configuration();
-
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
-
/**
-
* 这里必须有输入/输出
-
*/
-
if (otherArgs.length != 2) {
-
System.err.println("Usage: wordcount ");
-
System.exit(2);
-
}
-
Job job = new Job(conf, "word count");
-
job.setJarByClass(WordCount.class);//主类
-
job.setMapperClass(TokenizerMapper.class);//mapper
-
job.setCombinerClass(IntSumReducer.class);//作业合成类
-
job.setReducerClass(IntSumReducer.class);//reducer
-
job.setOutputKeyClass(Text.class);//设置作业输出数据的关键类
-
job.setOutputValueClass(IntWritable.class);//设置作业输出值类
-
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));//文件输入
-
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));//文件输出
-
System.exit(job.waitForCompletion(true) ? 0 : 1);//等待完成退出.
-
}
-
}
阅读(1321) | 评论(0) | 转发(0) |