Hadoop 통계 파일에서 어떤 단어가 나오는 횟수

3759 단어
파일txt 내용은 다음과 같습니다.
what is you name?
my name is zhang san.
통계를 요구합니다.txt에서'is'가 나오는 횟수는?
 
코드는 다음과 같습니다.
PerWordMapper
package com.hadoop.wordcount;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class PerWordMapper extends Mapper<Object, Text, Text, IntWritable> {

	public Text keyText = new Text();
	public IntWritable intValue = new IntWritable(1);
	
	@Override
	protected void map(Object key, Text value,
			Context context)
			throws IOException, InterruptedException {
		String str = value.toString();
		StringTokenizer to = new StringTokenizer(str);
		while (to.hasMoreTokens()) {
			String t = to.nextToken();
			//             
			if(t.equals("is")){
				keyText = new Text(t);
				context.write(keyText, intValue);
			}
	       
	     }
	}
}

 
PerWordReducer
package com.hadoop.wordcount;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class PerWordReducer extends Reducer<Text, IntWritable, Text, IntWritable> {

	public IntWritable intValue = new IntWritable(0);
	@Override
	protected void reduce(Text key, Iterable<IntWritable> value,
			Context context)
			throws IOException, InterruptedException {
		int sum = 0;
		while(value.iterator().hasNext()){
			sum += value.iterator().next().get();
		}
		intValue.set(sum);
		context.write(key, intValue);
	}
	
}

PerWordCount
package com.hadoop.wordcount;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import com.hadoop.mapreducer.MapperClass;
import com.hadoop.mapreducer.ReducerClass;
import com.hadoop.mapreducer.WordCount;

public class PerWordCount {
	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
		Configuration conf = new Configuration();
	    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
	    System.out.println("otherArgs.length:"+otherArgs.length);
	    if (otherArgs.length != 2) {
	      System.err.println("Usage: wordcount <in> <out>");
	      System.exit(2);
	    }
	    Job job = new Job(conf, "word count");
	    job.setJarByClass(PerWordCount.class);
	    job.setMapperClass(PerWordMapper.class);
	    job.setCombinerClass(PerWordReducer.class);
	    job.setReducerClass(PerWordReducer.class);
	    job.setOutputKeyClass(Text.class);
	    job.setOutputValueClass(IntWritable.class);
	    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
	    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
	    System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}

좋은 웹페이지 즐겨찾기