Java tutorial
package mapReduceBasics; /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * * @author hduser */ import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; public class WordCount { public static class TokenizerMapper extends Mapper<Object, Text, Text, IntWritable> { private final static IntWritable constantOne = new IntWritable(1); private Text inputWord = new Text(); public void map(Object key, Text value, Context context) throws IOException, InterruptedException { StringTokenizer inputRecord = new StringTokenizer(value.toString()); System.err.println("New Line # " + value.toString()); while (inputRecord.hasMoreTokens()) { inputWord.set(inputRecord.nextToken()); System.err.println("New Word # " + inputWord); context.write(inputWord, constantOne); } } } public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable> { private IntWritable result = new IntWritable(); public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(key, result); } } public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); GenericOptionsParser GOP = new GenericOptionsParser(conf, args); Configuration newConf = GOP.getConfiguration(); String[] otherArgs = GOP.getRemainingArgs(); System.err.println("Best of Luck"); Job job = new Job(newConf, "word count"); job.setJarByClass(WordCount.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); Path inputPath = new Path(otherArgs[0]); Path outputPath = new Path(otherArgs[1]); FileInputFormat.addInputPath(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); FileSystem fs = FileSystem.get(conf); fs.delete(outputPath, true); // job.setNumReduceTasks(3); System.err.println("Input Format - " + job.getInputFormatClass()); System.exit(job.waitForCompletion(true) ? 0 : 1); } }