Java examples for Big Data:Hadoop
Set hadoop file output path
import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class CountShare { enum Counter {/*w ww . j a v a2 s. c om*/ TIMESKIP, IPSKIP, URLSKIP, SERSKIP, } /** * * Map * */ public static class countShareMapper extends Mapper<Object,Text,Text,IntWritable> { public final static IntWritable ONE = new IntWritable(1); public Text word = new Text(); @Override public void setup(Context context) { } @Override public void map(Object key,Text value,Context context) throws IOException, InterruptedException{ //deal with (value.toString()); // word.set(log.getIp()); // context.write(word, ONE); //context.getCounter(Counter.IPSKIP).increment(1); } @Override public void cleanup(Context context) { } } /** * * Reduce */ public static class countShareReducer extends Reducer<Text,IntWritable,Text,IntWritable> { int sum = 0; @Override public void setup(Context context) throws IOException, InterruptedException { } public void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException, InterruptedException { Iterator<IntWritable> ite = values.iterator(); sum = 0; while(ite.hasNext()) { sum++; ite.next(); } context.write(key, new IntWritable(sum)); } @Override public void cleanup(Context context) throws IOException, InterruptedException { } } @SuppressWarnings("deprecation") public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { Path input = new Path("hdfs://192.168.32.131/in/yourApp/"); Path output = new Path("hdfs://192.168.32.131/out/yourApp/countShare/"); Configuration conf = new Configuration(); Job job = new Job(conf,"countShare"); job.setJarByClass(CountShare.class); job.setMapperClass(countShareMapper.class); job.setReducerClass(countShareReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, input); FileOutputFormat.setOutputPath(job, output); System.exit(job.waitForCompletion(true)?0:1); } }