List of usage examples for org.apache.hadoop.mapreduce Job setOutputValueClass
public void setOutputValueClass(Class<?> theClass) throws IllegalStateException
From source file:com.flytxt.yesbank.processor.HdfsToHbaseEngine.java
License:Open Source License
public static void main(String[] args) throws Exception { if (args.length < 1) { System.out.println("Hdfs to Hbase Engine requires the model Id as Input ..."); System.exit(1);// ww w . j a va 2 s .com } String modelId = args[0]; DBConnection dbConnection = DBConnection.getInstance(); dbConnection.loadDbProperties(); dbConnection.initializeDataBaseConnection(); String hfdsInputLoc = dbConnection.getHdfsInputDirectory(modelId); if (hfdsInputLoc != null) { Configuration conf = new Configuration(); String params = args[0]; conf.set("test", params); Job job = new Job(conf); // Configuration conf = new Configuration(); // Job job = Job.getInstance(conf, "hfds to hbase Engine"); job.setJarByClass(HdfsToHbaseEngine.class); job.setMapperClass(HdfsEngineMapper.class); // job.setCombinerClass(test.class); // job.setReducerClass(test.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(hfdsInputLoc)); job.setOutputFormatClass(NullOutputFormat.class); // FileOutputFormat.setOutputPath(job, new Path("/output_dir")); // conf.set("argParamValue", args[2]); // System.out.println("second argument value ------" + args[0]); // System.out.println(" in main method conf.getStrings --------" + // conf.get("argParamValue")); System.exit(job.waitForCompletion(true) ? 0 : 1); } else { System.out.println(" Hdfs Input Location deos not exists .. Unable to process the Request ...."); System.exit(0); } }
From source file:com.flytxt.yesbank.test.ModelProcessor.java
License:Open Source License
public static void main(String[] args) throws Exception { String modelId = args[0];//from w w w .j av a 2 s . c om ModelProcessor modelProcessor = new ModelProcessor(); modelProcessor.loadDbProperties(); modelProcessor.initializeDataBaseConnection(); String hfdsInputLoc = modelProcessor.getHdfsInputDirectory(modelId); if (hfdsInputLoc != null) { Configuration conf = new Configuration(); String params = args[0]; conf.set("test", params); Job job = new Job(conf); // Configuration conf = new Configuration(); // Job job = Job.getInstance(conf, "word count"); job.setJarByClass(ModelProcessor.class); job.setMapperClass(HdfsProcessMapper.class); // job.setCombinerClass(IntSumReducer.class); // job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(hfdsInputLoc)); job.setOutputFormatClass(NullOutputFormat.class); // FileOutputFormat.setOutputPath(job, new Path("/output_dir")); // conf.set("argParamValue", args[2]); // System.out.println("second argument value ------" + args[0]); // System.out.println(" in main method conf.getStrings --------" + // conf.get("argParamValue")); System.exit(job.waitForCompletion(true) ? 0 : 1); } else { System.out.println(" Hdfs Input Location deos not exists .. Unable to process the Request ...."); System.exit(0); } }
From source file:com.frdeso.app.Sleepy.java
License:Apache License
/** * Performs integer summation of all the values for each key. *///from ww w. java2 s. co m @Override public int run(String[] args) throws Exception { if (args.length != 3) { System.err.println("Usage: wordmean <in> <out> <number second>"); return 0; } Configuration conf = getConf(); conf.set("mapSleepTime", args[2]); @SuppressWarnings("deprecation") Job job = new Job(conf, "joba"); job.setJarByClass(Sleepy.class); job.setMapperClass(SleepyMapper.class); job.setCombinerClass(Reducer.class); job.setReducerClass(Reducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); Path outputpath = new Path(args[1]); FileOutputFormat.setOutputPath(job, outputpath); boolean result = job.waitForCompletion(true); return (result ? 0 : 1); }
From source file:com.github.libsml.commons.util.HadoopUtils.java
License:Apache License
/** * Create a map-only Hadoop Job out of the passed in parameters. Does not set the * Job name./* w w w . jav a 2 s.co m*/ * * @see #getCustomJobName(String, JobContext, Class, Class) */ public static Job prepareJob(Path inputPath, Path outputPath, Class<? extends InputFormat> inputFormat, Class<? extends Mapper> mapper, Class<? extends Writable> mapperKey, Class<? extends Writable> mapperValue, Class<? extends OutputFormat> outputFormat, Configuration conf) throws IOException { // Job job = new Job(new Configuration(conf)); Job job = Job.getInstance(conf); Configuration jobConf = job.getConfiguration(); if (mapper.equals(Mapper.class)) { throw new IllegalStateException("Can't figure out the user class jar file from mapper/reducer"); } job.setJarByClass(mapper); job.setInputFormatClass(inputFormat); jobConf.set("mapred.input.dir", inputPath.toString()); job.setMapperClass(mapper); job.setMapOutputKeyClass(mapperKey); job.setMapOutputValueClass(mapperValue); job.setOutputKeyClass(mapperKey); job.setOutputValueClass(mapperValue); jobConf.setBoolean("mapred.compress.map.output", true); job.setNumReduceTasks(0); job.setOutputFormatClass(outputFormat); jobConf.set("mapred.output.dir", outputPath.toString()); return job; }
From source file:com.github.libsml.commons.util.HadoopUtils.java
License:Apache License
/** * * @param inputPaths//from www . j a va 2 s .com * @param outputPath * @param inputFormat * @param inputKey * @param inputValue * @param mapper * @param mapperKey * @param mapperValue * @param combiner * @param reducer * @param outputKey * @param outputValue * @param outputFormat * @param conf * @param overwrite * @param isCompress * @return * @throws IOException */ public static Job prepareAvroJob(String inputPaths, String outputPath, Class<? extends InputFormat> inputFormat, Object inputKey, Object inputValue, Class<? extends Mapper> mapper, Object mapperKey, Object mapperValue, Class<? extends Reducer> combiner, Class<? extends Reducer> reducer, Object outputKey, Object outputValue, Class<? extends OutputFormat> outputFormat, Configuration conf, boolean overwrite, boolean isCompress) throws IOException { Job job = Job.getInstance(conf); Configuration jobConf = job.getConfiguration(); if (inputKey instanceof Schema) { if (inputValue instanceof Schema) { inputFormat = inputFormat == null ? AvroKeyValueInputFormat.class : inputFormat; } inputFormat = inputFormat == null ? AvroKeyInputFormat.class : inputFormat; } if (inputFormat != null) { job.setInputFormatClass(inputFormat); } if (inputKey instanceof Schema) { AvroJob.setInputKeySchema(job, (Schema) inputKey); } if (inputValue instanceof Schema) { AvroJob.setInputValueSchema(job, (Schema) inputValue); } if (outputKey instanceof Schema) { if (outputValue instanceof Schema) { outputFormat = outputFormat == null ? AvroKeyValueOutputFormat.class : outputFormat; } outputFormat = outputFormat == null ? AvroKeyOutputFormat.class : outputFormat; } if (outputFormat != null) { job.setOutputFormatClass(outputFormat); } if (outputKey instanceof Schema) { AvroJob.setOutputKeySchema(job, (Schema) outputKey); } else if (outputKey instanceof Class) { job.setOutputKeyClass((Class) outputKey); } if (outputValue instanceof Schema) { AvroJob.setOutputValueSchema(job, (Schema) outputValue); } else if (outputValue instanceof Class) { job.setOutputValueClass((Class) outputValue); } if (reducer == null) { job.setNumReduceTasks(0); if (mapperKey instanceof Schema) { AvroJob.setMapOutputKeySchema(job, (Schema) mapperKey); } else if (mapperKey instanceof Class) { job.setOutputKeyClass((Class) mapperKey); } if (mapperValue instanceof Schema) { AvroJob.setOutputValueSchema(job, (Schema) mapperValue); } else if (mapperKey instanceof Class) { job.setOutputValueClass((Class) mapperValue); } job.setJarByClass(mapper); } else if (reducer.equals(Reducer.class)) { if (mapper.equals(Mapper.class)) { throw new IllegalStateException("Can't figure out the user class jar file from mapper/reducer"); } job.setJarByClass(mapper); } else { job.setJarByClass(reducer); } FileInputFormat.setInputPaths(job, inputPaths); FileOutputFormat.setOutputPath(job, new Path(outputPath)); if (isCompress) { FileOutputFormat.setCompressOutput(job, true); FileOutputFormat.setOutputCompressorClass(job, DeflateCodec.class); } job.setMapperClass(mapper); if (mapperKey instanceof Schema) { AvroJob.setMapOutputKeySchema(job, (Schema) mapperKey); } else if (mapperKey instanceof Class) { job.setMapOutputKeyClass((Class) mapperKey); } if (mapperValue instanceof Schema) { AvroJob.setMapOutputValueSchema(job, (Schema) mapperValue); } else if (mapperKey instanceof Class) { job.setMapOutputValueClass((Class) mapperValue); } if (reducer != null) { job.setReducerClass(reducer); } if (combiner != null) { job.setCombinerClass(combiner); } if (overwrite) { HadoopUtils.delete(jobConf, new Path(outputPath)); } return job; }
From source file:com.github.libsml.commons.util.HadoopUtils.java
License:Apache License
public static Job prepareAvroJob(String inputPaths, Path outputPath, Schema inputKeySchema, Class<? extends Mapper> mapper, Class<? extends Writable> mapperKey, Class<? extends Writable> mapperValue, Class<? extends Reducer> combiner, Class<? extends Reducer> reducer, Schema outputKeySchema, Class<? extends Writable> outputValue, Configuration conf, boolean overwrite) throws IOException { Job job = Job.getInstance(conf); Configuration jobConf = job.getConfiguration(); if (reducer.equals(Reducer.class)) { if (mapper.equals(Mapper.class)) { throw new IllegalStateException("Can't figure out the user class jar file from mapper/reducer"); }/* www .j av a2 s. co m*/ job.setJarByClass(mapper); } else { job.setJarByClass(reducer); } FileInputFormat.setInputPaths(job, inputPaths); FileOutputFormat.setOutputPath(job, outputPath); FileOutputFormat.setCompressOutput(job, true); FileOutputFormat.setOutputCompressorClass(job, DeflateCodec.class); job.setInputFormatClass(AvroKeyInputFormat.class); AvroJob.setInputKeySchema(job, inputKeySchema); job.setMapperClass(mapper); if (mapperKey != null) { job.setMapOutputKeyClass(mapperKey); } if (mapperValue != null) { job.setMapOutputValueClass(mapperValue); } if (combiner != null) { job.setCombinerClass(combiner); } job.setOutputFormatClass(AvroKeyOutputFormat.class); job.setReducerClass(reducer); AvroJob.setOutputKeySchema(job, outputKeySchema); job.setOutputValueClass(outputValue); if (overwrite) { HadoopUtils.delete(jobConf, outputPath); } return job; }
From source file:com.github.libsml.commons.util.HadoopUtils.java
License:Apache License
public static Job prepareAvroJob(String inputPaths, Path outputPath, Schema inputKeySchema, Class<? extends Mapper> mapper, Class<? extends Writable> mapperKey, Class<? extends Writable> mapperValue, Class<? extends Reducer> combiner, Class<? extends Reducer> reducer, Class<? extends Writable> outputKey, Class<? extends Writable> outputValue, Configuration conf, boolean overwrite) throws IOException { Job job = Job.getInstance(conf); Configuration jobConf = job.getConfiguration(); if (reducer.equals(Reducer.class)) { if (mapper.equals(Mapper.class)) { throw new IllegalStateException("Can't figure out the user class jar file from mapper/reducer"); }/* w ww. j a va 2s. c o m*/ job.setJarByClass(mapper); } else { job.setJarByClass(reducer); } FileInputFormat.setInputPaths(job, inputPaths); FileOutputFormat.setOutputPath(job, outputPath); // FileOutputFormat.setCompressOutput(job, true); // FileOutputFormat.setOutputCompressorClass(job, DeflateCodec.class); job.setInputFormatClass(AvroKeyInputFormat.class); AvroJob.setInputKeySchema(job, inputKeySchema); job.setMapperClass(mapper); if (mapperKey != null) { job.setMapOutputKeyClass(mapperKey); } if (mapperValue != null) { job.setMapOutputValueClass(mapperValue); } if (combiner != null) { job.setCombinerClass(combiner); } job.setReducerClass(reducer); job.setOutputKeyClass(outputKey); job.setOutputValueClass(outputValue); if (overwrite) { HadoopUtils.delete(jobConf, outputPath); } return job; }
From source file:com.github.libsml.commons.util.HadoopUtils.java
License:Apache License
public static Job prepareJob(String inputPath, String outputPath, Class<? extends InputFormat> inputFormat, Class<? extends Mapper> mapper, Class<? extends Writable> mapperKey, Class<? extends Writable> mapperValue, Class<? extends Reducer> reducer, Class<? extends Writable> reducerKey, Class<? extends Writable> reducerValue, Class<? extends OutputFormat> outputFormat, Configuration conf) throws IOException { // Job job = new Job(new Configuration(conf)); Job job = Job.getInstance(conf); Configuration jobConf = job.getConfiguration(); if (reducer.equals(Reducer.class)) { if (mapper.equals(Mapper.class)) { throw new IllegalStateException("Can't figure out the user class jar file from mapper/reducer"); }/* ww w. j a va 2 s . c o m*/ job.setJarByClass(mapper); } else { job.setJarByClass(reducer); } job.setInputFormatClass(inputFormat); jobConf.set("mapred.input.dir", inputPath); job.setMapperClass(mapper); if (mapperKey != null) { job.setMapOutputKeyClass(mapperKey); } if (mapperValue != null) { job.setMapOutputValueClass(mapperValue); } jobConf.setBoolean("mapred.compress.map.output", true); job.setReducerClass(reducer); job.setOutputKeyClass(reducerKey); job.setOutputValueClass(reducerValue); job.setOutputFormatClass(outputFormat); jobConf.set("mapred.output.dir", outputPath); return job; }
From source file:com.github.milind.GlobalNumberAddition.java
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Global Addition of Numbers"); job.setJarByClass(GlobalNumberAddition.class); job.setMapperClass(GlobalNumberAdditionMapper.class); job.setNumReduceTasks(0);// ww w.j ava2 s.c o m job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:com.github.milind.GlobalNumberAverage.java
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Global Average of Numbers"); job.setJarByClass(GlobalNumberAverage.class); job.setMapperClass(GlobalNumberAverageMapper.class); job.setNumReduceTasks(0);/*w w w .j a v a2 s. com*/ job.setOutputKeyClass(Text.class); job.setOutputValueClass(DoubleWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }