List of usage examples for org.apache.hadoop.mapreduce Job waitForCompletion
public boolean waitForCompletion(boolean verbose) throws IOException, InterruptedException, ClassNotFoundException
From source file:Analysis.A7_Total_Signups_By_Year.Total_Signup_by_Year_Driver.java
/** * @param args the command line arguments *///w w w.j a v a 2 s . c o m public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Total Signups by Year"); job.setJarByClass(Total_Signup_by_Year_Driver.class); job.setMapperClass(Total_Signup_by_Year_Mapper.class); job.setCombinerClass(Total_Signup_by_Year_Reducer.class); job.setReducerClass(Total_Signup_by_Year_Reducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:Analysis.A8_Top_10_Most_Popular_Tracks.Top_10_Most_Popular_Tracks_Driver.java
/** * @param args the command line arguments */// w w w . j a va 2 s . c o m public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Top 10 most popular tracks "); job.setJarByClass(Top_10_Most_Popular_Tracks_Driver.class); job.setMapperClass(Top_10_Most_Popular_Tracks_Mapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setNumReduceTasks(1); job.setReducerClass(Top_10_Most_Popular_Tracks_Reducer.class); job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:Analysis.A9_Max_Activity_By_Time_of_Day.Most_Listens_By_Time_of_Day_Driver.java
/** * @param args the command line arguments */// ww w . jav a 2 s . com public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Most listens by Time of the Day"); job.setJarByClass(Most_Listens_By_Time_of_Day_Driver.class); job.setMapperClass(Most_Listens_By_Time_of_Day_Mapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(NullWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); int code = job.waitForCompletion(true) ? 0 : 1; if (code == 0) { for (Counter counter : job.getCounters() .getGroup(Most_Listens_By_Time_of_Day_Mapper.HOUR_COUNTER_GROUP)) { System.out.println(counter.getDisplayName() + "\t" + counter.getValue()); } } FileSystem.get(conf).delete(new Path(args[1]), true); System.exit(code); }
From source file:ar.edu.ungs.garules.CensusJob.java
License:Apache License
/** * Main -> Ejecucion del proceso/*from w w w. jav a2 s .co m*/ * @param args * @throws Exception */ public static void main(String[] args) throws Exception { long time = System.currentTimeMillis(); Individual<BitSet> bestInd = null; if (args.length != 2) args = DEFAULT_ARGS; // Preparacion del GA // -------------------------------------------------------------------------------------------------------------- Set<Individual<BitSet>> bestIndividuals = new HashSet<Individual<BitSet>>(); List<Gene> genes = new ArrayList<Gene>(); genes.add(genCondicionACampo); genes.add(genCondicionAOperador); genes.add(genCondicionAValor); genes.add(genCondicionBPresente); genes.add(genCondicionBCampo); genes.add(genCondicionBOperador); genes.add(genCondicionBValor); genes.add(genCondicionCPresente); genes.add(genCondicionCCampo); genes.add(genCondicionCOperador); genes.add(genCondicionCValor); genes.add(genPrediccionCampo); genes.add(genPrediccionValor); Map<Gene, Ribosome<BitSet>> translators = new HashMap<Gene, Ribosome<BitSet>>(); for (Gene gene : genes) translators.put(gene, new BitSetToIntegerRibosome(0)); Genome<BitSet> genome = new BitSetGenome("Chromosome 1", genes, translators); Parameter<BitSet> par = new Parameter<BitSet>(0.035, 0.9, 200, new DescendantAcceptEvaluator<BitSet>(), new CensusFitnessEvaluator(), new BitSetOnePointCrossover(), new BitSetFlipMutator(), null, new BitSetRandomPopulationInitializer(), null, new ProbabilisticRouletteSelector(), new GlobalSinglePopulation<BitSet>(genome), 500, 100d, new BitSetMorphogenesisAgent(), genome); ParallelFitnessEvaluationGA<BitSet> ga = new ParallelFitnessEvaluationGA<BitSet>(par); ga.init(); // -------------------------------------------------------------------------------------------------------------- // Fin de Preparacion del GA // Itera hasta el maximo de generaciones permitidas for (int i = 0; i < par.getMaxGenerations(); i++) { ga.initGeneration(); Configuration conf = new Configuration(); // Debug //showPopulation(ga.getPopulation()); //System.out.println((System.currentTimeMillis()-time)/1000 + "s transcurridos desde el inicio"); // Pasamos como parmetro las condiciones a evaluar Iterator<Individual<BitSet>> ite = ga.getPopulation().iterator(); int contador = 0; Set<String> expUnicas = new HashSet<String>(); while (ite.hasNext()) { Individual<BitSet> ind = ite.next(); String rep = RuleStringAdaptor.adapt(RuleAdaptor.adapt(ind)); expUnicas.add(rep); } for (String rep : expUnicas) if (ocurrencias.get(rep) == null) { conf.set(String.valueOf(contador), rep); contador++; } // Configuracion del job i Job job = new Job(conf, "GA rules - Generation " + i); job.setJarByClass(CensusJob.class); job.setMapperClass(CensusMapper.class); job.setCombinerClass(CensusReducer.class); job.setReducerClass(CensusReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); FileInputFormat.addInputPath(job, new Path(args[0])); SequenceFileOutputFormat.setOutputPath(job, new Path(args[1] + "g" + i)); // Corrida del trabajo map-reduce representando a la generacion i job.waitForCompletion(true); // Aca calculamos el fitness en base a lo que arrojo el job y si hay un mejor individuo lo agregamos al set de mejores individuos.... llenarOcurrencias(conf, args[1] + "g" + i); // Corremos GA para la generacion. Individual<BitSet> winnerGen = ga.run(new CensusFitnessEvaluator(ocurrencias)); // Mantenemos los mejores individuos if (bestInd == null) { bestInd = winnerGen; bestIndividuals.add(winnerGen); } else if (winnerGen.getFitness() > bestInd.getFitness()) { bestInd = winnerGen; bestIndividuals.add(winnerGen); } // Debug System.out.println("Mejor Individuo Generacion " + i + " => " + RuleAdaptor.adapt(bestInd) + " => Fitness = " + bestInd.getFitness()); } // Ordenamos y mostramos los mejores individuos List<Individual<BitSet>> bestIndList = new ArrayList<Individual<BitSet>>(bestIndividuals); Collections.sort(bestIndList, new Comparator<Individual<BitSet>>() { public int compare(Individual<BitSet> o1, Individual<BitSet> o2) { return (o1.getFitness() > o2.getFitness() ? -1 : (o1.getFitness() == o2.getFitness() ? 0 : 1)); } }); showPopulation(bestIndList); System.out.println("Tiempo total de corrida " + (System.currentTimeMillis() - time) / 1000 + "s"); }
From source file:arpserver.HadoopTool.java
@Override public int run(String[] strings) throws Exception { Configuration conf = new Configuration(); String in = strings[0];/*from www . j av a2 s .c om*/ String out = strings[1]; FileSystem fs = FileSystem.get(conf); if (fs.exists(new Path(out))) { fs.delete(new Path(out), true); fs.delete(new Path(out + "Src"), true); fs.delete(new Path(out + "Mitm"), true); fs.delete(new Path(out + "ArpScn"), true); fs.delete(new Path(out + "s"), true); fs.delete(new Path(out + "d"), true); fs.delete(new Path(out + "t"), true); } Job job = new Job(); Job job2 = new Job(); Job job3 = new Job(); Job job4 = new Job(); Job job5 = new Job(); Job job6 = new Job(); Job job7 = new Job(); job.setJobName("Q"); job2.setJobName("Src"); job3.setJobName("Mitm"); job4.setJobName("ArpScn"); job5.setJobName("s"); job6.setJobName("d"); job7.setJobName("time"); job.setJarByClass(QuickDetect.class); job.setMapperClass(Qmapper.class); job.setReducerClass(Qreducer.class); job2.setMapperClass(Srcmapper.class); job2.setReducerClass(Srcreducer.class); job3.setMapperClass(ArpScanmapper.class); job3.setReducerClass(ArpScanreducer.class); job4.setMapperClass(Mitmmapper.class); job4.setReducerClass(Mitmreducer.class); job5.setMapperClass(Smapper.class); job5.setReducerClass(Sreducer.class); job6.setMapperClass(Dmapper.class); job6.setReducerClass(Dreducer.class); job7.setMapperClass(timemapper.class); job7.setReducerClass(timereducer.class); //testFinal168.txt job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(Text.class); job2.setOutputKeyClass(NullWritable.class); job2.setOutputValueClass(Text.class); job3.setOutputKeyClass(NullWritable.class); job3.setOutputValueClass(IntWritable.class); job4.setOutputKeyClass(NullWritable.class); job4.setOutputValueClass(Text.class); job5.setOutputKeyClass(NullWritable.class); job5.setOutputValueClass(Text.class); job6.setOutputKeyClass(NullWritable.class); job6.setOutputValueClass(Text.class); job7.setOutputKeyClass(NullWritable.class); job7.setOutputValueClass(Text.class); job.setMapOutputKeyClass(QuickDetect.class); job.setMapOutputValueClass(IntWritable.class); //job.setOutputFormatClass(YearMultipleTextOutputFormat.class); job2.setMapOutputKeyClass(DetectSrc.class); job2.setMapOutputValueClass(IntWritable.class); job3.setMapOutputKeyClass(DetectArpScan.class); job3.setMapOutputValueClass(IntWritable.class); job4.setMapOutputKeyClass(DetectMitm.class); job4.setMapOutputValueClass(IntWritable.class); job5.setMapOutputKeyClass(SMac.class); job5.setMapOutputValueClass(IntWritable.class); job6.setMapOutputKeyClass(DMac.class); job6.setMapOutputValueClass(IntWritable.class); job7.setMapOutputKeyClass(timeMac.class); job7.setMapOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(in)); FileOutputFormat.setOutputPath(job, new Path(out)); if (job.waitForCompletion(true)) { FileInputFormat.addInputPath(job2, new Path(in)); FileOutputFormat.setOutputPath(job2, new Path(out + "Src")); if (job2.waitForCompletion(true)) { FileInputFormat.addInputPath(job3, new Path(in)); FileOutputFormat.setOutputPath(job3, new Path(out + "ArpScn")); if (job3.waitForCompletion(true)) { FileInputFormat.addInputPath(job4, new Path(in)); FileOutputFormat.setOutputPath(job4, new Path(out + "Mitm")); if (job4.waitForCompletion(true)) { FileInputFormat.addInputPath(job5, new Path(in)); FileOutputFormat.setOutputPath(job5, new Path(out + "s")); if (job5.waitForCompletion(true)) { FileInputFormat.addInputPath(job6, new Path(in)); FileOutputFormat.setOutputPath(job6, new Path(out + "d")); if (job6.waitForCompletion(true)) { FileInputFormat.addInputPath(job7, new Path(in)); FileOutputFormat.setOutputPath(job7, new Path(out + "t")); job7.waitForCompletion(true); } else { return 1; } } else { return 1; } } else { return 1; } } else { return 1; } } else { return 1; } } else { return 1; } return 0; }
From source file:AshleyIngram.FYP.Hadoop.WordCount.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2);/*w w w. j a va 2 s. c om*/ } Job job = new Job(conf, "word count"); job.setJarByClass(WordCount.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:assignment1.WordCount.LinkedSort.LinkedSort.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length < 2) { System.err.println("Usage: hadoop jar This.jar <in> [<in>...] <out>"); System.exit(2);// www. j a v a 2 s.c o m } Job job = new Job(conf, "word count"); job.setJarByClass(LinkedSort.class); job.setMapperClass(TokenizerMapper.class); //job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setPartitionerClass(SortPartitioner.class); job.setOutputKeyClass(WordAndLength.class); job.setOutputValueClass(IntWritable.class); job.setNumReduceTasks(2); for (int i = 0; i < otherArgs.length - 1; ++i) { FileInputFormat.addInputPath(job, new Path(otherArgs[i])); } FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:assignment1.WordCount.WordCountInMap.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length < 2) { System.err.println("Usage: hadoop jar This.jar <in> [<in>...] <out>"); System.exit(2);// ww w .j av a2s.c o m } Job job = new Job(conf, "word count"); job.setJarByClass(assignment1.WordCount.WordCountInMap.class); job.setMapperClass(TokenizerMapper.class); //job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); for (int i = 0; i < otherArgs.length - 1; ++i) { FileInputFormat.addInputPath(job, new Path(otherArgs[i])); } FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:assignment1.WordCount.WordSort.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length < 2) { System.err.println("Usage: hadoop jar This.jar <in> [<in>...] <out>"); System.exit(2);/* w ww. j a v a 2 s . c om*/ } Job job = new Job(conf, "word count"); job.setJarByClass(WordSort.class); job.setMapperClass(TokenizerMapper.class); //job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setPartitionerClass(SortPartitioner.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setNumReduceTasks(2); for (int i = 0; i < otherArgs.length - 1; ++i) { FileInputFormat.addInputPath(job, new Path(otherArgs[i])); } FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:Assignment2_P2_StockExchangeCount.StockPriceDriver.java
/** * @param args the command line arguments *//* w w w .j ava 2s . c om*/ public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Average Stock Price"); job.setJarByClass(StockPriceDriver.class); job.setMapperClass(StockPrice_Mapper.class); job.setCombinerClass(StockPrice_Reducer.class); job.setReducerClass(StockPrice_Reducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(FloatWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }