Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:Analysis.A7_Total_Signups_By_Year.Total_Signup_by_Year_Driver.java

/**
 * @param args the command line arguments
 *//*ww w  .  j a v a  2 s  . com*/

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Total Signups by Year");
    job.setJarByClass(Total_Signup_by_Year_Driver.class);
    job.setMapperClass(Total_Signup_by_Year_Mapper.class);

    job.setCombinerClass(Total_Signup_by_Year_Reducer.class);
    job.setReducerClass(Total_Signup_by_Year_Reducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:Analysis.A8_Top_10_Most_Popular_Tracks.Top_10_Most_Popular_Tracks_Driver.java

/**
 * @param args the command line arguments
 *///from   w ww.  ja  v a 2  s .com

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Top 10 most popular tracks ");
    job.setJarByClass(Top_10_Most_Popular_Tracks_Driver.class);

    job.setMapperClass(Top_10_Most_Popular_Tracks_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);

    job.setNumReduceTasks(1);
    job.setReducerClass(Top_10_Most_Popular_Tracks_Reducer.class);
    job.setOutputKeyClass(NullWritable.class);
    job.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:Analysis.A9_Max_Activity_By_Time_of_Day.Most_Listens_By_Time_of_Day_Driver.java

/**
 * @param args the command line arguments
 *///from  ww w .  j a  va  2  s  . c o  m
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Most listens by Time of the Day");
    job.setJarByClass(Most_Listens_By_Time_of_Day_Driver.class);

    job.setMapperClass(Most_Listens_By_Time_of_Day_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    int code = job.waitForCompletion(true) ? 0 : 1;

    if (code == 0) {
        for (Counter counter : job.getCounters()
                .getGroup(Most_Listens_By_Time_of_Day_Mapper.HOUR_COUNTER_GROUP)) {
            System.out.println(counter.getDisplayName() + "\t" + counter.getValue());
        }
    }

    FileSystem.get(conf).delete(new Path(args[1]), true);

    System.exit(code);
}

From source file:apex.benchmark.RedisHelper.java

License:Apache License

public void fillDB(String fileName) throws IOException {
    Path filePath = new Path(fileName);
    Configuration configuration = new Configuration();
    FileSystem fs;// ww w  . jav a2s.c  om
    fs = FileSystem.newInstance(filePath.toUri(), configuration);
    FSDataInputStream inputStream = fs.open(filePath);
    BufferedReader bufferedReader;

    try {

        bufferedReader = new BufferedReader(new InputStreamReader(inputStream));

        String line;
        while ((line = bufferedReader.readLine()) != null) {

            String[] mapping = line.split("\\s+");

            if (mapping.length != 2) {
                continue;
            }

            jedis.sadd("campaigns", mapping[0]);
            jedis.set(mapping[1], mapping[0]);
        }
    } catch (Exception e) {
        throw e;
    }
}

From source file:application.Application.java

License:Open Source License

/**
 *    il metodo main prende in ingresso gli argomenti passati da riga di comando, inizialmente svolge la fase di training
 *    esegue il job mapreduce per gli item, una volta ottenuto l'output del job esegue il secondo job mapreduce per gli user,
 *    infine svolge fase di working in cui stima la valutazione dell'utente e confronta con l'effettivo voto espresso dall'utente
 * @param args [] la prima cella di memoria dell'array contiene l'input path, 
 *  la seconda l'output path per l'esecuzione ItemPDFJob la terza l'output path per l'esecuzione di UserProfileJob, 
 *  la quarta e' opzionale e contiene il valore K con cui eseguire il processo
 * @throws Exception/*  ww w.  j av  a  2  s  .  c om*/
 */
public static void main(String[] args) throws Exception {

    long START_TIME = System.currentTimeMillis();

    System.out.println("\n--------------------------------------------------");
    System.out.println(
            " LCBM mapreduce: a fast and lightweight collaborative filtering algorithm for binary ratings.");
    System.out.println(" author: Fabio Petroni (http://www.fabiopetroni.com)");
    System.out.println("--------------------------------------------------\n");
    Globals GLOBALS = new Globals(args);
    System.out.println(" Parameters:");
    GLOBALS.print();
    System.out.println("\n");

    HashMap<Integer, ItemProfile> ITEM_STATE;
    HashMap<Integer, UserProfile> USER_STATE;
    Configuration conf = new Configuration();

    //begin ITEM PROFILING PROCEDURE
    System.out.println(" JOB1 - Computing item profiles... ");
    long local_START = System.currentTimeMillis();
    ItemPDFJob ItemPDF_DRIVER = new ItemPDFJob(GLOBALS, conf);
    ToolRunner.run(conf, ItemPDF_DRIVER, args);
    long local_END = System.currentTimeMillis();
    System.out.println("\ttime item profiling procedure: " + (local_END - local_START));
    System.out.println("\n");
    //end ITEM PROFILING PROCEDURE

    System.out.println(" JOB2 - Computing user profiles... ");
    local_START = System.currentTimeMillis();
    UserProfileJob UserProfiler_DRIVER = new UserProfileJob(GLOBALS, conf);
    ToolRunner.run(conf, UserProfiler_DRIVER, args);
    local_END = System.currentTimeMillis();
    System.out.println("\ttime user profiling procedure: " + (local_END - local_START));
    System.out.println("\n");
    //end USER PROFILING PROCEDURE

    ITEM_STATE = OutputReader.readOutputItemPDFJob(GLOBALS.getOUTPUT1(), conf);
    USER_STATE = OutputReader.readOutputUserProfileJob(GLOBALS.getOUTPUT2(), conf);

    //begin EVALUATION
    System.out.println("\n Evaluating algorithm performace... ");
    local_START = System.currentTimeMillis();
    RecommenderEvaluator RE = new RecommenderEvaluator(ITEM_STATE, USER_STATE, GLOBALS, conf);
    RE.evaluate();
    local_END = System.currentTimeMillis();
    double MCC = RE.getMCC();
    System.out.println("\ttime evaluator: " + (local_END - local_START));
    System.out.println("\tMCC: " + MCC);
    System.out.println("\n");
    //end EVALUATION   

    long END_TIME = System.currentTimeMillis();
    long TOTAL_TIME = END_TIME - START_TIME;
    System.out.println("  Total Time: " + TOTAL_TIME + "\n");
}

From source file:ar.edu.ungs.garules.CensusJob.java

License:Apache License

/**
 * Main -> Ejecucion del proceso//  w  w w  .  j a va  2  s  .c  om
 * @param args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {

    long time = System.currentTimeMillis();
    Individual<BitSet> bestInd = null;
    if (args.length != 2)
        args = DEFAULT_ARGS;

    // Preparacion del GA
    // --------------------------------------------------------------------------------------------------------------
    Set<Individual<BitSet>> bestIndividuals = new HashSet<Individual<BitSet>>();
    List<Gene> genes = new ArrayList<Gene>();
    genes.add(genCondicionACampo);
    genes.add(genCondicionAOperador);
    genes.add(genCondicionAValor);
    genes.add(genCondicionBPresente);
    genes.add(genCondicionBCampo);
    genes.add(genCondicionBOperador);
    genes.add(genCondicionBValor);
    genes.add(genCondicionCPresente);
    genes.add(genCondicionCCampo);
    genes.add(genCondicionCOperador);
    genes.add(genCondicionCValor);
    genes.add(genPrediccionCampo);
    genes.add(genPrediccionValor);

    Map<Gene, Ribosome<BitSet>> translators = new HashMap<Gene, Ribosome<BitSet>>();
    for (Gene gene : genes)
        translators.put(gene, new BitSetToIntegerRibosome(0));

    Genome<BitSet> genome = new BitSetGenome("Chromosome 1", genes, translators);

    Parameter<BitSet> par = new Parameter<BitSet>(0.035, 0.9, 200, new DescendantAcceptEvaluator<BitSet>(),
            new CensusFitnessEvaluator(), new BitSetOnePointCrossover(), new BitSetFlipMutator(), null,
            new BitSetRandomPopulationInitializer(), null, new ProbabilisticRouletteSelector(),
            new GlobalSinglePopulation<BitSet>(genome), 500, 100d, new BitSetMorphogenesisAgent(), genome);

    ParallelFitnessEvaluationGA<BitSet> ga = new ParallelFitnessEvaluationGA<BitSet>(par);
    ga.init();
    // --------------------------------------------------------------------------------------------------------------
    // Fin de Preparacion del GA

    // Itera hasta el maximo de generaciones permitidas 
    for (int i = 0; i < par.getMaxGenerations(); i++) {
        ga.initGeneration();
        Configuration conf = new Configuration();

        // Debug
        //showPopulation(ga.getPopulation());
        //System.out.println((System.currentTimeMillis()-time)/1000 + "s transcurridos desde el inicio");

        // Pasamos como parmetro las condiciones a evaluar
        Iterator<Individual<BitSet>> ite = ga.getPopulation().iterator();
        int contador = 0;
        Set<String> expUnicas = new HashSet<String>();
        while (ite.hasNext()) {
            Individual<BitSet> ind = ite.next();
            String rep = RuleStringAdaptor.adapt(RuleAdaptor.adapt(ind));
            expUnicas.add(rep);
        }
        for (String rep : expUnicas)
            if (ocurrencias.get(rep) == null) {
                conf.set(String.valueOf(contador), rep);
                contador++;
            }

        // Configuracion del job i
        Job job = new Job(conf, "GA rules - Generation " + i);
        job.setJarByClass(CensusJob.class);
        job.setMapperClass(CensusMapper.class);
        job.setCombinerClass(CensusReducer.class);
        job.setReducerClass(CensusReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        job.setOutputFormatClass(SequenceFileOutputFormat.class);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        SequenceFileOutputFormat.setOutputPath(job, new Path(args[1] + "g" + i));

        // Corrida del trabajo map-reduce representando a la generacion i
        job.waitForCompletion(true);

        // Aca calculamos el fitness en base a lo que arrojo el job y si hay un mejor individuo lo agregamos al set de mejores individuos....  
        llenarOcurrencias(conf, args[1] + "g" + i);

        // Corremos GA para la generacion.
        Individual<BitSet> winnerGen = ga.run(new CensusFitnessEvaluator(ocurrencias));

        // Mantenemos los mejores individuos
        if (bestInd == null) {
            bestInd = winnerGen;
            bestIndividuals.add(winnerGen);
        } else if (winnerGen.getFitness() > bestInd.getFitness()) {
            bestInd = winnerGen;
            bestIndividuals.add(winnerGen);
        }

        // Debug
        System.out.println("Mejor Individuo Generacion " + i + " => " + RuleAdaptor.adapt(bestInd)
                + " => Fitness = " + bestInd.getFitness());

    }

    // Ordenamos y mostramos los mejores individuos
    List<Individual<BitSet>> bestIndList = new ArrayList<Individual<BitSet>>(bestIndividuals);
    Collections.sort(bestIndList, new Comparator<Individual<BitSet>>() {
        public int compare(Individual<BitSet> o1, Individual<BitSet> o2) {
            return (o1.getFitness() > o2.getFitness() ? -1 : (o1.getFitness() == o2.getFitness() ? 0 : 1));
        }
    });
    showPopulation(bestIndList);
    System.out.println("Tiempo total de corrida " + (System.currentTimeMillis() - time) / 1000 + "s");

}

From source file:ark.util.HadoopUtil.java

License:Apache License

public static BufferedReader getFileReader(String path) {
    try {/*from w w w.  j  av  a  2 s.  co m*/
        Path filePath = new Path(path);
        FileSystem fileSystem = FileSystem.get(new Configuration());
        BufferedReader reader = new BufferedReader(new InputStreamReader(fileSystem.open(filePath)));
        return reader;
    } catch (Exception e) {
        return null;
    }
}

From source file:arpserver.HadoopTool.java

@Override
public int run(String[] strings) throws Exception {
    Configuration conf = new Configuration();
    String in = strings[0];// w w  w .j a v  a 2  s  .c o m
    String out = strings[1];
    FileSystem fs = FileSystem.get(conf);
    if (fs.exists(new Path(out))) {
        fs.delete(new Path(out), true);
        fs.delete(new Path(out + "Src"), true);
        fs.delete(new Path(out + "Mitm"), true);
        fs.delete(new Path(out + "ArpScn"), true);
        fs.delete(new Path(out + "s"), true);
        fs.delete(new Path(out + "d"), true);
        fs.delete(new Path(out + "t"), true);
    }
    Job job = new Job();
    Job job2 = new Job();
    Job job3 = new Job();
    Job job4 = new Job();
    Job job5 = new Job();
    Job job6 = new Job();
    Job job7 = new Job();
    job.setJobName("Q");
    job2.setJobName("Src");
    job3.setJobName("Mitm");
    job4.setJobName("ArpScn");
    job5.setJobName("s");
    job6.setJobName("d");
    job7.setJobName("time");
    job.setJarByClass(QuickDetect.class);

    job.setMapperClass(Qmapper.class);
    job.setReducerClass(Qreducer.class);

    job2.setMapperClass(Srcmapper.class);
    job2.setReducerClass(Srcreducer.class);

    job3.setMapperClass(ArpScanmapper.class);
    job3.setReducerClass(ArpScanreducer.class);

    job4.setMapperClass(Mitmmapper.class);
    job4.setReducerClass(Mitmreducer.class);

    job5.setMapperClass(Smapper.class);
    job5.setReducerClass(Sreducer.class);

    job6.setMapperClass(Dmapper.class);
    job6.setReducerClass(Dreducer.class);

    job7.setMapperClass(timemapper.class);
    job7.setReducerClass(timereducer.class);
    //testFinal168.txt
    job.setOutputKeyClass(NullWritable.class);
    job.setOutputValueClass(Text.class);

    job2.setOutputKeyClass(NullWritable.class);
    job2.setOutputValueClass(Text.class);

    job3.setOutputKeyClass(NullWritable.class);
    job3.setOutputValueClass(IntWritable.class);

    job4.setOutputKeyClass(NullWritable.class);
    job4.setOutputValueClass(Text.class);

    job5.setOutputKeyClass(NullWritable.class);
    job5.setOutputValueClass(Text.class);

    job6.setOutputKeyClass(NullWritable.class);
    job6.setOutputValueClass(Text.class);

    job7.setOutputKeyClass(NullWritable.class);
    job7.setOutputValueClass(Text.class);

    job.setMapOutputKeyClass(QuickDetect.class);
    job.setMapOutputValueClass(IntWritable.class);
    //job.setOutputFormatClass(YearMultipleTextOutputFormat.class);
    job2.setMapOutputKeyClass(DetectSrc.class);
    job2.setMapOutputValueClass(IntWritable.class);

    job3.setMapOutputKeyClass(DetectArpScan.class);
    job3.setMapOutputValueClass(IntWritable.class);

    job4.setMapOutputKeyClass(DetectMitm.class);
    job4.setMapOutputValueClass(IntWritable.class);

    job5.setMapOutputKeyClass(SMac.class);
    job5.setMapOutputValueClass(IntWritable.class);

    job6.setMapOutputKeyClass(DMac.class);
    job6.setMapOutputValueClass(IntWritable.class);

    job7.setMapOutputKeyClass(timeMac.class);
    job7.setMapOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(in));
    FileOutputFormat.setOutputPath(job, new Path(out));
    if (job.waitForCompletion(true)) {
        FileInputFormat.addInputPath(job2, new Path(in));
        FileOutputFormat.setOutputPath(job2, new Path(out + "Src"));
        if (job2.waitForCompletion(true)) {
            FileInputFormat.addInputPath(job3, new Path(in));
            FileOutputFormat.setOutputPath(job3, new Path(out + "ArpScn"));
            if (job3.waitForCompletion(true)) {
                FileInputFormat.addInputPath(job4, new Path(in));
                FileOutputFormat.setOutputPath(job4, new Path(out + "Mitm"));
                if (job4.waitForCompletion(true)) {
                    FileInputFormat.addInputPath(job5, new Path(in));
                    FileOutputFormat.setOutputPath(job5, new Path(out + "s"));
                    if (job5.waitForCompletion(true)) {
                        FileInputFormat.addInputPath(job6, new Path(in));
                        FileOutputFormat.setOutputPath(job6, new Path(out + "d"));
                        if (job6.waitForCompletion(true)) {
                            FileInputFormat.addInputPath(job7, new Path(in));
                            FileOutputFormat.setOutputPath(job7, new Path(out + "t"));
                            job7.waitForCompletion(true);
                        } else {
                            return 1;
                        }
                    } else {
                        return 1;
                    }
                } else {
                    return 1;
                }
            } else {
                return 1;
            }
        } else {
            return 1;
        }
    } else {
        return 1;
    }
    return 0;
}

From source file:arpserver.serverUdp.java

public void processHadoop(String inputfile) {
    input = this.hadooppath + "/bin/hadoop fs -copyFromLocal " + this.path + "/" + inputfile + ".txt "
            + this.hdfspath + "/" + inputfile + ".txt";
    io[0] = this.hdfspath + "/" + inputfile + ".txt";
    io[1] = this.hdfspath + "/" + inputfile + "output";
    filecheck = new File(this.path + "/" + inputfile + ".txt");
    System.out.println("====================================================");
    System.out.println(io[0]);/*from   w w w.  j av a  2  s  .c o m*/
    System.out.println(io[1]);
    System.out.println("====================================================");
    if (filecheck.exists()) {
        uploadFile(this.path + "/" + inputfile + ".txt", io[0]);
        try {
            int res = ToolRunner.run(new Configuration(), new HadoopTool(), io);
        } catch (Exception ex) {
            Logger.getLogger(serverUdp.class.getName()).log(Level.SEVERE, null, ex);
        }
        filecheck = new File(this.hdfspath + "/" + inputfile);
        if (!filecheck.exists()) {
            filecheck.mkdir();
        }
        downloadFile(io[1], "/" + inputfile);
        //try {
        //    int res = ToolRunner.run(new Configuration(), new hadoopTool(this.c), io);
        //} catch (Exception ex) {
        //    Logger.getLogger(serverUdp.class.getName()).log(Level.SEVERE, null, ex);
        //}
    } else {
        System.out.println("File Tidak ada");
    }

}

From source file:arrestsbyyear.ArrestsByYear.java

public static void main(String[] args) throws Exception {
    int res = ToolRunner.run(new Configuration(), new ArrestsByYear(), args);
    System.exit(res);/*  w  w w.j a  v a2s.co m*/
}