Example usage for org.apache.hadoop.mapreduce Job waitForCompletion

List of usage examples for org.apache.hadoop.mapreduce Job waitForCompletion

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job waitForCompletion.

Prototype

public boolean waitForCompletion(boolean verbose)
        throws IOException, InterruptedException, ClassNotFoundException 

Source Link

Document

Submit the job to the cluster and wait for it to finish.

Usage

From source file:com.asp.tranlog.ImportTsv.java

License:Apache License

/**
 * Main entry point.//from w  w w. j a va2 s  .com
 * 
 * @param args
 *            The command line parameters.
 * @throws Exception
 *             When running the job fails.
 */
public static void main(String[] args) throws Exception {
    System.out.println("==============================================");
    Configuration conf = HBaseConfiguration.create();

    LOG.error(PRE + "conf.toString() == " + conf.toString());

    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        usage("Wrong number of arguments: " + otherArgs.length);
        System.exit(-1);
    }

    String columns[] = conf.getStrings(COLUMNS_CONF_KEY);

    if (columns == null) {
        usage("No columns specified. Please specify with -D" + COLUMNS_CONF_KEY + "=...");
        System.exit(-1);
    }

    // Make sure one or more columns are specified
    if (columns.length < 2) {
        usage("One or more columns in addition to the row key are required");
        System.exit(-1);
    }
    columns = conf.getStrings(COLUMNS_CONF_KEY);
    if (columns == null) {
        usage("One or more key columns are required");
        System.exit(-1);
    }

    Job job = createSubmittableJob(conf, otherArgs);

    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:com.avira.couchdoop.demo.BenchmarkUpdater.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf();

    ArgsHelper.loadCliArgsIntoHadoopConf(conf, ExportArgs.ARGS_LIST, args);
    ExportArgs exportArgs = new ExportArgs(conf);

    Job job;
    boolean exitStatus = true;
    try {/*from w  w w. java2 s. c  o  m*/
        job = configureJob(conf, exportArgs.getInput());
        exitStatus = job.waitForCompletion(true);
    } catch (Exception e) {
        LOGGER.error(ExceptionUtils.getStackTrace(e));
    }

    return exitStatus ? 0 : 2;
}

From source file:com.avira.couchdoop.demo.ExportDriver.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (args.length != 1) {
        System.err.println("Usage: <input_path>");
        return 1;
    }/*from  w w w  .j a  v a2s  .c  o m*/
    String input = args[0];

    Job job = Job.getInstance(getConf());
    job.setJarByClass(ExportDriver.class);

    // User classpath takes precedence in favor of Hadoop classpath.
    // This is because the Couchbase client requires a newer version of
    // org.apache.httpcomponents:httpcore.
    //        job.setUserClassesTakesPrecedence(true);

    // Input
    FileInputFormat.setInputPaths(job, input);

    // Mapper
    job.setMapperClass(ExportMapper.class);

    // Reducer
    job.setNumReduceTasks(0);

    // Output
    job.setOutputKeyClass(String.class);
    job.setOutputValueClass(CouchbaseAction.class);
    job.setOutputFormatClass(CouchbaseOutputFormat.class);

    if (!job.waitForCompletion(true)) {
        return 2;
    }

    return 0;
}

From source file:com.avira.couchdoop.demo.ImportDriver.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (args.length != 1) {
        System.err.println("Usage: <output_path>");
        return 1;
    }//  ww  w  .  ja  v  a2  s.c  o m
    String output = args[0];

    Job job = Job.getInstance(getConf());
    job.setJarByClass(this.getClass());

    // User classpath takes precedence in favor of Hadoop classpath.
    // This is because the Couchbase client requires a newer version of
    // org.apache.httpcomponents:httpcore.
    job.setUserClassesTakesPrecedence(true);

    // Input
    job.setInputFormatClass(CouchbaseViewInputFormat.class);

    // Mapper
    job.setMapperClass(ImportMapper.class);

    // Reducer
    job.setNumReduceTasks(0);

    // Output
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    FileOutputFormat.setOutputPath(job, new Path(output));

    if (!job.waitForCompletion(true)) {
        return 2;
    }

    return 0;
}

From source file:com.avira.couchdoop.jobs.CouchbaseViewImporter.java

License:Apache License

@Override
public int run(String[] args) throws ArgsException {
    Configuration conf = getConf();

    ArgsHelper.loadCliArgsIntoHadoopConf(conf, ImportViewArgs.ARGS_LIST, args);
    ImportViewArgs importViewArgs = new ImportViewArgs(conf);

    Job job;
    boolean exitStatus = true;
    try {//from w ww.  j  av a2s.  co  m
        job = configureJob(conf, importViewArgs.getOutput());
        exitStatus = job.waitForCompletion(true);
    } catch (Exception e) {
        LOGGER.error(ExceptionUtils.getStackTrace(e));
    }

    return exitStatus ? 0 : 2;
}

From source file:com.avira.couchdoop.jobs.CouchbaseViewToHBaseImporter.java

License:Apache License

@Override
public int run(String[] args) throws ArgsException {
    Configuration conf = getConf();

    ArgsHelper.loadCliArgsIntoHadoopConf(conf, ImportViewToHBaseArgs.ARGS_LIST, args);
    ImportViewToHBaseArgs importViewToHBaseArgs = new ImportViewToHBaseArgs(conf);

    Job job;
    boolean exitStatus = true;
    try {/*from  w  ww  .j  av  a 2  s.  co m*/
        job = configureJob(conf, importViewToHBaseArgs.getTable());
        exitStatus = job.waitForCompletion(true);
    } catch (Exception e) {
        LOGGER.error(ExceptionUtils.getStackTrace(e));
    }

    return exitStatus ? 0 : 2;
}

From source file:com.awcoleman.BouncyCastleGenericCDRHadoop.BasicDriverMapReduce.java

License:Apache License

public int run(String[] args) throws Exception {

    if (args.length < 2) {
        System.out.println("Missing input and output filenames. Exiting.");
        System.exit(1);/*from  w  ww  .jav  a2s  . c o  m*/
    }

    Job job = new Job(super.getConf());
    job.setJarByClass(BasicDriverMapReduce.class);
    job.setJobName("BasicDriver1");
    job.setMapperClass(BasicMapper.class);
    job.setReducerClass(BasicReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    job.setInputFormatClass(RawFileAsBinaryInputFormat.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    return job.waitForCompletion(true) ? 0 : 1;
}

From source file:com.awcoleman.BouncyCastleGenericCDRHadoopWithWritable.BasicDriverMapReduce.java

License:Apache License

public int run(String[] args) throws Exception {

    if (args.length < 2) {
        System.out.println("Missing input and output filenames. Exiting.");
        System.exit(1);/*  w  w w.j a v a2s .  c  o m*/
    }

    @SuppressWarnings("deprecation")
    Job job = new Job(super.getConf());
    job.setJarByClass(BasicDriverMapReduce.class);
    job.setJobName("BasicDriverMapReduce");
    job.setMapperClass(BasicMapper.class);
    job.setReducerClass(BasicReducer.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(CallDetailRecord.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    job.setInputFormatClass(RawFileAsBinaryInputFormat.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    return job.waitForCompletion(true) ? 0 : 1;
}

From source file:com.awcoleman.ExampleJobSummaryLogWithOutput.BinRecToAvroRecDriver.java

License:Apache License

public int run(String[] args) throws Exception {

    String input = null;//from   w  w w.j a v a  2 s  . co m
    String output = null;

    if (args.length < 2) {
        System.err.printf("Usage: %s <input> <output>\n", this.getClass().getSimpleName());
        return -1;
    } else {
        input = args[0];
        output = args[1];
    }

    Job job = Job.getInstance(getConf(), "BinRecToAvroRecDriver");
    Configuration conf = job.getConfiguration();

    //Add job log to hold Driver logging (and any summary info about the dataset,job, or counters we want to write)
    String fapath = createTempFileAppender(job);

    //get schema
    Schema outSchema = ReflectData.get().getSchema(com.awcoleman.examples.avro.BinRecForPartitions.class);
    job.getConfiguration().set("outSchema", outSchema.toString());

    //Job conf settings
    job.setJarByClass(BinRecToAvroRecDriver.class);
    job.setMapperClass(Map.class);
    job.setReducerClass(Reduce.class);
    job.setInputFormatClass(BinRecInputFormat.class);
    job.setOutputFormatClass(AvroKeyOutputFormat.class);
    AvroJob.setOutputKeySchema(job, outSchema);

    AvroJob.setMapOutputKeySchema(job, Schema.create(Schema.Type.STRING));
    AvroJob.setMapOutputValueSchema(job, outSchema);

    //Job output compression
    FileOutputFormat.setCompressOutput(job, true);
    job.getConfiguration().set(AvroJob.CONF_OUTPUT_CODEC, DataFileConstants.DEFLATE_CODEC);

    //Input and Output Paths
    FileInputFormat.setInputPaths(job, new Path(input));
    Path outPath = new Path(output);
    FileOutputFormat.setOutputPath(job, outPath);
    outPath.getFileSystem(conf).delete(outPath, true);

    boolean jobCompletionStatus = job.waitForCompletion(true);

    //Print Custom Counters before exiting
    Counters counters = job.getCounters();
    for (MYJOB_CNTRS customCounter : MYJOB_CNTRS.values()) {
        Counter thisCounter = counters.findCounter(customCounter);
        System.out.println("Custom Counter " + customCounter + "=" + thisCounter.getValue());
    }

    long mycnt1 = job.getCounters()
            .findCounter("com.awcoleman.TestingGettingContainerLogger.BinRecToAvroRecDriver$MYJOB_CNTRS",
                    "MYCNT1")
            .getValue();
    long mycnt2 = job.getCounters()
            .findCounter("com.awcoleman.TestingGettingContainerLogger.BinRecToAvroRecDriver$MYJOB_CNTRS",
                    "MYCNT2")
            .getValue();
    long mycnt3 = job.getCounters()
            .findCounter("com.awcoleman.TestingGettingContainerLogger.BinRecToAvroRecDriver$MYJOB_CNTRS",
                    "MYCNT3")
            .getValue();

    long myfakekpi = mycnt1 - mycnt2;

    String msgMyfakekpi = "The Fake KPI of the Dataset: " + String.format("%,d", myfakekpi);
    System.out.println(msgMyfakekpi);
    logger.info(msgMyfakekpi);

    //Finished, so move job log to HDFS in _log dir, clean
    copyTempFileAppenderToHDFSOutpath(job, fapath, output);

    return jobCompletionStatus ? 0 : 1;
}

From source file:com.bah.applefox.main.plugins.fulltextindex.FTLoader.java

License:Apache License

/**
 * run takes the comandline args as arguments (in this case from a
 * configuration file), creates a new job, configures it, initiates it,
 * waits for completion, and returns 0 if it is successful (1 if it is not)
 * /*from w w w  .  ja  v  a  2  s .c o  m*/
 * @param args
 *            the commandline arguments (in this case from a configuration
 *            file)
 * 
 * @return 0 if the job ran successfully and 1 if it isn't
 */
public int run(String[] args) throws Exception {
    try {
        // Initialize variables
        FTLoader.articleFile = args[8];
        FTLoader.maxNGrams = Integer.parseInt(args[9]);
        FTLoader.stopWords = getStopWords();
        FTLoader.dTable = args[10];
        FTLoader.urlCheckedTable = args[11];
        FTLoader.divsFile = args[20];
        FTLoader.exDivs = getExDivs();

        // Give the job a name
        String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

        // Create job and set the jar
        Job job = new Job(getConf(), jobName);
        job.setJarByClass(this.getClass());

        String urlTable = args[5];

        job.setInputFormatClass(AccumuloInputFormat.class);
        InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), urlTable,
                new Authorizations());

        job.setMapperClass(MapperClass.class);
        job.setMapOutputKeyClass(Key.class);
        job.setMapOutputValueClass(Value.class);

        job.setReducerClass(ReducerClass.class);
        job.setNumReduceTasks(Integer.parseInt(args[4]));

        job.setOutputFormatClass(AccumuloOutputFormat.class);
        job.setOutputKeyClass(Key.class);
        job.setOutputValueClass(Value.class);

        AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true, urlTable);

        job.waitForCompletion(true);

        return job.isSuccessful() ? 0 : 1;
    } catch (IOException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    } catch (InterruptedException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    } catch (ClassNotFoundException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    }
    return 1;
}