Example usage for org.apache.hadoop.conf Configuration setInt

List of usage examples for org.apache.hadoop.conf Configuration setInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setInt.

Prototype

public void setInt(String name, int value) 

Source Link

Document

Set the value of the name property to an int.

Usage

From source file:edu.iu.daal_linreg.normaleq.LinRegDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*from ww  w  .ja  va 2 s.c  o  m*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.setInt(HarpDAALConstants.NUM_DEPVAR, Integer.parseInt(args[init.getSysArgNum() + 2]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 3]);
    conf.set(HarpDAALConstants.TEST_TRUTH_PATH, args[init.getSysArgNum() + 4]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job linRegJob = init.createJob("linRegJob", LinRegDaalLauncher.class, LinRegDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = linRegJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        linRegJob.killJob();
        System.out.println("linRegJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_logitboost.LOGITBOOSTDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from   w  w w  .java2 s.co m
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.setInt(HarpDAALConstants.NUM_CLASS, Integer.parseInt(args[init.getSysArgNum() + 2]));
    conf.setInt(HarpDAALConstants.MAX_ITERATIONS, Integer.parseInt(args[init.getSysArgNum() + 3]));
    conf.setDouble(HarpDAALConstants.ACC_THRESHOLD, Double.parseDouble(args[init.getSysArgNum() + 4]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 5]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job logitboostJob = init.createJob("logitboostJob", LOGITBOOSTDaalLauncher.class,
            LOGITBOOSTDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = logitboostJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        logitboostJob.killJob();
        System.out.println("logitboostJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_mom.densedistri.MOMDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from  www  .  j a  va2s .c  om
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job momJob = init.createJob("momJob", MOMDaalLauncher.class, MOMDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = momJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        momJob.killJob();
        System.out.println("momJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_mom.MOMDaalLauncher.java

License:Apache License

private Job configureMOMJob(Path inputDir, int mem, int numMapTasks, int numThreadsPerWorker, Path modelDir,
        Path outputDir, Configuration configuration) throws IOException, URISyntaxException {

    // configuration.set(Constants.TEST_FILE_PATH, testDirPath);
    // configuration.set(Constants.TEST_TRUTH_PATH, testGroundTruthDirPath);
    configuration.setInt(Constants.NUM_MAPPERS, numMapTasks);
    configuration.setInt(Constants.NUM_THREADS, numThreadsPerWorker);
    // configuration.setInt(Constants.BATCH_SIZE, batchSize);

    Job job = Job.getInstance(configuration, "mom_job");
    JobConf jobConf = (JobConf) job.getConfiguration();

    jobConf.set("mapreduce.framework.name", "map-collective");

    jobConf.setInt("mapreduce.job.max.split.locations", 10000);

    // mapreduce.map.collective.memory.mb
    // 125000/*from w w  w  .  j  a  v  a  2s .  c  om*/
    jobConf.setInt("mapreduce.map.collective.memory.mb", mem);

    int xmx = (int) Math.ceil((mem - 2000) * 0.5);
    int xmn = (int) Math.ceil(0.25 * xmx);
    jobConf.set("mapreduce.map.collective.java.opts",
            "-Xmx" + xmx + "m -Xms" + xmx + "m" + " -Xmn" + xmn + "m");

    jobConf.setNumMapTasks(numMapTasks);

    FileInputFormat.setInputPaths(job, inputDir);
    FileOutputFormat.setOutputPath(job, outputDir);

    job.setInputFormatClass(MultiFileInputFormat.class);
    job.setJarByClass(MOMDaalLauncher.class);
    job.setMapperClass(MOMDaalCollectiveMapper.class);
    job.setNumReduceTasks(0);

    System.out.println("Launcher launched");
    return job;
}

From source file:edu.iu.daal_naive.csrdistri.NaiveDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from w  ww  . j ava2 s  .  c o m
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.NUM_CLASS, Integer.parseInt(args[init.getSysArgNum()]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 1]);
    conf.set(HarpDAALConstants.TEST_TRUTH_PATH, args[init.getSysArgNum() + 2]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job nbJob = init.createJob("nbJob", NaiveDaalLauncher.class, NaiveDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = nbJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        nbJob.killJob();
        System.out.println("nbJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_naive.densedistri.NaiveDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*  w  w  w .  ja  v  a2 s .c  o m*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();
    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();
    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.setInt(HarpDAALConstants.NUM_CLASS, Integer.parseInt(args[init.getSysArgNum() + 2]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 3]);
    conf.set(HarpDAALConstants.TEST_TRUTH_PATH, args[init.getSysArgNum() + 4]);

    // config job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    Job naiveJob = init.createJob("naiveJob", NaiveDaalLauncher.class, NaiveDaalCollectiveMapper.class);

    // initialize centroids data
    JobConf thisjobConf = (JobConf) naiveJob.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    int nFeatures = Integer.parseInt(args[init.getSysArgNum() + 1]);

    //generate Data if required
    boolean generateData = Boolean.parseBoolean(args[init.getSysArgNum() + 5]);
    if (generateData) {
        Path inputPath = init.getInputPath();
        int nClass = Integer.parseInt(args[init.getSysArgNum() + 2]);
        int total_points = Integer.parseInt(args[init.getSysArgNum() + 6]);
        int total_files = Integer.parseInt(args[init.getSysArgNum() + 7]);

        Path testPath = new Path(args[init.getSysArgNum() + 3]);
        int total_test_points = Integer.parseInt(args[init.getSysArgNum() + 8]);
        Path testGroundTruthPath = new Path(args[init.getSysArgNum() + 4]);
        String tmpDirPathName = args[init.getSysArgNum() + 9];

        // replace it with naive specific data generator
        // generate training data
        DataGenerator.generateDenseDataAndIntLabelMulti(total_points, nFeatures, total_files, 2, 1, nClass, ",",
                inputPath, tmpDirPathName, fs);
        // generate test data a single file
        DataGenerator.generateDenseDataAndIntLabelMulti(total_test_points, nFeatures, 1, 2, 1, nClass, ",",
                testPath, tmpDirPathName, fs);
        // generate test groundtruth data a single file nFeature==1
        DataGenerator.generateDenseLabelMulti(total_test_points, 1, 1, nClass, ",", testGroundTruthPath,
                tmpDirPathName, fs);
    }

    // finish job
    boolean jobSuccess = naiveJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        naiveJob.killJob();
        System.out.println("naiveJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_naive.NaiveDaalLauncher.java

License:Apache License

private Job configureNaiveJob(Path inputDir, String testDirPath, String testGroundTruthDirPath, int mem,
        int vecsize, int num_class, int num_test, int numMapTasks, int numThreadsPerWorker, Path modelDir,
        Path outputDir, Configuration configuration) throws IOException, URISyntaxException {

    configuration.set(Constants.TEST_FILE_PATH, testDirPath);
    configuration.set(Constants.TEST_TRUTH_PATH, testGroundTruthDirPath);
    configuration.setInt(Constants.NUM_MAPPERS, numMapTasks);
    configuration.setInt(Constants.NUM_THREADS, numThreadsPerWorker);
    configuration.setInt(Constants.VECTOR_SIZE, vecsize);
    configuration.setInt(Constants.NUM_CLASS, num_class);
    configuration.setInt(Constants.NUM_TEST, num_test);

    Job job = Job.getInstance(configuration, "naive_job");
    JobConf jobConf = (JobConf) job.getConfiguration();

    jobConf.set("mapreduce.framework.name", "map-collective");

    jobConf.setInt("mapreduce.job.max.split.locations", 10000);

    // mapreduce.map.collective.memory.mb
    // 125000/*from ww w . ja v  a 2 s  .c  o  m*/
    jobConf.setInt("mapreduce.map.collective.memory.mb", mem);
    // mapreduce.map.collective.java.opts
    // -Xmx120000m -Xms120000m
    // int xmx = (mem - 5000) > (mem * 0.5)
    //   ? (mem - 5000) : (int) Math.ceil(mem * 0.5);
    int xmx = (int) Math.ceil((mem - 5000) * 0.5);
    int xmn = (int) Math.ceil(0.25 * xmx);
    jobConf.set("mapreduce.map.collective.java.opts",
            "-Xmx" + xmx + "m -Xms" + xmx + "m" + " -Xmn" + xmn + "m");

    jobConf.setInt("mapred.task.timeout", 1800000);

    jobConf.setNumMapTasks(numMapTasks);

    FileInputFormat.setInputPaths(job, inputDir);
    FileOutputFormat.setOutputPath(job, outputDir);

    job.setInputFormatClass(MultiFileInputFormat.class);
    job.setJarByClass(NaiveDaalLauncher.class);
    job.setMapperClass(NaiveDaalCollectiveMapper.class);
    job.setNumReduceTasks(0);

    System.out.println("Launcher launched");
    return job;
}

From source file:edu.iu.daal_nn.NNDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from  ww  w. j a v  a 2  s  .  c o  m
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.setInt(HarpDAALConstants.BATCH_SIZE, Integer.parseInt(args[init.getSysArgNum() + 2]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 3]);
    conf.set(HarpDAALConstants.TEST_TRUTH_PATH, args[init.getSysArgNum() + 4]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job nnJob = init.createJob("nnJob", NNDaalLauncher.class, NNDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = nnJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        nnJob.killJob();
        System.out.println("nnJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_normalization.MinMaxDenseBatch.MinMaxDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.// w  w  w  .ja v  a  2 s  . com
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setDouble(Constants.UPPER_BOUND, Double.parseDouble(args[init.getSysArgNum() + 1]));
    conf.setDouble(Constants.LOWER_BOUND, Double.parseDouble(args[init.getSysArgNum() + 2]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job minmaxJob = init.createJob("minmaxJob", MinMaxDaalLauncher.class, MinMaxDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = minmaxJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        minmaxJob.killJob();
        System.out.println("minmaxJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_normalization.ZscoreDenseBatch.ZSEDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*  w w  w . jav  a  2 s  .com*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job zscoreJob = init.createJob("zscoreJob", ZSEDaalLauncher.class, ZSEDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = zscoreJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        zscoreJob.killJob();
        System.out.println("zscoreJob failed");
    }

    return 0;
}