Example usage for org.apache.hadoop.conf Configuration setInt

List of usage examples for org.apache.hadoop.conf Configuration setInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setInt.

Prototype

public void setInt(String name, int value) 

Source Link

Document

Set the value of the name property to an int.

Usage

From source file:edu.iu.daal_outlier.multidensebatch.ODMultiDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*from   ww  w . java  2  s  . c o  m*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job odJob = init.createJob("odJob", ODMultiDaalLauncher.class, ODMultiDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = odJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        odJob.killJob();
        System.out.println("odJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_outlier.unidensebatch.ODUniDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*  w  ww .  j a v  a 2 s  .  c o m*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job odJob = init.createJob("odJob", ODUniDaalLauncher.class, ODUniDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = odJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        odJob.killJob();
        System.out.println("odJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_pca.cordensedistr.PCADaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from ww  w. j a va 2  s  .  c o m
 */
@Override
public int run(String[] args) throws Exception {
    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();
    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));

    // config job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    Job pcaJob = init.createJob("pcaJob", PCADaalLauncher.class, PCADaalCollectiveMapper.class);

    // initialize centroids data
    JobConf thisjobConf = (JobConf) pcaJob.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    int nFeatures = Integer.parseInt(args[init.getSysArgNum() + 1]);
    Path workPath = init.getWorkPath();

    //generate Data if required
    boolean generateData = Boolean.parseBoolean(args[init.getSysArgNum() + 2]);
    if (generateData) {
        Path inputPath = init.getInputPath();
        int total_points = Integer.parseInt(args[init.getSysArgNum() + 3]);
        int total_files = Integer.parseInt(args[init.getSysArgNum() + 4]);
        String tmpDirPathName = args[init.getSysArgNum() + 5];

        DataGenerator.generateDenseDataMulti(total_points, nFeatures, total_files, 2, 1, ",", inputPath,
                tmpDirPathName, fs);
    }

    // finish job
    boolean jobSuccess = pcaJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        pcaJob.killJob();
        System.out.println("pcaJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_pca.PCADaalLauncher.java

License:Apache License

private Job configurePCAJob(int numOfDataPoints, int vectorSize, int numPointFiles, int numMapTasks,
        int numThreads, int mem, Path dataDir, Path outDir, Configuration configuration)
        throws IOException, URISyntaxException {
    Job job = Job.getInstance(configuration, "PCA_job");

    FileInputFormat.setInputPaths(job, dataDir);
    FileOutputFormat.setOutputPath(job, outDir);

    job.setInputFormatClass(MultiFileInputFormat.class);
    job.setJarByClass(PCADaalLauncher.class);
    job.setMapperClass(PCADaalCollectiveMapper.class);
    org.apache.hadoop.mapred.JobConf jobConf = (JobConf) job.getConfiguration();
    jobConf.set("mapreduce.framework.name", "map-collective");
    jobConf.setNumMapTasks(numMapTasks);
    jobConf.setInt("mapreduce.job.max.split.locations", 10000);

    // mapreduce.map.collective.memory.mb
    // 125000/*from  w w w.  j  ava  2 s .  c o  m*/
    jobConf.setInt("mapreduce.map.collective.memory.mb", mem);
    int xmx = (int) Math.ceil((mem - 2000) * 0.5);
    int xmn = (int) Math.ceil(0.25 * xmx);
    jobConf.set("mapreduce.map.collective.java.opts",
            "-Xmx" + xmx + "m -Xms" + xmx + "m" + " -Xmn" + xmn + "m");

    job.setNumReduceTasks(0);
    Configuration jobConfig = job.getConfiguration();
    jobConfig.setInt(Constants.POINTS_PER_FILE, numOfDataPoints / numPointFiles);
    jobConfig.setInt(Constants.VECTOR_SIZE, vectorSize);
    jobConfig.setInt(Constants.NUM_MAPPERS, numMapTasks);
    jobConfig.setInt(Constants.NUM_THREADS, numThreads);
    return job;
}

From source file:edu.iu.daal_pca.svddensedistr.PCADaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*  www.  j  a v a2  s.c  om*/
 */
@Override
public int run(String[] args) throws Exception {
    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));

    // config job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    Job pcaJob = init.createJob("pcaJob", PCADaalLauncher.class, PCADaalCollectiveMapper.class);

    // initialize centroids data
    JobConf thisjobConf = (JobConf) pcaJob.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    int nFeatures = Integer.parseInt(args[init.getSysArgNum() + 1]);
    Path workPath = init.getWorkPath();

    //generate Data if required
    boolean generateData = Boolean.parseBoolean(args[init.getSysArgNum() + 2]);
    if (generateData) {
        Path inputPath = init.getInputPath();
        int total_points = Integer.parseInt(args[init.getSysArgNum() + 3]);
        int total_files = Integer.parseInt(args[init.getSysArgNum() + 4]);
        String tmpDirPathName = args[init.getSysArgNum() + 5];

        DataGenerator.generateDenseDataMulti(total_points, nFeatures, total_files, 2, 1, ",", inputPath,
                tmpDirPathName, fs);
    }

    // finish job
    boolean jobSuccess = pcaJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        pcaJob.killJob();
        System.out.println("pcaJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_pivoted_qr.PQRDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*from w w w .  j av a  2 s  .  com*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job pqrJob = init.createJob("pqrJob", PQRDaalLauncher.class, PQRDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = pqrJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        pqrJob.killJob();
        System.out.println("pqrJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_qr.QRDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*from ww  w . j ava2 s  . c  om*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();
    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job qrJob = init.createJob("qrJob", QRDaalLauncher.class, QRDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = qrJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        qrJob.killJob();
        System.out.println("qrJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_quality_metrics.LinRegMetrics.LINREGMESDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*from   w w  w  .  j  a  v  a2  s  .  c o m*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.setInt(HarpDAALConstants.NUM_DEPVAR, Integer.parseInt(args[init.getSysArgNum() + 2]));
    conf.setInt(Constants.IBETA_ONE, Integer.parseInt(args[init.getSysArgNum() + 3]));
    conf.setInt(Constants.IBETA_TWO, Integer.parseInt(args[init.getSysArgNum() + 4]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job meJob = init.createJob("meJob", LINREGMESDaalLauncher.class, LINREGMESDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = meJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        meJob.killJob();
        System.out.println("meJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_quality_metrics.SVMMultiMetrics.SVMMultiMESDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./* w  w w  . j av  a2 s  .  c om*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.setInt(HarpDAALConstants.NUM_CLASS, Integer.parseInt(args[init.getSysArgNum() + 2]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 3]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job meJob = init.createJob("meJob", SVMMultiMESDaalLauncher.class, SVMMultiMESDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = meJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        meJob.killJob();
        System.out.println("meJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_quantile.QTEDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from   w w  w  . ja  v  a2 s. com
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job quantileJob = init.createJob("quantileJob", QTEDaalLauncher.class, QTEDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = quantileJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        quantileJob.killJob();
        System.out.println("quantileJob failed");
    }

    return 0;
}