Example usage for org.apache.hadoop.conf Configuration getInt

List of usage examples for org.apache.hadoop.conf Configuration getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:edu.iu.lda.LDAMPCollectiveMapper.java

License:Apache License

/**
 * Mapper configuration./*  w w w  . j a  va 2 s  .  c om*/
 */
@Override
protected void setup(Context context) {
    LOG.info(
            "start setup: " + new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()));
    long startTime = System.currentTimeMillis();
    Configuration configuration = context.getConfiguration();
    numTopics = configuration.getInt(Constants.NUM_TOPICS, 100);
    alpha = configuration.getDouble(Constants.ALPHA, 0.1);
    beta = configuration.getDouble(Constants.BETA, 0.001);
    numIterations = configuration.getInt(Constants.NUM_ITERATIONS, 100);
    numThreads = configuration.getInt(Constants.NUM_THREADS, 16);
    scheduleRatio = configuration.getDouble(Constants.SCHEDULE_RATIO, 2.0);
    minBound = configuration.getInt(Constants.MIN_BOUND, Constants.TRAIN_MIN_THRESHOLD);
    maxBound = configuration.getInt(Constants.MAX_BOUND, Constants.TRAIN_MAX_THRESHOLD);
    if (minBound <= 0 || minBound > 100) {
        minBound = Constants.TRAIN_MIN_THRESHOLD;
    }
    if (maxBound <= 0 || maxBound > 100) {
        maxBound = Constants.TRAIN_MAX_THRESHOLD;
    }
    if (maxBound < minBound) {
        maxBound = minBound;
    }
    if (maxBound == 100) {
        minBound = 100;
        enableTuning = false;
    } else {
        enableTuning = true;
    }
    time = enableTuning ? 1000L : 1000000000L;
    hasOverTrained = false;
    lastUnderTrainIte = 0;
    breakPeriod = 0;
    modelDirPath = configuration.get(Constants.MODEL_DIR, "");
    printModel = configuration.getBoolean(Constants.PRINT_MODEL, false);
    printInterval = 10;
    freeInterval = 10;
    numModelSlices = 2;
    computeTime = 0L;
    waitTime = 0L;
    long endTime = System.currentTimeMillis();
    LOG.info("config (ms): " + (endTime - startTime));
    LOG.info("Num Topics " + numTopics);
    LOG.info("Alpha " + alpha);
    LOG.info("Beta " + beta);
    LOG.info("Num Iterations " + numIterations);
    LOG.info("numThreads\\scheduleRaito " + numThreads + "\\" + scheduleRatio);
    LOG.info("enableTuning\\Time\\Bounds " + enableTuning + "\\" + time + "\\" + minBound + "\\" + maxBound);
    LOG.info("Model Dir Path " + modelDirPath);
    LOG.info("Print Model " + printModel);
    LOG.info("Model Slices " + numModelSlices);
    LOG.info("Container Memory " + configuration.get("mapreduce.map.collective.memory.mb"));
    LOG.info("Java Memory " + configuration.get("mapreduce.map.collective.java.opts"));
}

From source file:edu.iu.mds.MDSAllgatherMapper.java

License:Apache License

public void setup(Context context) {
    Configuration conf = context.getConfiguration();
    numPoints = conf.getInt(MDSConstants.NUMPOINTS, 10);
    xWidth = conf.getInt(MDSConstants.XWIDTH, 3);
    iteration = conf.getInt(MDSConstants.ITERATION, 1);
}

From source file:edu.iu.mds.MDSAllgatherMultiThreadMapper.java

License:Apache License

public void setup(Context context) {
    Configuration conf = context.getConfiguration();
    numPoints = conf.getInt(MDSConstants.NUMPOINTS, 10);
    xWidth = conf.getInt(MDSConstants.XWIDTH, 3);
    iteration = conf.getInt(MDSConstants.ITERATION, 1);
    numMappers = conf.getInt(MDSConstants.NUM_MAPS, 2);
    partitionPerWorker = conf.getInt(MDSConstants.PARTITION_PER_WORKER, 1);
    totalPartitions = partitionPerWorker * numMappers;
}

From source file:edu.iu.pagerank.PRMultiThreadMapper.java

License:Apache License

public void setup(Context context) {
    Configuration conf = context.getConfiguration();
    totalVtx = conf.getInt(PRConstants.TOTAL_VTX, 10);
    numMaps = conf.getInt(PRConstants.NUM_MAPS, 3);
    partitionPerWorker = conf.getInt(PRConstants.PARTITION_PER_WORKER, 8);
    iteration = conf.getInt(PRConstants.ITERATION, 1);
}

From source file:edu.iu.sgd.SGDCollectiveMapper.java

License:Apache License

/**
 * Mapper configuration./* w w  w  .j  av a 2s  . c  o m*/
 */
@Override
protected void setup(Context context) {
    LOG.info(
            "start setup: " + new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()));
    long startTime = System.currentTimeMillis();
    Configuration configuration = context.getConfiguration();
    r = configuration.getInt(Constants.R, 100);
    lambda = configuration.getDouble(Constants.LAMBDA, 0.001);
    epsilon = configuration.getDouble(Constants.EPSILON, 0.001);
    numIterations = configuration.getInt(Constants.NUM_ITERATIONS, 100);
    trainRatio = configuration.getInt(Constants.TRAIN_RATIO, Constants.TARGET_BOUND);
    if (trainRatio <= 0 || trainRatio > 100) {
        trainRatio = Constants.TARGET_BOUND;
    }
    if (trainRatio == 100) {
        enableTuning = false;
    } else {
        enableTuning = true;
    }
    time = enableTuning ? 1000L : 1000000000L;
    numThreads = configuration.getInt(Constants.NUM_THREADS, 16);
    scheduleRatio = configuration.getDouble(Constants.SCHEDULE_RATIO, 2.0);
    modelDirPath = configuration.get(Constants.MODEL_DIR, "");
    testFilePath = configuration.get(Constants.TEST_FILE_PATH, "");
    numModelSlices = 2;
    rmseIteInterval = 5;
    freeInterval = 20;
    rmse = 0.0;
    testRMSE = 0.0;
    computeTime = 0L;
    waitTime = 0L;

    totalNumV = 0L;
    totalNumCols = 0L;
    oneOverSqrtR = 1.0 / Math.sqrt(r);
    random = new Random(System.currentTimeMillis());
    long endTime = System.currentTimeMillis();
    LOG.info("config (ms): " + (endTime - startTime));
    LOG.info("R " + r);
    LOG.info("Lambda " + lambda);
    LOG.info("Epsilon " + epsilon);
    LOG.info("Num Iterations " + numIterations);
    LOG.info("Num Threads " + numThreads + " " + scheduleRatio);
    LOG.info("enableTuning\\Time\\Bound " + enableTuning + "\\" + time + "\\" + trainRatio);
    LOG.info("Model Slices " + numModelSlices);
    LOG.info("Model Dir Path " + modelDirPath);
    LOG.info("TEST FILE PATH " + testFilePath);
    LOG.info("Container Memory " + configuration.get("mapreduce.map.collective.memory.mb"));
    LOG.info("Java Memory " + configuration.get("mapreduce.map.collective.java.opts"));
}

From source file:edu.iu.subgraph.SCCollectiveMapper.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {

    LOG.info("start setup");

    Configuration configuration = context.getConfiguration();
    numMappers = configuration.getInt(SCConstants.NUM_MAPPERS, 10);
    templateFile = configuration.get(SCConstants.TEMPLATE_PATH);
    useLocalMultiThread = configuration.getBoolean(SCConstants.USE_LOCAL_MULTITHREAD, true);
    rotation_pipeline = configuration.getBoolean(SCConstants.ROTATION_PIPELINE, true);

    LOG.info("init templateFile");
    LOG.info(templateFile);//from w  ww. j a  va 2  s .  c  o  m

    numThreads = configuration.getInt(SCConstants.THREAD_NUM, 10);
    numCores = configuration.getInt(SCConstants.CORE_NUM, 24);
    affinity = configuration.get(SCConstants.THD_AFFINITY);
    tpc = configuration.getInt(SCConstants.TPC, 2);

    //always use the maximum hardware threads to load in data and convert data 
    harpThreads = Runtime.getRuntime().availableProcessors();
    LOG.info("Num Threads " + numThreads);
    LOG.info("Num harp load data threads " + harpThreads);

    send_array_limit = (configuration.getInt(SCConstants.SENDLIMIT, 250)) * 1024L * 1024L;

    numIteration = configuration.getInt(SCConstants.NUM_ITERATION, 10);
    LOG.info("Subgraph Counting Iteration: " + numIteration);

    numModelSlices = 2;
}

From source file:edu.rosehulman.CollocMapper.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Configuration conf = context.getConfiguration();
    this.maxShingleSize = conf.getInt(MAX_SHINGLE_SIZE, DEFAULT_MAX_SHINGLE_SIZE);

    this.emitUnigrams = conf.getBoolean(CollocDriver.EMIT_UNIGRAMS, CollocDriver.DEFAULT_EMIT_UNIGRAMS);

    if (log.isInfoEnabled()) {
        log.info("Max Ngram size is {}", this.maxShingleSize);
        log.info("Emit Unitgrams is {}", emitUnigrams);
    }/*from  www  .ja  v  a  2  s.  c  om*/
}

From source file:edu.rosehulman.TFPartialVectorReducer.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Configuration conf = context.getConfiguration();

    dimension = conf.getInt(PartialVectorMerger.DIMENSION, Integer.MAX_VALUE);
    sequentialAccess = conf.getBoolean(PartialVectorMerger.SEQUENTIAL_ACCESS, false);
    namedVector = conf.getBoolean(PartialVectorMerger.NAMED_VECTOR, false);
    maxNGramSize = conf.getInt(DictionaryVectorizer.MAX_NGRAMS, maxNGramSize);

    //MAHOUT-1247
    Path dictionaryFile = HadoopUtil.getSingleCachedFile(conf);
    // key is word value is id
    for (Pair<Writable, IntWritable> record : new SequenceFileIterable<Writable, IntWritable>(dictionaryFile,
            true, conf)) {//  w ww  .ja v a2  s.c o  m
        dictionary.put(record.getFirst().toString(), record.getSecond().get());
    }
}

From source file:edu.stolaf.cs.wmrserver.JobServiceHandler.java

License:Apache License

public JobServiceHandler(Configuration conf) throws IOException {
    _homeDir = getHome(conf);/* w w  w . j  a va  2  s . c  o m*/
    _tempDir = getTempDir(conf);
    _langSupportDir = new File(conf.get("wmr.lang.support.dir", "lang-support"));
    _enforceInputContainment = conf.getBoolean("wmr.input.containment.enforce", false);
    _disallowLocalInput = conf.getBoolean("wmr.input.disallow.local", true);
    _outputPageSize = getOutputPageSize(conf);
    _quotaEnabled = conf.getBoolean("wmr.quota.enable", true) && conf.getBoolean("wmr.quota.user.enable", true);
    _quotaAttempts = conf.getInt("wmr.quota.user.attempts", 20);
    _quotaDuration = conf.getInt("wmr.quota.user.duration", 10);

    // Resolve relative lang support dir
    if (!_langSupportDir.isAbsolute())
        _langSupportDir = new File(System.getProperty("wmr.home.dir"), _langSupportDir.toString());

    // Load language configuration
    File wmrConfFile = new File(_langSupportDir, LANG_CONF_FILE);
    if (!wmrConfFile.exists())
        throw new IOException("Language configuration could not be found: " + wmrConfFile.toString());
    try {
        _languageConf = new HierarchicalINIConfiguration(wmrConfFile);
    } catch (ConfigurationException ex) {
        throw new IOException("The language configuration could not be loaded.", ex);
    }

    _hadoopEngine = new HadoopEngine(conf);
    _testJobEngine = new TestJobEngine(conf);
}

From source file:edu.stolaf.cs.wmrserver.JobServiceHandler.java

License:Apache License

public static int getOutputPageSize(Configuration conf) {
    return conf.getInt("wmr.status.output.pagesize", 0x80000 /* 512 K */);
}