Example usage for org.apache.hadoop.conf Configuration getInt

List of usage examples for org.apache.hadoop.conf Configuration getInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getInt.

Prototype

public int getInt(String name, int defaultValue) 

Source Link

Document

Get the value of the name property as an int.

Usage

From source file:be.ugent.intec.halvade.utils.HalvadeConf.java

License:Open Source License

public static String[] getKnownSitesOnHDFS(Configuration conf) {
    int size = conf.getInt(numberOfSites, 0);
    String[] sites = new String[size];
    for (int i = 0; i < size; i++) {
        sites[i] = conf.get(sitesOnHDFSName + i);
    }/* ww w.ja  va 2s .c om*/
    return sites;
}

From source file:be.ugent.intec.halvade.utils.HalvadeConf.java

License:Open Source License

public static int getMapContainerCount(Configuration conf) {
    return conf.getInt(totalContainers, 1);
}

From source file:be.ugent.intec.halvade.utils.HalvadeConf.java

License:Open Source License

public static int getMapTasksLeft(Configuration conf) throws IOException, URISyntaxException {
    int containers = conf.getInt(totalContainers, 1);
    int tasks = 0;
    String filedir = conf.get(outdir) + tasksDone;
    FileSystem fs = FileSystem.get(new URI(filedir), conf);
    FileStatus[] files = fs.listStatus(new Path(filedir));
    for (FileStatus file : files) {
        if (!file.isDirectory()) {
            tasks++;/*from   www  .  j  a  v a 2s .c  o m*/
        }
    }
    Logger.DEBUG("containers left: " + (Integer.parseInt(conf.get("mapred.map.tasks")) - tasks));
    return Integer.parseInt(conf.get("mapred.map.tasks")) - tasks;
}

From source file:be.ugent.intec.halvade.utils.HalvadeConf.java

License:Open Source License

public static SAMSequenceDictionary getSequenceDictionary(Configuration conf) throws IOException {
    int counter = conf.getInt(dictionaryCount, 0);
    SAMSequenceDictionary dict = new SAMSequenceDictionary();
    for (int i = 0; i < counter; i++) {
        String seqName = conf.get(dictionarySequenceName + i);
        int seqLength = conf.getInt(dictionarySequenceLength + i, 0);
        SAMSequenceRecord seq = new SAMSequenceRecord(seqName, seqLength);
        dict.addSequence(seq);/*from  ww w .  ja v  a  2  s  .  com*/
    }
    return dict;
}

From source file:be.ugent.intec.halvade.utils.HalvadeConf.java

License:Open Source License

public static int getMinChrLength(Configuration conf) {
    return conf.getInt(minChrSize, DEFAULT_MIN_CHR_SIZE);
}

From source file:boa.aggregators.MLAggregator.java

License:Apache License

public void saveModel(Object model) {
     FSDataOutputStream out = null;//from  w w  w.j  a  v a 2s.  c  o m
     FileSystem fileSystem = null;
     Path filePath = null;
     try {
         JobContext context = (JobContext) getContext();
         Configuration configuration = context.getConfiguration();
         int boaJobId = configuration.getInt("boa.hadoop.jobid", 0);
         JobConf job = new JobConf(configuration);
         Path outputPath = FileOutputFormat.getOutputPath(job);
         fileSystem = outputPath.getFileSystem(context.getConfiguration());

         fileSystem.mkdirs(new Path("/boa", new Path("" + boaJobId)));
         filePath = new Path("/boa",
                 new Path("" + boaJobId, new Path(("" + getKey()).split("\\[")[0] + "ML.model")));

         if (fileSystem.exists(filePath))
             return;

         out = fileSystem.create(filePath);
         ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
         ObjectOutputStream objectOut = new ObjectOutputStream(byteOutStream);
         objectOut.writeObject(model);
         objectOut.close();

         byte[] serializedObject = byteOutStream.toByteArray();
         out.write(serializedObject, 0, serializedObject.length);

         this.collect(filePath.toString());

     } catch (Exception e) {
         e.printStackTrace();
     } finally {
         try {
             if (out != null)
                 out.close();
         } catch (final Exception e) {
             e.printStackTrace();
         }
     }
 }

From source file:bsc.spark.examples.terasort.ehiggs.TeraScheduler.java

License:Apache License

public TeraScheduler(FileSplit[] realSplits, Configuration conf) throws IOException {
    this.realSplits = realSplits;
    this.slotsPerHost = conf.getInt(TTConfig.TT_MAP_SLOTS, 4);
    Map<String, Host> hostTable = new HashMap<String, Host>();
    splits = new Split[realSplits.length];
    for (FileSplit realSplit : realSplits) {
        Split split = new Split(realSplit.getPath().toString());
        splits[remainingSplits++] = split;
        for (String hostname : realSplit.getLocations()) {
            Host host = hostTable.get(hostname);
            if (host == null) {
                host = new Host(hostname);
                hostTable.put(hostname, host);
                hosts.add(host);//from   w  w  w  .j a v  a  2s  .c om
            }
            host.splits.add(split);
            split.locations.add(host);
        }
    }
}

From source file:cascading.stats.hadoop.CounterCache.java

License:Open Source License

protected CounterCache(CascadingStats stats, Configuration configuration) {
    this.stats = stats;
    this.timeout = configuration.getInt(COUNTER_TIMEOUT_PROPERTY, DEFAULT_TIMEOUT_TIMEOUT_SEC);
    this.maxFetchAttempts = configuration.getInt(COUNTER_FETCH_RETRIES_PROPERTY, DEFAULT_FETCH_RETRIES);
    this.maxAge = configuration.getInt(COUNTER_MAX_AGE_PROPERTY, DEFAULT_CACHED_AGE_MAX);
}

From source file:cascading.stats.hadoop.HadoopNodeCounterCache.java

License:Open Source License

protected HadoopNodeCounterCache(FlowNodeStats flowNodeStats, Configuration configuration) {
    super(flowNodeStats, configuration);
    this.flowNodeStats = flowNodeStats;

    // age matters here since we are aggregating task reports vs getting a pre-aggregated value at the node level
    this.maxAge = configuration.getInt(NODE_COUNTER_MAX_AGE_PROPERTY, DEFAULT_NODE_CACHED_AGE_MAX);
}

From source file:cascading.tuple.hadoop.util.DeserializerComparator.java

License:Open Source License

public static Comparator[] getFieldComparatorsFrom(Configuration conf, String name) {
    String value = conf.get(name);

    if (value == null)
        return new Comparator[conf.getInt(name + ".size", 1)];

    try {/*from  w  w w  .jav  a2 s .c o m*/
        return HadoopUtil.deserializeBase64(value, conf, Fields.class).getComparators();
    } catch (IOException exception) {
        throw new CascadingException("unable to deserialize comparators for: " + name);
    }
}