Example usage for org.apache.hadoop.conf Configuration getBoolean

List of usage examples for org.apache.hadoop.conf Configuration getBoolean

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getBoolean.

Prototype

public boolean getBoolean(String name, boolean defaultValue) 

Source Link

Document

Get the value of the name property as a boolean.

Usage

From source file:edu.usc.pgroup.louvain.hadoop.MapCommunity.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();

    verbose = configuration.getBoolean(LouvainMR.VERBOSE, false);
    nb_pass = configuration.getInt(LouvainMR.NB_PASS, 0);
    precision = configuration.getDouble(LouvainMR.PRECISION, 0.000001);
    display_level = configuration.getInt(LouvainMR.DISPLAY_LEVEL, -1);
    outpath = configuration.get(LouvainMR.OUT_PATH);

    System.out.println("verbose = " + verbose);
    System.out.println("display_level = " + display_level);
    System.out.println("outpath = " + outpath);

    super.setup(context);

}

From source file:edu.usc.pgroup.louvain.hadoop.ReduceCommunity.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    Configuration configuration = context.getConfiguration();
    verbose = configuration.getBoolean(LouvainMR.VERBOSE, false);
    precision = configuration.getDouble(LouvainMR.PRECISION, 0.000001);
    display_level = configuration.getInt(LouvainMR.DISPLAY_LEVEL, -1);
    this.outpath = configuration.get(LouvainMR.OUT_PATH);
    System.out.println("verbose = " + verbose);
    System.out.println("display_level = " + display_level);
    System.out.println("outpath = " + outpath);

    super.setup(context);
}

From source file:eu.scape_project.tpid.TomarPrepareInputdata.java

License:Apache License

/**
 * Start Hadoop job/* w ww .  ja  v a 2s  .c  o m*/
 *
 * @param conf Hadoop job configuration
 */
public static void startHadoopJob(Configuration conf) {
    try {
        Job job = new Job(conf, "tpid_" + conf.getInt("num_items_per_task", 0));
        if (conf.getBoolean("pseudo_distributed", false)) {
            job.getConfiguration().set("mapred.job.tracker", "local");
            job.getConfiguration().set("fs.default.name", "file:///");
        }
        job.setJarByClass(TomarPrepareInputdata.class);

        job.setMapperClass(TomarPrepareInputdata.ContainerProcessingMapper.class);
        // No reducer needed
        job.setNumReduceTasks(0);

        job.setInputFormatClass(TextInputFormat.class);

        MultipleOutputs.addNamedOutput(job, "keyfilmapping", TextOutputFormat.class, Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "tomarinput", TextOutputFormat.class, Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "error", TextOutputFormat.class, Text.class, Text.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(ObjectWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(ObjectWritable.class);

        TextInputFormat.addInputPath(job, new Path(config.getInputStr()));
        String outpath = StringUtils.normdir(conf.get("joboutput_hdfs_path", "tpid_joboutput"))
                + System.currentTimeMillis();
        FileOutputFormat.setOutputPath(job, new Path(outpath));
        LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
        job.waitForCompletion(true);
        // print output path (taverna integration)
        System.out.print(outpath);
        System.exit(0);
    } catch (Exception e) {
        LOG.error("I/O error", e);
    }
}

From source file:fi.tkk.ics.hadoop.bam.AnySAMInputFormat.java

License:Open Source License

/** Creates a new input format, reading {@link #TRUST_EXTS_PROPERTY} from
 * the given <code>Configuration</code>.
 *///from   w  w  w  . ja  v a  2 s.c o  m
public AnySAMInputFormat(Configuration conf) {
    this.formatMap = new HashMap<Path, SAMFormat>();
    this.conf = conf;
    this.trustExts = conf.getBoolean(TRUST_EXTS_PROPERTY, true);
    this.givenMap = false;
}

From source file:fi.tkk.ics.hadoop.bam.VCFInputFormat.java

License:Open Source License

/** Creates a new input format, reading {@link #TRUST_EXTS_PROPERTY} from
 * the given <code>Configuration</code>.
 *//*w  w  w .j av a 2 s.  c  o  m*/
public VCFInputFormat(Configuration conf) {
    this.formatMap = new HashMap<Path, VCFFormat>();
    this.conf = conf;
    this.trustExts = conf.getBoolean(TRUST_EXTS_PROPERTY, true);
    this.givenMap = false;
}

From source file:format.OverlapInputFormat.java

License:BSD License

@Override
protected List<FileStatus> listStatus(JobContext job) throws IOException {
    List<FileStatus> files = super.listStatus(job);
    List<FileStatus> results = new ArrayList<FileStatus>();
    //        Configuration conf = HadoopUtils.getConfiguration(job);
    Configuration conf = job.getConfiguration();
    boolean recursive = conf.getBoolean("mapred.input.dir.recursive", false);
    Iterator<FileStatus> it = files.iterator();
    while (it.hasNext()) {
        FileStatus fileStatus = it.next();
        FileSystem fs = fileStatus.getPath().getFileSystem(conf);
        addInputPath(results, fs, fileStatus, recursive);
    }//from w ww  .  j  a  va 2s  . co  m

    LOG.debug("Total pcap input paths to process: " + results.size());
    return results;
}

From source file:FormatStorage1.IFileInfo.java

License:Open Source License

public IFileInfo(Configuration conf) throws IOException {
    this.workStatus = ConstVar.WS_Init;
    this.conf = conf;
    fs = FileSystem.get(conf);/*from  w ww .  j av  a2s .com*/

    this.confSegmentSize = conf.getLong("dfs.block.size", ConstVar.DefaultSegmentSize);
    this.confUnitSize = conf.getLong(ConstVar.ConfUnitSize, ConstVar.DefaultUnitSize);
    this.conf.setInt("io.compression.codec.lzo.buffersize", 128 * 1024);
    this.currentline = 0;
    this.printlog = conf.getBoolean("printlog", false);
}

From source file:fr.ens.biologie.genomique.eoulsan.modules.expression.hadoop.HTSeqCountMapper.java

License:LGPL

@Override
public void setup(final Context context) throws IOException, InterruptedException {

    EoulsanLogger.initConsoleHandler();/*from   w w  w.  java2 s. co  m*/
    getLogger().info("Start of setup()");

    try {

        final Configuration conf = context.getConfiguration();

        final URI[] localCacheFiles = context.getCacheFiles();

        if (localCacheFiles == null || localCacheFiles.length == 0) {
            throw new IOException("Unable to retrieve genome index");
        }

        if (localCacheFiles.length > 1) {
            throw new IOException("Retrieve more than one file in distributed cache");
        }

        getLogger().info("Genome index compressed file (from distributed cache): " + localCacheFiles[0]);

        if (localCacheFiles == null || localCacheFiles.length == 0) {
            throw new IOException("Unable to retrieve annotation index");
        }

        if (localCacheFiles.length > 1) {
            throw new IOException("Retrieve more than one file in distributed cache");
        }

        // Load features
        this.features.load(PathUtils.createInputStream(new Path(localCacheFiles[0]), conf));

        // Counter group
        this.counterGroup = conf.get(CommonHadoop.COUNTER_GROUP_KEY);
        if (this.counterGroup == null) {
            throw new IOException("No counter group defined");
        }

        // Get the genome description filename
        final String genomeDescFile = conf.get(ExpressionHadoopModule.GENOME_DESC_PATH_KEY);

        if (genomeDescFile == null) {
            throw new IOException("No genome desc file set");
        }

        // Load genome description object
        final GenomeDescription genomeDescription = GenomeDescription
                .load(PathUtils.createInputStream(new Path(genomeDescFile), conf));

        // Set the chromosomes sizes in the parser
        this.parser.getFileHeader().setSequenceDictionary(SAMUtils.newSAMSequenceDictionary(genomeDescription));

        // Get the "stranded" parameter
        this.stranded = StrandUsage.getStrandUsageFromName(conf.get(STRANDED_PARAM));

        // Get the "overlap mode" parameter
        this.overlapMode = OverlapMode.getOverlapModeFromName(conf.get(OVERLAP_MODE_PARAM));

        // Get the "no ambiguous cases" parameter
        this.removeAmbiguousCases = conf.getBoolean(REMOVE_AMBIGUOUS_CASES, true);

    } catch (IOException e) {
        getLogger().severe("Error while loading annotation data in Mapper: " + e.getMessage());
    }

    getLogger().info("End of setup()");
}

From source file:gobblin.util.limiter.stressTest.MRStressTest.java

License:Apache License

static Limiter createLimiter(Configuration configuration, SharedResourcesBroker<SimpleScopeType> broker) {
    try {/*from ww w  .ja  v a 2s. c om*/
        Limiter limiter = new NoopLimiter();

        long localQps = configuration.getLong(LOCALLY_ENFORCED_QPS, 0);
        if (localQps > 0) {
            log.info("Setting up local qps " + localQps);
            limiter = new MultiLimiter(limiter, new RateBasedLimiter(localQps));
        }

        if (configuration.getBoolean(USE_THROTTLING_SERVER, false)) {
            log.info("Setting up remote throttling.");
            String resourceId = configuration.get(RESOURCE_ID);
            Limiter globalLimiter = broker.getSharedResource(new RestliLimiterFactory<SimpleScopeType>(),
                    new SharedLimiterKey(resourceId));
            limiter = new MultiLimiter(limiter, globalLimiter);
        }
        return limiter;
    } catch (NotConfiguredException nce) {
        throw new RuntimeException(nce);
    }
}

From source file:gov.jgi.meta.exec.BlastCommand.java

License:Open Source License

/**
 * new blast command based on values stored in the configuration.
 * <p/>/*from w ww.j  av  a2 s  .  c o m*/
 * Looks for the following config values: blast.commandline,
 * blast.commandpath, and blast.tmpdir, blast.cleanup
 *
 * @param config is the hadoop configuration with overriding values
 *               for commandline options and paths
 * @throws IOException if executable can not be found
 */
public BlastCommand(Configuration config) throws IOException {
    log.info("initializing");
    String c;

    log.info("initializing new blast command");

    if ((c = config.get("blast.commandline")) != null) {
        commandLine = c;
    }
    if ((c = config.get("blast.commandpath")) != null) {
        commandPath = c;
    }
    if ((c = config.get("formatdb.commandline")) != null) {
        formatdbCommandLine = c;
    }
    if ((c = config.get("formatdb.commandpath")) != null) {
        formatdbCommandPath = c;
    }

    if ((c = config.get("blast.tmpdir")) != null) {
        tmpDir = c;
    }

    docleanup = config.getBoolean("blast.cleanup", true);

    effectiveSize = config.getLong("blast.effectivedatabasesize", 0);
    useScaledEValue = config.getBoolean("blast.usescaledevalue", false);
    useEffectiveSize = config.getBoolean("blast.useeffectivesize", false);
    useEValue = config.getFloat("blast.useevalue", 10F);

    /*
     * do sanity check to make sure all paths exist
     */
    checkFileExists(commandLine);
    checkFileExists(commandPath);
    checkDirExists(tmpDir);

    /*
     * if all is good, create a working space inside tmpDir
     */

    tmpDirFile = MetaUtils.createTempDir("blast_", tmpDir);

    log.info("done initializing: tmp dir = " + tmpDirFile);
}