Example usage for org.apache.hadoop.conf Configuration setIfUnset

List of usage examples for org.apache.hadoop.conf Configuration setIfUnset

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setIfUnset.

Prototype

public synchronized void setIfUnset(String name, String value) 

Source Link

Document

Sets a property if it is currently unset.

Usage

From source file:org.culturegraph.mf.cluster.job.ingest.WikipediaIngest.java

License:Apache License

@Override
protected Configuration prepareConf(final Configuration conf) {
    conf.set(XmlInputFormat.START_TAG_KEY, START_TAG);
    conf.set(XmlInputFormat.END_TAG_KEY, END_TAG);
    conf.setIfUnset("mapred.map.tasks.speculative.execution", "false");
    return HBaseConfiguration.create(conf);
}

From source file:org.culturegraph.mf.cluster.job.match.CopyOfMatcher.java

License:Apache License

@Override
protected Configuration prepareConf(final Configuration conf) {
    setJobName(JOB_NAME + " '" + getConf().get(ConfigConst.INPUT_TABLE) + "' with '"
            + getConf().get(ConfigConst.MORPH_DEF) + "'");

    addOptionalArguments(ConfigConst.ALGORITHM_NAME);
    addRequiredArguments(ConfigConst.MORPH_DEF, ConfigConst.INPUT_TABLE, ConfigConst.OUTPUT_TABLE);

    final String metamorphDef = conf.get(ConfigConst.MORPH_DEF);
    if (null != metamorphDef) {
        final String name = new Metamorph(metamorphDef).getValue(Metamorph.METADATA, "name");
        conf.setIfUnset(ConfigConst.ALGORITHM_NAME, name);
    }/*from w w w  .j a va  2  s  . c  o m*/

    return HBaseConfiguration.create(conf);
}

From source file:org.opencb.opencga.storage.hadoop.variant.HadoopVariantStorageEngine.java

License:Apache License

public AbstractHadoopVariantStoragePipeline newStorageETL(boolean connected,
        Map<? extends String, ?> extraOptions) throws StorageEngineException {
    ObjectMap options = new ObjectMap(
            configuration.getStorageEngine(STORAGE_ENGINE_ID).getVariant().getOptions());
    if (extraOptions != null) {
        options.putAll(extraOptions);/*from www.  j  a v a  2 s .c om*/
    }
    boolean directLoad = options.getBoolean(HADOOP_LOAD_DIRECT, HADOOP_LOAD_DIRECT_DEFAULT);
    VariantHadoopDBAdaptor dbAdaptor = connected ? getDBAdaptor() : null;
    Configuration hadoopConfiguration = null == dbAdaptor ? null : dbAdaptor.getConfiguration();
    hadoopConfiguration = hadoopConfiguration == null ? getHadoopConfiguration(options) : hadoopConfiguration;
    hadoopConfiguration.setIfUnset(ArchiveDriver.CONFIG_ARCHIVE_TABLE_COMPRESSION, Algorithm.SNAPPY.getName());

    HBaseCredentials archiveCredentials = buildCredentials(
            getArchiveTableName(options.getInt(Options.STUDY_ID.key()), options));

    AbstractHadoopVariantStoragePipeline storageETL = null;
    if (directLoad) {
        storageETL = new HadoopDirectVariantStoragePipeline(configuration, storageEngineId, dbAdaptor,
                getMRExecutor(options), hadoopConfiguration, archiveCredentials,
                getVariantReaderUtils(hadoopConfiguration), options);
    } else {
        storageETL = new HadoopVariantStoragePipeline(configuration, storageEngineId, dbAdaptor,
                getMRExecutor(options), hadoopConfiguration, archiveCredentials,
                getVariantReaderUtils(hadoopConfiguration), options);
    }
    return storageETL;
}