Example usage for org.apache.hadoop.conf Configuration iterator

List of usage examples for org.apache.hadoop.conf Configuration iterator

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration iterator.

Prototype

@Override
public Iterator<Map.Entry<String, String>> iterator() 

Source Link

Document

Get an Iterator to go through the list of String key-value pairs in the configuration.

Usage

From source file:edu.mit.ll.graphulo.pig.backend.AbstractGraphuloStorage.java

License:Apache License

/**
 * Replaces the given entries in the configuration by clearing the
 * Configuration and re-adding the elements that aren't in the Map of
 * entries to unset//  w w w .j a  va 2 s. co  m
 * 
 * @param conf
 * @param entriesToUnset
 */
protected void clearUnset(Configuration conf, Map<String, String> entriesToUnset) {
    // Gets a copy of the entries
    Iterator<Entry<String, String>> originalEntries = conf.iterator();
    conf.clear();

    while (originalEntries.hasNext()) {
        Entry<String, String> originalEntry = originalEntries.next();

        // Only re-set() the pairs that aren't in our collection of keys to
        // unset
        if (!entriesToUnset.containsKey(originalEntry.getKey())) {
            conf.set(originalEntry.getKey(), originalEntry.getValue());
        }
    }
}

From source file:gobblin.util.JobConfigurationUtils.java

License:Apache License

/**
 * Put all configuration properties in a given {@link Configuration} object into a given
 * {@link Properties} object.//from   w w  w .  j ava2  s  . co  m
 *
 * @param configuration the given {@link Configuration} object
 * @param properties the given {@link Properties} object
 */
public static void putConfigurationIntoProperties(Configuration configuration, Properties properties) {
    for (Iterator<Entry<String, String>> it = configuration.iterator(); it.hasNext();) {
        Entry<String, String> entry = it.next();
        properties.put(entry.getKey(), entry.getValue());
    }
}

From source file:hitune.analysis.mapreduce.AnalysisConfiguration.java

License:Apache License

/**
 * Load the current configuration folder recursively
 * /*from   w  ww.j  av  a2 s. c om*/
 * if existing list.xml, go step forward
 * 
 * @param folder
 */
public void LoadConfiguration(String folder, Configuration conf) {
    log.debug("scan folder: " + folder);
    File listfile = new File(folder + "/list.xml");

    File conffile = new File(folder + "/conf.xml");
    Configuration newconf = new Configuration(conf);
    newconf.addResource(new Path(conffile.getAbsolutePath()));

    try {

        if (listfile.exists()) {
            Configuration tempconf = new Configuration(newconf);
            tempconf.addResource(new Path(listfile.getAbsolutePath()));

            Configuration _conf = new Configuration(false);
            _conf.addResource(new Path(listfile.getAbsolutePath()));
            Iterator<Map.Entry<String, String>> iter = _conf.iterator();
            while (iter.hasNext()) {
                Map.Entry<String, String> pairs = (Map.Entry<String, String>) iter.next();
                String key = pairs.getKey();
                LoadConfiguration(tempconf.get(key), newconf);
            }
        } else {
            String datasource = newconf.get(AnalysisProcessorConfiguration.datasource);
            log.debug("datasource: " + datasource);
            List<Configuration> cflist = null;
            if (conflist.containsKey(datasource)) {
                cflist = conflist.get(datasource);
            } else {
                cflist = new ArrayList<Configuration>();
            }
            cflist.add(newconf);
            log.debug("add conf: " + newconf);
            conflist.put(datasource, cflist);
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:org.apache.accumulo.pig.TestUtils.java

License:Apache License

public static void assertConfigurationsEqual(Configuration expectedConf, Configuration actualConf) {
    // Basically, for all the keys in expectedConf, make sure the values in both confs are equal
    Iterator<Entry<String, String>> expectedIter = expectedConf.iterator();
    while (expectedIter.hasNext()) {
        Entry<String, String> e = expectedIter.next();
        assertEquals(actualConf.get(e.getKey()), expectedConf.get(e.getKey()));
    }//from   ww  w  .  j ava  2  s  .c o m

    // Basically, for all the keys in actualConf, make sure the values in both confs are equal
    Iterator<Entry<String, String>> actualIter = actualConf.iterator();
    while (actualIter.hasNext()) {
        Entry<String, String> e = actualIter.next();
        assertEquals(actualConf.get(e.getKey()), expectedConf.get(e.getKey()));
    }
}

From source file:org.apache.ambari.servicemonitor.utils.DFSUtils.java

License:Apache License

public static TreeSet<String> sortedConfigList(Configuration conf) {
    TreeSet<String> keys = new TreeSet<String>();
    Iterator<Map.Entry<String, String>> iterator = conf.iterator();
    while (iterator.hasNext()) {
        Map.Entry<String, String> next = iterator.next();
        keys.add(next.getKey());//w w  w  . jav  a 2s .com
    }
    return keys;
}

From source file:org.apache.connectors.td.TeradataManager.java

License:Apache License

@Override
public void exportTable(ExportJobContext context) throws IOException, ExportException {

    context.setConnManager(this);
    Configuration conf = context.getOptions().getConf();
    Iterator<Map.Entry<String, String>> i = conf.iterator();

    Options tdOpts = TeradataExportOptions.create();
    try {//w w  w . ja  v a  2 s  .c o m
        tdOpts.set("classname", "com.teradata.jdbc.TeraDriver");
        tdOpts.set("username", context.getOptions().getUsername());
        tdOpts.set("password", context.getOptions().getPassword());
        tdOpts.set("url", context.getOptions().getConnectString());
        tdOpts.set("targetpaths", context.getOptions().getExportDir());
        tdOpts.set("targettable", context.getOptions().getTableName());

        if (context.getOptions().doHiveImport()) {
            LOG.info("Hive target set.");
            tdOpts.set("jobtype", "hive");
        }
        while (i.hasNext()) {
            Map.Entry<String, String> val = i.next();
            LOG.debug(val.getKey() + " = " + val.getValue());

            if (val.getKey().startsWith(TD_PREFIX)) {
                tdOpts.set(val.getKey().substring(TD_PREFIX.length()), val.getValue());
            }
        }
    } catch (ParameterValidationException e) {
        throw new IOException(e);
    }

    // TODO: Next version should instantiate the job and its context.

    String[] parameters = tdOpts.build();
    LOG.debug("Parameters to TD: " + join(parameters, " "));
    ConnectorExportTool.main(parameters);
}

From source file:org.apache.connectors.td.TeradataManager.java

License:Apache License

@Override
public void importTable(ImportJobContext context) throws IOException, ImportException {

    context.setConnManager(this);
    Configuration conf = context.getOptions().getConf();
    Iterator<Map.Entry<String, String>> i = conf.iterator();

    Options tdOpts;/*from  w  ww . j ava 2s  .  c  o  m*/

    tdOpts = TeradataImportOptions.create(context);

    try {

        tdOpts.mapOptions();

        while (i.hasNext()) {
            Map.Entry<String, String> val = i.next();
            LOG.debug(val.getKey() + " = " + val.getValue());

            if (val.getKey().startsWith(TD_PREFIX)) {
                tdOpts.set(val.getKey().substring(TD_PREFIX.length()), val.getValue());
            }
        }
    } catch (Exception e) {
        throw new IOException(e);
    }

    String[] parameters = tdOpts.build();

    LOG.info("Parameters to TD: " + join(parameters, " "));

    ConnectorImportTool.main(parameters);
}

From source file:org.apache.crunch.io.avro.trevni.TrevniRecordWriter.java

License:Apache License

static ColumnFileMetaData filterMetadata(final Configuration configuration) {
    final ColumnFileMetaData meta = new ColumnFileMetaData();
    Iterator<Entry<String, String>> keyIterator = configuration.iterator();

    while (keyIterator.hasNext()) {
        Entry<String, String> confEntry = keyIterator.next();
        if (confEntry.getKey().startsWith(META_PREFIX))
            meta.put(confEntry.getKey().substring(META_PREFIX.length()),
                    confEntry.getValue().getBytes(MetaData.UTF8));
    }/*www .j  a v  a2 s .c  o m*/

    return meta;
}

From source file:org.apache.hconf2prop.Main.java

License:Apache License

public static void convert(String arg) throws IOException {

    File input = new File(arg);
    if (!input.isFile() || !input.canRead()) {
        throw new IllegalArgumentException(arg);
    }/*from   www .ja  va 2  s  .  c  o m*/

    Configuration conf = new Configuration();
    conf.addResource(new Path(input.toURI()));

    TreeMap<String, String> map = new TreeMap<String, String>();

    Iterator<Map.Entry<String, String>> entries = conf.iterator();
    while (entries.hasNext()) {
        Map.Entry<String, String> entry = entries.next();
        map.put(entry.getKey(), entry.getValue());
    }

    String base = FilenameUtils.getBaseName(input.getName());
    File output = new File(input.getParentFile(), base + ".prop");
    FileOutputStream stream = new FileOutputStream(output);
    Writer writer = new OutputStreamWriter(stream);
    BufferedWriter buffer = new BufferedWriter(writer);
    Iterator<String> keys = map.navigableKeySet().iterator();
    while (keys.hasNext()) {
        String key = keys.next();
        String value = map.get(key);
        String line = String.format("%s = %s", key, value);
        buffer.write(line);
        buffer.newLine();
    }
    buffer.close();

}

From source file:org.apache.kylin.source.kafka.config.KafkaConsumerProperties.java

License:Apache License

public static Properties extractKafkaConfigToProperties(Configuration configuration) {
    Set<String> configNames = new HashSet<String>();
    try {//from   w  w  w .  j av  a2s  . c  om
        configNames = ConsumerConfig.configNames();
    } catch (Error e) {
        // the Kafka configNames api is supported on 0.10.1.0+, in case NoSuchMethodException which is an Error, not Exception
        String[] configNamesArray = ("metric.reporters, metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms,"
                + "sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, bootstrap.servers, ssl.keystore.type,"
                + " enable.auto.commit, sasl.mechanism, interceptor.classes, exclude.internal.topics, ssl.truststore.password,"
                + " client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, request.timeout.ms, heartbeat.interval.ms,"
                + " auto.commit.interval.ms, receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, ssl.keystore.password, fetch.min.bytes,"
                + " fetch.max.bytes, send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, retry.backoff.ms,"
                + " ssl.secure.random.implementation, sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, connections.max.idle.ms, session.timeout.ms, metrics.num.samples, key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, ssl.keystore.location, ssl.cipher.suites, security.protocol, ssl.keymanager.algorithm, metrics.sample.window.ms, auto.offset.reset")
                        .split(",");
        configNames.addAll(Arrays.asList(configNamesArray));
    }

    Properties result = new Properties();
    for (Iterator<Map.Entry<String, String>> it = configuration.iterator(); it.hasNext();) {
        Map.Entry<String, String> entry = it.next();
        String key = entry.getKey();
        String value = entry.getValue();
        if (configNames.contains(key)) {
            result.put(key, value);
        }
    }
    return result;
}