Example usage for java.util Properties put

List of usage examples for java.util Properties put

Introduction

In this page you can find the example usage for java.util Properties put.

Prototype

@Override
    public synchronized Object put(Object key, Object value) 

Source Link

Usage

From source file:Main.java

public static Properties extractProperties(Node node) {
    Properties props = new Properties();
    NamedNodeMap attributes = node.getAttributes();
    if (attributes != null) {
        for (int i = 0; i < attributes.getLength(); i++) {
            Node item = attributes.item(i);
            props.put(item.getNodeName(), item.getNodeValue());
        }//from  ww w . ja  v a 2s  . com
    }
    return props;
}

From source file:com.addthis.hydra.kafka.consumer.ConsumerUtils.java

public static ConsumerConfig newConsumerConfig(String zookeeper, Map<String, String> overrides) {
    Properties config = new Properties();
    config.put("zookeeper.connect", zookeeper);
    config.put("num.consumer.fetchers", "1");
    config.putAll(overrides);/* w  ww . j  a  v  a2s  . co  m*/
    return new ConsumerConfig(config);
}

From source file:CollectionUtils.java

/**
 * Converts specified map to {@link java.util.Properties}. The keys and String values
 * are migrated unchnaged, other types of values are {@link Object#toString() converted to String}.
 * @param map map to convert.//from   ww w  .  j av a2 s .  com
 * @return converted map as Properties.
 */
public static Properties asProperties(Map<String, ?> map) {
    Properties props = new Properties();
    for (Map.Entry<String, ?> entry : map.entrySet()) {
        Object v = entry.getValue();
        if (v != null) {
            props.put(entry.getKey(), v.toString());
        }
    }
    return props;
}

From source file:com.qubole.quark.planner.test.LatticeTest.java

@BeforeClass
public static void setUpClass() throws Exception {
    Properties info = new Properties();
    info.put("unitTestMode", "true");
    info.put("schemaFactory", "com.qubole.quark.planner.test.LatticeTest$SchemaFactory");

    ImmutableList<String> defaultSchema = ImmutableList.of("FOODMART");
    final ObjectMapper mapper = new ObjectMapper();

    info.put("defaultSchema", mapper.writeValueAsString(defaultSchema));

    parser = new SqlQueryParser(info);
}

From source file:com.gisgraphy.util.ConvertUtil.java

/**
 * Method to convert a ResourceBundle to a Properties object.
 * /*from   w ww  . j a va  2 s  .  com*/
 * @param rb
 *                a given resource bundle
 * @return Properties a populated properties object
 */
public static Properties convertBundleToProperties(ResourceBundle rb) {
    Properties props = new Properties();

    for (Enumeration<String> keys = rb.getKeys(); keys.hasMoreElements();) {
        String key = keys.nextElement();
        props.put(key, rb.getString(key));
    }

    return props;
}

From source file:com.qualogy.qafe.business.resource.rdb.DataSourceConnectionFactory.java

/**
 * Gets oracle proxy connection  properties
 * used for making proxy connection to the oracle database.
 *
 * @param dataId/*from  w  w  w  . jav a  2s .com*/
 * @return
 */
public static Properties getOracleProxyConnectionProperties(DataIdentifier dataId) {
    Properties properties = new Properties();

    String userName = (String) DataStore.getValue(dataId,
            ProxyConnectionPropertiesNameEnum.USERNAME.propertyName());
    properties.put(ProxyConnectionPropertiesNameEnum.USERNAME.propertyName(), userName);

    return properties;
}

From source file:uk.ac.cam.cl.dtg.picky.client.analytics.Analytics.java

private static void fillProperties(Properties properties, List<FileEntry> fileEntries) {
    if (fileEntries != null)
        properties.put(KEY_FILE_MATCHES, "" + fileEntries.size());
}

From source file:fredboat.db.DatabaseManager.java

/**
 * @param jdbcUrl connection to the database
 * @param dialect set to null or empty String to have it autodetected by Hibernate, chosen jdbc driver must support that
 *//*from   w w  w  . ja v a2  s  . com*/
public static void startup(String jdbcUrl, String dialect, int poolSize) {
    state = DatabaseState.INITIALIZING;

    try {

        if (Config.CONFIG.isUseSshTunnel()) {
            connectSSH();
        }

        //These are now located in the resources directory as XML
        Properties properties = new Properties();
        properties.put("configLocation", "hibernate.cfg.xml");

        properties.put("hibernate.connection.provider_class",
                "org.hibernate.hikaricp.internal.HikariCPConnectionProvider");
        properties.put("hibernate.connection.url", jdbcUrl);
        if (dialect != null && !"".equals(dialect))
            properties.put("hibernate.dialect", dialect);
        properties.put("hibernate.cache.region.factory_class",
                "org.hibernate.cache.ehcache.EhCacheRegionFactory");

        //properties.put("hibernate.show_sql", "true");

        //automatically update the tables we need
        //caution: only add new columns, don't remove or alter old ones, otherwise manual db table migration needed
        properties.put("hibernate.hbm2ddl.auto", "update");

        properties.put("hibernate.hikari.maximumPoolSize", Integer.toString(poolSize));
        properties.put("hibernate.hikari.idleTimeout", Integer.toString(Config.HIKARI_TIMEOUT_MILLISECONDS));

        LocalContainerEntityManagerFactoryBean emfb = new LocalContainerEntityManagerFactoryBean();
        emfb.setPackagesToScan("fredboat.db.entity");
        emfb.setJpaVendorAdapter(new HibernateJpaVendorAdapter());
        emfb.setJpaProperties(properties);
        emfb.setPersistenceUnitName("fredboat.test");
        emfb.setPersistenceProviderClass(HibernatePersistenceProvider.class);
        emfb.afterPropertiesSet();
        emf = emfb.getObject();

        log.info("Started Hibernate");
        state = DatabaseState.READY;
    } catch (Exception ex) {
        state = DatabaseState.FAILED;
        throw new RuntimeException("Failed starting database connection", ex);
    }
}

From source file:org.biopax.validator.Main.java

private static void setUpLogger() {
    //set defaults
    Properties properties = new Properties();
    properties.put("log4j.rootLogger", "ERROR, Console");
    properties.put("log4j.appender.Console", "org.apache.log4j.ConsoleAppender");
    properties.put("log4j.appender.Console.layout", "org.apache.log4j.PatternLayout");
    properties.put("log4j.appender.Console.layout.ConversionPattern", "%-4r [%t] %-5p %c %x - %m%n");
    PropertyConfigurator.configure(properties);

    properties = new Properties(properties);

    try {//from   ww w.j  ava  2 s . c o  m
        properties.load(new FileReader("log4j.properties"));
    } catch (IOException e) {
        throw new RuntimeException("Failed to load cPath2 properties " + "from log4j.properties", e);
    }

    PropertyConfigurator.configure(properties);
}

From source file:hydrograph.engine.cascading.scheme.hive.parquet.HiveParquetSchemeHelper.java

public static Properties getTableProperties(HiveTableDescriptor hiveTableDescriptor) {

    Properties properties = new Properties();
    String columns = StringUtils.join(hiveTableDescriptor.getColumnNames(), ",");
    String columnTypes = StringUtils.join(hiveTableDescriptor.getColumnTypes(), ":");
    properties.put(COLUMNS, columns);
    properties.put(COLUMNS_TYPES, columnTypes);
    return properties;
}