Example usage for java.util Properties get

List of usage examples for java.util Properties get

Introduction

In this page you can find the example usage for java.util Properties get.

Prototype

@Override
    public Object get(Object key) 

Source Link

Usage

From source file:com.mongodb.hadoop.hive.MongoStorageHandler.java

/**
 * Helper function to copy properties//w  w  w .j  a v a  2s.com
 */
private void copyJobProperties(final Properties from, final Map<String, String> to) {
    // Copy Hive-specific properties used directly by
    // HiveMongoInputFormat, BSONSerDe.
    if (from.containsKey(serdeConstants.LIST_COLUMNS)) {
        to.put(serdeConstants.LIST_COLUMNS, (String) from.get(serdeConstants.LIST_COLUMNS));
    }
    if (from.containsKey(serdeConstants.LIST_COLUMN_TYPES)) {
        to.put(serdeConstants.LIST_COLUMN_TYPES, (String) from.get(serdeConstants.LIST_COLUMN_TYPES));
    }
    if (from.containsKey(MONGO_COLS)) {
        to.put(MONGO_COLS, (String) from.get(MONGO_COLS));
    }
    if (from.containsKey(TABLE_LOCATION)) {
        to.put(TABLE_LOCATION, (String) from.get(TABLE_LOCATION));
    }

    // First, merge properties from the given properties file, if there
    // was one. These can be overwritten by other table properties later.
    String propertiesFilePathString = from.getProperty(PROPERTIES_FILE_PATH);
    if (propertiesFilePathString != null) {
        try {
            Properties properties = getProperties(getConf(), propertiesFilePathString);
            for (Map.Entry<Object, Object> prop : properties.entrySet()) {
                String key = (String) prop.getKey();
                String value = (String) prop.getValue();
                if (key.equals(MONGO_URI)) {
                    // Copy to input/output URI.
                    to.put(MongoConfigUtil.INPUT_URI, value);
                    to.put(MongoConfigUtil.OUTPUT_URI, value);
                } else {
                    to.put(key, value);
                }
            }
        } catch (IOException e) {
            LOG.error("Error while trying to read properties file " + propertiesFilePathString, e);
        }
    }

    // Copy general connector properties, such as ones defined in
    // MongoConfigUtil. These are all prefixed with "mongo.".
    for (Entry<Object, Object> entry : from.entrySet()) {
        String key = (String) entry.getKey();
        if (key.startsWith("mongo.")) {
            to.put(key, (String) from.get(key));
        }
    }

    // Update the keys for MONGO_URI per MongoConfigUtil.
    if (from.containsKey(MONGO_URI)) {
        String mongoURIStr = (String) from.get(MONGO_URI);
        to.put(MongoConfigUtil.INPUT_URI, mongoURIStr);
        to.put(MongoConfigUtil.OUTPUT_URI, mongoURIStr);
    }
}

From source file:com.glaf.core.jdbc.connection.HikariCPConnectionProvider.java

public void configure(Properties props) throws RuntimeException {
    Properties properties = new Properties();

    for (Iterator<?> ii = props.keySet().iterator(); ii.hasNext();) {
        String key = (String) ii.next();
        properties.put(key, props.get(key));
        if (key.startsWith("hikari.")) {
            String newKey = key.substring(7);
            properties.put(newKey, props.get(key));
        }//from  w  w  w. j a v  a  2 s .c  o m
    }

    String jdbcDriverClass = properties.getProperty(DBConfiguration.JDBC_DRIVER);
    String jdbcUrl = properties.getProperty(DBConfiguration.JDBC_URL);
    Properties connectionProps = ConnectionProviderFactory.getConnectionProperties(properties);

    log.info("HikariCP using driver: " + jdbcDriverClass + " at URL: " + jdbcUrl);
    log.info("Connection properties: " + PropertiesHelper.maskOut(connectionProps, "password"));

    autocommit = PropertiesHelper.getBoolean(DBConfiguration.JDBC_AUTOCOMMIT, properties);
    log.info("autocommit mode: " + autocommit);

    if (jdbcDriverClass == null) {
        log.warn("No JDBC Driver class was specified by property " + DBConfiguration.JDBC_DRIVER);
    } else {
        try {
            Class.forName(jdbcDriverClass);
        } catch (ClassNotFoundException cnfe) {
            try {
                ClassUtils.classForName(jdbcDriverClass);
            } catch (Exception e) {
                String msg = "JDBC Driver class not found: " + jdbcDriverClass;
                log.error(msg, e);
                throw new RuntimeException(msg, e);
            }
        }
    }

    try {

        String validationQuery = properties.getProperty(ConnectionConstants.PROP_VALIDATIONQUERY);

        Integer initialPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_INITIALSIZE, properties);
        Integer minPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_MINACTIVE, properties);
        Integer maxPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXACTIVE, properties);
        if (initialPoolSize == null && minPoolSize != null) {
            properties.put(ConnectionConstants.PROP_INITIALSIZE, String.valueOf(minPoolSize).trim());
        }

        Integer maxWait = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXWAIT, properties);

        if (maxPoolSize == null) {
            maxPoolSize = 50;
        }

        String dbUser = properties.getProperty(DBConfiguration.JDBC_USER);
        String dbPassword = properties.getProperty(DBConfiguration.JDBC_PASSWORD);

        if (dbUser == null) {
            dbUser = "";
        }

        if (dbPassword == null) {
            dbPassword = "";
        }

        HikariConfig config = new HikariConfig();
        config.setDriverClassName(jdbcDriverClass);
        config.setJdbcUrl(jdbcUrl);
        config.setUsername(dbUser);
        config.setPassword(dbPassword);
        config.setMaximumPoolSize(maxPoolSize);
        config.setDataSourceProperties(properties);
        if (StringUtils.isNotEmpty(validationQuery)) {
            config.setConnectionTestQuery(validationQuery);
        }
        if (maxWait != null) {
            config.setConnectionTimeout(maxWait * 1000L);
        }

        config.setMaxLifetime(1000L * 3600 * 8);

        String isolationLevel = properties.getProperty(DBConfiguration.JDBC_ISOLATION);
        if (isolationLevel == null) {
            isolation = null;
        } else {
            isolation = new Integer(isolationLevel);
            log.info("JDBC isolation level: " + DBConfiguration.isolationLevelToString(isolation.intValue()));
        }

        if (StringUtils.isNotEmpty(isolationLevel)) {
            config.setTransactionIsolation(isolationLevel);
        }

        ds = new HikariDataSource(config);

    } catch (Exception ex) {
        ex.printStackTrace();
        log.error("could not instantiate HikariCP connection pool", ex);
        throw new RuntimeException("Could not instantiate HikariCP connection pool", ex);
    }

    Connection conn = null;
    try {
        conn = ds.getConnection();
        if (conn == null) {
            throw new RuntimeException("HikariCP connection pool can't get jdbc connection");
        }
    } catch (SQLException ex) {
        ex.printStackTrace();
        throw new RuntimeException(ex);
    } finally {
        JdbcUtils.close(conn);
    }

}

From source file:com.redhat.rcm.maven.plugin.buildmetadata.BuildReportRenderer.java

private boolean hasPropertiesProvided(final Properties buildMetaDataProperties, final List<String> properties) {
    for (final String key : properties) {
        final Object value = buildMetaDataProperties.get(key);
        if (value != null && StringUtils.isNotBlank(String.valueOf(value))) {
            return true;
        }/*from   w  w  w  .j a v a  2 s .  c om*/
    }

    final Set<String> selectedProperties = createSelectedProperties();
    for (final String key : selectedProperties) {
        final Object value = buildMetaDataProperties.get(key);
        if (value != null && StringUtils.isNotBlank(String.valueOf(value))) {
            return true;
        }
    }

    return false;
}

From source file:es.itecban.deployment.resource.taxonomy.DefaultTaxonomyImpl.java

private void initTaxonomy() {
    categories = new Hashtable<String, Category>();
    Properties types = new Properties();
    try {/*from  w ww.j  ava2 s  . c om*/
        types.load(supportedTypes.getInputStream());
    } catch (Exception e) {
        e.printStackTrace();
    }
    if (types != null) {
        Enumeration keys = types.keys();

        while (keys.hasMoreElements()) {
            String categoryName = (String) keys.nextElement();
            String categoryDescription = (String) types.get(categoryName);
            addCategory(categoryName, categoryDescription);
        }
    }
}

From source file:com.impetus.client.neo4j.Neo4JClientFactory.java

/**
 * Create Neo4J Embedded Graph DB instance, that acts as a Neo4J connection
 * repository for Neo4J If a Neo4j specfic client properties file is
 * specified in persistence.xml, it initializes DB instance with those
 * properties. Other DB instance is initialized with default properties.
 */// w  w w  .  j av a2s.  c om
@Override
protected Object createPoolOrConnection() {
    if (log.isInfoEnabled())
        log.info("Initializing Neo4J database connection...");

    PersistenceUnitMetadata puMetadata = kunderaMetadata.getApplicationMetadata()
            .getPersistenceUnitMetadata(getPersistenceUnit());

    Properties props = puMetadata.getProperties();

    String datastoreFilePath = null;
    if (externalProperties != null) {
        datastoreFilePath = (String) externalProperties.get(PersistenceProperties.KUNDERA_DATASTORE_FILE_PATH);
    }
    if (StringUtils.isBlank(datastoreFilePath)) {
        datastoreFilePath = (String) props.get(PersistenceProperties.KUNDERA_DATASTORE_FILE_PATH);
    }

    if (StringUtils.isBlank(datastoreFilePath)) {
        throw new PersistenceUnitConfigurationException(
                "For Neo4J, it's mandatory to specify kundera.datastore.file.path property in persistence.xml");
    }

    Neo4JSchemaMetadata nsmd = Neo4JPropertyReader.nsmd;
    ClientProperties cp = nsmd != null ? nsmd.getClientProperties() : null;

    GraphDatabaseService graphDb = (GraphDatabaseService) getConnectionPoolOrConnection();

    if (cp != null && graphDb == null) {
        DataStore dataStore = nsmd != null ? nsmd.getDataStore() : null;

        Properties properties = dataStore != null && dataStore.getConnection() != null
                ? dataStore.getConnection().getProperties()
                : null;

        if (properties != null) {
            Map<String, String> config = new HashMap<String, String>((Map) properties);

            GraphDatabaseBuilder builder = new GraphDatabaseFactory()
                    .newEmbeddedDatabaseBuilder(datastoreFilePath);
            builder.setConfig(config);

            graphDb = builder.newGraphDatabase();
            // registerShutdownHook(graphDb);
        }
    }

    if (graphDb == null) {
        graphDb = new GraphDatabaseFactory().newEmbeddedDatabase(datastoreFilePath);
        // registerShutdownHook(graphDb);
    }

    return graphDb;
}

From source file:com.liferay.blade.cli.command.CreateCommand.java

private File _getDefaultExtDir() throws Exception {
    BladeCLI bladeCLI = getBladeCLI();//  w  ww.j a v a 2  s .c  o  m

    BaseArgs args = bladeCLI.getArgs();

    File base = new File(args.getBase());

    File baseDir = base.getCanonicalFile();

    if (!isWorkspace(baseDir)) {
        return baseDir;
    }

    Properties properties = getWorkspaceProperties();

    String extDirProperty = (String) properties.get(WorkspaceConstants.DEFAULT_EXT_DIR_PROPERTY);

    if (extDirProperty == null) {
        extDirProperty = WorkspaceConstants.DEFAULT_EXT_DIR;
    }

    WorkspaceProvider workspaceProvider = bladeCLI.getWorkspaceProvider(baseDir);

    File projectDir = workspaceProvider.getWorkspaceDir(baseDir);

    File extDir = new File(projectDir, extDirProperty);

    if (_containsDir(baseDir, extDir)) {
        return baseDir;
    }

    return extDir;
}

From source file:org.esgf.web.SearchConfigurationController.java

/**
 * This method gives a response to a request (called by esgf-web-fe/web/scripts/esgf/solr.js) for the facets defined in the file facets.properties.  
 * The logic places all facets (delimited by a ; for the time being) into a json array, where it is parsed in solr.js
 * /*w  ww  . ja  v  a2  s . c  o m*/
 * @return String Json representation of the facet array
 * 
 * @throws IOException
 * @throws JSONException
 * @throws ParserConfigurationException
 */
@RequestMapping(method = RequestMethod.GET)
public @ResponseBody String doGet() throws IOException, ParserConfigurationException, JSONException {

    Properties properties = new Properties();
    //String propertiesFile = WRITEABLE_SEARCHCONFIG_FILE;

    String propertiesFile = SEARCHCONFIG_PROPERTIES_FILE;

    SearchConfiguration searchConfig = new SearchConfiguration();

    try {
        properties.load(new FileInputStream(propertiesFile));

        for (Object key : properties.keySet()) {

            String value = (String) properties.get(key);

            //grab the globus online parameter
            if (key.equals("enableGlobusOnline")) {
                searchConfig.setEnableGlobusOnline(value);
            }
        }

    } catch (FileNotFoundException fe) {

        System.out.println("---------------------------------------------------------------------");
        System.out.println("Search Configuration file not found.  Setting the following defaults:");
        System.out.println("\tGlobus Online Enabled: " + searchConfig.getEnableGlobusOnline());
        System.out.println("---------------------------------------------------------------------");
    } catch (Exception e) {
        e.printStackTrace();
    }

    String json = searchConfig.toJSON();

    return json;

}

From source file:com.evolveum.midpoint.tools.gui.PropertiesGenerator.java

private PropertiesStatistics mergeProperties(Properties baseProperties, Properties targetProperties) {
    PropertiesStatistics stats = new PropertiesStatistics();

    Set<Object> keySet = baseProperties.keySet();
    for (Object key : keySet) {
        if (targetProperties.containsKey(key)) {
            continue;
        }//w w w . j a  va2 s  .  co  m

        targetProperties.setProperty((String) key, (String) baseProperties.get(key));
        stats.incrementAdded();
    }

    keySet = new HashSet<Object>();
    keySet.addAll(targetProperties.keySet());
    for (Object key : keySet) {
        if (baseProperties.containsKey(key)) {
            continue;
        }

        targetProperties.remove(key);
        stats.incrementDeleted();
    }

    return stats;
}

From source file:com.liferay.blade.cli.command.CreateCommand.java

private File _getDefaultWarsDir() throws Exception {
    BladeCLI bladeCLI = getBladeCLI();/*  w  ww  . jav  a2 s  . co  m*/

    BaseArgs args = bladeCLI.getArgs();

    File base = new File(args.getBase());

    File baseDir = base.getCanonicalFile();

    if (!isWorkspace(baseDir)) {
        return baseDir;
    }

    Properties properties = getWorkspaceProperties();

    String warsDirValue = (String) properties.get(WorkspaceConstants.DEFAULT_WARS_DIR_PROPERTY);

    if (warsDirValue == null) {
        warsDirValue = WorkspaceConstants.DEFAULT_WARS_DIR;
    }

    if (warsDirValue.contains(",")) {
        warsDirValue = warsDirValue.split(",")[0];
    }

    WorkspaceProvider workspaceProvider = bladeCLI.getWorkspaceProvider(baseDir);

    File projectDir = workspaceProvider.getWorkspaceDir(baseDir);

    File warsDir = new File(projectDir, warsDirValue);

    if (_containsDir(baseDir, warsDir)) {
        return baseDir;
    }

    return warsDir;
}

From source file:com.hortonworks.registries.schemaregistry.examples.avro.KafkaAvroSerDesApp.java

private Map<String, Object> createProducerConfig(Properties props) {
    String bootstrapServers = props.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG);
    Map<String, Object> config = new HashMap<>();
    config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    config.putAll(Collections.singletonMap(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name(),
            props.get(SCHEMA_REGISTRY_URL)));
    config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
    config.put(ProducerConfig.BATCH_SIZE_CONFIG, 1024);
    return config;
}