Example usage for java.util Properties putAll

List of usage examples for java.util Properties putAll

Introduction

In this page you can find the example usage for java.util Properties putAll.

Prototype

@Override
    public synchronized void putAll(Map<?, ?> t) 

Source Link

Usage

From source file:org.apache.openaz.xacml.admin.model.PDPPIPContainer.java

private void initialize() {
    assert this.data != null;
    ///*ww w  . jav  a  2 s  .c om*/
    // Get the list of configurations
    //
    Set<PDPPIPConfig> configs = null;
    if (this.isPDPGroup()) {
        configs = ((PDPGroup) this.data).getPipConfigs();
    } else if (this.isPDP()) {
        configs = ((PDP) this.data).getPipConfigs();
    } else {
        throw new IllegalArgumentException("This container only supported PDPGroup and PDP objects.");
    }
    //
    // Map these to a list of PIPConfiguration objects. That
    // way we can match them up to the database.
    //
    for (PDPPIPConfig config : configs) {
        Properties properties = new Properties();
        properties.putAll(config.getConfiguration());
        try {
            PIPConfiguration pipConfig = new PIPConfiguration(config.getId(), properties);
            if (logger.isDebugEnabled()) {
                logger.debug("Found config: " + pipConfig);
            }
            this.configurations.add(pipConfig);
        } catch (PIPException e) {
            logger.error("Failed to create PIPConfiguration: " + e.getLocalizedMessage());
        }
    }
}

From source file:org.apache.zeppelin.interpreter.Interpreter.java

@ZeppelinApi
public Properties getProperties() {
    Properties p = new Properties();
    p.putAll(properties);
    replaceContextParameters(p);/*from   w w  w  .  ja v  a2s  . c  o  m*/
    return p;
}

From source file:org.cloudifysource.esc.driver.provisioning.storage.openstack.OpenstackStorageDriver.java

private void initDeployer() {

    if (deployer != null) {
        return;/*w  w  w.j av a 2s . c om*/
    }

    try {
        logger.fine("Creating JClouds context deployer for Openstack with user: " + cloud.getUser().getUser());
        final Properties props = new Properties();
        props.putAll(computeTemplate.getOverrides());

        deployer = new JCloudsDeployer(cloud.getProvider().getProvider(), cloud.getUser().getUser(),
                cloud.getUser().getApiKey(), props);
    } catch (final Exception e) {
        publishEvent("connection_to_cloud_api_failed", cloud.getProvider().getProvider());
        throw new IllegalStateException("Failed to create cloud Deployer", e);
    }
}

From source file:org.apache.gobblin.service.modules.scheduler.GobblinServiceJobScheduler.java

/** {@inheritDoc} */
@Override//from  www . j a  va2  s .  co m
public void onAddSpec(Spec addedSpec) {
    if (this.helixManager.isPresent() && !this.helixManager.get().isConnected()) {
        // Specs in store will be notified when Scheduler is added as listener to FlowCatalog, so ignore
        // .. Specs if in cluster mode and Helix is not yet initialized
        _log.info("System not yet initialized. Skipping Spec Addition: " + addedSpec);
        return;
    }

    _log.info("New Flow Spec detected: " + addedSpec);

    if (addedSpec instanceof FlowSpec) {
        try {
            FlowSpec flowSpec = (FlowSpec) addedSpec;
            Properties jobConfig = new Properties();
            Properties flowSpecProperties = ((FlowSpec) addedSpec).getConfigAsProperties();
            jobConfig.putAll(this.properties);
            jobConfig.setProperty(ConfigurationKeys.JOB_NAME_KEY, addedSpec.getUri().toString());
            jobConfig.setProperty(ConfigurationKeys.JOB_GROUP_KEY,
                    flowSpec.getConfig().getValue(ConfigurationKeys.FLOW_GROUP_KEY).toString());
            jobConfig.setProperty(ConfigurationKeys.FLOW_RUN_IMMEDIATELY, ConfigUtils
                    .getString((flowSpec).getConfig(), ConfigurationKeys.FLOW_RUN_IMMEDIATELY, "false"));
            if (flowSpecProperties.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY) && StringUtils
                    .isNotBlank(flowSpecProperties.getProperty(ConfigurationKeys.JOB_SCHEDULE_KEY))) {
                jobConfig.setProperty(ConfigurationKeys.JOB_SCHEDULE_KEY,
                        flowSpecProperties.getProperty(ConfigurationKeys.JOB_SCHEDULE_KEY));
            }

            this.scheduledFlowSpecs.put(addedSpec.getUri().toString(), addedSpec);

            if (jobConfig.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) {
                _log.info("{} Scheduling flow spec: {} ", this.serviceName, addedSpec);
                scheduleJob(jobConfig, null);
                if (PropertiesUtils.getPropAsBoolean(jobConfig, ConfigurationKeys.FLOW_RUN_IMMEDIATELY,
                        "false")) {
                    _log.info("RunImmediately requested, hence executing FlowSpec: " + addedSpec);
                    this.jobExecutor
                            .execute(new NonScheduledJobRunner(flowSpec.getUri(), false, jobConfig, null));
                }
            } else {
                _log.info("No FlowSpec schedule found, so running FlowSpec: " + addedSpec);
                this.jobExecutor.execute(new NonScheduledJobRunner(flowSpec.getUri(), true, jobConfig, null));
            }
        } catch (JobException je) {
            _log.error("{} Failed to schedule or run FlowSpec {}", serviceName, addedSpec, je);
        }
    }
}

From source file:org.apache.ftpserver.message.MessageResourceImpl.java

/**
 * Get all messages.//from   w  w  w .ja  v  a  2  s  .  c  o  m
 */
public Properties getMessages(String language) {
    Properties messages = new Properties();

    // load properties sequentially 
    // (default,custom,default language,custom language)
    PropertiesPair pair = (PropertiesPair) m_messages.get(null);
    if (pair != null) {
        messages.putAll(pair.m_default);
        messages.putAll(pair.m_custom);
    }
    if (language != null) {
        language = language.toLowerCase();
        pair = (PropertiesPair) m_messages.get(language);
        if (pair != null) {
            messages.putAll(pair.m_default);
            messages.putAll(pair.m_custom);
        }
    }
    return messages;
}

From source file:org.apache.sqoop.connector.jdbc.oracle.OracleJdbcCommonInitializer.java

private boolean testDynamicallyGeneratedOracleRacInstanceConnection(String url, String userName,
        String password, Map<String, String> jdbcProperties, boolean showInstanceSysTimestamp,
        String instanceDescription) {

    boolean result = false;

    // Test the connection...
    try {//from  ww  w. j a v a  2s.  c o m
        Properties additionalProps = new Properties();
        if (jdbcProperties != null) {
            additionalProps.putAll(jdbcProperties);
        }
        Connection testConnection = OracleConnectionFactory.createOracleJdbcConnection(
                OracleJdbcConnectorConstants.ORACLE_JDBC_DRIVER_CLASS, url, userName, password,
                additionalProps);

        // Show the system time on each instance...
        if (showInstanceSysTimestamp) {
            LOG.info(String.format("\tDatabase time on %s is %s", instanceDescription,
                    OracleQueries.getSysTimeStamp(testConnection)));
        }

        testConnection.close();
        result = true;
    } catch (SQLException ex) {
        LOG.warn(String.format("The dynamically generated JDBC URL \"%s\" was unable to "
                + "connect to an instance in the Oracle RAC.", url), ex);
    }

    return result;
}

From source file:de.innovationgate.webgate.api.jdbc.pool.DBCPConnectionProvider.java

public void configure(Map propsMap) throws HibernateException {
    try {/*from  w  w  w.  ja  v  a  2 s  .  c o  m*/
        log.debug("Configure DBCPConnectionProvider");
        Properties props = new Properties();
        props.putAll(propsMap);

        String jdbcUrl = (String) props.getProperty(Environment.URL);

        // DBCP properties used to create the BasicDataSource
        Properties dbcpProperties = new Properties();

        // DriverClass & url
        String jdbcDriverClass = props.getProperty(Environment.DRIVER);

        // Try to determine driver by jdbc-URL
        if (jdbcDriverClass == null) {
            Driver driver = DriverManager.getDriver(jdbcUrl);
            if (driver != null) {
                jdbcDriverClass = driver.getClass().getName();
            } else {
                throw new HibernateException("Driver class not available");
            }
        }

        dbcpProperties.put("driverClassName", jdbcDriverClass);
        dbcpProperties.put("url", jdbcUrl);

        // Username / password
        String username = props.getProperty(Environment.USER);
        if (username != null) {
            dbcpProperties.put("username", username);
        }

        String password = props.getProperty(Environment.PASS);
        if (password != null) {
            dbcpProperties.put("password", password);
        }

        // Isolation level
        String isolationLevel = props.getProperty(Environment.ISOLATION);
        if ((isolationLevel != null) && (isolationLevel.trim().length() > 0)) {
            dbcpProperties.put("defaultTransactionIsolation", isolationLevel);
        }

        // Turn off autocommit (unless autocommit property is set) 
        String autocommit = props.getProperty(AUTOCOMMIT);
        if ((autocommit != null) && (autocommit.trim().length() > 0)) {
            dbcpProperties.put("defaultAutoCommit", autocommit);
        } else {
            dbcpProperties.put("defaultAutoCommit", String.valueOf(Boolean.FALSE));
        }

        // Pool size
        String poolSize = props.getProperty(Environment.POOL_SIZE);
        if ((poolSize != null) && (poolSize.trim().length() > 0) && (Integer.parseInt(poolSize) > 0)) {
            dbcpProperties.put("maxActive", poolSize);
        }

        // Copy all "driver" properties into "connectionProperties"
        Properties driverProps = ConnectionProviderInitiator.getConnectionProperties(props);
        if (driverProps.size() > 0) {
            StringBuffer connectionProperties = new StringBuffer();
            for (Iterator iter = driverProps.keySet().iterator(); iter.hasNext();) {
                String key = (String) iter.next();
                String value = driverProps.getProperty(key);
                connectionProperties.append(key).append('=').append(value);
                if (iter.hasNext()) {
                    connectionProperties.append(';');
                }
            }
            dbcpProperties.put("connectionProperties", connectionProperties.toString());
        }

        // Copy all DBCP properties removing the prefix
        for (Iterator iter = props.keySet().iterator(); iter.hasNext();) {
            String key = String.valueOf(iter.next());
            if (key.startsWith(PREFIX)) {
                String property = key.substring(PREFIX.length());
                String value = props.getProperty(key);
                dbcpProperties.put(property, value);
            }
        }

        // Backward-compatibility
        if (props.getProperty(DBCP_PS_MAXACTIVE) != null) {
            dbcpProperties.put("poolPreparedStatements", String.valueOf(Boolean.TRUE));
            dbcpProperties.put("maxOpenPreparedStatements", props.getProperty(DBCP_PS_MAXACTIVE));
        }
        if (props.getProperty(DBCP_MAXACTIVE) != null) {
            dbcpProperties.put("maxTotal", props.getProperty(DBCP_MAXACTIVE));
        }
        if (props.getProperty(DBCP_MAXWAIT) != null) {
            dbcpProperties.put("maxWaitMillis", props.getProperty(DBCP_MAXWAIT));
        }

        // Some debug info
        if (log.isDebugEnabled()) {
            log.debug("Creating a DBCP BasicDataSource with the following DBCP factory properties:");
            StringWriter sw = new StringWriter();
            dbcpProperties.list(new PrintWriter(sw, true));
            log.debug(sw.toString());
        }

        String dbKey = (String) props.get("hibernate.dbcp.dbkey");
        String databaseServerId = (String) props.get("hibernate.dbcp.dbserver.id");

        // Enable DBCP2 JMX monitoring information
        if (dbKey != null) {
            dbcpProperties.put("jmxName",
                    JMX_DBCP2_DBPOOLS_ADDRESS + ",pool=" + JmxManager.normalizeJmxKey(dbKey));
        } else if (databaseServerId != null) {
            String entityTitle = props.getProperty("hibernate.dbcp.dbserver.title");
            dbcpProperties.put("jmxName",
                    JMX_DBCP2_SERVERPOOLS_ADDRESS + ",pool=" + JmxManager.normalizeJmxKey(entityTitle));
        }

        // Let the factory create the pool
        _ds = BasicDataSourceFactory.createDataSource(dbcpProperties);
        _ds.setLogExpiredConnections(false);

        // The BasicDataSource has lazy initialization
        // borrowing a connection will start the DataSource
        // and make sure it is configured correctly.
        Connection conn = _ds.getConnection();
        conn.close();

        // Create Legacy JMX monitoring information, provided by WGA
        if ("true".equals(props.getProperty("hibernate.dbcp.legacyJMX"))) {
            try {
                if (dbKey != null) {
                    _entityKey = dbKey;
                    _entityTitle = dbKey;
                    _jmxManager = new JmxManager(new DBCPPoolInformation(this),
                            new ObjectName(JMX_DBPOOLS_ADDRESS + ",pool=" + JmxManager.normalizeJmxKey(dbKey)));
                } else if (databaseServerId != null) {
                    _server = true;
                    _entityKey = databaseServerId;
                    _entityTitle = (String) props.get("hibernate.dbcp.dbserver.title");
                    _jmxManager = new JmxManager(new DBCPPoolInformation(this), new ObjectName(
                            JMX_SERVERPOOLS_ADDRESS + ",pool=" + JmxManager.normalizeJmxKey(_entityTitle)));
                }
            } catch (Throwable e) {
                log.error("Error enabling JMX metrics for connection pool", e);
            }
        }

    } catch (Exception e) {
        String message = "Could not create a DBCP pool";
        if (_ds != null) {
            try {
                _ds.close();
            } catch (Exception e2) {
                // ignore
            }
            _ds = null;
        }
        throw new HibernateException(message, e);
    }
    log.debug("Configure DBCPConnectionProvider complete");

}

From source file:com.facebook.presto.jdbc.PrestoConnection.java

@Override
public Properties getClientInfo() throws SQLException {
    Properties properties = new Properties();
    properties.putAll(clientInfo);
    return properties;
}

From source file:com.netflix.blitz4j.LoggingConfiguration.java

/**
 * Reconfigure log4j at run-time./*from  w ww .  j  a v a  2  s.  c o m*/
 * 
 * @param name
 *            - The name of the property that changed
 * @param value
 *            - The new value of the property
 * @throws FileNotFoundException
 * @throws ConfigurationException
 */
private void reConfigure() throws ConfigurationException, FileNotFoundException {

    Properties consolidatedProps = new Properties();
    consolidatedProps.putAll(props);
    logger.info("Updated properties is :" + updatedProps);
    consolidatedProps.putAll(updatedProps);
    logger.info("The root category for log4j.rootCategory now is "
            + consolidatedProps.getProperty("log4j.rootCategory"));
    logger.info("The root category for log4j.rootLogger now is "
            + consolidatedProps.getProperty("log4j.rootLogger"));

    // Pause the async appenders so that the appenders are not accessed
    for (String originalAppenderName : originalAsyncAppenderNameMap.keySet()) {
        MessageBatcher asyncBatcher = BatcherFactory
                .getBatcher(AsyncAppender.class.getName() + "." + originalAppenderName);
        if (asyncBatcher == null) {
            continue;
        }
        asyncBatcher.pause();
    }

    // Configure log4j using the new set of properties
    configureLog4j(consolidatedProps);
    // Resume all the batchers to continue logging
    for (String originalAppenderName : originalAsyncAppenderNameMap.keySet()) {
        MessageBatcher asyncBatcher = BatcherFactory
                .getBatcher(AsyncAppender.class.getName() + "." + originalAppenderName);
        if (asyncBatcher == null) {
            continue;
        }
        asyncBatcher.resume();
    }
}

From source file:org.bitstrings.maven.plugins.portallocator.PortAllocatorMojo.java

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
    try {/*ww  w  . j  ava  2  s.c  om*/
        if (portAllocators != null) {
            for (PortAllocator portAllocator : portAllocators) {
                initPortAllocator(portAllocator);

                final PortAllocatorService existingPas = PORT_ALLOCATOR_SERVICE_MAP.get(portAllocator.getId());

                if (portAllocator.isPermitOverride() || (existingPas == null)
                        || (portAllocator.getId().equals(PORT_ALLOCATOR_DEFAULT_ID)
                                && (existingPas == PORT_ALLOCATOR_SERVICE_DEFAULT))) {
                    PORT_ALLOCATOR_SERVICE_MAP.put(portAllocator.getId(),
                            createPortAllocatorService(portAllocator));

                    if (!quiet && getLog().isInfoEnabled()) {
                        getLog().info("Registering port allocator [" + portAllocator.getId() + "]");
                    }
                }
            }
        }

        if (ports != null) {
            if ((ports.getPortAllocatorRef() != null) && (ports.getPortAllocator() != null)) {
                throw new MojoExecutionException(
                        "Either use a port allocator reference or define an inner allocator but you can use both.");
            }

            PortAllocatorService pas = ports.getPortAllocator() == null
                    ? PORT_ALLOCATOR_SERVICE_MAP
                            .get(firstNonNull(ports.getPortAllocatorRef(), PORT_ALLOCATOR_DEFAULT_ID))
                    : createPortAllocatorService(initPortAllocator(ports.getPortAllocator()));

            if (pas == null) {
                throw new MojoExecutionException(
                        "Cannot find port allocator [" + ports.getPortAllocatorRef() + "]");
            }

            // assign
            final LinkedListMultimap<String, Port> portGroupMap = LinkedListMultimap.create();

            for (Port port : ports) {
                final String offsetFrom = port.getOffsetFrom();
                final String portGroupName = findGroupRoot(port, portGroupMap);

                portGroupMap.put(portGroupName, port);

                if ((offsetFrom != null) && !executionPortMap.containsKey(getPortName(portGroupName))) {
                    throw new MojoExecutionException(
                            "Port [" + port.getName() + "] using offset from undefined [" + offsetFrom + "].");
                }

                Iterator<Port> portIterator = Iterators.singletonIterator(port);

                while (portIterator.hasNext()) {
                    final Port portToAllocate = portIterator.next();

                    final Integer previousPort = executionPortMap.remove(getPortName(portToAllocate.getName()));
                    executionPortMap.remove(getOffsetName(portToAllocate.getName()));

                    if (!allocatePort(pas, portToAllocate)) {
                        if (portToAllocate.getOffsetFrom() != null) {
                            portIterator = portGroupMap.get(portGroupName).listIterator();
                        }
                    }

                    ALLOCATION_LOCK.lock();
                    ALLOCATED_PORTS.remove(previousPort);
                    ALLOCATION_LOCK.unlock();
                }
            }

            // log ports
            for (Port port : ports) {
                if (!quiet && getLog().isInfoEnabled()) {
                    String name = getPortName(port.getName());
                    Integer value = executionPortMap.get(name);

                    if (value != null) {
                        getLog().info("Assigning port [" + value + "] to property [" + name + "]");
                    }

                    name = getOffsetName(port.getName());
                    value = executionPortMap.get(name);

                    if (value != null) {
                        getLog().info("Assigning offset [" + value + "] " + "using preferred port ["
                                + port.getPreferredPort() + "] " + "to property [" + name + "]");
                    }
                }
            }
        }

        if (writePropertiesFile != null) {
            final File parent = writePropertiesFile.getParentFile();

            if ((parent != null) && !parent.exists()) {
                parent.mkdirs();
            }

            try (final Writer out = new BufferedWriter(new FileWriter(writePropertiesFile))) {
                if (!quiet && getLog().isInfoEnabled()) {
                    getLog().info("Writing ports file [" + writePropertiesFile + "]");
                }

                final Properties outProps = new Properties();
                outProps.putAll(Maps.transformValues(executionPortMap, new Function<Integer, String>() {
                    @Override
                    public String apply(Integer input) {
                        return input.toString();
                    }
                }));
                outProps.store(out, null);
            } catch (Exception e) {
                throw new MojoExecutionException("Problem writing ports file [" + writePropertiesFile + "]", e);
            }
        }
    } catch (MojoExecutionException e) {
        throw e;
    } catch (Exception e) {
        throw new MojoExecutionException(e.getLocalizedMessage(), e);
    }
}