Example usage for java.util Properties putAll

List of usage examples for java.util Properties putAll

Introduction

In this page you can find the example usage for java.util Properties putAll.

Prototype

@Override
    public synchronized void putAll(Map<?, ?> t) 

Source Link

Usage

From source file:org.abstracthorizon.proximity.storage.local.ReadOnlyFileSystemStorage.java

/**
 * Store item properties./*ww  w  .jav a 2  s .c  om*/
 * 
 * @param iProps the i props
 * 
 * @throws StorageException the storage exception
 */
protected void storeItemProperties(ItemProperties iProps) throws StorageException {
    if (!iProps.isFile()) {
        throw new IllegalArgumentException("Only files can be stored!");
    }
    logger.debug("Storing metadata in [{}] in storage directory {}", iProps.getPath(), getMetadataBaseDir());
    try {

        File target = new File(getMetadataBaseDir(), iProps.getPath());
        target.getParentFile().mkdirs();
        Properties metadata = new Properties();
        metadata.putAll(iProps.getAllMetadata());
        FileOutputStream os = new FileOutputStream(target);
        try {
            metadata.store(os, "Written by " + this.getClass());
            os.flush();
        } finally {
            os.close();
        }
        target.setLastModified(iProps.getLastModified().getTime());

    } catch (IOException ex) {
        throw new StorageException("IOException in FS storage " + getMetadataBaseDir(), ex);
    }
}

From source file:com.ironiacorp.persistence.hibernate.HibernateBootstrap.java

/**
 * Get the DDL script to update the database.
 *///  ww  w.  j  av  a 2  s . com
public String getUpdateDDLScript() {
    Dialect dialect = Dialect.getDialect(config.getProperties());
    Properties props = new Properties();
    //      ConnectionProvider connectionProvider = null;
    DatabaseMetadata dm = null;

    props.putAll(dialect.getDefaultProperties());
    props.putAll(config.getProperties());
    /*      connectionProvider = ConnectionProviderFactory.newConnectionProvider(props);
            
          try {
             dm = new DatabaseMetadata(connectionProvider.getConnection(), dialect);
          } catch (SQLException e) {
             log.debug("Could not get database DDL script", e);
          }
    */
    String[] script = config.generateSchemaUpdateScript(dialect, dm);
    return ArrayUtil.toString(script);
}

From source file:com.glaf.jbpm.connection.DruidConnectionProvider.java

public void configure(Properties props) {
    Properties properties = new Properties();
    properties.putAll(props);

    for (Iterator<Object> ii = props.keySet().iterator(); ii.hasNext();) {
        String key = (String) ii.next();
        if (key.startsWith("druid.")) {
            String newKey = key.substring(6);
            properties.put(newKey, props.get(key));
        }/*ww w .ja v a  2s.co  m*/
    }

    Properties connectionProps = ConnectionProviderFactory.getConnectionProperties(properties);
    log.info("Connection properties: " + PropertiesHelper.maskOut(connectionProps, Environment.PASS));

    String jdbcDriverClass = properties.getProperty(Environment.DRIVER);
    String jdbcUrl = properties.getProperty(Environment.URL);

    log.info("Druid using driver: " + jdbcDriverClass + " at URL: " + jdbcUrl);

    autocommit = PropertiesHelper.getBoolean(Environment.AUTOCOMMIT, properties);
    log.info("autocommit mode: " + autocommit);

    if (jdbcDriverClass == null) {
        log.warn("No JDBC Driver class was specified by property " + Environment.DRIVER);
    } else {
        try {
            Class.forName(jdbcDriverClass);
        } catch (ClassNotFoundException cnfe) {
            try {
                ReflectUtils.instantiate(jdbcDriverClass);
            } catch (Exception e) {
                String msg = "JDBC Driver class not found: " + jdbcDriverClass;
                log.error(msg, e);
                throw new RuntimeException(msg, e);
            }
        }
    }

    try {

        Integer maxPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXACTIVE, properties);
        Integer maxStatements = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXSTATEMENTS, properties);

        Integer timeBetweenEvictionRuns = PropertiesHelper
                .getInteger(ConnectionConstants.PROP_TIMEBETWEENEVICTIONRUNS, properties);

        Integer maxWait = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXWAIT, properties);

        String validationQuery = properties.getProperty(ConnectionConstants.PROP_VALIDATIONQUERY);

        if (maxPoolSize == null) {
            maxPoolSize = 50;
        }

        if (timeBetweenEvictionRuns == null) {
            timeBetweenEvictionRuns = 60;
        }

        if (maxWait == null) {
            maxWait = 60;
        }

        String dbUser = properties.getProperty(Environment.USER);
        String dbPassword = properties.getProperty(Environment.PASS);

        if (dbUser == null) {
            dbUser = "";
        }

        if (dbPassword == null) {
            dbPassword = "";
        }

        ds = new DruidDataSource();

        DruidDataSourceFactory.config(ds, properties);
        ds.setConnectProperties(properties);
        ds.setDriverClassName(jdbcDriverClass);
        ds.setUrl(jdbcUrl);
        ds.setUsername(dbUser);
        ds.setPassword(dbPassword);

        ds.setInitialSize(1);
        ds.setMinIdle(3);
        ds.setMaxActive(maxPoolSize);
        ds.setMaxWait(maxWait * 1000L);

        ds.setConnectionErrorRetryAttempts(30);
        ds.setDefaultAutoCommit(true);

        ds.setTestOnReturn(false);
        ds.setTestOnBorrow(false);
        ds.setTestWhileIdle(false);

        if (StringUtils.isNotEmpty(validationQuery)) {
            log.debug("validationQuery:" + validationQuery);
            ds.setValidationQuery(validationQuery);
            ds.setTestWhileIdle(true);// ??????
        }

        ds.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRuns * 1000L);// ??
        ds.setMinEvictableIdleTimeMillis(1000L * 60L * 120L);// ????

        if (maxStatements != null) {
            ds.setPoolPreparedStatements(true);
            ds.setMaxOpenPreparedStatements(maxStatements);
            ds.setMaxPoolPreparedStatementPerConnectionSize(200);
        }

        ds.setRemoveAbandoned(false);// ? true/false
        ds.setRemoveAbandonedTimeout(7200);// 120
        ds.setLogAbandoned(true);// ?

        ds.init();
    } catch (Exception e) {
        log.error("could not instantiate Druid connection pool", e);
        throw new RuntimeException("Could not instantiate Druid connection pool", e);
    }

    String i = properties.getProperty(Environment.ISOLATION);
    if (i == null) {
        isolation = null;
    } else {
        isolation = new Integer(i);
    }

}

From source file:org.apache.gobblin.service.SimpleKafkaSpecConsumer.java

@Override
public Future<? extends List<Pair<SpecExecutor.Verb, Spec>>> changedSpecs() {
    List<Pair<SpecExecutor.Verb, Spec>> changesSpecs = new ArrayList<>();
    initializeWatermarks();//  w  ww.  ja  v a2s.c o m
    this.currentPartitionIdx = -1;
    while (!allPartitionsFinished()) {
        if (currentPartitionFinished()) {
            moveToNextPartition();
            continue;
        }
        if (this.messageIterator == null || !this.messageIterator.hasNext()) {
            try {
                this.messageIterator = fetchNextMessageBuffer();
            } catch (Exception e) {
                log.error(String.format(
                        "Failed to fetch next message buffer for partition %s. Will skip this partition.",
                        getCurrentPartition()), e);
                moveToNextPartition();
                continue;
            }
            if (this.messageIterator == null || !this.messageIterator.hasNext()) {
                moveToNextPartition();
                continue;
            }
        }
        while (!currentPartitionFinished()) {
            if (!this.messageIterator.hasNext()) {
                break;
            }

            KafkaConsumerRecord nextValidMessage = this.messageIterator.next();

            // Even though we ask Kafka to give us a message buffer starting from offset x, it may
            // return a buffer that starts from offset smaller than x, so we need to skip messages
            // until we get to x.
            if (nextValidMessage.getOffset() < _nextWatermark.get(this.currentPartitionIdx)) {
                continue;
            }

            _nextWatermark.set(this.currentPartitionIdx, nextValidMessage.getNextOffset());
            try {
                final AvroJobSpec record;

                if (nextValidMessage instanceof ByteArrayBasedKafkaRecord) {
                    record = decodeRecord((ByteArrayBasedKafkaRecord) nextValidMessage);
                } else if (nextValidMessage instanceof DecodeableKafkaRecord) {
                    record = ((DecodeableKafkaRecord<?, AvroJobSpec>) nextValidMessage).getValue();
                } else {
                    throw new IllegalStateException(
                            "Unsupported KafkaConsumerRecord type. The returned record can either be ByteArrayBasedKafkaRecord"
                                    + " or DecodeableKafkaRecord");
                }

                JobSpec.Builder jobSpecBuilder = JobSpec.builder(record.getUri());

                Properties props = new Properties();
                props.putAll(record.getProperties());
                jobSpecBuilder.withJobCatalogURI(record.getUri()).withVersion(record.getVersion())
                        .withDescription(record.getDescription()).withConfigAsProperties(props);

                if (!record.getTemplateUri().isEmpty()) {
                    jobSpecBuilder.withTemplate(new URI(record.getTemplateUri()));
                }

                String verbName = record.getMetadata().get(VERB_KEY);
                SpecExecutor.Verb verb = SpecExecutor.Verb.valueOf(verbName);

                changesSpecs.add(new ImmutablePair<SpecExecutor.Verb, Spec>(verb, jobSpecBuilder.build()));
            } catch (Throwable t) {
                log.error("Could not decode record at partition " + this.currentPartitionIdx + " offset "
                        + nextValidMessage.getOffset());
            }
        }
    }

    return new CompletedFuture(changesSpecs, null);
}

From source file:org.sakaiproject.hierarchy.tool.vm.spring.VelocityConfigurer.java

/**
 * Prepare the VelocityEngine instance and return it.
 * /*from  w w w .  j a va2s .c  o m*/
 * @return the VelocityEngine instance
 * @throws IOException
 *         if the config file wasn't found
 * @throws VelocityException
 *         on Velocity initialization failure
 */
public VelocityEngine createVelocityEngine() throws IOException, VelocityException {
    VelocityEngine velocityEngine = newVelocityEngine();
    if (servletContextHolder == null || servletContextHolder.getServletContext() == null) {
        logger.warn("Servlet ContextHolder  not set, you will not be able to use WebApp based templates, "
                + "please \n"
                + "1. Add a ServletContextHolder bean into applicationContext.xml with the name \n"
                + "2. add the VelocityContextListener to web.xml ");
    } else {
        logger.info("ServletContextHolder Set on path "
                + servletContextHolder.getServletContext().getRealPath("/"));
        velocityEngine.setApplicationAttribute(ServletContext.class.getName(),
                servletContextHolder.getServletContext());

    }

    Properties props = new Properties();

    // Merge local properties if set.
    if (!this.velocityProperties.isEmpty()) {
        props.putAll(this.velocityProperties);
    }

    // Apply properties to VelocityEngine.
    for (Iterator it = props.entrySet().iterator(); it.hasNext();) {
        Map.Entry entry = (Map.Entry) it.next();
        if (!(entry.getKey() instanceof String)) {
            throw new IllegalArgumentException(
                    "Illegal property key [" + entry.getKey() + "]: only Strings allowed");
        }

        velocityEngine.setProperty((String) entry.getKey(), entry.getValue());
    }

    try {
        // Perform actual initialization.
        velocityEngine.init();
    } catch (IOException ex) {
        throw ex;
    } catch (VelocityException ex) {
        throw ex;
    } catch (RuntimeException ex) {
        throw ex;
    } catch (Exception ex) {
        logger.error("Why does VelocityEngine throw a generic checked exception, after all?", ex);
        throw new VelocityException(ex.getMessage());
    }

    return velocityEngine;
}

From source file:it.geosolutions.geobatch.imagemosaic.GranuleRemoverOnlineTest.java

/**
 * * deletes existing store/*from  ww w. jav a 2s . co  m*/
 * * create a brand new mosaic dir
 * * create the mosaic on GS
 * 
 */
protected ImageMosaicConfiguration createMosaicConfig() throws IOException {

    // create datastore file
    Properties datastore = new Properties();
    datastore.putAll(getPostgisParams());
    datastore.remove(PostgisNGDataStoreFactory.DBTYPE.key);
    datastore.setProperty("SPI", "org.geotools.data.postgis.PostgisNGDataStoreFactory");
    datastore.setProperty(PostgisNGDataStoreFactory.LOOSEBBOX.key, "true");
    datastore.setProperty(PostgisNGDataStoreFactory.ESTIMATED_EXTENTS.key, "false");
    File datastoreFile = new File(getTempDir(), "datastore.properties");
    LOGGER.info("Creating  " + datastoreFile);
    FileOutputStream out = new FileOutputStream(datastoreFile);
    datastore.store(out, "Datastore file created from fixtures");
    out.flush();
    out.close();
    //        datastore.store(System.out, "Datastore created from fixtures");
    //        datastore.list(System.out);

    // config
    ImageMosaicConfiguration conf = new ImageMosaicConfiguration("", "", "");
    conf.setTimeRegex("(?<=_)\\\\d{8}");
    conf.setTimeDimEnabled("true");
    conf.setTimePresentationMode("LIST");
    conf.setGeoserverURL(getFixture().getProperty("gs_url"));
    conf.setGeoserverUID(getFixture().getProperty("gs_user"));
    conf.setGeoserverPWD(getFixture().getProperty("gs_password"));
    conf.setDatastorePropertiesPath(datastoreFile.getAbsolutePath());
    conf.setDefaultNamespace(WORKSPACE);
    conf.setDefaultStyle("raster");
    conf.setCrs("EPSG:4326");

    return conf;
}

From source file:com.glaf.core.jdbc.connection.DruidConnectionProvider.java

public void configure(Properties props) {
    Properties properties = new Properties();
    properties.putAll(props);

    for (Iterator<Object> ii = props.keySet().iterator(); ii.hasNext();) {
        String key = (String) ii.next();
        if (key.startsWith("druid.")) {
            String newKey = key.substring(6);
            properties.put(newKey, props.get(key));
        }//from   w  w  w.j  a  va  2s  .  c om
    }

    Properties connectionProps = ConnectionProviderFactory.getConnectionProperties(properties);
    log.info("Connection properties: " + PropertiesHelper.maskOut(connectionProps, "password"));

    String jdbcDriverClass = properties.getProperty("jdbc.driver");
    String jdbcUrl = properties.getProperty("jdbc.url");

    log.info("Druid using driver: " + jdbcDriverClass + " at URL: " + jdbcUrl);

    autocommit = PropertiesHelper.getBoolean("jdbc.autocommit", properties);
    log.info("autocommit mode: " + autocommit);

    if (jdbcDriverClass == null) {
        log.warn("No JDBC Driver class was specified by property jdbc.driver");
    } else {
        try {
            Class.forName(jdbcDriverClass);
        } catch (ClassNotFoundException cnfe) {
            try {
                ReflectUtils.instantiate(jdbcDriverClass);
            } catch (Exception e) {
                String msg = "JDBC Driver class not found: " + jdbcDriverClass;
                log.error(msg, e);
                throw new RuntimeException(msg, e);
            }
        }
    }

    try {

        Integer maxPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXACTIVE, properties);
        Integer maxStatements = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXSTATEMENTS, properties);

        Integer timeBetweenEvictionRuns = PropertiesHelper
                .getInteger(ConnectionConstants.PROP_TIMEBETWEENEVICTIONRUNS, properties);

        Integer maxWait = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXWAIT, properties);

        String validationQuery = properties.getProperty(ConnectionConstants.PROP_VALIDATIONQUERY);

        if (maxPoolSize == null) {
            maxPoolSize = 50;
        }

        if (timeBetweenEvictionRuns == null) {
            timeBetweenEvictionRuns = 60;
        }

        if (maxWait == null) {
            maxWait = 60;
        }

        String dbUser = properties.getProperty("jdbc.user");
        String dbPassword = properties.getProperty("jdbc.password");

        if (dbUser == null) {
            dbUser = "";
        }

        if (dbPassword == null) {
            dbPassword = "";
        }

        ds = new DruidDataSource();

        DruidDataSourceFactory.config(ds, properties);
        ds.setConnectProperties(properties);
        ds.setDriverClassName(jdbcDriverClass);
        ds.setUrl(jdbcUrl);
        ds.setUsername(dbUser);
        ds.setPassword(dbPassword);

        ds.setInitialSize(1);
        ds.setMinIdle(3);
        ds.setMaxActive(maxPoolSize);
        ds.setMaxWait(maxWait * 1000L);

        ds.setConnectionErrorRetryAttempts(30);
        ds.setDefaultAutoCommit(true);

        ds.setTestOnReturn(false);
        ds.setTestOnBorrow(false);
        ds.setTestWhileIdle(false);

        if (StringUtils.isNotEmpty(validationQuery)) {
            log.debug("validationQuery:" + validationQuery);
            ds.setValidationQuery(validationQuery);
            ds.setTestWhileIdle(true);// ??????
        }

        ds.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRuns * 1000L);// ??
        ds.setMinEvictableIdleTimeMillis(1000L * 60L * 120L);// ????

        if (maxStatements != null) {
            ds.setPoolPreparedStatements(true);
            ds.setMaxOpenPreparedStatements(maxStatements);
            ds.setMaxPoolPreparedStatementPerConnectionSize(200);
        }

        ds.setRemoveAbandoned(false);// ? true/false
        ds.setRemoveAbandonedTimeout(7200);// 120
        ds.setLogAbandoned(true);// ?

        ds.init();
    } catch (Exception ex) {
        ex.printStackTrace();
        log.error("could not instantiate Druid connection pool", ex);
        throw new RuntimeException("Could not instantiate Druid connection pool", ex);
    }

    String i = properties.getProperty("jdbc.isolation");
    if (i == null) {
        isolation = null;
    } else {
        isolation = new Integer(i);
    }

}

From source file:org.paxml.launch.LaunchModel.java

private void populateResourceMap(Map<PaxmlResource, List<Settings>> map, PaxmlResource res, Group group) {
    Settings settings = new Settings(group == null ? "" : group.getId());
    Properties properties = settings.getProperties();
    properties.putAll(getGlobalSettings().getProperties());
    if (group != null) {
        properties.putAll(group.getSettings().getProperties());
    }// w ww  .  j  av  a  2  s .co  m
    Map<String, Factor> factorMap = settings.getFactors();

    // merge the global factors
    for (Map.Entry<String, Factor> globalFactorEntry : getGlobalSettings().getFactors().entrySet()) {
        final String key = globalFactorEntry.getKey();
        Factor factor = factorMap.get(key);
        if (factor == null) {
            factor = new Factor();
            factor.setName(key);
            factorMap.put(key, factor);
        }
        factor.getValues().addAll(globalFactorEntry.getValue().getValues());
    }
    // copy my own factors
    if (group != null) {
        factorMap.putAll(group.getSettings().getFactors());
    }
    List<Settings> list = map.get(res);
    if (list == null) {
        list = new Vector<Settings>();
        map.put(res, list);
    }
    list.add(settings);
}

From source file:com.mkwhitacre.kafka.mapreduce.utils.KafkaBrokerTestHarness.java

/**
 * Returns properties for either a Kafka producer or consumer.
 *
 * @return Combined producer and consumer properties.
 */// w  w w. j  a va2  s  . co  m
public Properties getProps() {
    // Combine producer and consumer properties.
    Properties props = getProducerProps();
    props.putAll(getConsumerProps());
    return props;
}

From source file:org.grails.datastore.mapping.gemfire.GemfireDatastore.java

public void afterPropertiesSet() throws Exception {
    CacheFactoryBean cacheFactory = new CacheFactoryBean();
    if (connectionDetails != null) {
        if (connectionDetails.containsKey(SETTING_CACHE_XML)) {
            Object entry = connectionDetails.remove(SETTING_CACHE_XML);
            if (entry instanceof Resource) {
                cacheFactory.setCacheXml((Resource) entry);
            } else {
                cacheFactory.setCacheXml(new ClassPathResource(entry.toString()));
            }//from w w  w .j a va  2 s .  c  o m
        }

        if (connectionDetails.containsKey(SETTING_PROPERTIES)) {
            Object entry = connectionDetails.get(SETTING_PROPERTIES);
            if (entry instanceof Properties) {
                cacheFactory.setProperties((Properties) entry);
            } else if (entry instanceof Map) {
                final Properties props = new Properties();
                props.putAll((Map) entry);
                cacheFactory.setProperties(props);
            }
        }
    }

    try {
        if (gemfireCache == null) {
            cacheFactory.afterPropertiesSet();
            gemfireCache = cacheFactory.getObject();
        }
        initializeRegions(gemfireCache, mappingContext);
        initializeConverters(mappingContext);
    } catch (Exception e) {
        throw new DatastoreConfigurationException(
                "Failed to configure Gemfire cache and regions: " + e.getMessage(), e);
    }
}