List of usage examples for java.util Properties putAll
@Override public synchronized void putAll(Map<?, ?> t)
From source file:com.enonic.cms.business.localization.resource.LocalizationResourceBundleServiceImpl.java
private LocalizationResourceBundle createResourceBundle(Locale locale, ResourceKey defaultLocalizationResourceKey) { Properties props = new Properties(); String lang = locale.getLanguage(); String country = locale.getCountry(); String variant = locale.getVariant(); props.putAll(loadBundle(defaultLocalizationResourceKey, "")); if (StringUtils.isNotEmpty(lang)) { lang = lang.toLowerCase();/*from w ww .ja va 2 s .c o m*/ props.putAll(loadBundle(defaultLocalizationResourceKey, "_" + lang)); } if (StringUtils.isNotEmpty(country)) { country = country.toLowerCase(); props.putAll(loadBundle(defaultLocalizationResourceKey, "_" + lang + "_" + country)); } if (StringUtils.isNotEmpty(variant)) { variant = variant.toLowerCase(); props.putAll(loadBundle(defaultLocalizationResourceKey, "_" + lang + "_" + country + "_" + variant)); } return new LocalizationResourceBundle(props); }
From source file:ome.client.utests.Preferences3Test.java
@Test(groups = "ticket:1058") public void testOmeroUserIsProperlySetWithSpring2_5_5Manual() { Server s = new Server("localhost", 1099); Login l = new Login("me", "password"); Properties p = s.asProperties(); p.putAll(l.asProperties()); // This is copied from OmeroContext. This is the parent context which // should contain the properties; Properties copy = new Properties(p); ConstructorArgumentValues ctorArg1 = new ConstructorArgumentValues(); ctorArg1.addGenericArgumentValue(copy); BeanDefinition definition1 = new RootBeanDefinition(Properties.class, ctorArg1, null); StaticApplicationContext staticContext = new StaticApplicationContext(); staticContext.registerBeanDefinition("properties", definition1); staticContext.refresh();// w ww . j a v a2 s. com // This is the child context and contains a definition of a // PlaceHolderConfigurer // as well as a user of StaticApplicationContext childContext = new StaticApplicationContext(); MutablePropertyValues mpv2 = new MutablePropertyValues(); mpv2.addPropertyValue("properties", new RuntimeBeanReference("properties")); mpv2.addPropertyValue("systemPropertiesModeName", "SYSTEM_PROPERTIES_MODE_FALLBACK"); mpv2.addPropertyValue("localOverride", "true"); BeanDefinition definitionConfigurer = new RootBeanDefinition(PreferencesPlaceholderConfigurer.class, null, mpv2); childContext.registerBeanDefinition("propertiesPlaceholderConfigurer", definitionConfigurer); ConstructorArgumentValues cav2 = new ConstructorArgumentValues(); cav2.addGenericArgumentValue("${omero.user}"); BeanDefinition definitionTest = new RootBeanDefinition(String.class, cav2, null); childContext.registerBeanDefinition("test", definitionTest); childContext.setParent(staticContext); childContext.refresh(); String test = (String) childContext.getBean("test"); assertEquals(test, "me"); }
From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.sensitivity.Evaluator.java
@Override public void run(CommandLine commandLine) throws IOException { File outputFile = new File(commandLine.getOptionValue("output")); File inputFile = new File(commandLine.getOptionValue("input")); ParameterFile parameterFile = new ParameterFile(new File(commandLine.getOptionValue("parameterFile"))); // sanity check to ensure input hasn't been modified after the output if (!commandLine.hasOption("force") && (outputFile.lastModified() > 0L) && (inputFile.lastModified() > outputFile.lastModified())) { throw new FrameworkException("input appears to be newer than output"); }/*from ww w. j av a 2 s . c om*/ // open the resources and begin processing try { problem = ProblemFactory.getInstance().getProblem(commandLine.getOptionValue("problem")); try { input = new SampleReader(new FileReader(inputFile), parameterFile); try { if (commandLine.hasOption("metrics")) { NondominatedPopulation referenceSet = null; // load reference set and create the quality indicator if (commandLine.hasOption("reference")) { referenceSet = new NondominatedPopulation( PopulationIO.readObjectives(new File(commandLine.getOptionValue("reference")))); } else { referenceSet = ProblemFactory.getInstance() .getReferenceSet(commandLine.getOptionValue("problem")); } if (referenceSet == null) { throw new FrameworkException("no reference set available"); } QualityIndicator indicator = new QualityIndicator(problem, referenceSet); output = new MetricFileWriter(indicator, outputFile); } else { output = new ResultFileWriter(problem, outputFile, !commandLine.hasOption("novariables")); } // resume at the last good output for (int i = 0; i < output.getNumberOfEntries(); i++) { if (input.hasNext()) { input.next(); } else { throw new FrameworkException("output has more entries than input"); } } // setup any default parameters Properties defaultProperties = new Properties(); if (commandLine.hasOption("properties")) { for (String property : commandLine.getOptionValues("properties")) { String[] tokens = property.split("="); if (tokens.length == 2) { defaultProperties.setProperty(tokens[0], tokens[1]); } else { throw new FrameworkException("malformed property argument"); } } } if (commandLine.hasOption("epsilon")) { defaultProperties.setProperty("epsilon", commandLine.getOptionValue("epsilon")); } // seed the pseudo-random number generator if (commandLine.hasOption("seed")) { PRNG.setSeed(Long.parseLong(commandLine.getOptionValue("seed"))); } // process the remaining runs while (input.hasNext()) { Properties properties = input.next(); properties.putAll(defaultProperties); process(commandLine.getOptionValue("algorithm"), properties); } } finally { if (output != null) { output.close(); } } } finally { if (input != null) { input.close(); } } } finally { if (problem != null) { problem.close(); } } }
From source file:com.pl.plugins.commons.dal.utils.reports.JasperReportService.java
public void afterPropertiesSet() throws Exception { Properties systemProperties = System.getProperties(); systemProperties.putAll(jasperConfig); }
From source file:com.glaf.jbpm.connection.HikariCPConnectionProvider.java
public void configure(Properties props) throws RuntimeException { Properties properties = new Properties(); properties.putAll(props); for (Iterator<?> ii = props.keySet().iterator(); ii.hasNext();) { String key = (String) ii.next(); if (key.startsWith("hikari.")) { String newKey = key.substring(7); properties.put(newKey, props.get(key)); }// w ww . ja v a2 s . co m } String jdbcDriverClass = properties.getProperty(Environment.DRIVER); String jdbcUrl = properties.getProperty(Environment.URL); Properties connectionProps = ConnectionProviderFactory.getConnectionProperties(properties); log.info("HikariCP using driver: " + jdbcDriverClass + " at URL: " + jdbcUrl); log.info("Connection properties: " + PropertiesHelper.maskOut(connectionProps, Environment.PASS)); autocommit = PropertiesHelper.getBoolean(Environment.AUTOCOMMIT, props); log.info("autocommit mode: " + autocommit); if (jdbcDriverClass == null) { log.warn("No JDBC Driver class was specified by property " + Environment.DRIVER); } else { try { Class.forName(jdbcDriverClass); } catch (ClassNotFoundException cnfe) { try { ClassUtils.classForName(jdbcDriverClass); } catch (Exception e) { String msg = "JDBC Driver class not found: " + jdbcDriverClass; log.error(msg, e); throw new RuntimeException(msg, e); } } } try { String validationQuery = properties.getProperty(ConnectionConstants.PROP_VALIDATIONQUERY); Integer initialPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_INITIALSIZE, properties); Integer minPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_MINACTIVE, properties); Integer maxPoolSize = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXACTIVE, properties); if (initialPoolSize == null && minPoolSize != null) { properties.put(ConnectionConstants.PROP_INITIALSIZE, String.valueOf(minPoolSize).trim()); } Integer maxWait = PropertiesHelper.getInteger(ConnectionConstants.PROP_MAXWAIT, properties); if (maxPoolSize == null) { maxPoolSize = 50; } String dbUser = properties.getProperty(Environment.USER); String dbPassword = properties.getProperty(Environment.PASS); if (dbUser == null) { dbUser = ""; } if (dbPassword == null) { dbPassword = ""; } HikariConfig config = new HikariConfig(); config.setDriverClassName(jdbcDriverClass); config.setJdbcUrl(jdbcUrl); config.setUsername(dbUser); config.setPassword(dbPassword); config.setMaximumPoolSize(maxPoolSize); config.setDataSourceProperties(properties); if (StringUtils.isNotEmpty(validationQuery)) { config.setConnectionTestQuery(validationQuery); } if (maxWait != null) { config.setConnectionTimeout(maxWait * 1000L); } config.setMaxLifetime(1000L * 3600 * 8); String isolationLevel = properties.getProperty(Environment.ISOLATION); if (isolationLevel == null) { isolation = null; } else { isolation = new Integer(isolationLevel); log.info("JDBC isolation level: " + Environment.isolationLevelToString(isolation.intValue())); } if (StringUtils.isNotEmpty(isolationLevel)) { config.setTransactionIsolation(isolationLevel); } ds = new HikariDataSource(config); } catch (Exception ex) { ex.printStackTrace(); log.error("could not instantiate HikariCP connection pool", ex); throw new RuntimeException("Could not instantiate HikariCP connection pool", ex); } }
From source file:com.twitter.ambrose.cascading3.AmbroseCascadingNotifier.java
/** * The onStarting event is fired when a Flow instance receives the start() message. A Flow is cut * down into executing units called stepFlow. A stepFlow contains a stepFlowJob which represents * the mapreduce job to be submitted to Hadoop. The ambrose graph is constructed from the step * graph found in flow object.// w w w .j a v a2s.c om * * @param flow the flow. */ @Override @SuppressWarnings("unchecked") public void onStarting(Flow flow) { // init flow List<FlowStep> steps = flow.getFlowSteps(); totalNumberOfJobs = steps.size(); currentFlowId = flow.getID(); Properties props = new Properties(); props.putAll(flow.getConfigAsProperties()); try { statsWriteService.initWriteService(props); } catch (IOException e) { LOG.error("Failed to initialize statsWriteService", e); } // convert graph from cascading to jgrapht FlowStepGraph flowStepGraph = Flows.getStepGraphFrom(flow); DirectedGraph graph = new DefaultDirectedGraph<BaseFlowStep, FlowGraphEdge>( new EdgeFactory<BaseFlowStep, FlowGraphEdge>() { @Override public FlowGraphEdge createEdge(BaseFlowStep src, BaseFlowStep dest) { return new FlowGraphEdge(src.getID(), dest.getID()); } }); for (FlowStep v : flowStepGraph.vertexSet()) { graph.addVertex(v); } for (ProcessEdge e : flowStepGraph.edgeSet()) { graph.addEdge(e.getSourceProcessID(), e.getSinkProcessID()); } // convert graph from jgrapht to ambrose AmbroseCascadingGraphConverter converter = new AmbroseCascadingGraphConverter(graph, nodesByName); converter.convert(); AmbroseUtils.sendDagNodeNameMap(statsWriteService, currentFlowId, nodesByName); }
From source file:net.sf.sze.config.WebMvcConfig.java
/** * Initiates the message resolver./*from w w w . j ava 2s . c o m*/ * * @return a message source. */ @Bean(name = "messageSource") public MessageSource configureMessageSource() { ReloadableResourceBundleMessageSource messageSource = new ReloadableResourceBundleMessageSource(); messageSource.setBasenames(MESSAGE_SOURCE, APP_MESSAGE_SOURCE, HELP_MESSAGE_SOURCE, MESSAGE_SOURCE_OVAL); messageSource.setCacheSeconds(MESSAGE_CACHE); messageSource.setFallbackToSystemLocale(false); // Make sure Apostrophs must always be doubled.. messageSource.setAlwaysUseMessageFormat(true); // This persister doubles Apostoph messageSource.setPropertiesPersister( new RecursivePropertiesPersister(new ApostropheEscapingPropertiesPersister())); final Class<?>[] classes = URL.class.getDeclaredClasses(); final UrlDefinitionsToMessages urlDefinitions = new UrlDefinitionsToMessages(classes); urlDefinitions.addParamGroupAsMessages(); urlDefinitions.addParamsAsMessages(); urlDefinitions.addUrlsAsMessagesWithNamedParameters(); Properties staticMessages = urlDefinitions.getMessages(); final EntityPropertiesToMessages epm = new EntityPropertiesToMessages("net.sf.sze.model"); staticMessages.putAll(epm.getProperties()); final String version = buildNr.replace("SNAPSHOT", Long.toString(System.currentTimeMillis())); staticMessages.put("app.version", version); messageSource.setCommonMessages(staticMessages); return messageSource; }
From source file:org.apache.wiki.util.PropertyReader.java
/** * Implement the cascade functionality. * * @param context where to read the cascade from * @param defaultProperties properties to merge the cascading properties to * @since 2.5.x//from w ww. j a va 2 s.c o m */ private static void loadWebAppPropsCascade(ServletContext context, Properties defaultProperties) { if (getInitParameter(context, PARAM_CUSTOMCONFIG_CASCADEPREFIX + "1") == null) { LOG.debug(" No cascading properties defined for this context"); return; } // get into cascade... int depth = 0; boolean more = true; InputStream propertyStream = null; while (more) { depth++; String propertyFile = getInitParameter(context, PARAM_CUSTOMCONFIG_CASCADEPREFIX + depth); if (propertyFile == null) { more = false; break; } try { LOG.info(" Reading additional properties from " + propertyFile + " and merge to cascade."); Properties additionalProps = new Properties(); propertyStream = new FileInputStream(new File(propertyFile)); additionalProps.load(propertyStream); defaultProperties.putAll(additionalProps); } catch (Exception e) { LOG.error(" " + Release.APPNAME + ": Unable to load and setup properties from " + propertyFile + "." + e.getMessage()); } finally { IOUtils.closeQuietly(propertyStream); } } return; }
From source file:org.apache.openjpa.jdbc.schema.DBCPDriverDataSource.java
/** * Merge the passed in properties with a copy of the existing _connectionProperties * @param props/*from w w w . j a va 2 s . c o m*/ * @return Merged properties */ private Properties mergeConnectionProperties(final Properties props) { Properties mergedProps = new Properties(); mergedProps.putAll(getConnectionProperties()); // need to map "user" to "username" for Commons DBCP String uid = removeProperty(mergedProps, "user"); if (uid != null) { mergedProps.setProperty("username", uid); } // now, merge in any passed in properties if (props != null && !props.isEmpty()) { for (Iterator<Object> itr = props.keySet().iterator(); itr.hasNext();) { String key = (String) itr.next(); String value = props.getProperty(key); // need to map "user" to "username" for Commons DBCP if ("user".equalsIgnoreCase(key)) { key = "username"; } // case-insensitive search for existing key String existingKey = hasKey(mergedProps, key); if (existingKey != null) { // update existing entry mergedProps.setProperty(existingKey, value); } else { // add property to the merged set mergedProps.setProperty(key, value); } } } return mergedProps; }
From source file:org.apache.samza.sql.runner.SamzaSqlApplicationConfig.java
private UdfResolver createUdfResolver(Map<String, String> config) { String udfResolveValue = config.get(CFG_UDF_RESOLVER); Validate.notEmpty(udfResolveValue, "udfResolver config is not set or empty"); HashMap<String, String> domainConfig = getDomainProperties(config, String.format(CFG_FMT_UDF_RESOLVER_DOMAIN, udfResolveValue), false); Properties props = new Properties(); props.putAll(domainConfig); HashMap<String, String> udfConfig = getDomainProperties(config, CFG_UDF_CONFIG_DOMAIN, false); return new ConfigBasedUdfResolver(props, new MapConfig(udfConfig)); }