List of usage examples for java.util Properties putAll
@Override public synchronized void putAll(Map<?, ?> t)
From source file:org.apache.maven.model.building.DefaultModelBuilder.java
private void processImports(final List<ModelData> lineage, final ModelBuildingRequest request, final DefaultModelProblemCollector problems) { // [MNG-5971] Imported dependencies should be available to inheritance processing // Creates an intermediate model with property and repository inheritance. final List<Model> intermediateLineage = new ArrayList<>(lineage.size()); for (int i = 0, s0 = lineage.size(); i < s0; i++) { final Model model = lineage.get(i).getModel(); intermediateLineage.add(model.clone()); }/*from w w w . ja va 2 s .c o m*/ for (int i = intermediateLineage.size() - 2; i >= 0; i--) { final Model parent = intermediateLineage.get(i + 1); final Model child = intermediateLineage.get(i); final Properties properties = new Properties(); properties.putAll(parent.getProperties()); properties.putAll(child.getProperties()); child.setProperties(properties); final List<Repository> repositories = new ArrayList<>(); repositories.addAll(child.getRepositories()); for (final Repository parentRepository : parent.getRepositories()) { if (!repositories.contains(parentRepository)) { repositories.add(parentRepository); } } child.setRepositories(repositories); } // Interpolates the intermediate model. for (int i = 0, s0 = intermediateLineage.size(); i < s0; i++) { final Model model = intermediateLineage.get(i); problems.setSource(model); this.interpolateModel(model, request, problems); } // Exchanges 'import' scope dependencies in the original lineage with possibly interpolated values. for (int i = 0, s0 = lineage.size(); i < s0; i++) { final Model model = lineage.get(i).getModel(); if (model.getDependencyManagement() != null) { for (int j = 0, s1 = model.getDependencyManagement().getDependencies().size(); j < s1; j++) { final Dependency dependency = model.getDependencyManagement().getDependencies().get(j); if ("import".equals(dependency.getScope()) && "pom".equals(dependency.getType())) { final Dependency interpolated = intermediateLineage.get(i).getDependencyManagement() .getDependencies().get(j); model.getDependencyManagement().getDependencies().set(j, interpolated); } } } } // [MNG-4488] [regression] Parent POMs resolved from repository are validated in strict mode ModelBuildingRequest lenientRequest = request; if (request.getValidationLevel() > ModelBuildingRequest.VALIDATION_LEVEL_MAVEN_2_0) { lenientRequest = new FilterModelBuildingRequest(request) { @Override public int getValidationLevel() { return ModelBuildingRequest.VALIDATION_LEVEL_MAVEN_2_0; } }; } // Imports dependencies into the original model using the repositories of the intermediate model. for (int i = 0, s0 = lineage.size(), superModelIdx = lineage.size() - 1; i < s0; i++) { final Model model = lineage.get(i).getModel(); this.configureResolver(lenientRequest.getModelResolver(), intermediateLineage.get(i), problems, true); this.importDependencyManagement(model, "import", lenientRequest, problems, new HashSet<String>()); if (i != superModelIdx) { problems.setSource(model); modelValidator.validateRawModel(model, lenientRequest, problems); } } }
From source file:com.alibaba.doris.common.config.ConfigManagerImpl.java
/** * ?? ??<code>ConfigConnector</code> ???????? * /*from w ww . j a v a2s . c o m*/ * @throws ConfigException ?? <code>ConfigConnector</code> ?? */ public void initConfig() throws ConfigException { if (initialized) { logger.warn("ConfigManagerImpl is already initialized!"); return; } Properties defaultProperties = loadDefaultConfig(); // propertieslocation??properties?String if (properties == null) { this.properties = PropertiesLoadUtil.loadProperties(location); } else { Properties tempProperties = new Properties(); Enumeration<Object> keysEnum = properties.keys(); while (keysEnum.hasMoreElements()) { Object key = keysEnum.nextElement(); if (key instanceof String) { tempProperties.setProperty((String) key, properties.get(key).toString()); } else { throw new ConfigException(String.format( "The key[%s]'type of client properties file must be 'String',not support the type:%s", key, key.getClass())); } } this.properties = tempProperties; } if (defaultProperties != null) { defaultProperties.putAll(properties); properties = defaultProperties; } loadClientConfiguration(properties); // initialize the connector AdminConnector adminConnector = AdminConnector.getInstance(); adminConnector.init(properties);// this init must be execute when client initialized if (!adminConnector.isConnected()) { String mainAdminUrl = properties.getProperty("doris.config.adminserver.main.url"); String backupAdminUrl = properties.getProperty("doris.config.adminserver.backup.url"); throw new ConfigException( "admin server is not available,main admin:" + mainAdminUrl + ",backup admin:" + backupAdminUrl); } String autoFetchProp = properties.getProperty("doris.admin.config.autofetch.enable", "true"); boolean autoFetchEnable = Boolean.parseBoolean(autoFetchProp); if (autoFetchEnable) { // the interval to dectect configuration changes. String intervalConfig = properties.getProperty("doris.config.fetch.interval"); if (StringUtils.isEmpty(intervalConfig)) { throw new IllegalArgumentException("confg 'doris.config.fetch.interval' is not valid."); } this.interval = Long.parseLong(intervalConfig.trim()); // start fetch thread. fetchThread = new Thread(new ConfigFetchTask(), "doris-config-fetcher"); fetchThread.setDaemon(true); fetchThread.start(); } // initialize completed. initialized = true; }
From source file:com.bluexml.side.Framework.alfresco.notification.NotificationHelper.java
/** * This method loads the propertyFileName file from the Dictionary path of Alfresco repository (and if not found from classpath) * and then overwrite the properties with the properties files successively contains in resourceCustomPaths[i]+"/cm:"+propertyFileName * @param propertyFileName the property file name to load; if null, set to "notification.properties" * @param resourceCustomPaths a list of folder where the propertyFileName may be loaded * @return the set of properties/*from w w w . ja v a2 s . c o m*/ * @throws Exception */ public Properties getProperties(String language, String propertyFileName, ArrayList<String> resourceCustomPaths) throws Exception { // load properties Properties prop = new Properties(); if (propertyFileName == null) propertyFileName = PROPERTIES_PATH; // search in alfresco dictionary prop.load(getPropertiesInputStream(language, propertyFileName, PROPERTIES_IN_DICTIONARY_PATH, true)); // custom properties if (resourceCustomPaths != null && !resourceCustomPaths.isEmpty()) { for (String resourceCustomPath : resourceCustomPaths) { Properties customProp = new Properties(); InputStream s = getPropertiesInputStream(language, propertyFileName, resourceCustomPath, false); if (s != null) { customProp.load(s); prop.putAll(customProp); } } } logger.trace("Properties loaded :" + prop); return prop; }
From source file:fr.fastconnect.factory.tibco.bw.maven.run.LaunchDesignerMojo.java
private void updateAliasesFile() throws IOException { File aliasesFile = getAliasesFile(); File designer5Prefs = getDesigner5Prefs(); Properties prefs = new SortedProperties(); FileInputStream fisPrefs = new FileInputStream(designer5Prefs); prefs.load(fisPrefs);//from www . ja v a 2 s .c om fisPrefs.close(); Integer maxFileAliasPref = 0; for (Object k : prefs.keySet()) { String key = (String) k; if (key.startsWith(FILE_ALIAS_PREFIX)) { maxFileAliasPref++; } } Properties aliases = new Properties(); FileInputStream fis = new FileInputStream(aliasesFile); aliases.load(fis); fis.close(); String projectVersion = getProject().getVersion(); Properties duplicates = new Properties(); for (Object k : aliases.keySet()) { String key = (String) k; String value = aliases.getProperty(key); if (key.contains(projectVersion) && key.endsWith(":jar")) { getLog().debug(key); key = key.replace(projectVersion, "${project.version}"); duplicates.put(key, value); } } if (!duplicates.isEmpty()) { for (Object k : duplicates.keySet()) { String key = (String) k; String value = duplicates.getProperty(key); key = key.replace(TIBCO_ALIAS_PREFIX, ""); prefs.put(FILE_ALIAS_PREFIX + maxFileAliasPref.toString(), key + "=" + value); maxFileAliasPref++; } FileOutputStream fosPrefs = new FileOutputStream(designer5Prefs); prefs.store(fosPrefs, ""); fis.close(); aliases.putAll(duplicates); FileOutputStream fos = new FileOutputStream(aliasesFile); aliases.store(fos, ""); fis.close(); } }
From source file:com.legstar.mq.client.AbstractCicsMQ.java
/** * Given the endpoint parameters, setup a JNDI context to lookup JMS * resources.//from w w w .j a v a2s. co m * * @param cicsMQEndpoint the endpoint paramers * @return the JNDI context * @throws CicsMQConnectionException if JNDI context cannot be created */ protected Context createJndiContext(final CicsMQEndpoint cicsMQEndpoint) throws CicsMQConnectionException { try { Properties env = new Properties(); if (cicsMQEndpoint.getInitialContextFactory() != null && cicsMQEndpoint.getInitialContextFactory().length() > 0) { env.put(Context.INITIAL_CONTEXT_FACTORY, cicsMQEndpoint.getInitialContextFactory()); } if (cicsMQEndpoint.getJndiProviderURL() != null && cicsMQEndpoint.getJndiProviderURL().length() > 0) { env.put(Context.PROVIDER_URL, cicsMQEndpoint.getJndiProviderURL()); } if (cicsMQEndpoint.getJndiUrlPkgPrefixes() != null && cicsMQEndpoint.getJndiUrlPkgPrefixes().length() > 0) { env.put(Context.URL_PKG_PREFIXES, cicsMQEndpoint.getJndiUrlPkgPrefixes()); } if (cicsMQEndpoint.getJndiProperties() != null) { env.putAll(getProperties(cicsMQEndpoint.getJndiProperties())); } if (env.size() > 0) { return new InitialContext(env); } else { return new InitialContext(); } } catch (NamingException e) { throw new CicsMQConnectionException(e); } }
From source file:io.druid.indexing.kafka.supervisor.KafkaSupervisor.java
private KafkaConsumer<byte[], byte[]> getKafkaConsumer() { final Properties props = new Properties(); props.putAll(ioConfig.getConsumerProperties()); props.setProperty("enable.auto.commit", "false"); props.setProperty("metadata.max.age.ms", "10000"); props.setProperty("group.id", String.format("kafka-supervisor-%s", getRandomId())); ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader(); try {//from w w w. j a v a 2s. c om Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); return new KafkaConsumer<>(props, new ByteArrayDeserializer(), new ByteArrayDeserializer()); } finally { Thread.currentThread().setContextClassLoader(currCtxCl); } }
From source file:org.codehaus.mojo.sql.SqlExecMojo.java
/** * Creates a new Connection as using the driver, url, userid and password * specified.//from w w w.j a va 2s . co m * * The calling method is responsible for closing the connection. * * @return Connection the newly created connection. * @throws MojoExecutionException if the UserId/Password/Url is not set or there * is no suitable driver or the driver fails to load. * @throws SQLException if there is problem getting connection with valid url * */ private Connection getConnection() throws MojoExecutionException, SQLException { getLog().debug("connecting to " + getUrl()); Properties info = new Properties(); info.put("user", getUsername()); if (!enableAnonymousPassword) { info.put("password", getPassword()); } info.putAll(this.getDriverProperties()); Driver driverInstance = null; try { Class dc = Class.forName(getDriver()); driverInstance = (Driver) dc.newInstance(); } catch (ClassNotFoundException e) { throw new MojoExecutionException("Driver class not found: " + getDriver(), e); } catch (Exception e) { throw new MojoExecutionException("Failure loading driver: " + getDriver(), e); } Connection conn = driverInstance.connect(getUrl(), info); if (conn == null) { // Driver doesn't understand the URL throw new SQLException("No suitable Driver for " + getUrl()); } conn.setAutoCommit(autocommit); return conn; }
From source file:org.apache.maven.model.building.DefaultModelBuilder.java
@Override public ModelBuildingResult build(ModelBuildingRequest request) throws ModelBuildingException { // phase 1//from w w w . ja va 2 s .c o m DefaultModelBuildingResult result = new DefaultModelBuildingResult(); DefaultModelProblemCollector problems = new DefaultModelProblemCollector(result); // profile activation DefaultProfileActivationContext profileActivationContext = getProfileActivationContext(request); problems.setSource("(external profiles)"); List<Profile> activeExternalProfiles = profileSelector.getActiveProfiles(request.getProfiles(), profileActivationContext, problems); result.setActiveExternalProfiles(activeExternalProfiles); if (!activeExternalProfiles.isEmpty()) { Properties profileProps = new Properties(); for (Profile profile : activeExternalProfiles) { profileProps.putAll(profile.getProperties()); } profileProps.putAll(profileActivationContext.getUserProperties()); profileActivationContext.setUserProperties(profileProps); } // read and validate raw model Model inputModel = request.getRawModel(); if (inputModel == null) { inputModel = readModel(request.getModelSource(), request.getPomFile(), request, problems); } problems.setRootModel(inputModel); ModelData resultData = new ModelData(request.getModelSource(), inputModel); ModelData superData = new ModelData(null, getSuperModel()); Collection<String> parentIds = new LinkedHashSet<>(); List<ModelData> lineage = new ArrayList<>(); for (ModelData currentData = resultData; currentData != null;) { lineage.add(currentData); Model rawModel = currentData.getModel(); currentData.setRawModel(rawModel); Model tmpModel = rawModel.clone(); currentData.setModel(tmpModel); problems.setSource(tmpModel); // model normalization modelNormalizer.mergeDuplicates(tmpModel, request, problems); profileActivationContext.setProjectProperties(tmpModel.getProperties()); List<Profile> activePomProfiles = profileSelector.getActiveProfiles(rawModel.getProfiles(), profileActivationContext, problems); currentData.setActiveProfiles(activePomProfiles); Map<String, Activation> interpolatedActivations = getProfileActivations(rawModel, false); injectProfileActivations(tmpModel, interpolatedActivations); // profile injection for (Profile activeProfile : activePomProfiles) { profileInjector.injectProfile(tmpModel, activeProfile, request, problems); } if (currentData == resultData) { for (Profile activeProfile : activeExternalProfiles) { profileInjector.injectProfile(tmpModel, activeProfile, request, problems); } } if (currentData == superData) { break; } configureResolver(request.getModelResolver(), tmpModel, problems); ModelData parentData = readParent(tmpModel, currentData.getSource(), request, problems); if (parentData == null) { currentData = superData; } else if (currentData == resultData) { // First iteration - add initial id after version resolution. currentData.setGroupId(currentData.getRawModel().getGroupId() == null ? parentData.getGroupId() : currentData.getRawModel().getGroupId()); currentData.setVersion(currentData.getRawModel().getVersion() == null ? parentData.getVersion() : currentData.getRawModel().getVersion()); currentData.setArtifactId(currentData.getRawModel().getArtifactId()); parentIds.add(currentData.getId()); // Reset - only needed for 'getId'. currentData.setGroupId(null); currentData.setArtifactId(null); currentData.setVersion(null); currentData = parentData; } else if (!parentIds.add(parentData.getId())) { String message = "The parents form a cycle: "; for (String modelId : parentIds) { message += modelId + " -> "; } message += parentData.getId(); problems.add( new ModelProblemCollectorRequest(ModelProblem.Severity.FATAL, ModelProblem.Version.BASE) .setMessage(message)); throw problems.newModelBuildingException(); } else { currentData = parentData; } } problems.setSource(inputModel); checkPluginVersions(lineage, request, problems); // [MNG-5971] Imported dependencies should be available to inheritance processing processImports(lineage, request, problems); // inheritance assembly assembleInheritance(lineage, request, problems); Model resultModel = resultData.getModel(); problems.setSource(resultModel); problems.setRootModel(resultModel); // model interpolation resultModel = interpolateModel(resultModel, request, problems); resultData.setModel(resultModel); // url normalization modelUrlNormalizer.normalize(resultModel, request); // Now the fully interpolated model is available: reconfigure the resolver configureResolver(request.getModelResolver(), resultModel, problems, true); resultData.setGroupId(resultModel.getGroupId()); resultData.setArtifactId(resultModel.getArtifactId()); resultData.setVersion(resultModel.getVersion()); result.setEffectiveModel(resultModel); for (ModelData currentData : lineage) { String modelId = (currentData != superData) ? currentData.getId() : ""; result.addModelId(modelId); result.setActivePomProfiles(modelId, currentData.getActiveProfiles()); result.setRawModel(modelId, currentData.getRawModel()); } if (!request.isTwoPhaseBuilding()) { build(request, result); } return result; }
From source file:org.apache.hadoop.hive.ql.plan.PlanUtils.java
public static TableDesc getDefaultTableDesc(CreateTableDesc directoryDesc, String cols, String colTypes) { TableDesc ret = getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), cols, colTypes, false); ;//from ww w .j a v a 2 s .co m if (directoryDesc == null) { return ret; } try { Properties properties = ret.getProperties(); if (directoryDesc.getFieldDelim() != null) { properties.setProperty(serdeConstants.FIELD_DELIM, directoryDesc.getFieldDelim()); properties.setProperty(serdeConstants.SERIALIZATION_FORMAT, directoryDesc.getFieldDelim()); } if (directoryDesc.getLineDelim() != null) { properties.setProperty(serdeConstants.LINE_DELIM, directoryDesc.getLineDelim()); } if (directoryDesc.getCollItemDelim() != null) { properties.setProperty(serdeConstants.COLLECTION_DELIM, directoryDesc.getCollItemDelim()); } if (directoryDesc.getMapKeyDelim() != null) { properties.setProperty(serdeConstants.MAPKEY_DELIM, directoryDesc.getMapKeyDelim()); } if (directoryDesc.getFieldEscape() != null) { properties.setProperty(serdeConstants.ESCAPE_CHAR, directoryDesc.getFieldEscape()); } if (directoryDesc.getSerName() != null) { properties.setProperty(serdeConstants.SERIALIZATION_LIB, directoryDesc.getSerName()); } if (directoryDesc.getOutputFormat() != null) { ret.setOutputFileFormatClass(JavaUtils.loadClass(directoryDesc.getOutputFormat())); } if (directoryDesc.getNullFormat() != null) { properties.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, directoryDesc.getNullFormat()); } if (directoryDesc.getTblProps() != null) { properties.putAll(directoryDesc.getTblProps()); } } catch (ClassNotFoundException e) { // mimicking behaviour in CreateTableDesc tableDesc creation // returning null table description for output. LOG.warn("Unable to find class in getDefaultTableDesc: " + e.getMessage(), e); return null; } return ret; }
From source file:org.callistasoftware.maven.plugins.propertyscanner.MyMojo.java
private Properties loadProperties(Log logger) throws MojoExecutionException { FilenameFilter filter = new SuffixFileFilter(".properties"); Properties allProperties = new Properties(); for (File propertiesDirectory : propertiesDirectories) { if (!propertiesDirectory.exists()) { throw new MojoExecutionException("Could not find properties directory: " + propertiesDirectory); }//from ww w.j ava2s . c o m File[] propertiesFiles = propertiesDirectory.listFiles(filter); for (File propertiesFile : propertiesFiles) { if (!propertiesFile.exists()) { throw new MojoExecutionException("Could not find properties file: " + propertiesFile); } //loading properties Properties properties = new Properties(); FileReader r = null; try { r = new FileReader(propertiesFile); properties.load(r); } catch (IOException e) { throw new MojoExecutionException( "Error loading properties from translation file: " + propertiesFile, e); } finally { try { r.close(); } catch (Exception e) { //nothing } } logger.debug("Loaded properties, read " + properties.size() + " entries"); allProperties.putAll(properties); } } logger.info("Total properties loaded: " + allProperties.size()); return allProperties; }