List of usage examples for java.util Properties keySet
@Override
public Set<Object> keySet()
From source file:edu.ku.brc.specify.web.SpecifyExplorer.java
/** * @param fdi/* w w w . j a va 2s . c o m*/ * @param title * @return */ protected String makeURLLink(final FormDataObjIFace fdi, final String title, final Properties props) { StringBuffer sb = new StringBuffer("<a href=\"" + servletURL + "?"); if (fdi != null) { sb.append("cls="); sb.append(fdi.getDataClass().getSimpleName()); sb.append("&id="); sb.append(fdi.getId()); } boolean needsAmp = fdi != null; if (props != null) { for (Object key : props.keySet()) { if (!needsAmp) { needsAmp = true; } else { sb.append('&'); } sb.append(key.toString()); sb.append('='); sb.append(props.get(key)); } } sb.append("\">"); sb.append(title); sb.append("</a>"); return sb.toString(); }
From source file:org.apache.struts2.views.velocity.VelocityManager.java
/** * load optional velocity properties using the following loading strategy * <ul>/*from ww w . ja va2 s .c o m*/ * <li>relative to the servlet context path</li> * <li>relative to the WEB-INF directory</li> * <li>on the classpath</li> * </ul> * * @param context the current ServletContext. may <b>not</b> be null * @return the optional properties if struts.velocity.configfile was specified, an empty Properties file otherwise */ public Properties loadConfiguration(ServletContext context) { if (context == null) { String gripe = "Error attempting to create a loadConfiguration from a null ServletContext!"; LOG.error(gripe); throw new IllegalArgumentException(gripe); } Properties properties = new Properties(); // now apply our systemic defaults, then allow user to override applyDefaultConfiguration(context, properties); String defaultUserDirective = properties.getProperty("userdirective"); /** * if the user has specified an external velocity configuration file, we'll want to search for it in the * following order * * 1. relative to the context path * 2. relative to /WEB-INF * 3. in the class path */ String configfile; if (customConfigFile != null) { configfile = customConfigFile; } else { configfile = "velocity.properties"; } configfile = configfile.trim(); InputStream in = null; String resourceLocation = null; try { if (context.getRealPath(configfile) != null) { // 1. relative to context path, i.e. /velocity.properties String filename = context.getRealPath(configfile); if (filename != null) { File file = new File(filename); if (file.isFile()) { resourceLocation = file.getCanonicalPath() + " from file system"; in = new FileInputStream(file); } // 2. if nothing was found relative to the context path, search relative to the WEB-INF directory if (in == null) { file = new File(context.getRealPath("/WEB-INF/" + configfile)); if (file.isFile()) { resourceLocation = file.getCanonicalPath() + " from file system"; in = new FileInputStream(file); } } } } // 3. finally, if there's no physical file, how about something in our classpath if (in == null) { in = VelocityManager.class.getClassLoader().getResourceAsStream(configfile); if (in != null) { resourceLocation = configfile + " from classloader"; } } // if we've got something, load 'er up if (in != null) { if (LOG.isInfoEnabled()) { LOG.info("Initializing velocity using " + resourceLocation); } properties.load(in); } } catch (IOException e) { if (LOG.isWarnEnabled()) { LOG.warn("Unable to load velocity configuration " + resourceLocation, e); } } finally { if (in != null) { try { in.close(); } catch (IOException e) { } } } // overide with programmatically set properties if (this.velocityProperties != null) { Iterator keys = this.velocityProperties.keySet().iterator(); while (keys.hasNext()) { String key = (String) keys.next(); properties.setProperty(key, this.velocityProperties.getProperty(key)); } } String userdirective = properties.getProperty("userdirective"); if ((userdirective == null) || userdirective.trim().equals("")) { userdirective = defaultUserDirective; } else { userdirective = userdirective.trim() + "," + defaultUserDirective; } properties.setProperty("userdirective", userdirective); // for debugging purposes, allows users to dump out the properties that have been configured if (LOG.isDebugEnabled()) { LOG.debug("Initializing Velocity with the following properties ..."); for (Iterator iter = properties.keySet().iterator(); iter.hasNext();) { String key = (String) iter.next(); String value = properties.getProperty(key); if (LOG.isDebugEnabled()) { LOG.debug(" '" + key + "' = '" + value + "'"); } } } return properties; }
From source file:org.apache.maven.archetype.creator.FilesetArchetypeCreator.java
private void createModulePom(Model pom, String rootArtifactId, File archetypeFilesDirectory, Properties pomReversedProperties, File initialPomFile, boolean preserveCData, boolean keepParent) throws IOException { File outputFile = FileUtils.resolveFile(archetypeFilesDirectory, Constants.ARCHETYPE_POM); if (preserveCData) { getLogger().debug("Preserving CDATA parts of pom"); File inputFile = FileUtils.resolveFile(archetypeFilesDirectory, Constants.ARCHETYPE_POM + ".tmp"); FileUtils.copyFile(initialPomFile, inputFile); Reader in = null;/*from w w w . j a v a 2s . c o m*/ Writer out = null; try { in = ReaderFactory.newXmlReader(inputFile); String initialcontent = IOUtil.toString(in); String content = getReversedContent(initialcontent, pomReversedProperties); outputFile.getParentFile().mkdirs(); out = WriterFactory.newXmlWriter(outputFile); IOUtil.copy(content, out); } finally { IOUtil.close(in); IOUtil.close(out); } inputFile.delete(); } else { if (pom.getParent() != null) { pom.getParent().setGroupId(StringUtils.replace(pom.getParent().getGroupId(), pomReversedProperties.getProperty(Constants.GROUP_ID), "${" + Constants.GROUP_ID + "}")); if (pom.getParent().getArtifactId() != null && pom.getParent().getArtifactId().indexOf(rootArtifactId) >= 0) { pom.getParent().setArtifactId(StringUtils.replace(pom.getParent().getArtifactId(), rootArtifactId, "${rootArtifactId}")); } if (pom.getParent().getVersion() != null) { pom.getParent().setVersion("${" + Constants.VERSION + "}"); } } pom.setModules(null); if (pom.getGroupId() != null) { pom.setGroupId(StringUtils.replace(pom.getGroupId(), pomReversedProperties.getProperty(Constants.GROUP_ID), "${" + Constants.GROUP_ID + "}")); } pom.setArtifactId("${" + Constants.ARTIFACT_ID + "}"); if (pom.getVersion() != null) { pom.setVersion("${" + Constants.VERSION + "}"); } pom.setName(getReversedPlainContent(pom.getName(), pomReversedProperties)); pom.setDescription(getReversedPlainContent(pom.getDescription(), pomReversedProperties)); pom.setUrl(getReversedPlainContent(pom.getUrl(), pomReversedProperties)); rewriteReferences(pom, rootArtifactId, pomReversedProperties.getProperty(Constants.GROUP_ID)); pomManager.writePom(pom, outputFile, initialPomFile); } Reader in = null; try { in = ReaderFactory.newXmlReader(initialPomFile); String initialcontent = IOUtil.toString(in); for (Iterator<?> properties = pomReversedProperties.keySet().iterator(); properties.hasNext();) { String property = (String) properties.next(); if (initialcontent.indexOf("${" + property + "}") > 0) { getLogger().warn("OldArchetype uses ${" + property + "} for internal processing, but file " + initialPomFile + " contains this property already"); } } } finally { IOUtil.close(in); } }
From source file:edu.ucsd.library.dams.api.DAMSAPIServlet.java
public List<String> list(Properties props, String prefix, String suffix) { List<String> values = new ArrayList<String>(); for (Iterator it = props.keySet().iterator(); it.hasNext();) { String key = (String) it.next(); if (key != null && key.startsWith(prefix) && key.endsWith(suffix)) { String s = key.substring(prefix.length(), key.length() - suffix.length()); if (!values.contains(s)) { values.add(s);//from w w w .j a va 2 s. c o m } } } return values; }
From source file:com.googlecode.flyway.core.Flyway.java
/** * Configures Flyway with these properties. This overwrites any existing configuration. Property names are * documented in the flyway maven plugin. * * @param properties Properties used for configuration. * @throws FlywayException when the configuration failed. *///from w w w. j a v a2 s . c o m public void configure(Properties properties) { String driverProp = properties.getProperty("flyway.driver"); String urlProp = properties.getProperty("flyway.url"); String userProp = properties.getProperty("flyway.user"); String passwordProp = properties.getProperty("flyway.password"); if (StringUtils.hasText(driverProp) && StringUtils.hasText(urlProp) && StringUtils.hasText(userProp) && (passwordProp != null)) { // All datasource properties set setDataSource(new DriverDataSource(driverProp, urlProp, userProp, passwordProp)); } else if (StringUtils.hasText(driverProp) || StringUtils.hasText(urlProp) || StringUtils.hasText(userProp) || (passwordProp != null)) { // Some, but not all datasource properties set LOG.warn("Discarding INCOMPLETE dataSource configuration!" + " At least one of flyway.driver, flyway.url, flyway.user or flyway.password missing."); } String locationsProp = properties.getProperty("flyway.locations"); if (locationsProp != null) { setLocations(StringUtils.tokenizeToStringArray(locationsProp, ",")); } String baseDirProp = properties.getProperty("flyway.baseDir"); if (baseDirProp != null) { setBaseDir(baseDirProp); } String basePackageProp = properties.getProperty("flyway.basePackage"); if (basePackageProp != null) { setBasePackage(basePackageProp); } String placeholderPrefixProp = properties.getProperty("flyway.placeholderPrefix"); if (placeholderPrefixProp != null) { setPlaceholderPrefix(placeholderPrefixProp); } String placeholderSuffixProp = properties.getProperty("flyway.placeholderSuffix"); if (placeholderSuffixProp != null) { setPlaceholderSuffix(placeholderSuffixProp); } String sqlMigrationPrefixProp = properties.getProperty("flyway.sqlMigrationPrefix"); if (sqlMigrationPrefixProp != null) { setSqlMigrationPrefix(sqlMigrationPrefixProp); } String sqlMigrationSuffixProp = properties.getProperty("flyway.sqlMigrationSuffix"); if (sqlMigrationSuffixProp != null) { setSqlMigrationSuffix(sqlMigrationSuffixProp); } String encodingProp = properties.getProperty("flyway.encoding"); if (encodingProp != null) { setEncoding(encodingProp); } String schemasProp = properties.getProperty("flyway.schemas"); if (schemasProp != null) { setSchemas(StringUtils.tokenizeToStringArray(schemasProp, ",")); } String tableProp = properties.getProperty("flyway.table"); if (tableProp != null) { setTable(tableProp); } String validationErrorModeProp = properties.getProperty("flyway.validationErrorMode"); if (validationErrorModeProp != null) { setValidationErrorMode(ValidationErrorMode.valueOf(validationErrorModeProp)); } String validationModeProp = properties.getProperty("flyway.validationMode"); if (validationModeProp != null) { setValidationMode(ValidationMode.valueOf(validationModeProp)); } String initialVersionProp = properties.getProperty("flyway.initialVersion"); if (initialVersionProp != null) { setInitialVersion(new SchemaVersion(initialVersionProp)); } String initialDescriptionProp = properties.getProperty("flyway.initialDescription"); if (initialDescriptionProp != null) { setInitialDescription(initialDescriptionProp); } String disableInitCheckProp = properties.getProperty("flyway.disableInitCheck"); if (disableInitCheckProp != null) { setDisableInitCheck(Boolean.parseBoolean(disableInitCheckProp)); } String targetProp = properties.getProperty("flyway.target"); if (targetProp != null) { setTarget(new SchemaVersion(targetProp)); } Map<String, String> placeholdersFromProps = new HashMap<String, String>(); for (Object property : properties.keySet()) { String propertyName = (String) property; if (propertyName.startsWith(PLACEHOLDERS_PROPERTY_PREFIX) && propertyName.length() > PLACEHOLDERS_PROPERTY_PREFIX.length()) { String placeholderName = propertyName.substring(PLACEHOLDERS_PROPERTY_PREFIX.length()); String placeholderValue = properties.getProperty(propertyName); placeholdersFromProps.put(placeholderName, placeholderValue); } } setPlaceholders(placeholdersFromProps); }
From source file:org.apache.hadoop.hive.conf.HiveConf.java
public Properties getChangedProperties() { Properties ret = new Properties(); Properties newProp = getAllProperties(); for (Object one : newProp.keySet()) { String oneProp = (String) one; String oldValue = origProp.getProperty(oneProp); if (!StringUtils.equals(oldValue, newProp.getProperty(oneProp))) { ret.setProperty(oneProp, newProp.getProperty(oneProp)); }//from www .j a v a 2s .co m } return (ret); }
From source file:org.apache.hadoop.mapred.JobTracker.java
JobTracker(final JobConf conf, String identifier, Clock clock, QueueManager qm) throws IOException, InterruptedException { this.queueManager = qm; this.clock = clock; // Set ports, start RPC servers, setup security policy etc. InetSocketAddress addr = getAddress(conf); this.localMachine = addr.getHostName(); this.port = addr.getPort(); // find the owner of the process // get the desired principal to load UserGroupInformation.setConfiguration(conf); SecurityUtil.login(conf, JT_KEYTAB_FILE, JT_USER_NAME, localMachine); long secretKeyInterval = conf.getLong(DELEGATION_KEY_UPDATE_INTERVAL_KEY, DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT); long tokenMaxLifetime = conf.getLong(DELEGATION_TOKEN_MAX_LIFETIME_KEY, DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT); long tokenRenewInterval = conf.getLong(DELEGATION_TOKEN_RENEW_INTERVAL_KEY, DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT); secretManager = new DelegationTokenSecretManager(secretKeyInterval, tokenMaxLifetime, tokenRenewInterval, DELEGATION_TOKEN_GC_INTERVAL); secretManager.startThreads();/* w w w . j a va 2s.c o m*/ MAX_JOBCONF_SIZE = conf.getLong(MAX_USER_JOBCONF_SIZE_KEY, MAX_JOBCONF_SIZE); // // Grab some static constants // TASKTRACKER_EXPIRY_INTERVAL = conf.getLong("mapred.tasktracker.expiry.interval", 10 * 60 * 1000); RETIRE_JOB_INTERVAL = conf.getLong("mapred.jobtracker.retirejob.interval", 24 * 60 * 60 * 1000); RETIRE_JOB_CHECK_INTERVAL = conf.getLong("mapred.jobtracker.retirejob.check", 60 * 1000); retiredJobsCacheSize = conf.getInt("mapred.job.tracker.retiredjobs.cache.size", 1000); MAX_COMPLETE_USER_JOBS_IN_MEMORY = conf.getInt("mapred.jobtracker.completeuserjobs.maximum", 100); // values related to heuristic graylisting (a "fault" is a per-job // blacklisting; too many faults => node is graylisted across all jobs): TRACKER_FAULT_TIMEOUT_WINDOW = // 3 hours conf.getInt("mapred.jobtracker.blacklist.fault-timeout-window", 3 * 60); TRACKER_FAULT_BUCKET_WIDTH = // 15 minutes conf.getInt("mapred.jobtracker.blacklist.fault-bucket-width", 15); TRACKER_FAULT_THRESHOLD = conf.getInt("mapred.max.tracker.blacklists", 4); // future: rename to "mapred.jobtracker.blacklist.fault-threshold" for // namespace consistency if (TRACKER_FAULT_BUCKET_WIDTH > TRACKER_FAULT_TIMEOUT_WINDOW) { TRACKER_FAULT_BUCKET_WIDTH = TRACKER_FAULT_TIMEOUT_WINDOW; } TRACKER_FAULT_BUCKET_WIDTH_MSECS = (long) TRACKER_FAULT_BUCKET_WIDTH * 60 * 1000; // ideally, TRACKER_FAULT_TIMEOUT_WINDOW should be an integral multiple of // TRACKER_FAULT_BUCKET_WIDTH, but round up just in case: NUM_FAULT_BUCKETS = (TRACKER_FAULT_TIMEOUT_WINDOW + TRACKER_FAULT_BUCKET_WIDTH - 1) / TRACKER_FAULT_BUCKET_WIDTH; NUM_HEARTBEATS_IN_SECOND = conf.getInt(JT_HEARTBEATS_IN_SECOND, DEFAULT_NUM_HEARTBEATS_IN_SECOND); if (NUM_HEARTBEATS_IN_SECOND < MIN_NUM_HEARTBEATS_IN_SECOND) { NUM_HEARTBEATS_IN_SECOND = DEFAULT_NUM_HEARTBEATS_IN_SECOND; } HEARTBEATS_SCALING_FACTOR = conf.getFloat(JT_HEARTBEATS_SCALING_FACTOR, DEFAULT_HEARTBEATS_SCALING_FACTOR); if (HEARTBEATS_SCALING_FACTOR < MIN_HEARTBEATS_SCALING_FACTOR) { HEARTBEATS_SCALING_FACTOR = DEFAULT_HEARTBEATS_SCALING_FACTOR; } // This configuration is there solely for tuning purposes and // once this feature has been tested in real clusters and an appropriate // value for the threshold has been found, this config might be taken out. AVERAGE_BLACKLIST_THRESHOLD = conf.getFloat("mapred.cluster.average.blacklist.threshold", 0.5f); // This is a directory of temporary submission files. We delete it // on startup, and can delete any files that we're done with this.conf = conf; JobConf jobConf = new JobConf(conf); initializeTaskMemoryRelatedConfig(); // Read the hosts/exclude files to restrict access to the jobtracker. this.hostsReader = new HostsFileReader(conf.get("mapred.hosts", ""), conf.get("mapred.hosts.exclude", "")); aclsManager = new ACLsManager(conf, new JobACLsManager(conf), queueManager); LOG.info("Starting jobtracker with owner as " + getMROwner().getShortUserName()); // Create the scheduler Class<? extends TaskScheduler> schedulerClass = conf.getClass("mapred.jobtracker.taskScheduler", JobQueueTaskScheduler.class, TaskScheduler.class); taskScheduler = (TaskScheduler) ReflectionUtils.newInstance(schedulerClass, conf); // Set service-level authorization security policy if (conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, false)) { ServiceAuthorizationManager.refresh(conf, new MapReducePolicyProvider()); } int handlerCount = conf.getInt("mapred.job.tracker.handler.count", 10); this.interTrackerServer = RPC.getServer(this, addr.getHostName(), addr.getPort(), handlerCount, false, conf, secretManager); if (LOG.isDebugEnabled()) { Properties p = System.getProperties(); for (Iterator it = p.keySet().iterator(); it.hasNext();) { String key = (String) it.next(); String val = p.getProperty(key); LOG.debug("Property '" + key + "' is " + val); } } String infoAddr = NetUtils.getServerAddress(conf, "mapred.job.tracker.info.bindAddress", "mapred.job.tracker.info.port", "mapred.job.tracker.http.address"); InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr); String infoBindAddress = infoSocAddr.getHostName(); int tmpInfoPort = infoSocAddr.getPort(); this.startTime = clock.getTime(); infoServer = new HttpServer("job", infoBindAddress, tmpInfoPort, tmpInfoPort == 0, conf, aclsManager.getAdminsAcl()); infoServer.setAttribute("job.tracker", this); // initialize history parameters. final JobTracker jtFinal = this; getMROwner().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws Exception { JobHistory.init(jtFinal, conf, jtFinal.localMachine, jtFinal.startTime); return true; } }); infoServer.addServlet("reducegraph", "/taskgraph", TaskGraphServlet.class); infoServer.start(); this.trackerIdentifier = identifier; createInstrumentation(); // The rpc/web-server ports can be ephemeral ports... // ... ensure we have the correct info this.port = interTrackerServer.getListenerAddress().getPort(); this.conf.set("mapred.job.tracker", (this.localMachine + ":" + this.port)); this.localFs = FileSystem.getLocal(conf); LOG.info("JobTracker up at: " + this.port); this.infoPort = this.infoServer.getPort(); this.conf.set("mapred.job.tracker.http.address", infoBindAddress + ":" + this.infoPort); LOG.info("JobTracker webserver: " + this.infoServer.getPort()); // start the recovery manager recoveryManager = new RecoveryManager(); while (!Thread.currentThread().isInterrupted()) { try { // if we haven't contacted the namenode go ahead and do it if (fs == null) { fs = getMROwner().doAs(new PrivilegedExceptionAction<FileSystem>() { public FileSystem run() throws IOException { return FileSystem.get(conf); } }); } // clean up the system dir, which will only work if hdfs is out of // safe mode if (systemDir == null) { systemDir = new Path(getSystemDir()); } try { FileStatus systemDirStatus = fs.getFileStatus(systemDir); if (!systemDirStatus.getOwner().equals(getMROwner().getShortUserName())) { throw new AccessControlException("The systemdir " + systemDir + " is not owned by " + getMROwner().getShortUserName()); } if (!systemDirStatus.getPermission().equals(SYSTEM_DIR_PERMISSION)) { LOG.warn("Incorrect permissions on " + systemDir + ". Setting it to " + SYSTEM_DIR_PERMISSION); fs.setPermission(systemDir, new FsPermission(SYSTEM_DIR_PERMISSION)); } } catch (FileNotFoundException fnf) { } //ignore // Make sure that the backup data is preserved FileStatus[] systemDirData = fs.listStatus(this.systemDir); // Check if the history is enabled .. as we cant have persistence with // history disabled if (conf.getBoolean("mapred.jobtracker.restart.recover", false) && systemDirData != null) { for (FileStatus status : systemDirData) { try { recoveryManager.checkAndAddJob(status); } catch (Throwable t) { LOG.warn("Failed to add the job " + status.getPath().getName(), t); } } // Check if there are jobs to be recovered hasRestarted = recoveryManager.shouldRecover(); if (hasRestarted) { break; // if there is something to recover else clean the sys dir } } LOG.info("Cleaning up the system directory"); fs.delete(systemDir, true); if (FileSystem.mkdirs(fs, systemDir, new FsPermission(SYSTEM_DIR_PERMISSION))) { break; } LOG.error("Mkdirs failed to create " + systemDir); } catch (AccessControlException ace) { LOG.warn("Failed to operate on mapred.system.dir (" + systemDir + ") because of permissions."); LOG.warn( "Manually delete the mapred.system.dir (" + systemDir + ") and then start the JobTracker."); LOG.warn("Bailing out ... ", ace); throw ace; } catch (IOException ie) { LOG.info("problem cleaning system directory: " + systemDir, ie); } Thread.sleep(FS_ACCESS_RETRY_PERIOD); } if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } // Same with 'localDir' except it's always on the local disk. if (!hasRestarted) { jobConf.deleteLocalFiles(SUBDIR); } // Initialize history DONE folder FileSystem historyFS = getMROwner().doAs(new PrivilegedExceptionAction<FileSystem>() { public FileSystem run() throws IOException { JobHistory.initDone(conf, fs); final String historyLogDir = JobHistory.getCompletedJobHistoryLocation().toString(); infoServer.setAttribute("historyLogDir", historyLogDir); infoServer.setAttribute("serialNumberDirectoryDigits", Integer.valueOf(JobHistory.serialNumberDirectoryDigits())); infoServer.setAttribute("serialNumberTotalDigits", Integer.valueOf(JobHistory.serialNumberTotalDigits())); return new Path(historyLogDir).getFileSystem(conf); } }); infoServer.setAttribute("fileSys", historyFS); infoServer.setAttribute("jobConf", conf); infoServer.setAttribute("aclManager", aclsManager); if (JobHistoryServer.isEmbedded(conf)) { LOG.info("History server being initialized in embedded mode"); jobHistoryServer = new JobHistoryServer(conf, aclsManager, infoServer); jobHistoryServer.start(); LOG.info("Job History Server web address: " + JobHistoryServer.getAddress(conf)); } this.dnsToSwitchMapping = ReflectionUtils.newInstance(conf.getClass("topology.node.switch.mapping.impl", ScriptBasedMapping.class, DNSToSwitchMapping.class), conf); this.numTaskCacheLevels = conf.getInt("mapred.task.cache.levels", NetworkTopology.DEFAULT_HOST_LEVEL); //initializes the job status store completedJobStatusStore = new CompletedJobStatusStore(conf, aclsManager); }
From source file:org.apache.archiva.metadata.repository.file.FileMetadataRepository.java
@Override public ProjectVersionMetadata getProjectVersion(String repoId, String namespace, String projectId, String projectVersion) throws MetadataResolutionException { try {/* ww w . java 2 s. c o m*/ File directory = new File(getDirectory(repoId), namespace + "/" + projectId + "/" + projectVersion); Properties properties = readOrCreateProperties(directory, PROJECT_VERSION_METADATA_KEY); String id = properties.getProperty("id"); ProjectVersionMetadata versionMetadata = null; if (id != null) { versionMetadata = new ProjectVersionMetadata(); versionMetadata.setId(id); versionMetadata.setName(properties.getProperty("name")); versionMetadata.setDescription(properties.getProperty("description")); versionMetadata.setUrl(properties.getProperty("url")); versionMetadata.setIncomplete(Boolean.valueOf(properties.getProperty("incomplete", "false"))); String scmConnection = properties.getProperty("scm.connection"); String scmDeveloperConnection = properties.getProperty("scm.developerConnection"); String scmUrl = properties.getProperty("scm.url"); if (scmConnection != null || scmDeveloperConnection != null || scmUrl != null) { Scm scm = new Scm(); scm.setConnection(scmConnection); scm.setDeveloperConnection(scmDeveloperConnection); scm.setUrl(scmUrl); versionMetadata.setScm(scm); } String ciSystem = properties.getProperty("ci.system"); String ciUrl = properties.getProperty("ci.url"); if (ciSystem != null || ciUrl != null) { CiManagement ci = new CiManagement(); ci.setSystem(ciSystem); ci.setUrl(ciUrl); versionMetadata.setCiManagement(ci); } String issueSystem = properties.getProperty("issue.system"); String issueUrl = properties.getProperty("issue.url"); if (issueSystem != null || issueUrl != null) { IssueManagement issueManagement = new IssueManagement(); issueManagement.setSystem(issueSystem); issueManagement.setUrl(issueUrl); versionMetadata.setIssueManagement(issueManagement); } String orgName = properties.getProperty("org.name"); String orgUrl = properties.getProperty("org.url"); if (orgName != null || orgUrl != null) { Organization org = new Organization(); org.setName(orgName); org.setUrl(orgUrl); versionMetadata.setOrganization(org); } boolean done = false; int i = 0; while (!done) { String licenseName = properties.getProperty("license." + i + ".name"); String licenseUrl = properties.getProperty("license." + i + ".url"); if (licenseName != null || licenseUrl != null) { License license = new License(); license.setName(licenseName); license.setUrl(licenseUrl); versionMetadata.addLicense(license); } else { done = true; } i++; } done = false; i = 0; while (!done) { String mailingListName = properties.getProperty("mailingList." + i + ".name"); if (mailingListName != null) { MailingList mailingList = new MailingList(); mailingList.setName(mailingListName); mailingList.setMainArchiveUrl(properties.getProperty("mailingList." + i + ".archive")); String p = properties.getProperty("mailingList." + i + ".otherArchives"); if (p != null && p.length() > 0) { mailingList.setOtherArchives(Arrays.asList(p.split(","))); } else { mailingList.setOtherArchives(Collections.<String>emptyList()); } mailingList.setPostAddress(properties.getProperty("mailingList." + i + ".post")); mailingList.setSubscribeAddress(properties.getProperty("mailingList." + i + ".subscribe")); mailingList .setUnsubscribeAddress(properties.getProperty("mailingList." + i + ".unsubscribe")); versionMetadata.addMailingList(mailingList); } else { done = true; } i++; } done = false; i = 0; while (!done) { String dependencyArtifactId = properties.getProperty("dependency." + i + ".artifactId"); if (dependencyArtifactId != null) { Dependency dependency = new Dependency(); dependency.setArtifactId(dependencyArtifactId); dependency.setGroupId(properties.getProperty("dependency." + i + ".groupId")); dependency.setClassifier(properties.getProperty("dependency." + i + ".classifier")); dependency.setOptional( Boolean.valueOf(properties.getProperty("dependency." + i + ".optional"))); dependency.setScope(properties.getProperty("dependency." + i + ".scope")); dependency.setSystemPath(properties.getProperty("dependency." + i + ".systemPath")); dependency.setType(properties.getProperty("dependency." + i + ".type")); dependency.setVersion(properties.getProperty("dependency." + i + ".version")); dependency.setOptional( Boolean.valueOf(properties.getProperty("dependency." + i + ".optional"))); versionMetadata.addDependency(dependency); } else { done = true; } i++; } String facetIds = properties.getProperty("facetIds", ""); if (facetIds.length() > 0) { for (String facetId : facetIds.split(",")) { MetadataFacetFactory factory = metadataFacetFactories.get(facetId); if (factory == null) { log.error("Attempted to load unknown project version metadata facet: {}", facetId); } else { MetadataFacet facet = factory.createMetadataFacet(); Map<String, String> map = new HashMap<>(); for (Object key : new ArrayList(properties.keySet())) { String property = (String) key; if (property.startsWith(facet.getFacetId())) { map.put(property.substring(facet.getFacetId().length() + 1), properties.getProperty(property)); } } facet.fromProperties(map); versionMetadata.addFacet(facet); } } } updateProjectVersionFacets(versionMetadata, properties); } return versionMetadata; } catch (IOException e) { throw new MetadataResolutionException(e.getMessage(), e); } }