List of usage examples for java.util Map containsValue
boolean containsValue(Object value);
From source file:gov.nih.nci.ncicb.tcga.dcc.qclive.common.action.validation.VcfValidator.java
/** * Figures out what to return based on results of each file validation. In this case, will return true if * all files returned true, and false otherwise. * * @param results the results of each processFile call * @param context the qc context// w w w .ja v a2 s .c om * @return true if all files were valid, false otherwise */ @Override protected Boolean getReturnValue(final Map<File, Boolean> results, final QcContext context) { return !results.containsValue(false); }
From source file:net.e2.bw.idreg.db2ldif.Db2Ldif.java
/** * Generate LDIF for all users// w ww. ja va 2s.co m * @param db the database * @param ldif the LDIF file to append to */ @SuppressWarnings("all") private void importUsers(TeamworkDB db, StringBuilder ldif, Map<String, String> userNames) { long t0 = System.currentTimeMillis(); String sql = loadResourceText("/users.sql"); Connection conn = null; PreparedStatement stmt = null; try { conn = db.getConnection(); stmt = conn.prepareStatement(sql); ResultSet rs = stmt.executeQuery(); int index = 0; while (rs.next()) { String userName = TeamworkDB.getString(rs, "userName"); String entryUUID = TeamworkDB.getString(rs, "userId"); String userFirstName = TeamworkDB.getString(rs, "userFirstName"); String userLastName = TeamworkDB.getString(rs, "userLastName"); String userEmail = TeamworkDB.getString(rs, "userEmail"); String companyId = TeamworkDB.getString(rs, "companyName"); String userImage = TeamworkDB.getString(rs, "userImage"); // Normalize user and company names userName = userName.replace('.', '_'); companyId = companyId(companyId); // Make sure the name is unique if (userNames.containsValue(userName)) { int suffix = 2; while (userNames.containsValue(userName + suffix)) { suffix++; } userName = userName + suffix; } userNames.put(entryUUID, userName); ldif.append("dn: uid=").append(userName).append(",").append(peopleDN).append(NL); ldif.append("objectclass: top").append(NL); ldif.append("objectclass: organizationalPerson").append(NL); ldif.append("objectclass: inetOrgPerson").append(NL); ldif.append("objectclass: maritimeResource").append(NL); ldif.append("ou: people").append(NL); ldif.append("mrn: ").append("urn:mrn:mc:user:").append(companyId).append(":").append(userName) .append(NL); ldif.append("uid: ").append(userName).append(NL); ldif.append("cn: ").append(userFirstName).append(" ").append(userLastName).append(NL); ldif.append("givenName: ").append(userFirstName).append(NL); ldif.append("sn: ").append(userLastName).append(NL); ldif.append("mail: ").append(userEmail).append(NL); ldif.append("userpassword:: ").append("e1NTSEF9QTM3TkF4K0l1Z25UZS8vTHJPbWFOczdZeGVNSk4xeVQ=") .append(NL); ldif.append("entryUUID: ").append(new UUID(Long.parseLong(entryUUID), 0L).toString()).append(NL); if (includePhotos) { byte[] jpg = fetchJPEG(userImage); if (jpg != null) { wrapLine(ldif, "jpegPhoto:: ", Base64.getEncoder().encodeToString(jpg)); } } ldif.append(NL); index++; } rs.close(); System.out.println(String.format("Fetched %d users in %d ms", index, System.currentTimeMillis() - t0)); } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (stmt != null) stmt.close(); } catch (Exception ex) { } try { if (conn != null) conn.close(); } catch (Exception ex) { } } }
From source file:com.coverity.report.analysis.ProtecodeSCToolProcessor.java
@Override public List<String> validate() { List<String> errors = config.validate(); if (!errors.isEmpty()) { return errors; }// ww w . j a v a 2 s. co m Map<String, String> products = null; try { products = enumerateScopes(); } catch (Exception x) { errors.add(x.getMessage()); return errors; } if (errors.isEmpty()) { if (config.productId == null) { // Use the product name to get the product ID String productNameStr = config.productName; String productIdStr = products.get(productNameStr); if (productIdStr == null) { errors.add("Protecode SC product " + productNameStr + " no longer exists."); } else { try { config.productId = Long.parseLong(productIdStr); } catch (NumberFormatException x) { throw new RuntimeException( "Protecode SC product ID \"" + productIdStr + "\" is not a number"); } } } else { // Validate the productId by finding it in the list. if (!products.containsValue(config.productId.toString())) { errors.add("The Protecode SC product with ID " + config.productId + " no longer exists."); } } } return errors; }
From source file:edu.cornell.mannlib.vitro.webapp.sparql.GetAllPrefix.java
/** * Returns a map of prefixes for use in building queries. Will manufacture a * prefix for any namespace that doesn't have an associated owl:Ontology resource * with a prefix annotation /*from ww w .ja v a2s . c o m*/ * @param wadf * @return map of prefix strings to namespace URIs */ private Map<String, String> getPrefixMap(WebappDaoFactory wadf) { Map<String, String> prefixMap = new HashMap<String, String>(); OntologyDao oDao = wadf.getOntologyDao(); for (Ontology o : oDao.getAllOntologies()) { if (o.getPrefix() != null) { prefixMap.put(o.getPrefix(), o.getURI()); } } // add standard namespaces addPrefixIfNecessary("owl", OWL.getURI(), prefixMap); addPrefixIfNecessary("rdf", RDF.getURI(), prefixMap); addPrefixIfNecessary("rdfs", RDFS.getURI(), prefixMap); addPrefixIfNecessary("swrl", "http://www.w3.org/2003/11/swrl#", prefixMap); addPrefixIfNecessary("swrlb", "http://www.w3.org/2003/11/swrlb#", prefixMap); addPrefixIfNecessary("xsd", XSD.getURI(), prefixMap); addPrefixIfNecessary("vitro", VitroVocabulary.vitroURI, prefixMap); // we also need to manufacture prefixes for namespaces used by any class or // property, regardless of whether there's an associated owl:Ontology. int newPrefixCount = 0; List<BaseResourceBean> ontEntityList = new ArrayList<BaseResourceBean>(); ontEntityList.addAll(wadf.getVClassDao().getAllVclasses()); ontEntityList.addAll(wadf.getObjectPropertyDao().getAllObjectProperties()); ontEntityList.addAll(wadf.getDataPropertyDao().getAllDataProperties()); for (BaseResourceBean ontEntity : ontEntityList) { if (!ontEntity.isAnonymous() && !prefixMap.containsValue(ontEntity.getNamespace())) { newPrefixCount++; prefixMap.put("p." + Integer.toString(newPrefixCount), ontEntity.getNamespace()); } } return prefixMap; }
From source file:org.sakaiproject.evaluation.tool.TemplateBBean.java
/** * NB - this implementation depends on Hibernate reference equality * semantics!! Guarantees output sequence is consecutive without duplicates, * and will prefer honoring user sequence requests so long as they are not * inconsistent./*w w w .ja va 2 s .c o m*/ */ // TODO: This method needs to be invoked via a BeanGuard, trapping any // access to templateItemWBL.*.displayOrder // Current Jquery implementation is only working as a result of auto-commit // bug in DAO wrapper implementation. public void saveReorder() { LOG.info("save items reordering"); Map<String, EvalTemplateItem> delivered = templateItemWBL.getDeliveredBeans(); List<EvalTemplateItem> l = authoringService.getTemplateItemsForTemplate(templateId, new String[] {}, new String[] {}, new String[] {}); List<EvalTemplateItem> ordered = TemplateItemUtils.getNonChildItems(l); for (int i = 1; i <= ordered.size();) { EvalTemplateItem item = (EvalTemplateItem) ordered.get(i - 1); int itnum = item.getDisplayOrder(); if (i < ordered.size()) { EvalTemplateItem next = (EvalTemplateItem) ordered.get(i); int nextnum = next.getDisplayOrder(); // only make a write or adjustment if we would be about to commit two // items with the same index. if (itnum == nextnum) { // if the user requested this item XOR it is in the right place, // emit this one second. That is, if the user wants it here and there // is no conflict, write it here. if (delivered.containsValue(item) ^ (itnum == i)) { emit(next, i++); emit(item, i++); continue; } else { emit(item, i++); emit(next, i++); continue; } } } emit(item, i++); } // this will seem a little odd but we are saving the template to validate the order of all templateItems localTemplateLogic.saveTemplate(localTemplateLogic.fetchTemplate(templateId)); }
From source file:cn.afterturn.easypoi.excel.imports.ExcelImportService.java
/** * ???//from ww w. j a v a 2 s. com */ private void checkIsValidTemplate(Map<Integer, String> titlemap, Map<String, ExcelImportEntity> excelParams, ImportParams params, List<ExcelCollectionParams> excelCollection) { if (params.getImportFields() != null) { if (params.isNeedCheckOrder()) { // ?? if (params.getImportFields().length != titlemap.size()) { LOGGER.error("excel??"); throw new ExcelImportException(ExcelImportEnum.IS_NOT_A_VALID_TEMPLATE); } int i = 0; for (String title : titlemap.values()) { if (!StringUtils.equals(title, params.getImportFields()[i++])) { LOGGER.error("excel??"); throw new ExcelImportException(ExcelImportEnum.IS_NOT_A_VALID_TEMPLATE); } } } else { for (int i = 0, le = params.getImportFields().length; i < le; i++) { if (!titlemap.containsValue(params.getImportFields()[i])) { throw new ExcelImportException(ExcelImportEnum.IS_NOT_A_VALID_TEMPLATE); } } } } else { Collection<ExcelImportEntity> collection = excelParams.values(); for (ExcelImportEntity excelImportEntity : collection) { if (excelImportEntity.isImportField() && !titlemap.containsValue(excelImportEntity.getName())) { LOGGER.error(excelImportEntity.getName() + ","); throw new ExcelImportException(ExcelImportEnum.IS_NOT_A_VALID_TEMPLATE); } } for (int i = 0, le = excelCollection.size(); i < le; i++) { ExcelCollectionParams collectionparams = excelCollection.get(i); collection = collectionparams.getExcelParams().values(); for (ExcelImportEntity excelImportEntity : collection) { if (excelImportEntity.isImportField() && !titlemap .containsValue(collectionparams.getExcelName() + "_" + excelImportEntity.getName())) { throw new ExcelImportException(ExcelImportEnum.IS_NOT_A_VALID_TEMPLATE); } } } } }
From source file:org.apache.sling.resourceresolver.impl.ResourceResolverImpl.java
/** * full implementation - apply sling:alias from the resource path - apply * /etc/map mappings (inkl. config backwards compat) - return absolute uri * if possible/*from ww w. j a v a2 s . c o m*/ * * @see org.apache.sling.api.resource.ResourceResolver#map(javax.servlet.http.HttpServletRequest, * java.lang.String) */ @Override public String map(final HttpServletRequest request, final String resourcePath) { checkClosed(); // find a fragment or query int fragmentQueryMark = resourcePath.indexOf('#'); if (fragmentQueryMark < 0) { fragmentQueryMark = resourcePath.indexOf('?'); } // cut fragment or query off the resource path String mappedPath; final String fragmentQuery; if (fragmentQueryMark >= 0) { fragmentQuery = resourcePath.substring(fragmentQueryMark); mappedPath = resourcePath.substring(0, fragmentQueryMark); logger.debug("map: Splitting resource path '{}' into '{}' and '{}'", new Object[] { resourcePath, mappedPath, fragmentQuery }); } else { fragmentQuery = null; mappedPath = resourcePath; } // cut off scheme and host, if the same as requested final String schemehostport; final String schemePrefix; if (request != null) { schemehostport = MapEntry.getURI(request.getScheme(), request.getServerName(), request.getServerPort(), "/"); schemePrefix = request.getScheme().concat("://"); logger.debug("map: Mapping path {} for {} (at least with scheme prefix {})", new Object[] { resourcePath, schemehostport, schemePrefix }); } else { schemehostport = null; schemePrefix = null; logger.debug("map: Mapping path {} for default", resourcePath); } ParsedParameters parsed = new ParsedParameters(mappedPath); final Resource res = resolveInternal(parsed.getRawPath(), parsed.getParameters()); if (res != null) { // keep, what we might have cut off in internal resolution final String resolutionPathInfo = res.getResourceMetadata().getResolutionPathInfo(); logger.debug("map: Path maps to resource {} with path info {}", res, resolutionPathInfo); // find aliases for segments. we can't walk the parent chain // since the request session might not have permissions to // read all parents SLING-2093 final LinkedList<String> names = new LinkedList<String>(); Resource current = res; String path = res.getPath(); while (path != null) { String alias = null; if (current != null && !path.endsWith(JCR_CONTENT_LEAF)) { if (factory.getMapEntries().isOptimizeAliasResolutionEnabled()) { logger.debug("map: Optimize Alias Resolution is Enabled"); String parentPath = ResourceUtil.getParent(path); if (parentPath != null) { final Map<String, String> aliases = factory.getMapEntries().getAliasMap(parentPath); if (aliases != null && aliases.containsValue(current.getName())) { for (String key : aliases.keySet()) { if (current.getName().equals(aliases.get(key))) { alias = key; break; } } } } } else { logger.debug("map: Optimize Alias Resolution is Disabled"); alias = ResourceResolverControl.getProperty(current, PROP_ALIAS); } } if (alias == null || alias.length() == 0) { alias = ResourceUtil.getName(path); } names.add(alias); path = ResourceUtil.getParent(path); if ("/".equals(path)) { path = null; } else if (path != null) { current = res.getResourceResolver().resolve(path); } } // build path from segment names final StringBuilder buf = new StringBuilder(); // construct the path from the segments (or root if none) if (names.isEmpty()) { buf.append('/'); } else { while (!names.isEmpty()) { buf.append('/'); buf.append(names.removeLast()); } } // reappend the resolutionPathInfo if (resolutionPathInfo != null) { buf.append(resolutionPathInfo); } // and then we have the mapped path to work on mappedPath = buf.toString(); logger.debug("map: Alias mapping resolves to path {}", mappedPath); } boolean mappedPathIsUrl = false; for (final MapEntry mapEntry : this.factory.getMapEntries().getMapMaps()) { final String[] mappedPaths = mapEntry.replace(mappedPath); if (mappedPaths != null) { logger.debug("map: Match for Entry {}", mapEntry); mappedPathIsUrl = !mapEntry.isInternal(); if (mappedPathIsUrl && schemehostport != null) { mappedPath = null; for (final String candidate : mappedPaths) { if (candidate.startsWith(schemehostport)) { mappedPath = candidate.substring(schemehostport.length() - 1); mappedPathIsUrl = false; logger.debug("map: Found host specific mapping {} resolving to {}", candidate, mappedPath); break; } else if (candidate.startsWith(schemePrefix) && mappedPath == null) { mappedPath = candidate; } } if (mappedPath == null) { mappedPath = mappedPaths[0]; } } else { // we can only go with assumptions selecting the first entry mappedPath = mappedPaths[0]; } logger.debug("resolve: MapEntry {} matches, mapped path is {}", mapEntry, mappedPath); break; } } // this should not be the case, since mappedPath is primed if (mappedPath == null) { mappedPath = resourcePath; } // [scheme:][//authority][path][?query][#fragment] try { // use commons-httpclient's URI instead of java.net.URI, as it can // actually accept *unescaped* URIs, such as the "mappedPath" and // return them in proper escaped form, including the path, via // toString() final URI uri = new URI(mappedPath, false); // 1. mangle the namespaces in the path String path = mangleNamespaces(uri.getPath()); // 2. prepend servlet context path if we have a request if (request != null && request.getContextPath() != null && request.getContextPath().length() > 0) { path = request.getContextPath().concat(path); } // update the path part of the URI uri.setPath(path); mappedPath = uri.toString(); } catch (final URIException e) { logger.warn("map: Unable to mangle namespaces for " + mappedPath + " returning unmangled", e); } logger.debug("map: Returning URL {} as mapping for path {}", mappedPath, resourcePath); // reappend fragment and/or query if (fragmentQuery != null) { mappedPath = mappedPath.concat(fragmentQuery); } return mappedPath; }
From source file:org.jahia.tools.maven.plugins.LegalArtifactAggregator.java
private void processJarFile(InputStream inputStream, String jarFilePath, JarMetadata contextJarMetadata, boolean processMavenPom, int level, boolean lookForNotice, boolean lookForLicense, boolean processingSources) throws IOException { // if we don't need to find either a license or notice, don't process the jar at all if (!lookForLicense && !lookForNotice) { return;/* w w w .j a v a 2s . c o m*/ } final String indent = getIndent(level); output(indent, "Processing JAR " + jarFilePath + "...", false, true); // JarFile realJarFile = new JarFile(jarFile); JarInputStream jarInputStream = new JarInputStream(inputStream); String bundleLicense = null; Manifest manifest = jarInputStream.getManifest(); if (manifest != null && manifest.getMainAttributes() != null) { bundleLicense = manifest.getMainAttributes().getValue("Bundle-License"); if (bundleLicense != null) { output(indent, "Found Bundle-License attribute with value:" + bundleLicense); KnownLicense knownLicense = getKnowLicenseByName(bundleLicense); // this data is not reliable, especially on the ServiceMix repackaged bundles } } String pomFilePath = null; byte[] pomByteArray = null; final String jarFileName = getJarFileName(jarFilePath); if (contextJarMetadata == null) { contextJarMetadata = jarDatabase.get(jarFileName); if (contextJarMetadata == null) { // compute project name contextJarMetadata = new JarMetadata(jarFilePath, jarFileName); } jarDatabase.put(jarFileName, contextJarMetadata); } Notice notice; JarEntry curJarEntry = null; while ((curJarEntry = jarInputStream.getNextJarEntry()) != null) { if (!curJarEntry.isDirectory()) { final String fileName = curJarEntry.getName(); if (lookForNotice && isNotice(fileName, jarFilePath)) { output(indent, "Processing notice found in " + curJarEntry + "..."); InputStream noticeInputStream = jarInputStream; List<String> noticeLines = IOUtils.readLines(noticeInputStream); notice = new Notice(noticeLines); Map<String, Notice> notices = contextJarMetadata.getNoticeFiles(); if (notices == null) { notices = new TreeMap<>(); notices.put(fileName, notice); output(indent, "Found first notice " + curJarEntry); } else if (!notices.containsValue(notice)) { output(indent, "Found additional notice " + curJarEntry); notices.put(fileName, notice); } else { output(indent, "Duplicated notice in " + curJarEntry); notices.put(fileName, notice); duplicatedNotices.add(jarFilePath); } // IOUtils.closeQuietly(noticeInputStream); } else if (processMavenPom && fileName.endsWith("pom.xml")) { // remember pom file path in case we need it pomFilePath = curJarEntry.getName(); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(jarInputStream, byteArrayOutputStream); pomByteArray = byteArrayOutputStream.toByteArray(); } else if (lookForLicense && isLicense(fileName, jarFilePath)) { output(indent, "Processing license found in " + curJarEntry + "..."); InputStream licenseInputStream = jarInputStream; List<String> licenseLines = IOUtils.readLines(licenseInputStream); LicenseFile licenseFile = new LicenseFile(jarFilePath, fileName, jarFilePath, licenseLines); resolveKnownLicensesByText(licenseFile); if (StringUtils.isNotBlank(licenseFile.getAdditionalLicenseText()) && StringUtils.isNotBlank(licenseFile.getAdditionalLicenseText().trim())) { KnownLicense knownLicense = new KnownLicense(); knownLicense.setId(FilenameUtils.getBaseName(jarFilePath) + "-additional-terms"); knownLicense .setName("Additional license terms from " + FilenameUtils.getBaseName(jarFilePath)); List<TextVariant> textVariants = new ArrayList<>(); TextVariant textVariant = new TextVariant(); textVariant.setId("default"); textVariant.setDefaultVariant(true); textVariant.setText(Pattern.quote(licenseFile.getAdditionalLicenseText())); textVariants.add(textVariant); knownLicense.setTextVariants(textVariants); knownLicense.setTextToUse(licenseFile.getAdditionalLicenseText()); knownLicense.setViral(licenseFile.getText().toLowerCase().contains("gpl")); knownLicenses.getLicenses().put(knownLicense.getId(), knownLicense); licenseFile.getKnownLicenses().add(knownLicense); licenseFile.getKnownLicenseKeys().add(knownLicense.getId()); } for (KnownLicense knownLicense : licenseFile.getKnownLicenses()) { SortedSet<LicenseFile> licenseFiles = knownLicensesFound.get(knownLicense); if (licenseFiles != null) { if (!licenseFiles.contains(licenseFile)) { licenseFiles.add(licenseFile); } knownLicensesFound.put(knownLicense, licenseFiles); } else { licenseFiles = new TreeSet<>(); licenseFiles.add(licenseFile); knownLicensesFound.put(knownLicense, licenseFiles); } } Map<String, LicenseFile> licenseFiles = contextJarMetadata.getLicenseFiles(); if (licenseFiles == null) { licenseFiles = new TreeMap<>(); } if (licenseFiles.containsKey(fileName)) { // warning we already have a license file here, what should we do ? output(indent, "License file already exists for " + jarFilePath + " will override it !", true, false); licenseFiles.remove(fileName); } licenseFiles.put(fileName, licenseFile); // IOUtils.closeQuietly(licenseInputStream); } else if (fileName.endsWith(".jar")) { InputStream embeddedJarInputStream = jarInputStream; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); IOUtils.copy(embeddedJarInputStream, byteArrayOutputStream); final JarMetadata embeddedJarMetadata = new JarMetadata(jarFilePath, getJarFileName(fileName)); if (embeddedJarMetadata != null) { embeddedJarMetadata.setJarContents(byteArrayOutputStream.toByteArray()); contextJarMetadata.getEmbeddedJars().add(embeddedJarMetadata); } } else if (fileName.endsWith(".class")) { String className = fileName.substring(0, fileName.length() - ".class".length()).replaceAll("/", "."); int lastPoint = className.lastIndexOf("."); String packageName = null; if (lastPoint > 0) { packageName = className.substring(0, lastPoint); SortedSet<String> currentJarPackages = jarDatabase .get(FilenameUtils.getBaseName(jarFilePath)).getPackages(); if (currentJarPackages == null) { currentJarPackages = new TreeSet<>(); } currentJarPackages.add(packageName); } } } jarInputStream.closeEntry(); } jarInputStream.close(); jarInputStream = null; if (!contextJarMetadata.getEmbeddedJars().isEmpty()) { for (JarMetadata embeddedJarMetadata : contextJarMetadata.getEmbeddedJars()) { if (embeddedJarMetadata.getJarContents() != null) { ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( embeddedJarMetadata.getJarContents()); processJarFile(byteArrayInputStream, contextJarMetadata.toString(), null, true, level, true, true, processingSources); } else { output(indent, "Couldn't find dependency for embedded JAR " + contextJarMetadata, true, false); } } } if (processMavenPom) { if (pomFilePath == null) { output(indent, "No POM found in " + jarFilePath); } else { output(indent, "Processing POM found at " + pomFilePath + " in " + jarFilePath + "..."); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(pomByteArray); processJarPOM(byteArrayInputStream, pomFilePath, jarFilePath, contextJarMetadata, lookForNotice, lookForLicense, contextJarMetadata.getEmbeddedJars(), level + 1, processingSources); } } if (lookForLicense || lookForNotice) { if (lookForLicense) { output(indent, "No license found in " + jarFilePath); } if (lookForNotice) { output(indent, "No notice found in " + jarFilePath); } if (pomFilePath == null && lookForLicense && lookForNotice) { if (StringUtils.isBlank(contextJarMetadata.getVersion())) { output(indent, "Couldn't resolve version for JAR " + contextJarMetadata + ", can't query Maven Central repository without version !"); } else { List<Artifact> mavenCentralArtifacts = findArtifactInMavenCentral(contextJarMetadata.getName(), contextJarMetadata.getVersion(), contextJarMetadata.getClassifier()); if (mavenCentralArtifacts != null && mavenCentralArtifacts.size() == 1) { Artifact mavenCentralArtifact = mavenCentralArtifacts.get(0); Artifact resolvedArtifact = resolveArtifact(mavenCentralArtifact, level); if (resolvedArtifact != null) { // we have a copy of the local artifact, let's request the sources for it. if (!processingSources && !"sources".equals(contextJarMetadata.getClassifier())) { final Artifact artifact = new DefaultArtifact(resolvedArtifact.getGroupId(), resolvedArtifact.getArtifactId(), "sources", "jar", resolvedArtifact.getVersion()); File sourceJar = getArtifactFile(artifact, level); if (sourceJar != null && sourceJar.exists()) { FileInputStream sourceJarInputStream = new FileInputStream(sourceJar); processJarFile(sourceJarInputStream, sourceJar.getPath(), contextJarMetadata, false, level + 1, lookForNotice, lookForLicense, true); IOUtils.closeQuietly(sourceJarInputStream); } } else { // we are already processing a sources artifact, we need to load the pom artifact to extract information from there final Artifact artifact = new DefaultArtifact(resolvedArtifact.getGroupId(), resolvedArtifact.getArtifactId(), null, "pom", resolvedArtifact.getVersion()); File artifactPom = getArtifactFile(artifact, level); if (artifactPom != null && artifactPom.exists()) { output(indent, "Processing POM for " + artifact + "..."); processPOM(lookForNotice, lookForLicense, jarFilePath, contextJarMetadata, contextJarMetadata.getEmbeddedJars(), level + 1, new FileInputStream(artifactPom), processingSources); } } } else { output(indent, "===> Couldn't resolve artifact " + mavenCentralArtifact + " in Maven Central. Please resolve license and notice files manually!", false, true); } } else { output(indent, "===> Couldn't find nor POM, license or notice. Please check manually!", false, true); } } } } output(indent, "Done processing JAR " + jarFilePath + ".", false, true); }
From source file:com.taobao.android.builder.tasks.app.prepare.PreparePackageIdsTask.java
/** * packageid 30 - 127/* w w w .j a v a2 s . co m*/ */ @TaskAction void generate() throws IOException { File packageIdFile = appVariantContext.getAtlasExtension().getTBuildConfig().getPackageIdFile(); File apPackageIdFile = appVariantContext.apContext.getPackageIdFile(); boolean isAutoPackageId = appVariantContext.getAtlasExtension().getTBuildConfig().isAutoPackageId(); Map<String, String> autoConfigMap = new HashMap<String, String>(); if (null != apPackageIdFile && apPackageIdFile.exists()) { autoConfigMap.putAll(loadPackageIdProperties(apPackageIdFile)); } else if (null != packageIdFile && packageIdFile.exists()) { autoConfigMap.putAll(loadPackageIdProperties(packageIdFile)); } AtlasDependencyTree atlasDependencyTree = AtlasBuildContext.androidDependencyTrees.get(getVariantName()); for (AwbBundle awbBundle : atlasDependencyTree.getAwbBundles()) { String key = awbBundle.getResolvedCoordinates().getGroupId() + ":" + awbBundle.getResolvedCoordinates().getArtifactId(); if (autoConfigMap.containsKey(key)) { continue; } File customPackageIDFile = new File(awbBundle.getAndroidLibrary().getFolder(), "customPackageID.txt"); String packageId = getCustomPackageId(customPackageIDFile); if (StringUtils.isNotEmpty(packageId) && StringUtils.isNumeric(packageId)) { autoConfigMap.put(key, packageId); } else { autoConfigMap.put(key, ""); } } if (isAutoPackageId && autoConfigMap.containsValue("")) { //?autoConfig List<String> keys = new ArrayList<String>(autoConfigMap.keySet()); Collections.sort(keys); for (String key : keys) { if ("".equals(autoConfigMap.get(key))) { for (int i = 30; i <= 127; i++) { if (!autoConfigMap.values().contains(String.valueOf(i))) { autoConfigMap.put(key, String.valueOf(i)); break; } } } } } AtlasBuildContext.customPackageIdMaps = autoConfigMap; //wite package Id file File outPkgFile = new File(getProject().getBuildDir(), "outputs/packageIdFile.properties"); writeProperties(autoConfigMap, outPkgFile); appBuildInfo.setPackageIdFile(outPkgFile); //check value if (autoConfigMap.containsValue("")) { getLogger().error(JSON.toJSONString(autoConfigMap, true)); throw new GradleException("packageId ?"); } if (autoConfigMap.size() != new HashSet(autoConfigMap.values()).size()) { // getLogger().error(JSON.toJSONString(autoConfigMap, true)); Map<String, PackageIdItem> idItemMap = new HashMap<String, PackageIdItem>(); for (String key : autoConfigMap.keySet()) { String customPackageId = autoConfigMap.get(key); PackageIdItem packageIdItem = idItemMap.get(customPackageId); if (null == packageIdItem) { String[] split = customPackageId.split("\\."); packageIdItem = new PackageIdItem(); packageIdItem.packageId = Integer.valueOf(split[0]); if (split.length > 1) { packageIdItem.typeIdOffset = Integer.valueOf(split[1]); } idItemMap.put(customPackageId, packageIdItem); } packageIdItem.bundles.add(key); } Collection<PackageIdItem> collection = idItemMap.values(); List<PackageIdItem> packageList = new ArrayList<PackageIdItem>(collection); Collections.sort(packageList, new Comparator<PackageIdItem>() { @Override public int compare(PackageIdItem o1, PackageIdItem o2) { return o1.packageId - o2.packageId; } }); getLogger().error(JSON.toJSONString(packageList, true)); throw new GradleException("packageId ???"); } // check if packageID is not used Map<String, String> autoConfigMap2 = new HashMap<>(autoConfigMap); for (AwbBundle awbBundle : atlasDependencyTree.getAwbBundles()) { String key = awbBundle.getResolvedCoordinates().getGroupId() + ":" + awbBundle.getResolvedCoordinates().getArtifactId(); autoConfigMap2.remove(key); } if (autoConfigMap2.size() > 0) { File outPkgFile2 = new File(getProject().getBuildDir(), "outputs/warning-unusedPackageIdFile.properties"); writeProperties(autoConfigMap2, outPkgFile2); } }
From source file:com.snowplowanalytics.refererparser.Parser.java
/** * Builds the map of hosts to referers from the * input YAML file.//from w ww .ja va 2 s .co m * * @param referersYaml An InputStream containing the * referers database in YAML format. * * @return a Map where the key is the hostname of each * referer and the value (RefererLookup) * contains all known info about this referer */ private Map<String, RefererLookup> loadReferers(InputStream referersYaml) throws CorruptYamlException { Yaml yaml = new Yaml(new SafeConstructor()); Map<String, Map<String, Map>> rawReferers = (Map<String, Map<String, Map>>) yaml.load(referersYaml); // This will store all of our referers Map<String, RefererLookup> referers = new HashMap<String, RefererLookup>(); // Outer loop is all referers under a given medium for (Map.Entry<String, Map<String, Map>> mediumReferers : rawReferers.entrySet()) { Medium medium = Medium.fromString(mediumReferers.getKey()); // Inner loop is individual referers for (Map.Entry<String, Map> referer : mediumReferers.getValue().entrySet()) { String sourceName = referer.getKey(); Map<String, List<String>> refererMap = referer.getValue(); // Validate List<String> parameters = refererMap.get("parameters"); if (medium == Medium.SEARCH) { if (parameters == null) { throw new CorruptYamlException( "No parameters found for search referer '" + sourceName + "'"); } } else { if (parameters != null) { throw new CorruptYamlException( "Parameters not supported for non-search referer '" + sourceName + "'"); } } List<String> domains = refererMap.get("domains"); if (domains == null) { throw new CorruptYamlException("No domains found for referer '" + sourceName + "'"); } // Our hash needs referer domain as the // key, so let's expand for (String domain : domains) { if (referers.containsValue(domain)) { throw new CorruptYamlException("Duplicate of domain '" + domain + "' found"); } referers.put(domain, new RefererLookup(medium, sourceName, parameters)); } } } return referers; }