List of usage examples for java.util Set stream
default Stream<E> stream()
From source file:com.hurence.logisland.plugin.PluginManager.java
private static void installPlugin(String artifact, String logislandHome) { Optional<ModuleInfo> moduleInfo = findPluginMeta().entrySet().stream() .filter(e -> artifact.equals(e.getKey().getArtifact())).map(Map.Entry::getKey).findFirst(); if (moduleInfo.isPresent()) { System.err//from w w w . j a v a 2 s . c o m .println("A component already matches the artifact " + artifact + ". Please remove it first."); System.exit(-1); } try { IvySettings settings = new IvySettings(); settings.load(new File(logislandHome, "conf/ivy.xml")); Ivy ivy = Ivy.newInstance(settings); ivy.bind(); System.out.println("\nDownloading dependencies. Please hold on...\n"); String parts[] = Arrays.stream(artifact.split(":")).map(String::trim).toArray(a -> new String[a]); if (parts.length != 3) { throw new IllegalArgumentException( "Unrecognized artifact format. It should be groupId:artifactId:version"); } ModuleRevisionId revisionId = new ModuleRevisionId(new ModuleId(parts[0], parts[1]), parts[2]); Set<ArtifactDownloadReport> toBePackaged = downloadArtifacts(ivy, revisionId, new String[] { "default", "compile", "runtime" }); ArtifactDownloadReport artifactJar = toBePackaged.stream() .filter(a -> a.getArtifact().getModuleRevisionId().equals(revisionId)).findFirst() .orElseThrow(() -> new IllegalStateException("Unable to find artifact " + artifact + ". Please check the name is correct and the repositories on ivy.xml are correctly configured")); Manifest manifest = new JarFile(artifactJar.getLocalFile()).getManifest(); File libDir = new File(logislandHome, "lib"); if (manifest.getMainAttributes().containsKey(ManifestAttributes.MODULE_ARTIFACT)) { org.apache.commons.io.FileUtils.copyFileToDirectory(artifactJar.getLocalFile(), libDir); //we have a logisland plugin. Just copy it System.out.println(String.format("Found logisland plugin %s version %s\n" + "It will provide:", manifest.getMainAttributes().getValue(ManifestAttributes.MODULE_NAME), manifest.getMainAttributes().getValue(ManifestAttributes.MODULE_VERSION))); Arrays.stream(manifest.getMainAttributes().getValue(ManifestAttributes.MODULE_EXPORTS).split(",")) .map(String::trim).forEach(s -> System.out.println("\t" + s)); } else { System.out.println("Repackaging artifact and its dependencies"); Set<ArtifactDownloadReport> environment = downloadArtifacts(ivy, revisionId, new String[] { "provided" }); Set<ArtifactDownloadReport> excluded = toBePackaged.stream() .filter(adr -> excludeGroupIds.stream() .anyMatch(s -> s.matches(adr.getArtifact().getModuleRevisionId().getOrganisation())) || excludedArtifactsId.stream().anyMatch( s -> s.matches(adr.getArtifact().getModuleRevisionId().getName()))) .collect(Collectors.toSet()); toBePackaged.removeAll(excluded); environment.addAll(excluded); Repackager rep = new Repackager(artifactJar.getLocalFile(), new LogislandPluginLayoutFactory()); rep.setMainClass(""); File destFile = new File(libDir, "logisland-component-" + artifactJar.getLocalFile().getName()); rep.repackage(destFile, callback -> toBePackaged.stream().filter(adr -> adr.getLocalFile() != null) .filter(adr -> !adr.getArtifact().getModuleRevisionId().equals(revisionId)) .map(adr -> new Library(adr.getLocalFile(), LibraryScope.COMPILE)).forEach(library -> { try { callback.library(library); } catch (IOException e) { throw new UncheckedIOException(e); } })); Thread.currentThread().setContextClassLoader(new URLClassLoader( environment.stream().filter(adr -> adr.getLocalFile() != null).map(adr -> { try { return adr.getLocalFile().toURI().toURL(); } catch (Exception e) { throw new RuntimeException(e); } }).toArray(a -> new URL[a]), Thread.currentThread().getContextClassLoader())); //now clean up package and write the manifest String newArtifact = "com.hurence.logisland.repackaged:" + parts[1] + ":" + parts[2]; LogislandRepackager.execute(destFile.getAbsolutePath(), "BOOT-INF/lib-provided", parts[2], newArtifact, "Logisland Component for " + artifact, "Logisland Component for " + artifact, new String[] { "org.apache.kafka.*" }, new String[0], "org.apache.kafka.connect.connector.Connector"); } System.out.println("Install done!"); } catch (Exception e) { System.err.println("Unable to install artifact " + artifact); e.printStackTrace(); System.exit(-1); } }
From source file:edu.wpi.checksims.algorithm.output.SimilarityMatrix.java
/** * Fill the results matrix/* ww w . ja v a2 s . c o m*/ * * NOTE that this is inefficient; we calculate twice for each pair, as (i,j) and (j,i). * If speed optimizations are required, recommend starting here. * * @param submissions Submissions to generate results matrix from * @param algorithm Algorithm to use when detecting plagiarism * @return Array of algorithm results, with results[i,j] being the results of comparing students i and j */ public static SimilarityMatrix generate(Set<Submission> submissions, SimilarityDetector algorithm) { checkNotNull(submissions); checkArgument(submissions.size() >= 2); checkNotNull(algorithm); Logger logs = LoggerFactory.getLogger(SimilarityMatrix.class); float[][] results = new float[submissions.size()][submissions.size()]; logs.debug("Sorting submissions prior to algorithm pass..."); List<Submission> submissionsSorted = Ordering .from((Submission a, Submission b) -> a.getName().compareTo(b.getName())) .immutableSortedCopy(submissions); // Generate all possible unordered pairs of submissions Set<Submission> submissionsAsSet = ImmutableSet.copyOf(submissions); Set<Pair<Submission, Submission>> pairs = PairGenerator.generatePairs(submissionsAsSet); // Get results for all possible pairs of submissions Set<AlgorithmResults> algorithmResults = AlgorithmRunner.runAlgorithm(pairs, algorithm); // First, zero the diagonal of the results array for (int i = 0; i < submissionsSorted.size(); i++) { results[i][i] = 0.00f; // Same submission, ignore } // For each result, fill corresponding spots in the results matrix algorithmResults.stream().forEach((result) -> { int indexFirst = submissionsSorted.indexOf(result.a); int indexSecond = submissionsSorted.indexOf(result.b); if (indexFirst == -1) { throw new RuntimeException("Could not find index of submission " + result.a.getName()); } else if (indexSecond == -1) { throw new RuntimeException("Could not find index of submission " + result.b.getName()); } results[indexFirst][indexSecond] = result.percentMatchedA(); results[indexSecond][indexFirst] = result.percentMatchedB(); }); logs.info("Done performing similarity detection"); return new SimilarityMatrix(submissionsSorted, results); }
From source file:fr.landel.utils.commons.StringFormatUtils.java
private static StringBuilder replaceAndClear(final Set<Group> groups, final StringBuilder sb) { groups.stream().sorted(Group.COMPARATOR.desc()).forEachOrdered((g) -> { if (g.remove) { sb.replace(g.start, g.end, EMPTY); } else {//from w w w .j av a 2s. co m g.asterisk = false; sb.replace(g.start, g.end, g.toString()); } }); return sb; }
From source file:com.onyxscheduler.domain.Job.java
public static Job fromQuartzJobDetailAndTriggers(JobDetail jobDetail, Set<? extends org.quartz.Trigger> triggers) { try {/*w ww .j av a 2 s . c o m*/ Job job = (Job) jobDetail.getJobClass().newInstance(); org.quartz.JobKey jobKey = jobDetail.getKey(); job.setId(UUID.fromString((String) jobDetail.getJobDataMap().remove(ID_DATAMAP_KEY))); job.setName(jobKey.getName()); job.setGroup(jobKey.getGroup()); job.setTriggers(triggers.stream().map(Trigger::fromQuartzTrigger).collect(Collectors.toSet())); job.initFromDataMap(jobDetail.getJobDataMap()); return job; } catch (InstantiationException | IllegalAccessException e) { throw Throwables.propagate(e); } }
From source file:org.fcrepo.transform.transformations.LDPathTransform.java
/** * Pull a node-type specific transform out of JCR * @param node the node// www . jav a 2 s .c om * @param key the key * @return node-type specific transform * @throws RepositoryException if repository exception occurred */ public static LDPathTransform getNodeTypeTransform(final Node node, final String key) throws RepositoryException { final Node programNode = node.getSession().getNode(CONFIGURATION_FOLDER + key); LOGGER.debug("Found program node: {}", programNode.getPath()); final NodeType primaryNodeType = node.getPrimaryNodeType(); final Set<NodeType> supertypes = orderedBy(BY_NAME).add(primaryNodeType.getSupertypes()).build(); final Set<NodeType> mixinTypes = orderedBy(BY_NAME).add(node.getMixinNodeTypes()).build(); // start with mixins, primary type, and supertypes of primary type final ImmutableList.Builder<NodeType> nodeTypesB = builder(); nodeTypesB.addAll(mixinTypes).add(primaryNodeType).addAll(supertypes); // add supertypes of mixins mixinTypes.stream().map(mixin -> orderedBy(BY_NAME).add(mixin.getDeclaredSupertypes()).build()) .forEach(nodeTypesB::addAll); final List<NodeType> nodeTypes = nodeTypesB.build(); LOGGER.debug("Discovered node types: {}", nodeTypes); for (final NodeType nodeType : nodeTypes) { if (programNode.hasNode(nodeType.toString())) { return new LDPathTransform(programNode.getNode(nodeType.toString()).getNode(JCR_CONTENT) .getProperty(JCR_DATA).getBinary().getStream()); } } throw new WebApplicationException( new Exception( "Couldn't find transformation for " + node.getPath() + " and transformation key " + key), SC_BAD_REQUEST); }
From source file:com.uber.okbuck.core.util.ProjectUtil.java
public static Map<ComponentIdentifier, ResolvedArtifactResult> downloadSources(Project project, Set<ResolvedArtifactResult> artifacts) { // Download sources if enabled via intellij extension if (!ProjectUtil.getOkBuckExtension(project).getIntellijExtension().downloadSources()) { return new HashMap<>(); }/*ww w .j a v a 2 s. com*/ DependencyHandler dependencies = project.getDependencies(); try { Set<ComponentIdentifier> identifiers = artifacts.stream() .filter(artifact -> canHaveSources(artifact.getFile())) .map(artifact -> artifact.getId().getComponentIdentifier()).collect(Collectors.toSet()); @SuppressWarnings("unchecked") Class<? extends Artifact>[] artifactTypesArray = (Class<? extends Artifact>[]) new Class<?>[] { SourcesArtifact.class }; Set<ComponentArtifactsResult> results = dependencies.createArtifactResolutionQuery() .forComponents(identifiers).withArtifacts(JvmLibrary.class, artifactTypesArray).execute() .getResolvedComponents(); // Only has one type. Class<? extends Artifact> type = artifactTypesArray[0]; return results.stream().map(artifactsResult -> artifactsResult.getArtifacts(type)).flatMap(Set::stream) .filter(artifactResult -> artifactResult instanceof ResolvedArtifactResult) .map(artifactResult -> (ResolvedArtifactResult) artifactResult) .filter(artifactResult -> FileUtil.isZipFile(artifactResult.getFile())) .collect(Collectors.toMap(resolvedArtifact -> resolvedArtifact.getId().getComponentIdentifier(), resolvedArtifact -> resolvedArtifact)); } catch (Throwable t) { System.out.println( "Unable to download sources for project " + project.toString() + " with error " + t.toString()); return new HashMap<>(); } }
From source file:com.netflix.genie.core.jpa.specifications.JpaCommandSpecs.java
/** * Get all the clusters given the specified parameters. * * @param applicationId The id of the application that is registered with these commands * @param statuses The status of the commands * @return The specification//from w w w .j a va2s. c om */ public static Specification<CommandEntity> findCommandsForApplication(final String applicationId, final Set<CommandStatus> statuses) { return (final Root<CommandEntity> root, final CriteriaQuery<?> cq, final CriteriaBuilder cb) -> { final List<Predicate> predicates = new ArrayList<>(); final Join<CommandEntity, ApplicationEntity> application = root.join(CommandEntity_.applications); predicates.add(cb.equal(application.get(ApplicationEntity_.id), applicationId)); if (statuses != null && !statuses.isEmpty()) { final List<Predicate> orPredicates = statuses.stream() .map(status -> cb.equal(root.get(CommandEntity_.status), status)) .collect(Collectors.toList()); predicates.add(cb.or(orPredicates.toArray(new Predicate[orPredicates.size()]))); } return cb.and(predicates.toArray(new Predicate[predicates.size()])); }; }
From source file:com.yahoo.bard.webservice.web.filters.QueryParameterNormalizationFilter.java
/** * Build a parameter map that searches for @QueryParam annotations on the jersey endpoints * and extract their names./*from www .jav a 2 s . c o m*/ * This map enables us to perform case insensitive translations. * * Detail: * This method extracts all classes contained within the packages specified as "jersey provider packages." * It then conditions these methods by enumerating all methods and keeping only those that are annotated * as JAX-RS endpoints (+ the bard @PATCH annotation). After harvesting this list of method, it then enumerates * all of the parameters for each method and keeps a list of the @QueryParam values. It then extracts the values * from these @QueryParam annotations to retain its codified casing while also constructing a map of lowercase'd * values to map in a case insensitive way. * * NOTE: The ClassLoader provided with the ResourceConfig is used to find classes. * * @param providers Set of provider classes to seek for @QueryParam * @param classLoader Class loader to use while searching for specified packages * @return Parameter map containing all of the case insensitive to case sensitive @QueryParam values */ private static Map<String, String> buildParameterMap(Set<Class<?>> providers, ClassLoader classLoader) { if (providers == null) { LOG.warn("No providers defined. Disabling QueryParameterNormalizationFilter."); return Collections.emptyMap(); } else if (classLoader == null) { LOG.warn("No valid ClassLoader found from context. Disabling QueryParameterNormalizationFilter."); return Collections.emptyMap(); } return providers.stream() // Extract all of the corresponding methods from these classes .flatMap(QueryParameterNormalizationFilter::extractMethods) // Determine which methods are annotated as a JAX-RS endpoint .filter(QueryParameterNormalizationFilter::isWebEndpoint) // For each of these methods, extract the @QueryParam annotations from the parameter list .flatMap(QueryParameterNormalizationFilter::extractQueryParameters) // Extract the parameter value .map(QueryParam::value).distinct() // Map the lower-case'd parameter value to its @QueryParam case'd counterpart .collect(Collectors.toMap(param -> param.toLowerCase(Locale.ENGLISH), Function.identity(), QueryParameterNormalizationFilter::resolveMapConflicts)); }
From source file:org.eclipse.sw360.datahandler.common.SW360Utils.java
public static Set<String> filterBUSet(String organisation, Set<String> strings) { if (strings == null || isNullOrEmpty(organisation)) { return new HashSet<String>(); }/*from ww w . java 2s. c o m*/ String bu = getBUFromOrganisation(organisation); return strings.stream().filter(string -> bu.equals(string)).collect(Collectors.toSet()); }
From source file:com.netflix.genie.web.data.repositories.jpa.specifications.JpaCommandSpecs.java
/** * Get all the clusters given the specified parameters. * * @param applicationId The id of the application that is registered with these commands * @param statuses The status of the commands * @return The specification/*from w w w . j a va 2 s. c om*/ */ public static Specification<CommandEntity> findCommandsForApplication(final String applicationId, @Nullable final Set<CommandStatus> statuses) { return (final Root<CommandEntity> root, final CriteriaQuery<?> cq, final CriteriaBuilder cb) -> { final List<Predicate> predicates = new ArrayList<>(); final Join<CommandEntity, ApplicationEntity> application = root.join(CommandEntity_.applications); predicates.add(cb.equal(application.get(ApplicationEntity_.uniqueId), applicationId)); if (statuses != null && !statuses.isEmpty()) { predicates.add( cb.or(statuses.stream().map(status -> cb.equal(root.get(CommandEntity_.status), status)) .toArray(Predicate[]::new))); } return cb.and(predicates.toArray(new Predicate[predicates.size()])); }; }