Example usage for java.util List parallelStream

List of usage examples for java.util List parallelStream

Introduction

In this page you can find the example usage for java.util List parallelStream.

Prototype

default Stream<E> parallelStream() 

Source Link

Document

Returns a possibly parallel Stream with this collection as its source.

Usage

From source file:de.dkfz.roddy.core.Analysis.java

public Map<DataSet, Boolean> checkStatus(List<String> pids, boolean suppressInfo) {
    List<DataSet> dataSets = getRuntimeService().loadDatasetsWithFilter(this, pids, suppressInfo);
    Map<DataSet, Boolean> results = new LinkedHashMap<>();
    dataSets.parallelStream().forEach(ds -> {
        boolean result = checkStatusForDataset(ds);
        synchronized (results) {
            results.put(ds, result);//from   w w w. ja va  2 s.  com
        }
    });
    List<DataSet> sortedKeys = new LinkedList<>(results.keySet());
    sortedKeys.sort((ds1, ds2) -> ds1.getId().compareTo(ds2.getId()));
    Map<DataSet, Boolean> sortedMap = new LinkedHashMap<>();
    for (DataSet ds : sortedKeys) {
        sortedMap.put(ds, results.get(ds));
    }
    return sortedMap;
}

From source file:com.hack23.cia.web.impl.ui.application.views.common.chartfactory.impl.DocumentChartDataManagerImpl.java

@Override
public void createDocumentHistoryPartyChart(final AbstractOrderedLayout content, final String org) {
    final DataSeries dataSeries = new DataSeries();
    final Series series = new Series();

    final Map<String, List<ViewRiksdagenPartyDocumentDailySummary>> allMap = getViewRiksdagenPartyDocumentDailySummaryMap();

    final List<ViewRiksdagenPartyDocumentDailySummary> itemList = allMap
            .get(org.toUpperCase(Locale.ENGLISH).replace(UNDER_SCORE, EMPTY_STRING).trim());

    if (itemList != null) {

        final Map<String, List<ViewRiksdagenPartyDocumentDailySummary>> map = itemList.parallelStream()
                .filter(t -> t != null).collect(Collectors.groupingBy(
                        t -> StringUtils.defaultIfBlank(t.getEmbeddedId().getDocumentType(), NO_INFO)));

        addDocumentHistoryByPartyData(dataSeries, series, map);
    }//from www. j  a v  a  2  s .  c  o m

    addChart(content, "Document history party", new DCharts().setDataSeries(dataSeries)
            .setOptions(chartOptions.createOptionsXYDateFloatLegendOutside(series)).show());
}

From source file:com.hack23.cia.web.impl.ui.application.views.common.chartfactory.impl.DocumentChartDataManagerImpl.java

@Override
public void createPersonDocumentHistoryChart(final AbstractOrderedLayout content, final String personId) {

    final SimpleDateFormat simpleDateFormat = new SimpleDateFormat(DD_MMM_YYYY, Locale.ENGLISH);

    final DataSeries dataSeries = new DataSeries();

    final Series series = new Series();

    final Map<String, List<ViewRiksdagenPoliticianDocumentDailySummary>> allMap = getViewRiksdagenPoliticianDocumentDailySummaryMap();

    final List<ViewRiksdagenPoliticianDocumentDailySummary> itemList = allMap
            .get(personId.toUpperCase(Locale.ENGLISH).replace(UNDER_SCORE, EMPTY_STRING).trim());

    if (itemList != null) {

        final Map<String, List<ViewRiksdagenPoliticianDocumentDailySummary>> map = itemList.parallelStream()
                .filter(t -> t != null).collect(Collectors.groupingBy(
                        t -> StringUtils.defaultIfBlank(t.getEmbeddedId().getDocumentType(), NO_INFO)));

        addDocumentHistoryByPersonData(simpleDateFormat, dataSeries, series, map);
    }//w  w  w  . j av  a2s.  c  om

    addChart(content, "Document history ", new DCharts().setDataSeries(dataSeries)
            .setOptions(chartOptions.createOptionsXYDateFloatLegendOutside(series)).show());
}

From source file:uk.co.flax.biosolr.elasticsearch.mapper.ontology.owl.OntologyHelper.java

private Collection<String> findPropertyValueStrings(List<String> propertyUris, IRI iri) {
    Collection<String> classNames = new HashSet<>();

    OWLDataFactory odf = ontology.getOWLOntologyManager().getOWLDataFactory();

    // For every property URI, find the annotations for this entry
    propertyUris.parallelStream().map(uri -> odf.getOWLAnnotationProperty(IRI.create(uri)))
            .map(prop -> findAnnotationNames(iri, prop)).forEach(classNames::addAll);

    return classNames;
}

From source file:de.steilerdev.myVerein.server.controller.admin.DivisionManagementController.java

/**
 * This function gathers the complete division tree that the user is administrating. The function is invoked by GETting the URI /api/admin/division/divisionTree.
 * @param currentUser The currently logged in user.
 * @return An HTTP response with a status code. In case of success a list of tree nodes, that represent the division tree, are bundled with the status code, otherwise just the error code is returned.
 *///from w ww. j a v  a 2 s  .  c  om
@RequestMapping(value = "divisionTree", produces = "application/json", method = RequestMethod.GET)
public ResponseEntity<List<TreeNode>> getDivisionTree(@CurrentUser User currentUser) {
    logger.trace("[" + currentUser + "]  Gathering the division tree");
    List<Division> divisions = getOptimizedSetOfAdministratedDivisions(currentUser);
    if (divisions != null) {
        if (!divisions.isEmpty()) {
            List<TreeNode> divisionTree = new ArrayList<>();
            divisions.parallelStream().forEach(div -> divisionTree.add(getSubTree(div)));
            if (divisionTree.isEmpty()) {
                logger.warn("[" + currentUser + "]  The division tree is empty");
                return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
            } else {
                logger.debug("[" + currentUser + "]  Returning the division tree");
                return new ResponseEntity<>(divisionTree, HttpStatus.OK);
            }
        } else {
            logger.warn("[" + currentUser
                    + "]  The optimized set of administrated divisions for the user is empty");
            return new ResponseEntity<>(HttpStatus.FORBIDDEN);
        }
    } else {
        logger.warn("[" + currentUser + "]  Unable to find divisions for the user");
        return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
    }
}

From source file:com.civprod.writerstoolbox.NaturalLanguage.util.RegexStringTokenizer.java

@Override
public List<String> tokenize(String Text) {
    List<String> ReturnList = new java.util.ArrayList<>(1);
    ReturnList.add(Text);// w w w .  j a v  a2 s . co m
    List<Pattern> ignorePatterns = new java.util.ArrayList<>(ignoreMapping.size());
    for (Pattern curPattern : applyOrder) {
        ReturnList = Tokenize(ReturnList, curPattern, ignorePatterns, ReturnList.size());
        if (ignoreMapping.containsKey(curPattern)) {
            ignorePatterns.add(ignoreMapping.get(curPattern));
        }
    }
    for (Pattern curRemovePattern : removePatterns) {
        ReturnList = ReturnList.parallelStream()
                .filter((String curWord) -> !curRemovePattern.matcher(curWord).matches())
                .collect(Collectors.toList());
    }
    return ReturnList;
}

From source file:org.artifactory.post.services.CallHomeService.java

/**
 * Collects features metadata  {@see RTFACT-8412}
 *
 * @param request// w  ww .  ja va2 s  .  c  om
 */
private void addFeatures(CallHomeRequest request) {
    FeatureGroup featureGroups = new FeatureGroup();

    FeatureGroup repositoriesFeature = new FeatureGroup("repositories");
    FeatureGroup localRepositoriesFeature = new FeatureGroup("local repositories");
    FeatureGroup remoteRepositoriesFeature = new FeatureGroup("remote repositories");

    List<RealRepo> localAndRemoteRepositories = repoService.getLocalAndRemoteRepositories();
    localAndRemoteRepositories.stream().forEach(rr -> {
        if (rr.isLocal()) {
            addLocalRepoFeatures(localRepositoriesFeature, rr);

        } else {
            addRemoteRepoFeatures(remoteRepositoriesFeature, rr);

        }
    });

    long localCount = localAndRemoteRepositories.parallelStream().filter(r -> r.isLocal()).count();
    localRepositoriesFeature.addFeatureAttribute("number_of_repositories", localCount);
    remoteRepositoriesFeature.addFeatureAttribute("number_of_repositories",
            localAndRemoteRepositories.size() - localCount);

    repositoriesFeature.addFeature(localRepositoriesFeature);
    repositoriesFeature.addFeature(remoteRepositoriesFeature);

    // virtual repos
    FeatureGroup virtualRepositoriesFeature = new FeatureGroup("virtual repositories");
    List<VirtualRepo> virtualRepositories = repoService.getVirtualRepositories();
    virtualRepositoriesFeature.addFeatureAttribute("number_of_repositories",
            getVirtualReposSize(virtualRepositories));
    addVirtualRepoFeatures(virtualRepositoriesFeature, virtualRepositories);
    repositoriesFeature.addFeature(virtualRepositoriesFeature);

    featureGroups.addFeature(repositoriesFeature);
    request.addCallHomeFeature(repositoriesFeature);
}

From source file:at.medevit.elexis.emediplan.core.internal.EMediplanServiceImpl.java

@Override
public void setPresciptionsToMedicament(Medication medication, Medicament medicament) {
    if (medication.Patient != null && medication.Patient.patientId != null) {
        if (medicament.artikelstammItem != null) {
            Query<Prescription> qre = new Query<>(Prescription.class);
            qre.add(Prescription.FLD_PATIENT_ID, Query.LIKE, medication.Patient.patientId);
            qre.orderBy(true, PersistentObject.FLD_LASTUPDATE);

            List<Prescription> execute = qre.execute();

            TimeTool now = new TimeTool();
            now.add(TimeTool.SECOND, 5);

            List<Prescription> patientPrescriptions = execute.parallelStream().filter(p -> !p.isStopped(now))
                    .collect(Collectors.toList());

            setMedicamentState(medicament, patientPrescriptions);
        }/*from   w w  w .  j ava2  s. c  o  m*/
        setMedicamentStateInfo(medicament);
    }

}

From source file:org.wso2.security.tools.reposcanner.scanner.GitHubRepoScanner.java

public void scan(Storage storage) throws Exception {
    String consoleTag = "[GIT] ";
    log.info(consoleTag + "GIT repository scanning started.");

    ArtifactInfoGenerator mavenArtifactInfoGenerator = new MavenArtifactInfoGenerator();
    RepoDownloader gitRepoDownloader = new GitHubTagDownloader();

    //Create temp folder for storing downloaded repository content
    File gitTempFolder = new File("temp-git");
    if (gitTempFolder.exists()) {
        FileUtils.deleteDirectory(gitTempFolder);
    }//from ww w . ja v  a 2  s  .  co  m
    if (gitTempFolder.mkdir()) {
        log.info(consoleTag + "Temporary folder created at: " + gitTempFolder.getAbsolutePath());
    } else {
        log.error(consoleTag + "Unable to create temporary folder at: " + gitTempFolder.getAbsolutePath());
        return;
    }

    //Get list of repositories from GitHub
    RepoInfoGenerator repoInfoGenerator = new GitHubRepoInfoGenerator(oAuth2Token);
    List<Repo> repoList = null;
    if (AppConfig.getGithubAccounts() == null || AppConfig.getGithubAccounts().isEmpty()) {
        log.error(consoleTag + "No GitHub user accounts provided for the scan. Terminating...");
        return;
    } else {
        repoList = repoInfoGenerator.getRepoList(consoleTag, AppConfig.getGithubAccounts());
    }

    if (!AppConfig.isSkipScan()) {
        repoList.parallelStream().forEach(repo -> {
            String newConsoleTag = consoleTag + "[User:" + repo.getUser() + ",Repo:" + repo.getRepositoryUrl()
                    + ",Tag:" + repo.getTagName() + "] ";
            try {
                if (AppConfig.isRescanRepos() || !storage.isRepoPresent(repo)) {

                    log.info(newConsoleTag + "[Adding] Adding repo to scanning pool");

                    //Create folder to store files from Github
                    String identifier = repo.getRepositoryName() + "-Tag-" + repo.getTagName();
                    File artifactTempFolder = new File(
                            gitTempFolder.getAbsoluteFile() + File.separator + identifier);
                    artifactTempFolder.mkdir();
                    log.info(consoleTag + "Temporary folder created at: "
                            + artifactTempFolder.getAbsolutePath());

                    try {
                        //Download from GitHub and extract ZIP
                        log.info(consoleTag + "Downloading started");
                        gitRepoDownloader.downloadRepo(repo, artifactTempFolder);
                        log.info(consoleTag + "Downloading completed");

                        //Locate POM files within the extracted ZIP
                        log.info(consoleTag + "POM searching started");
                        Collection<File> sourceFiles = FileUtils.listFiles(artifactTempFolder,
                                TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE);
                        List<File> mavenBuildConfigFiles = sourceFiles.stream()
                                .filter(file -> file.getName().equals("pom.xml")).collect(Collectors.toList());
                        log.info(consoleTag + "POM searching completed");

                        //Execute maven executor plugin on each POM to get Maven ID (groupId, artifactId, packaging, version)
                        ExecutorService artifactWorkerExecutorService = Executors.newSingleThreadExecutor();

                        List<Callable<RepoArtifact>> callableArrayList = new ArrayList<>();

                        mavenBuildConfigFiles.stream().forEach(mavenBuildConfigFile -> {
                            try {
                                String path = mavenBuildConfigFile.getAbsolutePath().substring(
                                        artifactTempFolder.getAbsolutePath().length(),
                                        mavenBuildConfigFile.getAbsolutePath().length());
                                path = path.substring(path.indexOf(File.separator, 1), path.length());
                                String finalPath = path.replace("pom.xml", "");

                                String pathIncludedConsoleTag = consoleTag + "[" + path + "] ";

                                //If this is a repo re-scan, only the artifacts that are not already indexed should be scanned.
                                //If thus is not a repo re-scan, repo itself will be skipped if it is already indexed (in previous condition check)
                                boolean scanArtifact = true;
                                if (AppConfig.isRescanRepos() && (storage.isArtifactPresent(repo, path)
                                        || storage.isErrorPresent(repo, path))) {
                                    scanArtifact = false;
                                }
                                if (scanArtifact) {
                                    log.info(pathIncludedConsoleTag
                                            + "[Adding] Adding POM for artifact information gathering pool");
                                    //Callable<RepoArtifact> callable = () -> {
                                    try {
                                        RepoArtifact repoArtifactInfo = mavenArtifactInfoGenerator.getArtifact(
                                                consoleTag, artifactTempFolder, mavenBuildConfigFile);
                                        repoArtifactInfo.setRepo(repo);
                                        log.info(consoleTag + "Maven ID extracted. Sending for storage.");
                                        //return repoArtifactInfo;
                                        storage.persist(repoArtifactInfo);
                                    } catch (Exception e) {
                                        RepoError repoError = new RepoError(finalPath, "MavenID not found",
                                                repo, new Date());
                                        storage.persistError(repoError);

                                        if (AppConfig.isVerbose()) {
                                            log.warn(consoleTag
                                                    + "[Skipping] Could not extract Maven ID from Maven executor",
                                                    e);
                                        } else {
                                            log.warn(consoleTag
                                                    + "[Skipping] Could not extract Maven ID from Maven executor");
                                        }
                                        //return null;
                                    }
                                    //};
                                    //callableArrayList.add(callable);
                                } else {
                                    log.warn(pathIncludedConsoleTag
                                            + "[Skipping] Artifact is already present in storage.");
                                }
                            } catch (Exception e) {
                                log.error(consoleTag
                                        + "[!!! CRITICAL ERROR !!!] Exception in extracting artifact information for repository: "
                                        + repo + " config file: " + mavenBuildConfigFile.getAbsolutePath(), e);
                            }
                        });

                        /** artifactWorkerExecutorService.invokeAll(callableArrayList).stream().forEach(artifactFuture -> {
                        RepoArtifact repoArtifact = null;
                        try {
                            repoArtifact = artifactFuture.get();
                            if (repoArtifact != null) {
                                storage.persist(repoArtifact);
                            }
                        } catch (Exception e) {
                            log.error(consoleTag + "[!!! CRITICAL ERROR !!!] Exception in persisting Artifact: " + repoArtifact, e);
                        }
                        }); **/
                    } catch (Exception e) {
                        try {
                            FileUtils.deleteDirectory(artifactTempFolder);
                        } catch (IOException e1) {
                            log.warn("Exception in removing temp folder: "
                                    + artifactTempFolder.getAbsolutePath());
                        }
                        log.error(consoleTag + "[!!! CRITICAL ERROR !!!] Git repository scanning failed: "
                                + identifier, e);
                    } finally {
                        //Do cleanup and storage release
                        log.info(consoleTag + "All threads complete. Clean up tasks started.");
                        log.info(consoleTag + "Deleting: " + artifactTempFolder.getAbsolutePath());
                        FileUtils.deleteDirectory(artifactTempFolder);
                    }

                } else {
                    log.warn(newConsoleTag + "[Skipping] Repo is already present in storage.");
                }
            } catch (Exception e) {
                log.error(consoleTag + "[!!! CRITICAL ERROR !!!] Exception in scanning repository: " + repo, e);
            }
        });
    } else {
        log.warn(consoleTag + "[Skipping] SkipScan parameter is set.");
    }

    //Do cleanup and storage release
    log.info(consoleTag + "All threads complete. Clean up tasks started.");
    FileUtils.deleteDirectory(gitTempFolder);
}

From source file:cognitivej.vision.face.scenario.FaceScenarios.java

/**
 * Groups Single Face Images into groups.
 *
 * @param images the image urls (as a list).
 * @return a biuild {@link FaceGroupingSet}
 *///from  w  ww .  ja  v a  2 s .  c om
@NotNull
public FaceGroupingSet groupFaceListOnSingleFace(List<String> images) {
    List<ImageAndFace> imageAndFaces = new ArrayList<>();
    for (String image : images) {
        imageAndFaces.add(new ImageAndFace<>(image, findSingleFace(image)));
    }
    List<String> faceIds = imageAndFaces.parallelStream().map(it -> it.getFace().faceId)
            .collect(Collectors.toList());
    FaceGrouping groupings = faceTaskBuilder.groupFaces(faceIds).withResult();
    return new FaceGroupingSet(imageAndFaces, groupings);
}