Example usage for java.util List parallelStream

List of usage examples for java.util List parallelStream

Introduction

In this page you can find the example usage for java.util List parallelStream.

Prototype

default Stream<E> parallelStream() 

Source Link

Document

Returns a possibly parallel Stream with this collection as its source.

Usage

From source file:com.jscriptive.moneyfx.ui.account.AccountFrame.java

private String getAbsSum(List<AccountItem> accountItems) {
    double sum = accountItems.parallelStream()
            .flatMapToDouble(item -> DoubleStream.of(abs(item.getBalance().doubleValue()))).sum();
    return CurrencyFormat.getInstance().format(sum);
}

From source file:no.imr.stox.functions.acoustic.PgNapesIO.java

public static void export2(String cruise, String country, String callSignal, String path, String fileName,
        List<DistanceBO> distances, Double groupThickness, Integer freqFilter, String specFilter,
        boolean withZeros) {
    Set<Integer> freqs = distances.stream().flatMap(dist -> dist.getFrequencies().stream())
            .map(FrequencyBO::getFreq).collect(Collectors.toSet());
    if (freqFilter == null && freqs.size() == 1) {
        freqFilter = freqs.iterator().next();
    }//w  w w  .ja  va  2s .c  o  m

    if (freqFilter == null) {
        System.out.println("Multiple frequencies, specify frequency filter as parameter");
        return;
    }
    Integer freqFilterF = freqFilter; // ef.final
    List<String> acList = distances.parallelStream().flatMap(dist -> dist.getFrequencies().stream())
            .filter(fr -> freqFilterF.equals(fr.getFreq())).map(f -> {
                DistanceBO d = f.getDistanceBO();
                LocalDateTime sdt = LocalDateTime.ofInstant(d.getStart_time().toInstant(), ZoneOffset.UTC);
                Double intDist = d.getIntegrator_dist();
                String month = StringUtils.leftPad(sdt.getMonthValue() + "", 2, "0");
                String day = StringUtils.leftPad(sdt.getDayOfMonth() + "", 2, "0");
                String hour = StringUtils.leftPad(sdt.getHour() + "", 2, "0");
                String minute = StringUtils.leftPad(sdt.getMinute() + "", 2, "0");
                String log = Conversion.formatDoubletoDecimalString(d.getLog_start(), "0.0");
                String acLat = Conversion.formatDoubletoDecimalString(d.getLat_start(), "0.000");
                String acLon = Conversion.formatDoubletoDecimalString(d.getLon_start(), "0.000");
                return Stream
                        .of(d.getNation(), d.getPlatform(), d.getCruise(), log, sdt.getYear(), month, day, hour,
                                minute, acLat, acLon, intDist, f.getFreq(), f.getThreshold())
                        .map(o -> o == null ? "" : o.toString()).collect(Collectors.joining("\t")) + "\t";
            }).collect(Collectors.toList());
    String fil1 = path + "/" + fileName + ".txt";
    acList.add(0, Stream.of("Country", "Vessel", "Cruise", "Log", "Year", "Month", "Day", "Hour", "Min",
            "AcLat", "AcLon", "Logint", "Frequency", "Sv_threshold").collect(Collectors.joining("\t")));
    try {
        Files.write(Paths.get(fil1), acList, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
    } catch (IOException ex) {
        Logger.getLogger(PgNapesIO.class.getName()).log(Level.SEVERE, null, ex);
    }
    acList.clear();
    // Acoustic values
    distances.stream().filter(d -> d.getPel_ch_thickness() != null)
            .flatMap(dist -> dist.getFrequencies().stream()).filter(fr -> freqFilterF.equals(fr.getFreq()))
            .forEachOrdered(f -> {
                try {
                    Double groupThicknessF = Math.max(f.getDistanceBO().getPel_ch_thickness(), groupThickness);
                    Map<String, Map<Integer, Double>> pivot = f.getSa().stream()
                            .filter(s -> s.getCh_type().equals("P")).map(s -> new SAGroup(s, groupThicknessF))
                            .filter(s -> s.getSpecies() != null
                                    && (specFilter == null || specFilter.equals(s.getSpecies())))
                            // create pivot table: species (dim1) -> depth interval index (dim2) -> sum sa (group aggregator)
                            .collect(Collectors.groupingBy(SAGroup::getSpecies, Collectors.groupingBy(
                                    SAGroup::getDepthGroupIdx, Collectors.summingDouble(SAGroup::sa))));
                    if (pivot.isEmpty() && specFilter != null && withZeros) {
                        pivot.put(specFilter, new HashMap<>());
                    }
                    Integer maxGroupIdx = pivot.entrySet().stream().flatMap(e -> e.getValue().keySet().stream())
                            .max(Integer::compare).orElse(null);
                    if (maxGroupIdx == null) {
                        return;
                    }
                    acList.addAll(pivot.entrySet().stream().sorted(Comparator.comparing(Map.Entry::getKey))
                            .flatMap(e -> {
                                return IntStream.range(0, maxGroupIdx + 1).boxed().map(groupIdx -> {
                                    Double chUpDepth = groupIdx * groupThicknessF;
                                    Double chLowDepth = (groupIdx + 1) * groupThicknessF;
                                    Double sa = e.getValue().get(groupIdx);
                                    if (sa == null) {
                                        sa = 0d;
                                    }
                                    String res = null;
                                    if (withZeros || sa > 0d) {
                                        DistanceBO d = f.getDistanceBO();
                                        String log = Conversion.formatDoubletoDecimalString(d.getLog_start(),
                                                "0.0");
                                        LocalDateTime sdt = LocalDateTime
                                                .ofInstant(d.getStart_time().toInstant(), ZoneOffset.UTC);
                                        String month = StringUtils.leftPad(sdt.getMonthValue() + "", 2, "0");
                                        String day = StringUtils.leftPad(sdt.getDayOfMonth() + "", 2, "0");
                                        //String sas = String.format(Locale.UK, "%11.5f", sa);
                                        res = Stream
                                                .of(d.getNation(), d.getPlatform(), d.getCruise(), log,
                                                        sdt.getYear(), month, day, e.getKey(), chUpDepth,
                                                        chLowDepth, sa)
                                                .map(o -> o == null ? "" : o.toString())
                                                .collect(Collectors.joining("\t"));
                                    }
                                    return res;
                                }).filter(s -> s != null);
                            }).collect(Collectors.toList()));
                } catch (Exception e) {
                    e.printStackTrace();
                }
            });

    String fil2 = path + "/" + fileName + "Values.txt";
    acList.add(0, Stream.of("Country", "Vessel", "Cruise", "Log", "Year", "Month", "Day", "Species",
            "ChUppDepth", "ChLowDepth", "SA").collect(Collectors.joining("\t")));
    try {
        Files.write(Paths.get(fil2), acList, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
    } catch (IOException ex) {
        Logger.getLogger(PgNapesIO.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:cognition.pipeline.service.DNCPipelineService.java

/**
 * Anonymise the DNC Work Coordinates (DWC) specified in a view/table in the source DB.
 *
 *//*w w w .j av a2 s  . com*/
public void startCreateModeWithDBView() {
    logger.info("Retrieving coordinates from DB.");

    List<DNCWorkCoordinate> dncWorkCoordinates = coordinatesDao.getCoordinates();

    dncWorkCoordinates.parallelStream().forEach(this::processSingleCoordinate);
    logger.info("Finished all.");
    dumpFailedCoordinates();
}

From source file:uk.ac.ebi.ep.data.repositories.UniprotEntryRepositoryImpl.java

@Transactional(readOnly = true)
@Override//from ww  w  . j a v a  2  s  .c o  m
public List<String> filterEnzymesInAccessions(List<String> accessions) {

    JPAQuery query = new JPAQuery(entityManager);

    BooleanBuilder builder = new BooleanBuilder();
    accessions.parallelStream().forEach(accession -> {

        builder.or($.accession.equalsIgnoreCase(accession));

    });
    List<String> enzymes = query.from($).where(builder).list($.accession).stream().distinct()
            .collect(Collectors.toList());

    return enzymes;

}

From source file:cognition.pipeline.service.DNCPipelineService.java

/**
 * Anonymise the DNC Work Coordinates (DWC) specified in the jSON file
 * whose path is given as argument./*w  w  w . j a v a 2  s .  c om*/
 * @param filePath File path of the jSON file that contains DNC Work Coordinates.
 */
public void startCreateModeWithFile(String filePath) {
    logger.info("Loading work units from file.");

    List<DNCWorkCoordinate> workCoordinates = jsonHelper.loadListFromFile(new File(filePath));

    workCoordinates.parallelStream().forEach(this::processSingleCoordinate);
    logger.info("Finished all.");
    dumpFailedCoordinates();
}

From source file:pl.edu.icm.comac.vis.server.service.SearchService.java

protected void updateResultsWithTypes(List<SearchResult> searchResults) throws OpenRDFException {
    Set<String> ids = searchResults.stream().map(x -> x.getId()).collect(Collectors.toSet());
    try {//from ww  w .  j a v a  2  s . co m
        Map<String, NodeType> types = typeService.identifyTypes(ids);
        searchResults.parallelStream().forEachOrdered(x -> x.setType(types.get(x.getId())));
    } catch (UnknownNodeException ex) {
        log.error("Unexpected inconsistency in data, no type defined for node", ex);
    }
    return;

}

From source file:pe.chalk.takoyaki.target.Target.java

public void collect(D document) {
    try {//from   w w w .  j  av  a2s .  c  om
        this.getFilters().forEach(filter -> {
            List<? extends Data> list = filter.getFreshData(document);
            if (list.isEmpty())
                return;

            final Event event = new Event(filter, list);

            list.forEach(data -> filter.getLogger().info(data.toString()));

            List<EventHandler> handlers = Takoyaki.getInstance().getPlugins().parallelStream()
                    .filter(EventHandler.class::isInstance).map(EventHandler.class::cast)
                    .collect(Collectors.toList());
            if (handlers.parallelStream().allMatch(handler -> handler.checkEvent(event)))
                handlers.parallelStream().forEach(handler -> handler.handleEvent(event));
        });
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.apdplat.superword.extract.ChineseSynonymAntonymExtractor.java

public static void parseSynonymAntonym(List<String> words) {
    LOGGER.info("??" + words.size());
    Set<String> SKIP_WORDS = new ConcurrentSkipListSet<>();
    try {/*from   w w  w .j  a v  a 2  s .c om*/
        if (Files.notExists(CHECKED_WORDS_PATH)) {
            CHECKED_WORDS_PATH.toFile().createNewFile();
        }
        SKIP_WORDS.addAll(Files.readAllLines(CHECKED_WORDS_PATH));
    } catch (Exception e) {
        LOGGER.error("?", e);
    }
    int total = words.size() - SKIP_WORDS.size();
    LOGGER.info("????" + SKIP_WORDS.size());
    LOGGER.info("??" + total);
    String url = "http://www.iciba.com/";
    AtomicInteger i = new AtomicInteger();
    EXECUTOR_SERVICE.submit(() -> {
        while (true) {
            try {
                Thread.sleep(60000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            save();
        }
    });
    words.parallelStream().forEach(word -> {
        if (SKIP_WORDS.contains(word)) {
            return;
        }
        LOGGER.info(
                "" + total + "/" + i.incrementAndGet() + " ?" + Thread.currentThread());
        try {
            word = word.trim();
            if ("".equals(word) || isNotChineseChar(word) || word.length() < 2) {
                return;
            }
            String html = getContent(url + word);
            int times = 1;
            while (StringUtils.isBlank(html) && times < 3) {
                times++;
                //IP?
                ProxyIp.toNewIp();
                html = getContent(url + word);
            }
            if (StringUtils.isBlank(html)) {
                LOGGER.error("??" + url + word);
                return;
            }
            times = 1;
            //LOGGER.debug("?HTML" +html);
            while (html.contains("??ip?") && times < 3) {
                times++;
                //IP?
                ProxyIp.toNewIp();
                html = getContent(url + word);
            }
            SynonymAntonym synonymAntonym = parseSynonymAntonym(html, word);
            if (!synonymAntonym.getSynonym().isEmpty()) {
                SYNONYM_MAP.put(synonymAntonym.getWord(), synonymAntonym.getSynonym());
            }
            if (!synonymAntonym.getAntonym().isEmpty()) {
                StringBuilder str = new StringBuilder();
                synonymAntonym.getAntonym().forEach(w -> str.append(w.getWord()).append(" "));
                ANTONYM.put(word, str.toString().trim());
            }
            CHECKED_WORDS.add(word);
        } catch (Exception e) {
            LOGGER.error("", e);
        }
    });
    save();
    filterSameRecord(CHINESE_SYNONYM);
    filterSameRecord(CHINESE_ANTONYM);
}

From source file:com.firewallid.termcloud.TermCloud.java

public void saveTermCloudAll(JavaPairRDD<String, List<Tuple2<String, Double>>> doc, String fileNamePrefix)
        throws IOException {
    List<Tuple2<String, List<Tuple2<String, Double>>>> collectDoc = doc.collect();

    if (collectDoc.isEmpty()) {
        return;//from w w  w .  ja  v a 2 s.c om
    }

    /* Reduced feature-value list */
    List<Tuple2<String, Double>> featureValueList = collectDoc.parallelStream()
            .map(titleFeatures -> titleFeatures._2).reduce((featureValueList1, featureValueList2) -> {
                List<Tuple2<String, Double>> combineList = FIUtils.combineList(featureValueList1,
                        featureValueList2);

                List<Tuple2<String, Double>> collect = combineList.parallelStream()
                        .collect(Collectors
                                .groupingBy(t -> t._1, Collectors.mapping(t -> t._2, Collectors.toList())))
                        .entrySet().parallelStream()
                        .map(t -> new Tuple2<String, Double>(t.getKey(),
                                t.getValue().parallelStream().mapToDouble(Double::doubleValue).sum()))
                        .collect(Collectors.toList());

                return collect;
            }).get();

    /* Sorting */
    List<Tuple2<String, Double>> featureValueListSorted = FIUtils.sortDescTupleListByValue(featureValueList);

    /* Top N */
    List<Tuple2<String, Double>> featureValueListTopN;
    if (featureValueListSorted.size() <= conf.getInt(TOPN, 100)) {
        featureValueListTopN = new ArrayList<>(featureValueListSorted);
    } else {
        featureValueListTopN = new ArrayList<>(featureValueListSorted.subList(0, conf.getInt(TOPN, 100)));
    }

    /* Text for file. One line, one feature-value pair */
    String featureValueText = featureValueListTopN.parallelStream()
            .map(feature -> feature._1 + StringEscapeUtils.unescapeJava(conf.get(LINE_DELIMITER)) + feature._2)
            .collect(Collectors.joining(System.lineSeparator()));

    /* Save to file */
    FIFile.writeStringToHDFSFile(FIFile.generateFullPath(conf.get(TERMCLOUD_FOLDER),
            createFileNameTermCloud(fileNamePrefix, conf.get(ALLNAME))), featureValueText);
}

From source file:com.blackducksoftware.integration.hub.detect.detector.clang.ClangExtractor.java

public Extraction extract(final ClangLinuxPackageManager pkgMgr, final File givenDir, final int depth,
        final ExtractionId extractionId, final File jsonCompilationDatabaseFile) {
    try {// ww  w. j ava  2s  . co  m
        logger.info(String.format("Analyzing %s", jsonCompilationDatabaseFile.getAbsolutePath()));
        final File rootDir = fileFinder.findContainingDir(givenDir, depth);
        final File outputDirectory = directoryManager.getExtractionOutputDirectory(extractionId);
        logger.debug(String.format("extract() called; compileCommandsJsonFilePath: %s",
                jsonCompilationDatabaseFile.getAbsolutePath()));
        final Set<File> unManagedDependencyFiles = ConcurrentHashMap.newKeySet(64);
        final List<CompileCommand> compileCommands = CompileCommandsJsonFile
                .parseJsonCompilationDatabaseFile(gson, jsonCompilationDatabaseFile);
        final List<Dependency> bdioComponents = compileCommands.parallelStream()
                .flatMap(compileCommandToDependencyFilePathsConverter(outputDirectory))
                .collect(Collectors.toSet()).parallelStream().filter(StringUtils::isNotBlank).map(File::new)
                .filter(fileIsNewPredicate())
                .flatMap(dependencyFileToLinuxPackagesConverter(rootDir, unManagedDependencyFiles, pkgMgr))
                .collect(Collectors.toSet()).parallelStream()
                .flatMap(linuxPackageToBdioComponentsConverter(pkgMgr)).collect(Collectors.toList());

        final DetectCodeLocation detectCodeLocation = codeLocationAssembler
                .generateCodeLocation(pkgMgr.getDefaultForge(), rootDir, bdioComponents);
        logSummary(bdioComponents, unManagedDependencyFiles);
        return new Extraction.Builder().success(detectCodeLocation).build();
    } catch (final Exception e) {
        return new Extraction.Builder().exception(e).build();
    }
}