Example usage for java.util Collection parallelStream

List of usage examples for java.util Collection parallelStream

Introduction

In this page you can find the example usage for java.util Collection parallelStream.

Prototype

default Stream<E> parallelStream() 

Source Link

Document

Returns a possibly parallel Stream with this collection as its source.

Usage

From source file:com.github.horrorho.inflatabledonkey.cloud.Donkey.java

void processConcurrent(HttpClient httpClient, ForkJoinPool fjp, AssetPool pool, FileAssembler consumer)
        throws IOException {
    logger.trace("<< processConcurrent()");
    try {//from  w ww . jav a2 s  .  c o  m
        Collection<StorageHostChunkList> containers = pool.authorize(httpClient);
        fjp.submit(
                () -> containers.parallelStream().forEach(u -> processContainer(httpClient, u, pool, consumer)))
                .get();

    } catch (InterruptedException ex) {
        throw new UncheckedInterruptedException(ex);
    } catch (ExecutionException ex) {
        Throwable cause = ex.getCause();
        if (cause instanceof RuntimeException) {
            throw (RuntimeException) cause;
        }
        if (cause instanceof IOException) {
            throw (IOException) cause;
        }
        throw new RuntimeException(cause);
    }
    logger.trace("<< processConcurrent()");
}

From source file:net.ctalkobt.syllogism.Context.java

/************************************************************************
 * Determines if a given syllogism for an equivalence type is valid.
 *
 * @param memeKey/*from w ww. j ava  2  s.  c o  m*/
 * @param equivalence
 * @param memeValue
 * @return result if known, otherwise optional.empty(). 
 ***********************************************************************/
public Optional<Boolean> interrogate(Term memeKey, Copula equivalence, Term memeValue) {
    Collection<KeyValue<Copula, Term>> memeRelations = (Collection<KeyValue<Copula, Term>>) memeAssociations
            .get(memeKey);
    if (memeRelations == null || memeRelations.isEmpty()) {
        return Optional.empty();
    }

    Optional<KeyValue<Copula, Term>> result = memeRelations.parallelStream().findFirst()
            .filter((KeyValue<Copula, Term> kv) -> {
                if (kv.getKey().equals(equivalence) && kv.getValue().equals(memeValue)) {
                    return true;
                } else {
                    Optional<Boolean> result1 = interrogate(kv.getValue(), equivalence, memeValue);
                    return result1.isPresent();
                }
            });

    if (result.isPresent()) {
        return Optional.of(equivalence.getTruthEquivalency());
    }
    return Optional.empty();
}

From source file:com.fitbur.testify.integration.IntegrationTestVerifier.java

@Override
public void wiring() {
    doPrivileged((PrivilegedAction<Object>) () -> {
        CutDescriptor cutDescriptor = testContext.getCutDescriptor();
        String testClassName = testContext.getTestClassName();
        Collection<ParameterDescriptor> paramDescriptors = testContext.getParamaterDescriptors().values();

        if (cutDescriptor != null) {
            String cutClassName = cutDescriptor.getTypeName();
            paramDescriptors.parallelStream().forEach(p -> {
                Optional instance = p.getInstance();
                if (!instance.isPresent()) {
                    String paramTypeName = p.getTypeName();
                    logger.warn(//from  w  ww . j  a v a2  s .  co m
                            "Class under test '{}' defined in '{}' has a collaborator "
                                    + "of type '{}' but test class '{}' does not define a field of "
                                    + "type '{}' annotated with @Fake, @Real, or @Inject. The real "
                                    + "instance of the collaborator will be used.",
                            cutClassName, testClassName, paramTypeName, testClassName, paramTypeName);
                }

            });
        }

        return null;
    });
}

From source file:ijfx.ui.filter.DefaultNumberFilter.java

private Long countElementsInRange(Collection<? extends Number> possibleValues) {
    return possibleValues.parallelStream().filter(
            n -> n.doubleValue() >= rangeSlider.getLowValue() && n.doubleValue() <= rangeSlider.getHighValue())
            .count();//ww  w  . j a  va 2  s  . c o m
}

From source file:de.qaware.chronix.importer.csv.FileImporter.java

/**
 * Reads the given file / folder and calls the bi consumer with the extracted points
 *
 * @param points// w w w  .  j a v a2 s.  c o  m
 * @param folder
 * @param databases
 * @return
 */
public Pair<Integer, Integer> importPoints(Map<Attributes, Pair<Instant, Instant>> points, File folder,
        BiConsumer<List<ImportPoint>, Attributes>... databases) {

    final AtomicInteger pointCounter = new AtomicInteger(0);
    final AtomicInteger tsCounter = new AtomicInteger(0);
    final File metricsFile = new File(METRICS_FILE_PATH);

    LOGGER.info("Writing imported metrics to {}", metricsFile);
    LOGGER.info("Import supports csv files as well as gz compressed csv files.");

    try {
        final FileWriter metricsFileWriter = new FileWriter(metricsFile);

        Collection<File> files = new ArrayList<>();
        if (folder.isFile()) {
            files.add(folder);
        } else {
            files.addAll(FileUtils.listFiles(folder, new String[] { "gz", "csv" }, true));
        }

        AtomicInteger counter = new AtomicInteger(0);

        files.parallelStream().forEach(file -> {
            SimpleDateFormat sdf = new SimpleDateFormat(dateFormat);
            NumberFormat nf = DecimalFormat.getInstance(numberLocal);

            InputStream inputStream = null;
            BufferedReader reader = null;
            try {
                inputStream = new FileInputStream(file);

                if (file.getName().endsWith("gz")) {
                    inputStream = new GZIPInputStream(inputStream);
                }
                reader = new BufferedReader(new InputStreamReader(inputStream));

                //Read the first line
                String headerLine = reader.readLine();

                if (headerLine == null || headerLine.isEmpty()) {
                    boolean deleted = deleteFile(file, inputStream, reader);
                    LOGGER.debug("File is empty {}. File {} removed {}", file.getName(), deleted);
                    return;
                }

                //Extract the attributes from the file name
                //E.g. first_second_third_attribute.csv
                String[] fileNameMetaData = file.getName().split("_");

                String[] metrics = headerLine.split(csvDelimiter);

                Map<Integer, Attributes> attributesPerTimeSeries = new HashMap<>(metrics.length);

                for (int i = 1; i < metrics.length; i++) {
                    String metric = metrics[i];
                    String metricOnlyAscii = Normalizer.normalize(metric, Normalizer.Form.NFD);
                    metricOnlyAscii = metric.replaceAll("[^\\x00-\\x7F]", "");
                    Attributes attributes = new Attributes(metricOnlyAscii, fileNameMetaData);

                    //Check if meta data is completely set
                    if (isEmpty(attributes)) {
                        boolean deleted = deleteFile(file, inputStream, reader);
                        LOGGER.info("Attributes contains empty values {}. File {} deleted {}", attributes,
                                file.getName(), deleted);
                        continue;
                    }

                    if (attributes.getMetric().equals(".*")) {
                        boolean deleted = deleteFile(file, inputStream, reader);
                        LOGGER.info("Attributes metric{}. File {} deleted {}", attributes.getMetric(),
                                file.getName(), deleted);
                        continue;
                    }
                    attributesPerTimeSeries.put(i, attributes);
                    tsCounter.incrementAndGet();

                }

                Map<Integer, List<ImportPoint>> dataPoints = new HashMap<>();

                String line;
                while ((line = reader.readLine()) != null) {
                    String[] splits = line.split(csvDelimiter);
                    String date = splits[0];

                    Instant dateObject;
                    if (instantDate) {
                        dateObject = Instant.parse(date);
                    } else if (sdfDate) {
                        dateObject = sdf.parse(date).toInstant();
                    } else {
                        dateObject = Instant.ofEpochMilli(Long.valueOf(date));
                    }

                    for (int column = 1; column < splits.length; column++) {

                        String value = splits[column];
                        double numericValue = nf.parse(value).doubleValue();

                        ImportPoint point = new ImportPoint(dateObject, numericValue);

                        if (!dataPoints.containsKey(column)) {
                            dataPoints.put(column, new ArrayList<>());
                        }
                        dataPoints.get(column).add(point);
                        pointCounter.incrementAndGet();
                    }

                }

                dataPoints.values().forEach(Collections::sort);

                IOUtils.closeQuietly(reader);
                IOUtils.closeQuietly(inputStream);

                dataPoints.forEach((key, importPoints) -> {
                    for (BiConsumer<List<ImportPoint>, Attributes> database : databases) {
                        database.accept(importPoints, attributesPerTimeSeries.get(key));
                    }
                    points.put(attributesPerTimeSeries.get(key), Pair.of(importPoints.get(0).getDate(),
                            importPoints.get(importPoints.size() - 1).getDate()));
                    //write the stats to the file
                    Instant start = importPoints.get(0).getDate();
                    Instant end = importPoints.get(importPoints.size() - 1).getDate();

                    try {
                        writeStatsLine(metricsFileWriter, attributesPerTimeSeries.get(key), start, end);
                    } catch (IOException e) {
                        LOGGER.error("Could not write stats line", e);
                    }
                    LOGGER.info("{} of {} time series imported", counter.incrementAndGet(), tsCounter.get());
                });

            } catch (Exception e) {
                LOGGER.info("Exception while reading points.", e);
            } finally {
                //close all streams
                IOUtils.closeQuietly(reader);
                IOUtils.closeQuietly(inputStream);
            }

        });
    } catch (Exception e) {
        LOGGER.error("Exception occurred during reading points.");
    }
    return Pair.of(tsCounter.get(), pointCounter.get());
}

From source file:delfos.rs.trustbased.WeightedGraph.java

void validateParameters(Collection<Node> nodes) throws IllegalArgumentException {
    boolean allMatch = nodes.parallelStream().allMatch(node -> this.allNodes().contains(node));
    if (!allMatch) {
        throw new IllegalArgumentException("Specified nodes are not present in the weighted graph");
    }/*from   ww w .  j av a 2 s .  com*/
}

From source file:delfos.rs.trustbased.WeightedGraph.java

private Map<Node, Map<Node, Number>> getSubGraphEdges(Collection<Node> nodes) {
    Map<Node, Map<Node, Number>> edgesOfSubGraph = nodes.parallelStream()
            .collect(Collectors.toMap(node1 -> node1, node1 -> {
                Map<Node, Number> edgesFromThisVertex = nodes.parallelStream()
                        .filter(node2 -> this.connectionWeight(node1, node2).isPresent())
                        .collect(Collectors.toMap(node2 -> node2, node2 -> {
                            return this.connectionWeight(node1, node2).get();
                        }));/*w ww.  java  2  s  .c  o  m*/

                return edgesFromThisVertex;
            }));
    return edgesOfSubGraph;
}

From source file:de.knowwe.jspwiki.KnowWEPlugin.java

/**
 * Loads ALL articles stored in the pageDir (which is specified in jspwiki.properties).
 *
 * @param engine the wiki engine to get the articles from
 * @created 07.06.2010/*w w w. j av  a2  s . co m*/
 */
private void initializeAllArticles(WikiEngine engine) {
    ArticleManager articleManager = getDefaultArticleManager();
    articleManager.open();
    try {
        Collection<?> wikiPages = getAllPages(engine);
        long start = System.currentTimeMillis();
        wikiPages.parallelStream().forEach(o -> {
            WikiPage wp = (WikiPage) o;
            String content = engine.getPureText(wp.getName(), wp.getVersion());
            Article article = Article.createArticle(content, wp.getName(), Environment.DEFAULT_WEB);
            ((DefaultArticleManager) articleManager).queueArticle(article);
        });
        Log.info("Sectionized all articles in " + (System.currentTimeMillis() - start) + "ms");
    } catch (ProviderException e1) {
        Log.warning("Unable to load all articles, maybe some articles won't be initialized!", e1);
    } finally {
        articleManager.commit();
    }

    try {
        // we wait to get an accurate reading on the server startup time
        articleManager.getCompilerManager().awaitTermination();
    } catch (InterruptedException e) {
        Log.warning("Caught InterrupedException while waiting til compilation is finished.", e);
    }
    EventManager.getInstance().fireEvent(new InitializedArticlesEvent(articleManager));
}

From source file:delfos.rs.trustbased.WeightedGraph.java

public Set<PathBetweenNodes<Node>> allEdges() {

    Set<PathBetweenNodes<Node>> allEdges = this.allNodes().parallelStream().flatMap(node1 -> {

        Collection<PathBetweenNodes<Node>> edgesFromNode = this.getEdgesFromNode(node1);

        return edgesFromNode.parallelStream();
    }).filter(path -> path.isEdge()).filter(path -> !path.isSelf()).collect(Collectors.toSet());

    return allEdges;
}

From source file:com.hortonworks.streamline.streams.service.TopologyCatalogResource.java

private List<CatalogResourceUtil.TopologyDetailedResponse> enrichTopologies(Collection<Topology> topologies,
        String asUser, String sortType, Boolean ascending, Integer latencyTopN) {
    LOG.debug("[START] enrichTopologies");
    Stopwatch stopwatch = Stopwatch.createStarted();

    try {/*from  w  ww . j a  va 2  s  .  com*/
        List<CatalogResourceUtil.TopologyDetailedResponse> responses = ParallelStreamUtil
                .execute(() -> topologies
                        .parallelStream().map(t -> CatalogResourceUtil.enrichTopology(t, asUser, latencyTopN,
                                environmentService, actionsService, metricsService, catalogService))
                        .sorted((c1, c2) -> {
                            int compared;

                            switch (TopologySortType.valueOf(sortType.toUpperCase())) {
                            case NAME:
                                compared = c1.getTopology().getName().compareTo(c2.getTopology().getName());
                                break;
                            case STATUS:
                                compared = c1.getRunning().compareTo(c2.getRunning());
                                break;
                            case LAST_UPDATED:
                                compared = c1.getTopology().getVersionTimestamp()
                                        .compareTo(c2.getTopology().getVersionTimestamp());
                                break;
                            default:
                                throw new IllegalStateException("Not supported SortType: " + sortType);
                            }

                            return ascending ? compared : (compared * -1);
                        }).collect(toList()), forkJoinPool);

        LOG.debug("[END] enrichTopologies - elapsed: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));

        return responses;
    } finally {
        stopwatch.stop();
    }
}