Example usage for java.util Set parallelStream

List of usage examples for java.util Set parallelStream

Introduction

In this page you can find the example usage for java.util Set parallelStream.

Prototype

default Stream<E> parallelStream() 

Source Link

Document

Returns a possibly parallel Stream with this collection as its source.

Usage

From source file:Main.java

public static void main(String[] args) {
    Set<String> names = new HashSet<>();
    names.add("XML");
    names.add("Java");

    Stream<String> sequentialStream = names.stream();
    sequentialStream.forEach(System.out::println);

    Stream<String> parallelStream = names.parallelStream();
    parallelStream.forEach(System.out::println);
}

From source file:com.firewallid.util.FIUtils.java

public static List<Tuple2<String, Integer>> setToTupleList(Set<String> set) {
    List<Tuple2<String, Integer>> tupleList = set.parallelStream()
            .map((String s) -> new Tuple2<String, Integer>(s, 1)).collect(Collectors.toList());

    return tupleList;
}

From source file:com.baasbox.service.user.FriendShipService.java

public static List<ODocument> getFollowing(String username, QueryParams criteria) throws SqlInjectionException {
    OUser me = UserService.getOUserByUsername(username);
    Set<ORole> roles = me.getRoles();
    List<String> usernames = roles.parallelStream().map(ORole::getName)
            .filter((x) -> x.startsWith(RoleDao.FRIENDS_OF_ROLE))
            .map((m) -> StringUtils.difference(RoleDao.FRIENDS_OF_ROLE, m)).collect(Collectors.toList());
    if (username.isEmpty()) {
        return Collections.emptyList();
    } else {//  w  ww.  ja v  a 2s  . co  m
        List<ODocument> followers = UserService.getUserProfileByUsernames(usernames, criteria);
        return followers;
    }

}

From source file:com.civprod.writerstoolbox.testarea.UnsupervisedDiscourseSegmentation.java

public static double cosineSimilarityStemmedAndFiltered(Counter<String> countA, Counter<String> countB) {
    Set<String> allWords = countA.keySet().parallelStream()
            .filter((String curKey) -> countB.keySet().contains(curKey)).collect(Collectors.toSet());
    double dotProduct = allWords.parallelStream()
            .mapToDouble((String curKey) -> countA.getCount(curKey) * countB.getCount(curKey)).sum();
    double sqrtOfSumA = Math
            .pow(countA.keySet().parallelStream().mapToDouble((String curKey) -> countA.getCount(curKey))
                    .map((double curValue) -> curValue * curValue).sum(), .5);
    double sqrtOfSumB = Math
            .pow(countB.keySet().parallelStream().mapToDouble((String curKey) -> countB.getCount(curKey))
                    .map((double curValue) -> curValue * curValue).sum(), .5);
    return dotProduct / (sqrtOfSumA * sqrtOfSumB);
}

From source file:org.jhk.pulsing.web.service.prod.helper.PulseServiceUtil.java

public static Map<Long, String> processTrendingPulseSubscribe(Set<String> tps, ObjectMapper objMapper) {

    @SuppressWarnings("unchecked")
    Map<Long, String> tpSubscriptions = Collections.EMPTY_MAP;
    final Map<String, Integer> count = new HashMap<>();

    tps.parallelStream().forEach(tpsIdValueCounts -> {

        try {//from ww w .j a  v  a 2 s  . com
            _LOGGER.debug(
                    "PulseServiceUtil.processTrendingPulseSubscribe: trying to convert " + tpsIdValueCounts);

            Map<String, Integer> converted = objMapper.readValue(tpsIdValueCounts,
                    _TRENDING_PULSE_SUBSCRIPTION_TYPE_REF);

            _LOGGER.debug("PulseServiceUtil.processTrendingPulseSubscribe: sucessfully converted "
                    + converted.size());

            //Structure is <id>0x07<value>0x13<timestamp> -> count; i.e. {"10020x07Mocked 10020x13<timestamp>" -> 1}
            //Need to split the String content, gather the count for the searched interval
            //and return the sorted using Java8 stream
            //TODO impl better

            Map<String, Integer> computed = converted.entrySet().stream().reduce(new HashMap<String, Integer>(),
                    (Map<String, Integer> mapped, Entry<String, Integer> entry) -> {
                        String[] split = entry.getKey()
                                .split(CommonConstants.TIME_INTERVAL_PERSIST_TIMESTAMP_DELIM);
                        Integer value = entry.getValue();

                        mapped.compute(split[0], (key, val) -> {
                            return val == null ? value : val + value;
                        });

                        return mapped;
                    }, (Map<String, Integer> result, Map<String, Integer> aggregated) -> {
                        result.putAll(aggregated);
                        return result;
                    });

            computed.entrySet().parallelStream().forEach(entry -> {
                Integer value = entry.getValue();

                count.compute(entry.getKey(), (key, val) -> {
                    return val == null ? value : val + value;
                });
            });

        } catch (Exception cException) {
            cException.printStackTrace();
        }
    });

    if (count.size() > 0) {
        tpSubscriptions = count.entrySet().stream()
                .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
                .collect(Collectors.toMap(
                        entry -> Long.parseLong(
                                entry.getKey().split(CommonConstants.TIME_INTERVAL_ID_VALUE_DELIM)[0]),
                        entry -> entry.getKey().split(CommonConstants.TIME_INTERVAL_ID_VALUE_DELIM)[1],
                        (x, y) -> {
                            throw new AssertionError();
                        }, LinkedHashMap::new));
    }

    return tpSubscriptions;
}

From source file:internal.static_util.scorer.TermRelatednessScorer.java

/**
 * Given a word and a set of its related relatedTerms, returns an ordered list of relatedTerms ranked from most relevant to least.
 *
 * @param original          The word you want to find ranked relatedTerms for
 * @param relatedTerms          The set of terms related to the original
 * @param minRelevanceRatio An optional parameter for the minimum a synonym must score to be returned. If none
 *                          given, .50 is assumed.
 * @return A list of scoredTerms, in descending order of their scores.
 *///from  ww  w  .  ja v  a  2s. c  o  m
public static List<ScoredTerm> getRankedTermsWithScores(String original, Set<String> relatedTerms,
        double minRelevanceRatio) {
    // Handle null/empty cases
    if (original == null || relatedTerms == null || relatedTerms.isEmpty()) {
        return Collections.EMPTY_LIST;
    }
    // A HashMap with the word as the key, and its corresponding score
    List<ScoredTerm> scoredTerms = Collections.synchronizedList(new ArrayList<>());

    // Open up a parallel stream on the relatedTerms to perform the doc search on them all
    relatedTerms.parallelStream().forEach(term -> scoredTerms.add(new ScoredTerm(term, score(original, term))));

    // TODO: NOTICE: if the change is made to use a 'top ten' type, refactor to just take first 'x' relatedTerms
    // Trim the fat - anything below relevance rank gets the D
    List<ScoredTerm> relevantTerms = getRelevantTerms(scoredTerms, minRelevanceRatio);

    // Use common.data.ScoredTerm's built-in comparator for sorting purposes
    // It is by default in ascending order; we want most relevant first, so reverse it
    Collections.sort(relevantTerms, Comparator.reverseOrder());

    // If there were no relevant relatedTerms, return null.
    // TODO: throw a NoRelevantTerms exception?
    return relevantTerms.size() > 0 ? relevantTerms : Collections.EMPTY_LIST;
}

From source file:HSqlManager.java

@SuppressWarnings("Duplicates")
@Deprecated//from   ww  w  . j  a  v  a 2 s  .  c  om
private static void mycoCommonInitialize(int bps, Connection connection) throws SQLException, IOException {
    long time = System.currentTimeMillis();
    String base = new File("").getAbsolutePath();
    CSV.makeDirectory(new File(base + "/PhageData"));
    INSTANCE = ImportPhagelist.getInstance();
    //        INSTANCE.parseAllPhages(bps);
    written = true;
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    stat.execute("SET FILES LOG FALSE\n");
    PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers"
            + "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + " Values(?,?,true,false,false,?,?)");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    String strain = "";
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
        if (r[2].equals("xkcd")) {
            strain = r[0];
        }
    }
    call.close();
    String x = strain;
    Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet());
    Map<String, List<String>> clusters = new HashMap<>();
    clust.parallelStream().forEach(cluster -> clusters.put(cluster, phages.stream()
            .filter(a -> a[0].equals(x) && a[1].equals(cluster)).map(a -> a[2]).collect(Collectors.toList())));
    for (String z : clusters.keySet()) {
        try {
            List<String> clustphages = clusters.get(z);
            Set<String> primers = Collections.synchronizedSet(
                    CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphages.get(0) + ".csv"));
            clustphages.remove(0);
            for (String phage : clustphages) {
                //                    String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta");
                //                    String sequence =seqs[0]+seqs[1];
                //                    Map<String, List<Integer>> seqInd = new HashMap<>();
                //                    for (int i = 0; i <= sequence.length()-bps; i++) {
                //                        String sub=sequence.substring(i,i+bps);
                //                        if(seqInd.containsKey(sub)){
                //                            seqInd.get(sub).add(i);
                //                        }else {
                //                            List<Integer> list = new ArrayList<>();
                //                            list.add(i);
                //                            seqInd.put(sub,list);
                //                        }
                //                    }
                //                    primers = primers.stream().filter(seqInd::containsKey).collect(Collectors.toSet());
                //                    primers =Sets.intersection(primers,CSV.readCSV(base + "/PhageData/"+Integer.toString(bps)
                //                            + phage + ".csv"));
                //                    System.gc();
                //                            String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta");
                //                            String sequence =seqs[0]+seqs[1];
                //                            primers.stream().filter(sequence::contains);
                primers.retainAll(CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv"));
                //                    Set<CharSequence> prim = primers;
                //                    for (CharSequence primer: primers){
                //                        if(seqInd.containsKey(primer)){
                //                            prim.remove(primer);
                //                        }
                //                    }
                //                    primers=prim;
            }
            int i = 0;
            for (String a : primers) {
                try {
                    //finish update
                    st.setInt(1, bps);
                    st.setString(2, a);
                    st.setString(3, x);
                    st.setString(4, z);
                    st.addBatch();
                } catch (SQLException e) {
                    e.printStackTrace();
                    System.out.println("Error occurred at " + x + " " + z);
                }
                i++;
                if (i == 1000) {
                    i = 0;
                    st.executeBatch();
                    db.commit();
                }
            }
            if (i > 0) {
                st.executeBatch();
                db.commit();
            }
        } catch (SQLException e) {
            e.printStackTrace();
            System.out.println("Error occurred at " + x + " " + z);
        }
        System.out.println(z);
    }
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Common Updated");
    System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60);
}

From source file:org.wallerlab.yoink.regionizer.partitioner.DensityPartitioner.java

private void checkEveryNonQMCoreMolecule(double densityThreshold, List<Molecule> moleculesInAdaptiveSearch,
        Set<Molecule> moleculesInNonQmCore, Set<Molecule> moleculesInQmCore) {

    moleculesInNonQmCore.parallelStream().forEach(molecule -> {
        Coord centerOfMass = molecule.getCenterOfMass();
        double density = densityCalculator.calculate(centerOfMass, moleculesInQmCore);
        if (density >= densityThreshold) {

            moleculesInAdaptiveSearch.add(molecule);
        }/*from  ww  w .j  ava  2s.com*/
    });
}

From source file:ai.grakn.test.engine.tasks.storage.TaskStateInMemoryStoreTest.java

@Test
public void testGetAllTasks() {
    TaskId id = stateStorage.newState(task());
    Set<TaskState> res = stateStorage.getTasks(null, null, null, null, 0, 0);

    assertTrue(res.parallelStream().map(TaskState::getId).filter(x -> x.equals(id)).collect(Collectors.toList())
            .size() == 1);//from   w w w.java2  s  .c  o m
}

From source file:ai.grakn.test.engine.tasks.storage.TaskStateInMemoryStoreTest.java

@Test
public void testGetTasksByStatus() {
    TaskId id = stateStorage.newState(task());
    Set<TaskState> res = stateStorage.getTasks(CREATED, null, null, null, 0, 0);

    assertTrue(res.parallelStream().map(TaskState::getId).filter(x -> x.equals(id)).collect(Collectors.toList())
            .size() == 1);/*from w w w  .ja  v  a2  s .c  om*/
}