Example usage for java.util Set forEach

List of usage examples for java.util Set forEach

Introduction

In this page you can find the example usage for java.util Set forEach.

Prototype

default void forEach(Consumer<? super T> action) 

Source Link

Document

Performs the given action for each element of the Iterable until all elements have been processed or the action throws an exception.

Usage

From source file:se.uu.it.cs.recsys.service.preference.SameCourseFinder.java

/**
 * For example, course 1DL301 and 1DL300, both are Database Design I, but
 * planned in different periods./*from www . jav  a  2s  . c  om*/
 *
 * @param courseCodeSet, the course collection that at most only one from it can
 * be selected.
 * @param firstTaughtYear, the first year that the course is taught in the plan years
 * @return non-null collection of the course id
 */
public Set<Integer> getIdCollectionToAvoidMoreThanOneSel(Set<String> courseCodeSet, Integer firstTaughtYear) {

    Set<Integer> totalIdSet = new HashSet<>();

    //        Short firstTaughtYear = null;
    //
    //        for (se.uu.it.cs.recsys.api.type.Course c : courseSet) {
    //            if (firstTaughtYear == null) {
    //                firstTaughtYear = c.getTaughtYear().shortValue();
    //            } else {
    //                if (firstTaughtYear > c.getTaughtYear()) {
    //                    firstTaughtYear = c.getTaughtYear().shortValue();
    //                }
    //            }
    //        }

    //        final short finalFirstTaughtYear = firstTaughtYear;

    courseCodeSet.forEach(code -> {
        Set<Integer> partSet = this.courseRepository.findByCode(code).stream()
                .filter(entity -> entity.getTaughtYear() >= firstTaughtYear)
                .map(entity -> entity.getAutoGenId()).collect(Collectors.toSet());

        if (!partSet.isEmpty()) {
            totalIdSet.addAll(partSet);
        }
    });

    return totalIdSet;
}

From source file:com.hortonworks.streamline.streams.security.service.SecurityCatalogResource.java

private Response addOrUpdateRoleUsers(Long roleId, Set<Long> userIds) {
    List<UserRole> userRoles = new ArrayList<>();
    Role roleToQuery = catalogService.getRole(roleId);
    Set<Long> currentUserIds = catalogService.listUsers(roleToQuery).stream().map(User::getId)
            .collect(Collectors.toSet());
    Set<Long> userIdsToAdd = Sets.difference(userIds, currentUserIds);
    Set<Long> userIdsToRemove = Sets.difference(currentUserIds, userIds);
    Sets.intersection(currentUserIds, userIds).forEach(userId -> {
        userRoles.add(new UserRole(userId, roleId));
    });// w ww.  j  a v  a2 s .  com
    userIdsToRemove.forEach(userId -> catalogService.removeUserRole(userId, roleId));
    userIdsToAdd.forEach(userId -> {
        userRoles.add(catalogService.addUserRole(userId, roleId));
    });
    return WSUtils.respondEntities(userRoles, OK);
}

From source file:se.uu.it.cs.recsys.service.resource.impl.RecommendationGenerator.java

Set<Integer> filterOnPlanYear(Set<Integer> courseId) {
    if (courseId.isEmpty()) {
        return Collections.emptySet();
    }/*from   w  w  w  .  jav a  2s .  co  m*/

    Set<se.uu.it.cs.recsys.persistence.entity.Course> filteredCourse = this.courseRepository
            .findByAutoGenIds(courseId).stream()
            .filter(ConstraintSolverPreferenceBuilder
                    .inPlanYearPredicate(this.constraintPref.getIndexedScheduleInfo()))
            .sorted((se.uu.it.cs.recsys.persistence.entity.Course one,
                    se.uu.it.cs.recsys.persistence.entity.Course two) -> Integer.compare(one.getAutoGenId(),
                            two.getAutoGenId()))
            .collect(Collectors.toSet());

    filteredCourse.forEach(course -> LOGGER.debug("Filtered course: {}", course));

    return filteredCourse.stream().map(course -> course.getAutoGenId()).collect(Collectors.toSet());
}

From source file:org.jamocha.rating.fraj.RatingProvider.java

private double rateBetaWithExistentials(final StatisticsProvider statisticsProvider,
        final PathNodeFilterSet toRate,
        final Map<Set<PathFilterList>, List<Pair<List<Set<PathFilterList>>, List<PathFilter>>>> componentToJoinOrder,
        final Map<Path, Set<PathFilterList>> pathToPreNetworkComponents) {
    final Set<Path> positiveExistentialPaths = toRate.getPositiveExistentialPaths();
    final Set<Path> negativeExistentialPaths = toRate.getNegativeExistentialPaths();
    final Set<Set<PathFilterList>> positiveExistentialComponents = new HashSet<>(),
            negativeExistentialComponents = new HashSet<>(), regularComponents = new HashSet<>();
    final Set<Set<PathFilterList>> preNetworkComponents = new HashSet<>(pathToPreNetworkComponents.values());
    for (final Set<PathFilterList> preNetworkComponent : preNetworkComponents) {
        final PathCollector<HashSet<Path>> pathCollector = PathCollector.newHashSet();
        preNetworkComponent.forEach(pathCollector::collectAllInLists);
        final HashSet<Path> paths = pathCollector.getPaths();
        if (!Collections.disjoint(paths, positiveExistentialPaths)) {
            positiveExistentialComponents.add(preNetworkComponent);
        } else if (!Collections.disjoint(paths, negativeExistentialPaths)) {
            negativeExistentialComponents.add(preNetworkComponent);
        } else {//from w ww  . ja  va2  s .c o  m
            regularComponents.add(preNetworkComponent);
        }
    }
    final Map<Set<PathFilterList>, Data> preNetworkComponentToData = preNetworkComponents.stream()
            .collect(toMap(Function.identity(), statisticsProvider::getData));
    final Map<Set<PathFilterList>, PathFilter> existentialComponentToFilter = componentToJoinOrder.values()
            .iterator().next().stream().filter(p -> !regularComponents.contains(p.getLeft().iterator().next()))
            .collect(toMap(p -> p.getLeft().iterator().next(), p -> p.getRight().iterator().next()));

    final double tupleSize = regularComponents.stream()
            .mapToDouble(c -> preNetworkComponentToData.get(c).getTupleSize()).sum();
    final double tuplesPerPage = statisticsProvider.getPageSize() / tupleSize;
    final double unfilteredRowCount = calcBetaUnfilteredSize(statisticsProvider, componentToJoinOrder,
            pathToPreNetworkComponents, regularComponents);
    final double rowCount = unfilteredRowCount * DoubleStream
            .concat(positiveExistentialComponents.stream()
                    .mapToDouble(component -> 1 - Math.pow(
                            (1 - statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)),
                            preNetworkComponentToData.get(component).getRowCount())),
                    negativeExistentialComponents.stream().mapToDouble(component -> Math.pow(
                            (1 - statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)),
                            preNetworkComponentToData.get(component).getRowCount())))
            .reduce(1.0, (a, b) -> a * b);
    final double xOverUX = rowCount / unfilteredRowCount;
    // joinsize is needed twice per component, thus pre-calculate it
    final Map<Set<PathFilterList>, Double> regularComponentToJoinSize = regularComponents.stream()
            .collect(toMap(Function.identity(),
                    component -> joinSize(statisticsProvider, component, componentToJoinOrder.get(component),
                            positiveExistentialComponents, negativeExistentialComponents,
                            pathToPreNetworkComponents, xOverUX)));
    // dnrating (30a)
    final double finsert = xOverUX
            * regularComponents.stream()
                    .mapToDouble(component -> preNetworkComponentToData.get(component).getFinsert()
                            * regularComponentToJoinSize.get(component))
                    .sum()
            + DoubleStream.concat(negativeExistentialComponents.stream().mapToDouble(component -> {
                final double jsf = statisticsProvider.getJSF(regularComponents, component,
                        existentialComponentToFilter.get(component), pathToPreNetworkComponents);
                return preNetworkComponentToData.get(component).getFdelete() * rowCount * (jsf / (1 - jsf));
            }), positiveExistentialComponents.stream()
                    .mapToDouble(component -> preNetworkComponentToData.get(component).getFinsert() * rowCount
                            * statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)))
                    .sum();
    // dnrating (30b)
    final double fdelete = DoubleStream.concat(
            regularComponents.stream().mapToDouble(c -> preNetworkComponentToData.get(c).getFdelete()),
            DoubleStream.concat(negativeExistentialComponents.stream()
                    .mapToDouble(component -> preNetworkComponentToData.get(component).getFdelete() * rowCount
                            * statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)),
                    positiveExistentialComponents.stream().mapToDouble(component -> {
                        final double jsf = statisticsProvider.getJSF(regularComponents, component,
                                existentialComponentToFilter.get(component), pathToPreNetworkComponents);
                        return preNetworkComponentToData.get(component).getFinsert() * rowCount
                                * (jsf / (1 - jsf));
                    })))
            .sum();
    // publish information to statistics provider
    {
        final Set<PathFilterList> filters = new HashSet<>();
        componentToJoinOrder.keySet().forEach(filters::addAll);
        filters.add(toRate);
        statisticsProvider.setData(filters, new Data(finsert, fdelete, rowCount, tupleSize));
    }
    final double mUxBeta = m(unfilteredRowCount, tuplesPerPage);
    // dnrating (40)
    final double runtimeCost = DoubleStream.concat(regularComponents.stream().mapToDouble(component -> {
        final Data data = preNetworkComponentToData.get(component);
        return data.getFinsert()
                * costPosInsVarII(statisticsProvider, component, componentToJoinOrder.get(component),
                        regularComponents, pathToPreNetworkComponents)
                + data.getFdelete() * (mUxBeta + cardenas(mUxBeta, regularComponentToJoinSize.get(component)));
    }), Stream.concat(positiveExistentialComponents.stream(), negativeExistentialComponents.stream())
            .mapToDouble(component -> {
                final Data data = preNetworkComponentToData.get(component);
                return data.getFinsert() * 2
                        * jc(statisticsProvider, statisticsProvider.getJSF(regularComponents, component,
                                existentialComponentToFilter.get(component), pathToPreNetworkComponents), data,
                                1)
                        + data.getFdelete() * costNegDelVarII(statisticsProvider, component,
                                componentToJoinOrder.get(component), pathToPreNetworkComponents);
            })).sum();
    final double memoryCost = unfilteredRowCount * (tupleSize
            + 0.15 * (positiveExistentialComponents.size() + negativeExistentialComponents.size()));
    return cpuAndMemCostCombiner.applyAsDouble(runtimeCost, memoryCost);
}

From source file:HSqlManager.java

private static void checkPhage(Connection connection) throws SQLException, IOException {
    List<String[]> all = INSTANCE.readFileAllStrains(INSTANCE.path);
    List<String> clusters = all.stream().map(x -> x[0]).collect(Collectors.toList());
    Set<String> phages = all.stream().map(x -> x[1]).collect(Collectors.toSet());
    List<String> strains = all.stream().map(x -> x[2]).collect(Collectors.toList());
    List<String> phageslist = all.stream().map(x -> x[1]).collect(Collectors.toList());
    Set<String> dbphages = new HashSet<>();
    Statement st = connection.createStatement();
    PreparedStatement insertPhages = connection
            .prepareStatement("INSERT INTO Primerdb.Phages(Name, Cluster, Strain)" + " values(?,?,?);");
    String sql = "SELECT * FROM Primerdb.Phages;";
    ResultSet rs = st.executeQuery(sql);
    while (rs.next()) {
        dbphages.add(rs.getString("Name"));
    }//from w  w w.  j a va 2  s  .  co  m
    phages.removeAll(dbphages);
    List<String[]> phageinfo = new ArrayList<>();
    if (phages.size() > 0) {
        System.out.println("Phages Added:");
        phages.forEach(x -> {
            String[] ar = new String[3];
            System.out.println(x);
            String cluster = clusters.get(phageslist.indexOf(x));
            String strain = strains.get(phageslist.indexOf(x));
            try {
                insertPhages.setString(1, x);
                insertPhages.setString(2, cluster);
                insertPhages.setString(3, strain);
                insertPhages.addBatch();
            } catch (SQLException e) {
                e.printStackTrace();
            }
            try {
                insertPhages.executeBatch();
            } catch (SQLException e) {
                e.printStackTrace();
            }
            ar[0] = x;
            ar[1] = cluster;
            ar[2] = strain;
            phageinfo.add(ar);
        });
        newPhages = phageinfo;
    } else {
        System.out.println("No Phages added");
    }
    st.close();
    insertPhages.close();
}

From source file:org.onosproject.store.primitives.impl.EventuallyConsistentMapImpl.java

private void handleUpdateRequests(UpdateRequest<K> request) {
    final Set<K> keys = request.keys();
    final NodeId sender = request.sender();
    final List<NodeId> peers = ImmutableList.of(sender);

    keys.forEach(key -> queueUpdate(new UpdateEntry<>(key, items.get(key)), peers));
}

From source file:pt.ist.fenix.ui.struts.action.koha.ExportUserInfoForKoha.java

public ActionForward getTeachersAndResearchers(final ActionMapping mapping, final ActionForm actionForm,
        final HttpServletRequest request, final HttpServletResponse response) throws Exception {
    final Spreadsheet spreadsheet = new Spreadsheet("TeachersAndResearchers");
    spreadsheet.setHeader("IST-ID").setHeader("*departamento").setHeader("nome").setHeader("email")
            .setHeader("telefone").setHeader("cgdCode");

    Set<Person> teachersAndResearchers = new HashSet<>();
    for (Teacher teacher : Bennu.getInstance().getTeachersSet()) {
        if (teacher.isActiveContractedTeacher()) {
            teachersAndResearchers.add(teacher.getPerson());
        }// ww  w. j  a  v a2  s . co  m
    }
    for (Researcher researcher : Bennu.getInstance().getResearchersSet()) {
        if (researcher.isActiveContractedResearcher()) {
            teachersAndResearchers.add(researcher.getPerson());
        }
    }
    teachersAndResearchers.forEach(p -> addEmployeeInformation(spreadsheet, p));

    return sendXls(response, spreadsheet);
}

From source file:org.codice.ddf.graphql.transform.GraphQLTransformCommons.java

private GraphQLFieldDefinition getErrorCodesQueryProvider(List<FieldProvider> fieldProviders) {
    Set<String> errorCodes = new TreeSet<>();

    for (FieldProvider fieldProvider : fieldProviders) {
        List<FunctionField> mutations = fieldProvider.getMutationFunctions();
        List<FunctionField> queryFields = fieldProvider.getDiscoveryFunctions();

        for (FunctionField mutation : mutations) {
            errorCodes.addAll(mutation.getErrorCodes());
        }/*  ww w  .j  ava 2 s .  c  o  m*/

        for (FunctionField field : queryFields) {
            errorCodes.addAll(field.getErrorCodes());
        }
    }

    if (errorCodes.isEmpty()) {
        return null;
    }

    GraphQLEnumType.Builder enumTypeBuilder = GraphQLEnumType.newEnum().name("ErrorCode")
            .description("All possible error codes.");
    errorCodes.forEach(enumTypeBuilder::value);
    GraphQLEnumType errorCodeEnumType = enumTypeBuilder.build();

    return GraphQLFieldDefinition.newFieldDefinition().name("errorCodes")
            .description("Returns all the possible error codes from the graphQL schema.")
            .type(GraphQLList.list(errorCodeEnumType)).dataFetcher(dataFetchingEnvironment -> errorCodes)
            .build();
}

From source file:de.acosix.alfresco.site.hierarchy.repo.service.SiteHierarchyServiceImpl.java

protected void withAspectInheritedPropertyRemovalGuard(final NodeRef node, final QName baseAspect,
        final Supplier<?> fn) {
    // there is a bug when aspects that inherit from another are removed but the base aspect remains
    // all properties of the base aspect are deleted as well
    final AspectDefinition aspect = this.dictionaryService.getAspect(baseAspect);
    final Set<QName> propertiesToPreserve = aspect.getProperties().keySet();
    final Map<QName, Serializable> properties = this.nodeService.getProperties(node);
    final Map<QName, Serializable> propertiesBackup = new HashMap<>();
    propertiesToPreserve.forEach(propertyQName -> {
        if (properties.containsKey(propertyQName)) {
            propertiesBackup.put(propertyQName, properties.get(propertyQName));
        }/*w  w  w .ja  v a2s. com*/
    });

    fn.get();

    if (!propertiesBackup.isEmpty()) {
        this.nodeService.addProperties(node, propertiesBackup);
    }
}

From source file:ai.susi.mind.SusiSkill.java

/**
 * if no keys are given, we compute them from the given phrases
 * @param phrases//from  w w  w  .j  a v a2  s  .c o m
 * @return
 */
private static JSONArray computeKeysFromPhrases(List<SusiPhrase> phrases) {
    Set<String> t = new LinkedHashSet<>();

    // create a list of token sets from the phrases
    List<Set<String>> ptl = new ArrayList<>();
    final AtomicBoolean needsCatchall = new AtomicBoolean(false);
    phrases.forEach(phrase -> {
        Set<String> s = new HashSet<>();
        for (String token : SPACE_PATTERN.split(phrase.getPattern().toString())) {
            String m = SusiPhrase.extractMeat(token.toLowerCase());
            if (m.length() > 1)
                s.add(m);
        }
        // if there is no meat inside, it will not be possible to access the skill without the catchall skill, so remember that
        if (s.size() == 0)
            needsCatchall.set(true);

        ptl.add(s);
    });

    // this is a kind of emergency case where we need a catchall skill because otherwise we cannot access one of the phrases
    JSONArray a = new JSONArray();
    if (needsCatchall.get())
        return a.put(CATCHALL_KEY);

    // collect all token
    ptl.forEach(set -> set.forEach(token -> t.add(token)));

    // if no tokens are available, return the catchall key
    if (t.size() == 0)
        return a.put(CATCHALL_KEY);

    // make a copy to make it possible to use the original key set again
    Set<String> tc = new LinkedHashSet<>();
    t.forEach(c -> tc.add(c));

    // remove all token that do not appear in all phrases
    ptl.forEach(set -> {
        Iterator<String> i = t.iterator();
        while (i.hasNext())
            if (!set.contains(i.next()))
                i.remove();
    });

    // if no token is left, use the original tc set and add all keys
    if (t.size() == 0) {
        tc.forEach(c -> a.put(c));
        return a;
    }

    // use only the first token, because that appears in all the phrases
    return new JSONArray().put(t.iterator().next());
}