Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:de.joinout.criztovyl.tools.directory.DirectoryChanges.java

/**
 * Locates all changed files, does not include new or deleted files.<br>
 * As first there is created/received a map with hash-strings as keys and
 * {@link Path}s as values for the current and previous
 * {@link FileList} via {@link FileList#getMappedHashedModifications()}.
 * Then all keys of the previous map are removed from the current map and
 * the remaining values are returned.<br>
 * Only files which content changed are included.
 * /*from  ww w .j a v  a2 s  .  co m*/
 * @return a {@link Set} of {@link Path}s
 * @param forceRecalculate whether there should be a recalculation of the changed files
 * @see FileList#getMappedHashedModifications()
 */
public Set<Path> getChangedFiles(boolean forceRecalculate) {

    if (forceRecalculate || changed == null) { //(Re-)calculate if is wanted or there is no previous calculation

        // Get all new and deleted files, they are not included in the modified
        // files, add them to list which files are ignored
        final Set<Path> ignore = new HashSet<>();
        ignore.addAll(getDeletedFiles());
        ignore.addAll(getNewFiles());

        if (logger.isDebugEnabled())
            logger.debug("Files ignored: {}", new TreeSet<>(ignore));

        // Create a map for modificated files and put modifications map from current directory
        final HashMap<String, Path> mod = new HashMap<>(current.getMappedHashedModifications(ignore));

        //Receive modifications from previous directory
        Map<String, Path> mod_p = previous.getMappedHashedModifications(ignore);

        //Intersect map keys
        Set<String> intersection = new HashSet<>(mod.keySet());
        intersection.retainAll(mod_p.keySet());

        if (logger.isDebugEnabled()) {

            if (!(mod_p.size() > 500))
                logger.debug("Modifications map of previous list: {}", new TreeMap<>(mod_p));
            else
                logger.debug("Previous modification map is bigger than 500 elements, will not print out.");

            if (!(mod_p.size() > 500))
                logger.debug("Modifications map of current list: {}", new TreeMap<>(mod));
            else
                logger.debug("Current modification map is bigger than 500 elements, will not print out.");

            if (!(mod_p.size() > 500))
                logger.debug("Intersection of above: {}", intersection);
            else
                logger.debug("Intersection set is bigger than 500 elements, will not print out.");
        }

        //Merge maps
        mod.putAll(mod_p);

        // Remove everything which is in both maps
        mod.keySet().removeAll(new TreeSet<>(intersection));

        //Only files which contents changed stay in map
        //Iterate over keys
        for (Iterator<String> i = mod.keySet().iterator(); i.hasNext();) {

            //Get path
            Path path = mod.get(i.next());

            //Check if file has changed (may throw I/O exception)
            try {
                if (contentChanged(path))

                    //Remove if is not newer then complement file
                    if (!FileUtils.isFileNewer(path.getFile(), getComplementPath(path).getFile()))
                        i.remove();
                    else
                        ;

                //Has not changed, remove from map
                else
                    i.remove();
            } catch (IOException e) { //Catch IOException, remove from map to avoid further errors
                i.remove();
                if (logger.isWarnEnabled())
                    logger.warn(
                            "Caught IOException while testing if file is newer: \"{}\". Removing from modifications to prevent further errors.",
                            path);
                if (logger.isDebugEnabled())
                    logger.debug(e);
            }
        }

        //Save for reuse
        changed = new HashSet<>(mod.values());
    }

    //Return changed files
    return changed;
}

From source file:de.micromata.genome.gwiki.page.search.IndexTextFilesContentSearcher.java

private Collection<SearchResult> and(Collection<SearchResult> first, Collection<SearchResult> second) {
    Set<SearchResult> ret = new TreeSet<SearchResult>(new SearchResultIdComparator());
    ret.addAll(first);//from   w  w  w.  ja va 2s. c  o  m
    ret.retainAll(second);
    return ret;
}

From source file:org.openmrs.module.facilitydata.web.controller.FacilityDataCompletionAnalysisFormController.java

@RequestMapping("/module/facilitydata/completionAnalysis.form")
public void viewForm(ModelMap map, HttpServletRequest request,
        @ModelAttribute("query") FacilityDataQuery query) {

    FacilityDataService fds = Context.getService(FacilityDataService.class);

    if (query.getSchema() != null) {

        Date validFrom = query.getSchema().getValidFrom();
        if (query.getFromDate() != null && validFrom != null && query.getFromDate().before(validFrom)) {
            query.setFromDate(validFrom);
        }//from  w  w w  . j ava2  s  .  co  m
        if (query.getFromDate() == null) {
            Date minEntryDate = fds.getMinEnteredStartDateForSchema(query.getSchema());
            if (minEntryDate == null) {
                minEntryDate = new Date();
            }
            query.setFromDate(minEntryDate);
        }

        Date validTo = query.getSchema().getValidTo();
        if (query.getToDate() != null && validTo != null && query.getToDate().after(validTo)) {
            query.setToDate(validTo);
        }
        if (query.getToDate() == null) {
            query.setToDate(new Date());
        }

        Calendar cal = Calendar.getInstance();
        Frequency frequency = query.getSchema().getForm().getFrequency();
        if (frequency == Frequency.MONTHLY) {
            cal.setTime(query.getFromDate());
            if (cal.get(Calendar.DATE) > 1) {
                cal.set(Calendar.DATE, 1);
                query.setFromDate(cal.getTime());
            }
            cal.setTime(query.getToDate());
            cal.set(Calendar.DATE, cal.getActualMaximum(Calendar.DATE));
            query.setToDate(cal.getTime());
        }

        int numExpected = 0;
        List<Integer> daysOfWeek = FacilityDataConstants.getDailyReportDaysOfWeek();
        Map<FacilityDataFormQuestion, Integer> numValuesByQuestion = new LinkedHashMap<FacilityDataFormQuestion, Integer>();
        Map<FacilityDataFormQuestion, Double> numericTotals = new HashMap<FacilityDataFormQuestion, Double>();
        Map<FacilityDataFormQuestion, Map<FacilityDataCodedOption, Integer>> codedTotals = new HashMap<FacilityDataFormQuestion, Map<FacilityDataCodedOption, Integer>>();

        cal.setTime(query.getFromDate());
        while (cal.getTime().compareTo(query.getToDate()) <= 0) {
            if (frequency != Frequency.DAILY || daysOfWeek.contains(cal.get(Calendar.DAY_OF_WEEK))) {
                numExpected++;
            }
            cal.add(frequency.getCalendarField(), frequency.getCalendarIncrement());
        }

        List<FacilityDataValue> values = fds.evaluateFacilityDataQuery(query);
        Map<FacilityDataFormQuestion, Map<Location, FacilityDataValue>> latestValuesForQuestion = new HashMap<FacilityDataFormQuestion, Map<Location, FacilityDataValue>>();
        for (FacilityDataValue v : values) {

            Integer numValues = numValuesByQuestion.get(v.getQuestion());
            numValuesByQuestion.put(v.getQuestion(), numValues == null ? 1 : numValues + 1);

            FacilityDataQuestionType type = v.getQuestion().getQuestion().getQuestionType();
            if (type instanceof CodedFacilityDataQuestionType) {
                Map<FacilityDataCodedOption, Integer> m = codedTotals.get(v.getQuestion());
                if (m == null) {
                    m = new HashMap<FacilityDataCodedOption, Integer>();
                    codedTotals.put(v.getQuestion(), m);
                }
                Integer num = m.get(v.getValueCoded());
                num = num == null ? 1 : num + 1;
                m.put(v.getValueCoded(), num);
            } else {
                PeriodApplicability pa = v.getQuestion().getQuestion().getPeriodApplicability();
                if (pa == PeriodApplicability.DURING_PERIOD) {
                    Double num = numericTotals.get(v.getQuestion());
                    if (num == null) {
                        num = new Double(0);
                    }
                    num += v.getValueNumeric();
                    numericTotals.put(v.getQuestion(), num);
                } else {
                    Map<Location, FacilityDataValue> valueForQuestion = latestValuesForQuestion
                            .get(v.getQuestion());
                    if (valueForQuestion == null) {
                        valueForQuestion = new HashMap<Location, FacilityDataValue>();
                        latestValuesForQuestion.put(v.getQuestion(), valueForQuestion);
                    }
                    FacilityDataValue valueForLocation = valueForQuestion.get(v.getFacility());
                    if (valueForLocation == null || valueForLocation.getToDate().before(v.getToDate())) {
                        valueForQuestion.put(v.getFacility(), v);
                    }
                }
            }
        }
        if (!latestValuesForQuestion.isEmpty()) {
            for (FacilityDataFormSection section : query.getSchema().getSections()) {
                for (FacilityDataFormQuestion question : section.getQuestions()) {
                    Double d = new Double(0);
                    Map<Location, FacilityDataValue> m = latestValuesForQuestion.get(question);
                    if (m != null) {
                        for (FacilityDataValue v : m.values()) {
                            if (v != null) {
                                d += v.getValueNumeric();
                            }
                        }
                        numericTotals.put(question, d);
                    }
                }
            }
        }

        if (query.getFacility() == null) {
            numExpected = numExpected * getLocations().size();
        } else {
            Set<Location> allFacilities = FacilityDataUtil.getAllLocationsInHierarchy(query.getFacility());
            List<Location> supportedFacilities = FacilityDataConstants.getSupportedFacilities();
            allFacilities.retainAll(supportedFacilities);
            numExpected = numExpected * allFacilities.size();
        }

        map.addAttribute("numExpected", numExpected);
        map.addAttribute("numValuesByQuestion", numValuesByQuestion);
        map.addAttribute("numericTotals", numericTotals);
        map.addAttribute("codedTotals", codedTotals);
    }
}

From source file:ar.edu.famaf.nlp.alusivo.GraphAlgorithm.java

private boolean matchHelper(Map<Resource, Resource> bijection, // pi
        Set<Resource> neighbors, // Y
        DirectedPseudograph<Resource, Edge> fullGraph, // G
        DirectedPseudograph<Resource, Edge> candidate, // H
        long startTime) throws ReferringExpressionException {

    if (bijection.keySet().size() == candidate.vertexSet().size()) {
        return true;
    }// w w w  .j a v  a 2s  .  c o m
    if (neighbors.isEmpty()) {
        return false;
    }

    if (System.currentTimeMillis() - startTime > maxTime)
        throw new ReferringExpressionException("Time-out");

    for (Resource toMap : neighbors) { // y
        if (bijection.containsKey(toMap))
            continue;
        // find valid matches Z
        for (Resource extension : fullGraph.vertexSet()) { // z
            if (bijection.values().contains(extension) || bijection.keySet().contains(extension))
                continue;
            if (!containedPropertiesInSubgraph(toMap, candidate, extension, fullGraph))
                continue;
            Set<Resource> toCheck = neighbors(candidate, toMap);
            toCheck.retainAll(bijection.keySet());
            boolean good = true;
            for (Resource other : toCheck) { // h
                // (toMap -> other) y->h
                Set<Edge> outgoing = candidate.getAllEdges(toMap, other);
                if (!fullGraph.getAllEdges(extension, bijection.get(other)).containsAll(outgoing)) {
                    good = false;
                    break;
                }
                // (other -> toMap) y->h
                Set<Edge> incoming = candidate.getAllEdges(other, toMap);
                if (!fullGraph.getAllEdges(bijection.get(other), extension).containsAll(incoming)) {
                    good = false;
                    break;
                }
            }
            if (!good)
                continue;
            Map<Resource, Resource> bijectionRec = new HashMap<>();
            bijectionRec.putAll(bijection);
            bijectionRec.put(toMap, extension);
            Set<Resource> neighborsRec = new HashSet<Resource>(neighbors);
            neighborsRec.remove(toMap); // not in Figure 8
            if (matchHelper(bijectionRec, neighborsRec, fullGraph, candidate, startTime))
                return true;
        }

    }
    return false;
}

From source file:com.aurel.track.admin.customize.lists.systemOption.IssueTypeBL.java

/**
 * Gets the item types by projects involved in context and the person
 * @param involvedProjects/*from   ww w .jav  a  2s.co m*/
 * @param personID
 * @param locale
 * @param restrict whether to restrict the item types:
 *             true a subset valid for all involved projects (any issue type change in subset is valid)
 *             false a superset valid for any involved project (certain issue change may not be possible)
 * @return
 */
public static List<TListTypeBean> getByProjectsAndPerson(Integer[] involvedProjects, Integer personID,
        Locale locale, boolean restrict) {
    List<TListTypeBean> datasource = null;
    if (involvedProjects != null) {
        if (involvedProjects.length == 1) {
            //an explicit project is selected for bulk operation or 
            //all selected issues are from the same project
            datasource = IssueTypeBL.loadByPersonAndProjectAndRight(personID, involvedProjects[0], new int[] {
                    AccessBeans.AccessFlagIndexes.CREATETASK, AccessBeans.AccessFlagIndexes.PROJECTADMIN });
        } else {
            if (involvedProjects.length > 1) {
                //the project field is not selected and the selected issues are from more projects:
                //get the intersection of the issueTypes allowed in all involved projects
                Set<Integer> intersection = null;
                for (int i = 0; i < involvedProjects.length; i++) {
                    List<TListTypeBean> issueTypeBeans = IssueTypeBL.loadByPersonAndProjectAndRight(personID,
                            involvedProjects[i], new int[] { AccessBeans.AccessFlagIndexes.CREATETASK,
                                    AccessBeans.AccessFlagIndexes.PROJECTADMIN });
                    Set<Integer> issueTypesIDsForProject = GeneralUtils
                            .createIntegerSetFromBeanList(issueTypeBeans);
                    if (intersection == null) {
                        intersection = issueTypesIDsForProject;
                    } else {
                        if (restrict) {
                            intersection.retainAll(issueTypesIDsForProject);
                        } else {
                            intersection.addAll(issueTypesIDsForProject);
                        }
                    }
                }
                datasource = IssueTypeBL
                        .loadByIssueTypeIDs(GeneralUtils.createListFromCollection(intersection));
            } else {
                //no project selected for bulk operation: we  should work with 
                //the intersection of the issueTypes allowed in all involved projects
                datasource = IssueTypeBL.loadAllSelectable();
            }
        }
    }
    return LocalizeUtil.localizeDropDownList(datasource, locale);
}

From source file:jmemorize.gui.swing.panels.CardCounterPanel.java

private void attachPartialProgressBar() {
    Main.getInstance().addLearnSessionObserver(new LearnSessionObserver() {
        class LearnCardObs implements LearnCardObserver {
            public void nextCardFetched(Card nextCard, boolean flippedMode) {
                if (m_cards == null) {
                    m_cards = new ArrayList<Card>();
                    m_cards.addAll(m_session.getCardsLeft());
                }//from   w  ww  .j a v  a 2  s. c om

                if (!m_cards.contains(nextCard)) {
                    // if the new card is not in m_cards, then a card has been skipped and
                    // this card added.  We have to figure out the skipped card
                    // this is kind of inefficient, but only happens on a skipped card... 
                    // note also that we don't necessarily see the new card immediately 
                    // after the skip, so the card positions may not update immediately.
                    Set<Card> cardsToRemove = new HashSet<Card>();
                    cardsToRemove.addAll(m_session.getSkippedCards());
                    cardsToRemove.retainAll(m_cards);
                    m_cards.removeAll(cardsToRemove);
                    m_cards.add(nextCard);
                }

                PartialProgressBar bar = (PartialProgressBar) m_bar;
                bar.setValues(getValues());
            }
        }

        private LearnCardObserver m_obs;

        public void sessionEnded(LearnSession session) {
            session.removeObserver(m_obs);
        }

        public void sessionStarted(LearnSession session) {
            m_session = session;
            m_cards = null;

            m_obs = new LearnCardObs();
            session.addObserver(m_obs);
        }
    });
}

From source file:hudson.tasks.test.TestObject.java

/**
 * #2988: uniquifies a {@link #getSafeName} amongst children of the parent.
 *///from  w w w .  j  a va2s .  co  m
protected final String uniquifyName(Collection<? extends TestObject> siblings, String base) {
    synchronized (UNIQUIFIED_NAMES) {
        String uniquified = base;
        Map<TestObject, Void> taken = UNIQUIFIED_NAMES.get(base);
        if (taken == null) {
            taken = new WeakHashMap<TestObject, Void>();
            UNIQUIFIED_NAMES.put(base, taken);
        } else {
            Set<TestObject> similars = new HashSet<TestObject>(taken.keySet());
            similars.retainAll(new HashSet<TestObject>(siblings));
            if (!similars.isEmpty()) {
                uniquified = base + '_' + (similars.size() + 1);
            }
        }
        taken.put(this, null);
        return uniquified;
    }
}

From source file:ubic.BAMSandAllen.FocusedAnalysis.ExploreRegionNames.java

public void printRelations() {
    log.info("Bnames size:" + bNames.size());
    log.info("Anames size:" + aNames.size());
    log.info(bNames.contains("Nucleus y"));
    log.info(aNames.contains("Nucleus y"));
    for (String matrixAColumn : aNames) {

        Set<String> matrixBColumns = pair.convertANametoB(matrixAColumn);
        // we may still have been mapped to a brain region with no data
        // requires mapping and matrix data!
        int BSizeBefore = matrixBColumns.size();
        Set<String> removed = new HashSet<String>(matrixBColumns);
        removed.removeAll(bNames);//w  w  w .j av  a 2s . c  om
        matrixBColumns.retainAll(bNames);

        int bSizeDiff = BSizeBefore - matrixBColumns.size();
        if (bSizeDiff != 0) {
            // log.info( "B size changed by (no connect info):" + bSizeDiff );
            log.info("No connection info for:   " + matrixAColumn + "->" + removed);
        }

        if (matrixBColumns.size() > 1) {
            log.info("Merging " + matrixBColumns + " into " + matrixAColumn);
        }
    }

}

From source file:org.jax.haplotype.analysis.EMMAAssociationTest.java

/**
 * Calculate the kinship values/*from  ww w. j  a  v  a 2  s.c  o m*/
 * @param genoData
 *          the genotype data to base it on
 * @param strains
 *          a map of strain names
 * @return
 *          the kinship
 * @throws
 *          IOException
 */
public double[] calculateKinship(GenomeDataSource genoData, Set<String> strains) throws IOException {
    strains = new HashSet<String>(strains);
    strains.retainAll(genoData.getAvailableStrains());
    String[] commonStrains = strains.toArray(new String[0]);
    Arrays.sort(commonStrains);
    int strainCount = commonStrains.length;

    int snpCount = 0;
    for (ChromosomeDataSource currChr : genoData.getChromosomeDataSources().values()) {
        snpCount += currChr.getSnpPositionInputStream().getSnpCount();
    }

    double[] genos = new double[snpCount * strainCount];
    int currStartIndex = 0;
    for (ChromosomeDataSource currChr : genoData.getChromosomeDataSources().values()) {
        SdpInputStream sdpStream = currChr.getSdpInputStream(commonStrains);
        int currSnpCount = (int) currChr.getSnpPositionInputStream().getSnpCount();
        for (int i = 0; i < currSnpCount; i++) {
            int currSnp = currStartIndex + i;
            BitSet currSdp = sdpStream.getNextSdp();
            for (int strainIndex = 0; strainIndex < strainCount; strainIndex++) {
                double currCall = currSdp.get(strainIndex) ? 1.0 : 0.0;
                genos[currSnp * strainCount + strainIndex] = currCall;
            }
        }
    }

    return calculateKinship(strainCount, genos);
}

From source file:org.ohmage.request.campaign.CampaignUpdateRequest.java

/**
 * Services the request./*from   w w w. java2s  .c  o  m*/
 */
@Override
public void service() {
    LOGGER.info("Servicing the campaign update request.");

    if (!authenticate(AllowNewAccount.NEW_ACCOUNT_DISALLOWED)) {
        return;
    }

    try {
        boolean isAdmin;
        try {
            LOGGER.info("Checking if the user is an admin.");
            UserServices.instance().verifyUserIsAdmin(getUser().getUsername());

            LOGGER.info("The user is an admin.");
            isAdmin = true;
        } catch (ServiceException e) {
            LOGGER.info("The user is not an admin.");
            isAdmin = false;
        }

        if (isAdmin) {
            LOGGER.info("Verifying that the campaign exists.");
            CampaignServices.instance().checkCampaignExistence(campaignId, true);
        } else {
            LOGGER.info("Verfiying that the campaign exists and that the user belongs.");
            UserCampaignServices.instance().campaignExistsAndUserBelongs(campaignId, getUser().getUsername());
        }

        if (!isAdmin) {
            LOGGER.info("Verifying that the user is allowed to update the campaign.");
            UserCampaignServices.instance().verifyUserCanUpdateCampaign(getUser().getUsername(), campaignId);
        }

        if (xml != null) {
            LOGGER.info("Verifying that the user is allowed to update the campaign.");
            UserCampaignServices.instance().verifyUserCanUpdateCampaignXml(getUser().getUsername(), campaignId,
                    id, name);
        }

        if ((classesToAdd != null) && (classesToRemove != null)) {
            LOGGER.info(
                    "Both a list of classes to add and remove were given, so we are truncating the lists to remove items that are in both.");
            Set<String> union = new HashSet<String>(classesToAdd);
            union.retainAll(classesToRemove);

            classesToAdd.removeAll(union);
            classesToRemove.removeAll(union);
        }

        if (classesToAdd != null) {
            if (isAdmin) {
                LOGGER.info("Verifying that all of the classes to add exist.");
                ClassServices.instance().checkClassesExistence(classesToAdd, true);
            } else {
                LOGGER.info("Verifying that all of the classes to add exist and that the user belongs.");
                UserClassServices.instance().classesExistAndUserBelongs(classesToAdd, getUser().getUsername());
            }
        }

        if (classesToRemove != null) {
            if (isAdmin) {
                LOGGER.info("Verifying that all of the classes to remove exist.");
                ClassServices.instance().checkClassesExistence(classesToRemove, true);
            } else {
                LOGGER.info("Verifying that all of the classes to remove exist and that the user belongs.");
                UserClassServices.instance().classesExistAndUserBelongs(classesToRemove,
                        getUser().getUsername());
            }

            LOGGER.info("Verifying that not all of the classes are being disassociated from the campaign.");
            CampaignClassServices.instance().verifyNotDisassocitingAllClassesFromCampaign(campaignId,
                    classesToRemove, classesToAdd);
        }

        if (usersAndRolesToAdd != null) {
            LOGGER.info("Verifying that all of the users to add exist.");
            UserServices.instance().verifyUsersExist(usersAndRolesToAdd.keySet(), true);

            if (!isAdmin) {
                LOGGER.info(
                        "Verifying that the user is allowed to give the permissions they are trying to give.");
                Set<Campaign.Role> roles = new HashSet<Campaign.Role>();
                for (Set<Campaign.Role> currRoles : usersAndRolesToAdd.values()) {
                    roles.addAll(currRoles);
                }
                UserCampaignServices.instance().verifyUserCanGrantOrRevokeRoles(getUser().getUsername(),
                        campaignId, roles);
            }
        }

        if ((usersAndRolesToRemove != null) && (!isAdmin)) {
            LOGGER.info(
                    "Verifying that the user is allowed to revoke permissions that they are trying to revoke access.");
            Set<Campaign.Role> roles = new HashSet<Campaign.Role>();
            for (Set<Campaign.Role> currRoles : usersAndRolesToRemove.values()) {
                roles.addAll(currRoles);
            }
            UserCampaignServices.instance().verifyUserCanGrantOrRevokeRoles(getUser().getUsername(), campaignId,
                    roles);
        }

        LOGGER.info("Updating the campaign.");
        CampaignServices.instance().updateCampaign(campaignId, xml, description, runningState, privacyState,
                classesToAdd, classesToRemove, usersAndRolesToAdd, usersAndRolesToRemove);
    } catch (ServiceException e) {
        e.failRequest(this);
        e.logException(LOGGER);
    }
}