Example usage for java.util Collection removeAll

List of usage examples for java.util Collection removeAll

Introduction

In this page you can find the example usage for java.util Collection removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes all of this collection's elements that are also contained in the specified collection (optional operation).

Usage

From source file:com.comphenix.xp.Action.java

/**
 * Inherit traits from the previous action into the current action, returning a new action with the result.
 * @param previous - the previous action to inherit from.
 * @return A new action with the traits of this and the previois action.
 *///from  w  ww .  ja v a 2  s. c o  m
public Action inheritAction(Action previous) {

    // Scale the previous action
    Action scaled = previous.multiply(getInheritMultiplier());
    Action current = multiply(1);

    // Include the previous multiplier
    current.setInheritMultiplier(current.getInheritMultiplier() * previous.getInheritMultiplier());

    // Find any rewards that are not overwritten
    Collection<String> rewards = scaled.getRewardNames();
    rewards.removeAll(getRewardNames());

    // Copy over
    for (String reward : rewards) {
        current.addReward(reward, scaled.getReward(reward));
    }

    // And copy the message too, if it hasn't already been set
    if (current.messages == null) {
        current.messages = scaled.messages;
    }

    return current;
}

From source file:gov.bnl.channelfinder.ChannelsResource.java

/**
 * Check is all the tags and properties already exist
 * @return//from www. ja va2s. c  o  m
 * @throws IOException 
 * @throws JsonMappingException 
 * @throws JsonParseException 
 */
private List<XmlChannel> validateChannels(List<XmlChannel> channels, Client client)
        throws JsonParseException, JsonMappingException, IOException {
    for (XmlChannel channel : channels) {
        if (channel.getName() == null || channel.getName().isEmpty()) {
            throw new IllegalArgumentException("Invalid channel name ");
        }
        if (channel.getOwner() == null || channel.getOwner().isEmpty()) {
            throw new IllegalArgumentException(
                    "Invalid channel owner (null or empty string) for '" + channel.getName() + "'");
        }
        for (XmlProperty xmlProperty : channel.getProperties()) {
            if (xmlProperty.getValue() == null || xmlProperty.getValue().isEmpty()) {
                throw new IllegalArgumentException(
                        "Invalid property value (missing or null or empty string) for '" + xmlProperty.getName()
                                + "'");
            }
        }
    }
    final Map<String, XmlTag> tags = new HashMap<String, XmlTag>();
    final Map<String, XmlProperty> properties = new HashMap<String, XmlProperty>();

    ObjectMapper mapper = new ObjectMapper();
    mapper.addMixIn(XmlProperty.class, OnlyXmlProperty.class);
    mapper.addMixIn(XmlTag.class, OnlyXmlTag.class);

    SearchResponse response = client.prepareSearch("properties").setTypes("property")
            .setQuery(new MatchAllQueryBuilder()).setSize(1000).execute().actionGet();
    for (SearchHit hit : response.getHits()) {
        XmlProperty prop = mapper.readValue(hit.getSourceAsString(), XmlProperty.class);
        properties.put(prop.getName(), prop);
    }
    response = client.prepareSearch("tags").setTypes("tag").setQuery(new MatchAllQueryBuilder()).setSize(1000)
            .execute().actionGet();
    for (SearchHit hit : response.getHits()) {
        XmlTag tag = mapper.readValue(hit.getSourceAsString(), XmlTag.class);
        tags.put(tag.getName(), tag);
    }
    if (tags.keySet().containsAll(ChannelUtil.getTagNames(channels))
            && properties.keySet().containsAll(ChannelUtil.getPropertyNames(channels))) {
        for (XmlChannel channel : channels) {
            channel.getTags().parallelStream().forEach((tag) -> {
                tag.setOwner(tags.get(tag.getName()).getOwner());
            });
            channel.getProperties().parallelStream().forEach((prop) -> {
                prop.setOwner(properties.get(prop.getName()).getOwner());
            });
        }
        return channels;
    } else {
        StringBuffer errorMsg = new StringBuffer();
        Collection<String> missingTags = ChannelUtil.getTagNames(channels);
        missingTags.removeAll(tags.keySet());
        for (String tag : missingTags) {
            errorMsg.append(tag + "|");
        }
        Collection<String> missingProps = ChannelUtil.getPropertyNames(channels);
        missingProps.removeAll(properties.keySet());
        for (String prop : missingProps) {
            errorMsg.append(prop + "|");
        }
        throw new IllegalArgumentException(
                "The following Tags and/or Properties on the channel don't exist -- " + errorMsg.toString());

    }
}

From source file:org.mskcc.cbio.portal.servlet.ProteinArraySignificanceTestJSON.java

/** 
 * Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods.
 * @param request servlet request//from   www.  ja  v  a 2  s  .c o  m
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    JSONArray table = new JSONArray();

    String cancerStudyStableId = request.getParameter("cancer_study_id");
    String heatMap = request.getParameter("heat_map");
    String gene = request.getParameter("gene");
    String alterationType = request.getParameter("alteration");
    String antibodyType = request.getParameter("antibody");
    String excludeAntibodyType = request.getParameter("exclude_antibody");
    String strDataScale = request.getParameter("data_scale");

    // TODO filtered heat map breaks the parsing, we need the raw parameter
    // (alternatively, we can change the parsing method)
    if (request instanceof XssRequestWrapper) {
        heatMap = ((XssRequestWrapper) request).getRawParameter("heat_map");
    }

    double dataScale = strDataScale == null ? 0 : Double.parseDouble(strDataScale);

    Collection<String> antibodyTypes;
    if (antibodyType == null) {
        if (excludeAntibodyType == null) {
            antibodyTypes = null; // include all
        } else {
            try {
                antibodyTypes = GetProteinArrayData.getProteinArrayTypes();
                antibodyTypes.removeAll(Arrays.asList(excludeAntibodyType.split(" ")));
            } catch (DaoException e) {
                throw new ServletException(e);
            }
        }
    } else {
        antibodyTypes = Arrays.asList(antibodyType.split(" "));
    }

    String[] heatMapLines = heatMap.split("\r?\n");
    String[] genes = heatMapLines[0].split("\t");
    genes[0] = "Any";
    Set<String> allCases = getAllCases(heatMapLines);
    Map<String, Set<String>>[] alteredCases = getAlteredCases(heatMapLines, genes, gene, alterationType);

    Map<String, ProteinArrayInfo> proteinArrays;
    Map<String, Map<String, Double>> proteinArrayData;
    try {
        int cancerStudyId = DaoCancerStudy.getCancerStudyByStableId(cancerStudyStableId).getInternalId();
        proteinArrays = GetProteinArrayData.getProteinArrayInfo(cancerStudyId, null, antibodyTypes);
        proteinArrayData = GetProteinArrayData.getProteinArrayData(cancerStudyId, proteinArrays.keySet(),
                allCases);
    } catch (DaoException e) {
        throw new ServletException(e);
    }

    if (gene == null) {
        for (int i = 0; i < genes.length; i++) {
            export(table, genes[i], alteredCases[i], proteinArrays, proteinArrayData, dataScale);
        }
    } else {
        export(table, gene, alteredCases[0], proteinArrays, proteinArrayData, dataScale);
    }

    response.setContentType("application/json");
    PrintWriter out = response.getWriter();
    try {
        JSONValue.writeJSONString(table, out);
    } finally {
        out.close();
    }

}

From source file:kevin.gvmsgarch.Worker.java

private void archiveAll(String authToken, String rnrse) throws IOException, ParserConfigurationException,
        SAXException, XPathExpressionException, JSONException {
    try {/*from   w w  w.  j  a v  a2  s .  c  o  m*/
        Collection<String> msgIds = Collections.EMPTY_LIST;
        int page = 1;
        int processed = 0;
        HashSet<String> alreadyProcessed = new HashSet<String>();
        do {
            int numParsed = 0;
            do {
                String json = App.extractInboxJson(authToken, this.location, page);
                msgIds = getMessageIds(json);
                if (msgIds != null) {
                    numParsed += msgIds.size();
                    msgIds.removeAll(alreadyProcessed);
                    processed += msgIds.size();
                    if (msgIds.removeAll(getFilterIds(json, this.filter))) {
                        this.firePropertyChange("progress", null, processed);
                    }
                    if (msgIds.isEmpty()) {
                        page++;
                    }
                }
            } while (msgIds != null && msgIds.isEmpty() && !pm.isCanceled());

            if (!pm.isCanceled() && msgIds != null && msgIds.size() > 0) {
                archiveThese(authToken, rnrse, msgIds, mode);
                alreadyProcessed.addAll(msgIds);
                this.firePropertyChange("progress", null, processed);
            }
        } while (msgIds != null && msgIds.size() > 0 && !pm.isCanceled());
        this.firePropertyChange("finish", null, null);
    } catch (Exception ex) {
        this.firePropertyChange("error", null, ex);
    }
}

From source file:org.metasyntactic.providers.DataProvider.java

private void addMissingData(final LookupResult result, final Location location,
        final Iterable<Movie> currentMovies, final List<Theater> currentTheaters) {
    // Ok. so if:
    // a) the user is doing their main search
    // b) we do not find data for a theater that should be showing up
    // c) they're close enough to their last search
    // then we want to give them the old information we have for that
    // theater *as well as* a warning to let them know that it may be
    // out of date.
    ////from   ww  w.j a  v  a2s  . c o  m
    // This is to deal with the case where the user is confused because
    // a theater they care about has been filtered out because it didn't
    // report showtimes.
    final Collection<String> existingMovieTitles = new LinkedHashSet<String>();
    for (final Movie movie : result.getMovies()) {
        existingMovieTitles.add(movie.getCanonicalTitle());
    }
    final Collection<Theater> missingTheaters = new LinkedHashSet<Theater>(currentTheaters);
    missingTheaters.removeAll(result.getTheaters());
    for (final Theater theater : missingTheaters) {
        if (theater.getLocation().distanceTo(location) > 50) {
            // Not close enough. Consider this a brand new search in a new
            // location. Don't include this old theaters.
            continue;
        }
        // no showtime information available. fallback to anything we've
        // stored (but warn the user).
        final Map<String, List<Performance>> oldPerformances = lookupTheaterPerformances(theater);
        if (isEmpty(oldPerformances)) {
            continue;
        }
        final Date syncDate = synchronizationDateForTheater(theater);
        if (syncDate == null) {
            continue;
        }
        if (Math.abs(syncDate.getTime() - new Date().getTime()) > Constants.FOUR_WEEKS) {
            continue;
        }
        result.getPerformances().put(theater.getName(), oldPerformances);
        result.getSynchronizationData().put(theater.getName(), syncDate);
        result.getTheaters().add(theater);
        addMissingMovies(oldPerformances, result, existingMovieTitles, currentMovies);
    }
}

From source file:com.adaptris.core.lifecycle.FilteredSharedComponentStart.java

private Collection<AdaptrisConnection> exclude(Collection<AdaptrisConnection> conns) {
    if (getExcludes().size() == 0) {
        return conns;
    }/*from w w w .  j av a  2s .  c om*/
    initialisePatterns();
    List<AdaptrisConnection> toBeRemoved = new ArrayList<>();
    for (AdaptrisConnection element : conns) {
        for (Pattern pattern : excludePatterns) {
            if (pattern.matcher(element.getUniqueId()).find()) {
                toBeRemoved.add(element);
                break;
            }
        }
    }
    conns.removeAll(toBeRemoved);
    return conns;
}

From source file:com.bluexml.side.Framework.alfresco.dataGenerator.dictionary.AlfrescoModelDictionary.java

/**
 * removes of the model's types types that can't be instantiate, i.e. abstract types 
 * @param types//from w w  w . j  a v a  2s  .  c  o m
 * @return the non abstract types, i.e. types that can be instantiate under Alfresco
 * @throws ParserConfigurationException
 * @throws SAXException
 * @throws IOException
 */
private Collection<TypeDefinition> removeAbstractTypes(Collection<TypeDefinition> types)
        throws ParserConfigurationException, SAXException, IOException {
    Collection<TypeDefinition> tempTypes = new ArrayList<TypeDefinition>();
    Collection<QName> notAbstractTypes = getNotAbstractTypes();
    for (TypeDefinition type : types) {
        QName qnamedType = type.getName();
        if (!notAbstractTypes.contains(qnamedType)) {
            tempTypes.add(type);
        }
    }
    types.removeAll(tempTypes);
    return types;
}

From source file:org.phenotips.data.similarity.internal.DefaultPatientSimilarityView.java

/**
 * Find, remove, and return all terms with given ancestor.
 *
 * @param terms the terms, modified by removing terms with given ancestor
 * @param ancestor the ancestor to search for
 * @return the terms with the given ancestor (removed from given terms)
 *///  ww w  .j  av  a  2 s  .  c  o m
private Collection<VocabularyTerm> popTermsWithAncestor(Collection<VocabularyTerm> terms,
        VocabularyTerm ancestor) {
    Collection<VocabularyTerm> matched = new HashSet<VocabularyTerm>();
    for (VocabularyTerm term : terms) {
        if (term.getAncestorsAndSelf().contains(ancestor)) {
            matched.add(term);
        }
    }
    terms.removeAll(matched);
    return matched;
}

From source file:graph.inference.module.DisjointWithWorker.java

private void findDisjoint(QueryObject queryObj, Collection<Edge> disjointWithEdges) {
    if (queryObj.getAtomic() == null)
        return;/* www. ja  v a  2 s . c om*/
    VariableNode varNode = (queryObj.isProof()) ? VariableNode.DEFAULT : queryObj.getVariable();
    QueryObject genlResults1 = new QueryObject(CommonConcepts.GENLS.getNode(dag_), queryObj.getAtomic(),
            varNode);
    querier_.applyModule("genls", genlResults1);
    QueryObject genlResults2 = null;

    Collection<DAGNode> genlResults = genlResults1.getCompleted();
    Collection<DAGNode> larger = null;
    boolean swapped = false;

    // For proofs, work out the smaller set of the two.
    if (queryObj.isProof()) {
        genlResults2 = new QueryObject(CommonConcepts.GENLS.getNode(dag_), queryObj.getNode(2), varNode);
        querier_.applyModule("genls", genlResults2);

        genlResults = new ArrayList<>(genlResults1.getCompleted());
        genlResults.removeAll(genlResults2.getCompleted());
        larger = new ArrayList<>(genlResults2.getCompleted());
        larger.removeAll(genlResults1.getCompleted());
        if (genlResults.size() > larger.size()) {
            Collection<DAGNode> temp = genlResults;
            genlResults = larger;
            larger = temp;
            swapped = true;
        }

        // Not disjoint
        if (genlResults.isEmpty())
            return;
        larger = new HashSet<>(larger);
    }

    for (DAGNode nodeA : genlResults) {
        // Check if the node has any disjointWith assertions that are in the
        // other genls
        Collection<Edge> node1Edges = relatedModule_.execute(nodeA);
        node1Edges = CollectionUtils.retainAll(node1Edges, disjointWithEdges);
        for (Edge e : node1Edges) {
            Node[] edgeNodes = e.getNodes();
            Node thisNode, otherNode = null;
            if (edgeNodes[1].equals(nodeA)) {
                thisNode = edgeNodes[1];
                otherNode = edgeNodes[2];
            } else if (edgeNodes[2].equals(nodeA)) {
                thisNode = edgeNodes[2];
                otherNode = edgeNodes[1];
            } else
                continue;

            if (queryObj.isProof() && larger.contains(otherNode)) {
                // Disjoint found!
                // Add genl justifications either side.
                if (swapped) {
                    Node temp = thisNode;
                    thisNode = otherNode;
                    otherNode = temp;
                }
                queryObj.getJustification()
                        .addAll(alterGenlJustification(genlResults1, (DAGNode) thisNode, false));
                queryObj.addResult(new Substitution(), edgeNodes);
                queryObj.getJustification()
                        .addAll(alterGenlJustification(genlResults2, (DAGNode) otherNode, true));

                return;
            } else if (!queryObj.isProof())
                queryObj.addResult(new Substitution(varNode, (DAGNode) otherNode), edgeNodes);
        }
    }

    checkSiblingDisjoint(queryObj);
}

From source file:norbert.mynemo.core.selection.RecommenderSelector.java

/**
 * Removes from the given collection all evaluations that are significantly worst. The left
 * evaluations are non significantly different.
 *///  w w w. j a  v  a2  s .  com
private void retainBestEvaluations(Collection<RecommenderEvaluation> evaluations) {
    checkNotNull(evaluations);

    EvaluationComparator comparator = new EvaluationComparator(metric);
    Collection<RecommenderEvaluation> rejectedEvaluations = new ArrayList<>();

    for (RecommenderEvaluation evalA : evaluations) {
        for (RecommenderEvaluation evalB : evaluations) {
            if (areSignificantlyDifferent(evalA, evalB)) {
                rejectedEvaluations.add(Collections.max(newArrayList(evalA, evalB), comparator));
            }
        }
    }

    evaluations.removeAll(rejectedEvaluations);
}