Example usage for java.util LinkedHashSet addAll

List of usage examples for java.util LinkedHashSet addAll

Introduction

In this page you can find the example usage for java.util LinkedHashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:com.act.biointerpretation.cofactorremoval.CofactorRemover.java

/**
 * This function is the meat of the cofactor removal process.  It extracts all cofactors based on their ids and
 * places them in the appropriate collection within the reaciton.  Note that because this is executed by
 * BiointerpretationProcessor's `runSpecializedReactionProcessing` hook, the chemical ids have already been updated
 * to reference the chemical entries in the WriteDB.
 * @param reaction The reaction to update.
 * @param component Update substrates or products.
 *///from   w ww  .j  a va2s.c  om
private void updateReactionProductOrSubstrate(Reaction reaction, ReactionComponent component) {
    Long[] chemIds, originalCofactorIds;
    if (component == SUBSTRATE) {
        chemIds = reaction.getSubstrates();
        originalCofactorIds = reaction.getSubstrateCofactors();
    } else {
        chemIds = reaction.getProducts();
        originalCofactorIds = reaction.getProductCofactors();
    }

    Map<Boolean, List<Long>> partitionedIds = Arrays.asList(chemIds).stream()
            .collect(Collectors.partitioningBy(knownCofactorWriteDBIds::contains));

    List<Long> cofactorIds = partitionedIds.containsKey(true) ? partitionedIds.get(true)
            : Collections.EMPTY_LIST;
    List<Long> nonCofactorIds = partitionedIds.containsKey(false) ? partitionedIds.get(false)
            : Collections.EMPTY_LIST;

    // Retain previously partitioned cofactors if any exist.
    if (originalCofactorIds != null && originalCofactorIds.length > 0) {
        // Use an ordered set to unique the partitioned and previously specified cofactors.  Original cofactors go first.
        LinkedHashSet<Long> uniqueCofactorIds = new LinkedHashSet<>(Arrays.asList(originalCofactorIds));
        uniqueCofactorIds.addAll(cofactorIds);
        /* We do this potentially expensive de-duplication step only in the presumably rare case that we find a reaction
         * that already has cofactors set.  A reaction that has not already undergone cofactor removal is very unlikely to
         * have cofactors partitioned from substrates/products. */
        cofactorIds = new ArrayList<>(uniqueCofactorIds);
    }

    // Coefficients for cofactors should automatically fall out when we update the substrate/product list.
    if (component == SUBSTRATE) {
        reaction.setSubstrateCofactors(cofactorIds.toArray(new Long[cofactorIds.size()]));
        reaction.setSubstrates(nonCofactorIds.toArray(new Long[nonCofactorIds.size()]));
        /* Coefficients should already have been set when the reaction was migrated to the new DB, so no need to update.
         * Note that this assumption depends strongly on the current coefficient implementation in the Reaction model. */
    } else {
        reaction.setProductCofactors(cofactorIds.toArray(new Long[cofactorIds.size()]));
        reaction.setProducts(nonCofactorIds.toArray(new Long[nonCofactorIds.size()]));
    }
}

From source file:org.osaf.cosmo.atom.provider.DetachedItemCollectionAdapter.java

private NoteItem detachOccurrence(NoteItem master, NoteItem copy, NoteItem occurrence) {

    ThisAndFutureHelper tafHelper = new ThisAndFutureHelper();
    LinkedHashSet<ContentItem> updates = new LinkedHashSet<ContentItem>();

    // need to update master, create new master
    updates.add(master);//from ww w  .  j  a v  a 2 s .  c o  m
    updates.add(copy);

    // get all modifications to remove/add
    updates.addAll(tafHelper.breakRecurringEvent(master, copy, occurrence));

    // This service call will update/remove/create items in one transaction
    // Any new items will be added to all specified parents.
    getContentService().updateContentItems(master.getParents(), updates);

    return copy;
}

From source file:com.geewhiz.pacify.managers.FilterManager.java

private LinkedHashSet<Defect> filterPFile(PFile pFile) {
    logger.info("      Customize File [{}]", pFile.getRelativePath());
    logger.debug("          Filtering [{}] using encoding [{}] and filter [{}]",
            pMarker.getAbsoluteFileFor(pFile).getAbsolutePath(), pFile.getEncoding(), pFile.getFilterClass());

    File fileToFilter = pMarker.getAbsoluteFileFor(pFile);
    PacifyFilter pacifyFilter = getFilterForPFile(pFile);

    Map<String, String> propertyValues = new HashMap<String, String>();
    LinkedHashSet<Defect> defects = fillPropertyValuesFor(propertyValues, pFile);

    String beginToken = pMarker.getBeginTokenFor(pFile);
    String endToken = pMarker.getEndTokenFor(pFile);
    String encoding = pFile.getEncoding();

    defects.addAll(pacifyFilter.filter(propertyValues, beginToken, endToken, fileToFilter, encoding));
    logger.info("          [{}] placeholders replaced.", pFile.getPProperties().size());

    return defects;
}

From source file:org.loklak.api.server.SuggestServlet.java

@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    RemoteAccess.Post post = RemoteAccess.evaluate(request);

    // manage DoS
    if (post.isDoS_blackout()) {
        response.sendError(503, "your request frequency is too high");
        return;//from w w  w.jav a  2s. co  m
    }

    String callback = post.get("callback", "");
    boolean jsonp = callback != null && callback.length() > 0;
    boolean minified = post.get("minified", false);

    int requestkey = post.hashCode();
    Map<String, Object> m = post.isDoS_servicereduction() ? cache.get(requestkey) : null;
    if (m == null) {
        boolean local = post.isLocalhostAccess();
        boolean delete = post.get("delete", false);
        int count = post.get("count", 10); // number of queries
        String query = post.get("q", ""); // to get a list of queries which match; to get all latest: leave q empty
        String source = post.get("source", "all"); // values: all,query,geo
        String orders = post.get("order", query.length() == 0 ? "desc" : "asc").toUpperCase();
        SortOrder order = SortOrder.valueOf(orders);
        String orderby = post.get("orderby", query.length() == 0 ? "retrieval_next" : "query_count");
        int timezoneOffset = post.get("timezoneOffset", 0);
        Date since = post.get("since", "").equals("now") ? new Date()
                : post.get("since", (Date) null, timezoneOffset);
        Date until = post.get("until", "").equals("now") ? new Date()
                : post.get("until", (Date) null, timezoneOffset);
        String selectby = post.get("selectby", "retrieval_next");
        ResultList<QueryEntry> queryList = new ResultList<>();

        if ((source.equals("all") || source.equals("query")) && query.length() >= 0) {
            long start = System.currentTimeMillis();
            queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby);
            post.recordEvent("localqueries_time", System.currentTimeMillis() - start);
        }

        if (delete && local && queryList.size() > 0) {
            long start = System.currentTimeMillis();
            for (QueryEntry qe : queryList)
                DAO.deleteQuery(qe.getQuery(), qe.getSourceType());
            queryList.clear();
            queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby);
            post.recordEvent("localquerydelete_time", System.currentTimeMillis() - start);
        }

        if (source.equals("all") || source.equals("geo")) {
            long start = System.currentTimeMillis();
            LinkedHashSet<String> suggestions = DAO.geoNames.suggest(query, count, 0);
            if (suggestions.size() < count && query.length() > 2)
                suggestions.addAll(DAO.geoNames.suggest(query, count, 1));
            if (suggestions.size() < count && query.length() > 5)
                suggestions.addAll(DAO.geoNames.suggest(query, count, 2));
            for (String s : suggestions) {
                QueryEntry qe = new QueryEntry(s, 0, Long.MAX_VALUE, SourceType.IMPORT, false);
                queryList.add(qe);
            }
            post.recordEvent("suggestionsquery_time", System.currentTimeMillis() - start);
        }

        long start = System.currentTimeMillis();
        post.setResponse(response, "application/javascript");

        List<Object> queries = new ArrayList<>();
        if (queryList != null)
            for (QueryEntry t : queryList)
                queries.add(t.toMap());

        int random = post.get("random", -1);
        if (random > 0 && random < queries.size()) {
            // take the given number from the result list and use random to choose
            List<Object> random_queries = new ArrayList<>();
            Random r = new Random(System.currentTimeMillis());
            while (random-- > 0) {
                random_queries.add(queries.remove(r.nextInt(queries.size())));
                int shrink = Math.max(queries.size() / 2, random * 10);
                while (queries.size() > shrink)
                    queries.remove(queries.size() - 1); // prefer from top
            }
            queries = random_queries;
        }

        // generate json
        m = new LinkedHashMap<String, Object>();
        Map<String, Object> metadata = new LinkedHashMap<String, Object>();
        metadata.put("count", queryList == null ? "0" : Integer.toString(queries.size()));
        metadata.put("hits", queryList.getHits());
        metadata.put("query", query);
        metadata.put("order", orders);
        metadata.put("orderby", orderby);
        if (since != null)
            metadata.put("since", AbstractIndexEntry.utcFormatter.print(since.getTime()));
        if (until != null)
            metadata.put("until", AbstractIndexEntry.utcFormatter.print(until.getTime()));
        if (since != null || until != null)
            metadata.put("selectby", selectby);
        metadata.put("client", post.getClientHost());
        m.put("search_metadata", metadata);

        m.put("queries", queries);
        post.recordEvent("postprocessing_time", System.currentTimeMillis() - start);
    }

    // write json
    response.setCharacterEncoding("UTF-8");
    PrintWriter sos = response.getWriter();
    if (jsonp)
        sos.print(callback + "(");
    sos.print(minified ? new ObjectMapper().writer().writeValueAsString(m)
            : new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(m));
    if (jsonp)
        sos.println(");");
    sos.println();
    post.finalize();
}

From source file:org.loklak.api.search.SuggestServlet.java

@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    Query post = RemoteAccess.evaluate(request);

    // manage DoS
    if (post.isDoS_blackout()) {
        response.sendError(503, "your request frequency is too high");
        return;//from w  w w  . j a  v  a 2 s  .  c o m
    }

    String callback = post.get("callback", "");
    boolean jsonp = callback != null && callback.length() > 0;
    boolean minified = post.get("minified", false);

    int requestkey = post.hashCode();
    JSONObject m = post.isDoS_servicereduction() ? cache.get(requestkey) : null;
    if (m == null) {
        boolean local = post.isLocalhostAccess();
        boolean delete = post.get("delete", false);
        int count = post.get("count", 10); // number of queries
        String query = post.get("q", ""); // to get a list of queries which match; to get all latest: leave q empty
        String source = post.get("source", "all"); // values: all,query,geo
        String orders = post.get("order", query.length() == 0 ? "desc" : "asc").toUpperCase();
        SortOrder order = SortOrder.valueOf(orders);
        String orderby = post.get("orderby", query.length() == 0 ? "retrieval_next" : "query_count");
        int timezoneOffset = post.get("timezoneOffset", 0);
        Date since = post.get("since", "").equals("now") ? new Date()
                : post.get("since", (Date) null, timezoneOffset);
        Date until = post.get("until", "").equals("now") ? new Date()
                : post.get("until", (Date) null, timezoneOffset);
        String selectby = post.get("selectby", "retrieval_next");
        ResultList<QueryEntry> queryList = new ResultList<>();

        if ((source.equals("all") || source.equals("query")) && query.length() >= 0) {
            long start = System.currentTimeMillis();
            queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby);
            post.recordEvent("localqueries_time", System.currentTimeMillis() - start);
        }

        if (delete && local && queryList.size() > 0) {
            long start = System.currentTimeMillis();
            for (QueryEntry qe : queryList)
                DAO.deleteQuery(qe.getQuery(), qe.getSourceType());
            queryList.clear();
            queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby);
            post.recordEvent("localquerydelete_time", System.currentTimeMillis() - start);
        }

        if (source.equals("all") || source.equals("geo")) {
            long start = System.currentTimeMillis();
            LinkedHashSet<String> suggestions = DAO.geoNames.suggest(query, count, 0);
            if (suggestions.size() < count && query.length() > 2)
                suggestions.addAll(DAO.geoNames.suggest(query, count, 1));
            if (suggestions.size() < count && query.length() > 5)
                suggestions.addAll(DAO.geoNames.suggest(query, count, 2));
            for (String s : suggestions) {
                QueryEntry qe = new QueryEntry(s, 0, Long.MAX_VALUE, SourceType.TWITTER, false);
                queryList.add(qe);
            }
            post.recordEvent("suggestionsquery_time", System.currentTimeMillis() - start);
        }

        long start = System.currentTimeMillis();
        post.setResponse(response, "application/javascript");

        List<Object> queries = new ArrayList<>();
        if (queryList != null)
            for (QueryEntry t : queryList)
                queries.add(t.toJSON().toMap());

        int random = post.get("random", -1);
        if (random > 0 && random < queries.size()) {
            // take the given number from the result list and use random to choose
            List<Object> random_queries = new ArrayList<>();
            Random r = new Random(System.currentTimeMillis());
            while (random-- > 0) {
                random_queries.add(queries.remove(r.nextInt(queries.size())));
                int shrink = Math.max(queries.size() / 2, random * 10);
                while (queries.size() > shrink)
                    queries.remove(queries.size() - 1); // prefer from top
            }
            queries = random_queries;
        }

        // generate json
        m = new JSONObject(true);
        JSONObject metadata = new JSONObject(true);
        metadata.put("count", queryList == null ? "0" : Integer.toString(queries.size()));
        metadata.put("hits", queryList.getHits());
        metadata.put("query", query);
        metadata.put("order", orders);
        metadata.put("orderby", orderby);
        if (since != null)
            metadata.put("since", AbstractObjectEntry.utcFormatter.print(since.getTime()));
        if (until != null)
            metadata.put("until", AbstractObjectEntry.utcFormatter.print(until.getTime()));
        if (since != null || until != null)
            metadata.put("selectby", selectby);
        metadata.put("client", post.getClientHost());
        m.put("search_metadata", metadata);

        m.put("queries", queries);
        post.recordEvent("postprocessing_time", System.currentTimeMillis() - start);
    }

    // write json
    response.setCharacterEncoding("UTF-8");
    PrintWriter sos = response.getWriter();
    if (jsonp)
        sos.print(callback + "(");
    sos.print(m.toString(minified ? 0 : 2));
    if (jsonp)
        sos.println(");");
    sos.println();
    post.finalize();
}

From source file:org.rapidcontext.core.type.WebService.java

/**
 * Returns the HTTP methods supported for the specified request.
 * The OPTIONS method is always supported and the HEAD method is
 * automatically added if GET is supported.
 *
 * @param request        the request to check
 *
 * @return the array of HTTP method names supported
 *///from w w w .  j av a  2s.  c om
public String[] methods(Request request) {
    LinkedHashSet set = new LinkedHashSet();
    set.add(METHOD.OPTIONS);
    set.addAll(Arrays.asList(methodsImpl(request)));
    for (int i = 0; i < matchers.size(); i++) {
        WebMatcher m = (WebMatcher) matchers.get(i);
        if (m.method() != null && m.match(request) > 0) {
            set.add(m.method());
        }
    }
    if (set.contains(METHOD.GET)) {
        set.add(METHOD.HEAD);
    }
    return (String[]) set.toArray(new String[set.size()]);
}

From source file:com.geewhiz.pacify.Replacer.java

public LinkedHashSet<Defect> doReplacement(EntityManager entityManager) {
    LinkedHashSet<Defect> defects = new LinkedHashSet<Defect>();
    for (PMarker pMarker : entityManager.getPMarkers()) {
        logger.info("   Processing Marker File [{}],", pMarker.getFile().getAbsolutePath());
        FilterManager filterManager = new FilterManager(propertyResolveManager, pMarker);
        defects.addAll(filterManager.doFilter());
    }// w ww  .  ja  v  a2 s . c  o m
    return defects;
}

From source file:org.search.niem.uml.qvt.NiemQvtLibrary.java

@Operation(contextual = true, kind = Operation.Kind.QUERY)
public static LinkedHashSet<Package> getPimConstraintModels(final Package self) {
    final LinkedHashSet<Package> pimConstraintModels = new LinkedHashSet<>();
    for (final Dependency d : self.getClientDependencies()) {
        if (UMLPackage.Literals.REALIZATION.isInstance(d) && getAppliedReferencesStereotype(d) != null) {
            pimConstraintModels
                    .addAll(EcoreUtil.<Package>getObjectsByType(d.getClients(), UMLPackage.Literals.PACKAGE));
        }/*from  w  w  w.j  a v a 2s.c o m*/
    }
    return pimConstraintModels;
}

From source file:de.hybris.platform.classification.impl.DefaultClassificationClassesResolverStrategy.java

@Override
public List<ClassAttributeAssignmentModel> getAllClassAttributeAssignments(
        final Set<ClassificationClassModel> classificationClasses) {
    if (classificationClasses != null && !classificationClasses.isEmpty()) {
        final LinkedHashSet<ClassAttributeAssignmentModel> list = new LinkedHashSet<ClassAttributeAssignmentModel>();
        for (final ClassificationClassModel ccl : classificationClasses) {
            list.addAll(ccl.getAllClassificationAttributeAssignments());
        }/* ww w.  java  2  s.c  o  m*/
        return new ArrayList(list);
    } else {
        return Collections.EMPTY_LIST;
    }
}

From source file:org.openflexo.foundation.sg.implmodel.TechnologyModuleDefinition.java

/**
 * Recursively retrieve all modules required by this module, including itself.
 * //w ww  .j  a  va 2  s .  com
 * @return the retrieved required modules.
 */
public Set<TechnologyModuleDefinition> getAllRequiredModules() {

    Map<Integer, LinkedHashSet<TechnologyModuleDefinition>> requiredModules = getAllRequiredModulesByLevel();

    LinkedHashSet<TechnologyModuleDefinition> result = new LinkedHashSet<TechnologyModuleDefinition>();
    for (LinkedHashSet<TechnologyModuleDefinition> set : requiredModules.values()) {
        result.addAll(set);
    }

    return result;
}