Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:HSqlManager.java

@SuppressWarnings("Duplicates")
@Deprecated//www  . j  a va 2  s.com
private static void mycoCommonInitialize(int bps, Connection connection) throws SQLException, IOException {
    long time = System.currentTimeMillis();
    String base = new File("").getAbsolutePath();
    CSV.makeDirectory(new File(base + "/PhageData"));
    INSTANCE = ImportPhagelist.getInstance();
    //        INSTANCE.parseAllPhages(bps);
    written = true;
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    stat.execute("SET FILES LOG FALSE\n");
    PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers"
            + "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + " Values(?,?,true,false,false,?,?)");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    String strain = "";
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
        if (r[2].equals("xkcd")) {
            strain = r[0];
        }
    }
    call.close();
    String x = strain;
    Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet());
    Map<String, List<String>> clusters = new HashMap<>();
    clust.parallelStream().forEach(cluster -> clusters.put(cluster, phages.stream()
            .filter(a -> a[0].equals(x) && a[1].equals(cluster)).map(a -> a[2]).collect(Collectors.toList())));
    for (String z : clusters.keySet()) {
        try {
            List<String> clustphages = clusters.get(z);
            Set<String> primers = Collections.synchronizedSet(
                    CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphages.get(0) + ".csv"));
            clustphages.remove(0);
            for (String phage : clustphages) {
                //                    String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta");
                //                    String sequence =seqs[0]+seqs[1];
                //                    Map<String, List<Integer>> seqInd = new HashMap<>();
                //                    for (int i = 0; i <= sequence.length()-bps; i++) {
                //                        String sub=sequence.substring(i,i+bps);
                //                        if(seqInd.containsKey(sub)){
                //                            seqInd.get(sub).add(i);
                //                        }else {
                //                            List<Integer> list = new ArrayList<>();
                //                            list.add(i);
                //                            seqInd.put(sub,list);
                //                        }
                //                    }
                //                    primers = primers.stream().filter(seqInd::containsKey).collect(Collectors.toSet());
                //                    primers =Sets.intersection(primers,CSV.readCSV(base + "/PhageData/"+Integer.toString(bps)
                //                            + phage + ".csv"));
                //                    System.gc();
                //                            String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta");
                //                            String sequence =seqs[0]+seqs[1];
                //                            primers.stream().filter(sequence::contains);
                primers.retainAll(CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv"));
                //                    Set<CharSequence> prim = primers;
                //                    for (CharSequence primer: primers){
                //                        if(seqInd.containsKey(primer)){
                //                            prim.remove(primer);
                //                        }
                //                    }
                //                    primers=prim;
            }
            int i = 0;
            for (String a : primers) {
                try {
                    //finish update
                    st.setInt(1, bps);
                    st.setString(2, a);
                    st.setString(3, x);
                    st.setString(4, z);
                    st.addBatch();
                } catch (SQLException e) {
                    e.printStackTrace();
                    System.out.println("Error occurred at " + x + " " + z);
                }
                i++;
                if (i == 1000) {
                    i = 0;
                    st.executeBatch();
                    db.commit();
                }
            }
            if (i > 0) {
                st.executeBatch();
                db.commit();
            }
        } catch (SQLException e) {
            e.printStackTrace();
            System.out.println("Error occurred at " + x + " " + z);
        }
        System.out.println(z);
    }
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Common Updated");
    System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60);
}

From source file:netdecoder.NetDecoder.java

public void edgeAnalysis() throws IOException, NoSuchFieldException, SecurityException,
        IllegalArgumentException, IllegalAccessException {
    String dir = cmd.getOptionValue("out");
    //String d = cmd.getOptionValue("d");
    String filename = cmd.getOptionValue("f");
    String ncp = cmd.getOptionValue("ncp");
    String ndp = cmd.getOptionValue("ndp");
    String control = cmd.getOptionValue("control");
    String condition = cmd.getOptionValue("condition");
    Double corThreshold = Double.valueOf(cmd.getOptionValue("corThreshold"));
    Double ratioThreshold = Double.valueOf(cmd.getOptionValue("ratioThreshold"));
    Integer top = Integer.valueOf(cmd.getOptionValue("top"));
    RJava rJava = new RJava();

    String geneListFile = cmd.getOptionValue("g");
    Set<String> genesPaths = null;
    if (!geneListFile.equals("none")) {
        genesPaths = new LinkedHashSet(NetworkFlow.getGenes(geneListFile));
    }/*from ww  w  .  ja v  a  2s. c  o  m*/
    List<String> controlNetworkPaths = Serialization.deserialize(ncp, List.class);
    List<String> diseaseNetworkPaths = Serialization.deserialize(ndp, List.class);
    Map<String, Node> controlNetwork = createNetworkFromPaths(controlNetworkPaths);
    Map<String, Node> diseaseNetwork = createNetworkFromPaths(diseaseNetworkPaths);

    Map<String, Map<String, Double>> flowInNetworks = getFlowInNetworks(controlNetwork, diseaseNetwork);
    Map<String, Double> totalFlowsDisease = NetDecoderUtils.getTotalFlow(diseaseNetwork);
    Map<String, Double> totalFlowsControl = NetDecoderUtils.getTotalFlow(controlNetwork);

    Set<Edge> cEdges = getAllEdges(controlNetwork);
    Set<Edge> dEdges = getAllEdges(diseaseNetwork);
    Map<String, Map<String, Double>> flowMatrix = new LinkedHashMap();

    if (!cmd.hasOption("overlap")) { //use only edges in both networks to infer key edges?
        dEdges.addAll(cEdges);
    } else {
        dEdges.retainAll(cEdges);
    }
    Map<String, Double> xxControl = new LinkedHashMap();
    Map<String, Double> xxDisease = new LinkedHashMap();
    for (Edge e : dEdges) {
        Edge eControl = NetDecoderUtils.getEdge(controlNetwork, e);
        Edge eDisease = NetDecoderUtils.getEdge(diseaseNetwork, e);
        //System.out.println(eDisease.getFlow() + "\t" + eDisease.getSignScores());
        if (eControl != null && eDisease != null) {
            xxControl.put(e.toString(), eControl.getFlow());
            xxDisease.put(e.toString(), eDisease.getFlow());
        } else if (eControl != null) {
            xxControl.put(e.toString(), eControl.getFlow());
            xxDisease.put(e.toString(), 0.0);
        } else if (eDisease != null) {
            xxControl.put(e.toString(), 0.0);
            xxDisease.put(e.toString(), eDisease.getFlow());
        }
    }
    flowMatrix.put(control, xxControl);
    flowMatrix.put(condition, xxDisease);

    //creating folders...
    String path2create = dir + condition + "/";
    new File(path2create).mkdirs();

    Map<String, Map<String, Double>> aux = changeMapping(flowMatrix);
    String name = path2create + filename + "_flowMatrix";
    saveFlowMatrix(aux, control, condition, name + ".txt");

    List<String> Rscripts = new ArrayList();
    //rJava.plotBarplot(path2create, name, condition, corThreshold, ratioThreshold, filename);
    String barplot = rJava.createBarplotScript(path2create, name, condition, corThreshold, ratioThreshold,
            filename);
    //Rscripts.add(barplot);

    try {
        TimeUnit.SECONDS.sleep(5);
    } catch (InterruptedException ex) {
        Thread.currentThread().interrupt();
    }
    //Load key edges flow matrix for differential edge-centered network enrichment
    String keyEdgesFile = path2create + filename + "_keyEdges.txt";
    System.out.println(keyEdgesFile);
    Set<Edge> keyEdges = loadKeyEdges(keyEdgesFile);

    //==========save full networks
    NetDecoderUtils.savePaths(NetDecoderUtils.convertString2Paths(diseaseNetworkPaths),
            condition + "/" + filename + "_ALL_paths_disease.txt");
    NetDecoderUtils.savePaths(NetDecoderUtils.convertString2Paths(controlNetworkPaths),
            condition + "/" + filename + "_ALL_paths_control.txt");
    Map<String, Double> controlDFNdiff = getFlowDifference(flowInNetworks, controlNetwork.keySet());
    Map<String, Double> diseaseDFNdiff = getFlowDifference(flowInNetworks, diseaseNetwork.keySet());
    Map<String, Double> controlDFNtotal = getTotalFlowInProteins(totalFlowsControl, controlNetwork.keySet());
    Map<String, Double> diseaseDFNtotal = getTotalFlowInProteins(totalFlowsDisease, diseaseNetwork.keySet());
    NetDecoderUtils.saveFlows(controlDFNtotal,
            dir + "/" + condition + "/" + filename + "_FULL_totalFlow_Control.txt");
    NetDecoderUtils.saveFlows(diseaseDFNtotal, condition + "/" + filename + "_FULL_totalFlow_Disease.txt");
    NetDecoderUtils.saveFlows(controlDFNdiff, condition + "/" + filename + "_FULL_flowDifference_Control.txt");
    NetDecoderUtils.saveFlows(diseaseDFNdiff, condition + "/" + filename + "_FULL_flowDifference_Disease.txt");
    NetDecoderUtils.saveGeneList(controlNetwork, condition + "/" + filename + "_FULL_GENES_Control.txt");
    NetDecoderUtils.saveGeneList(diseaseNetwork, condition + "/" + filename + "_FULL_GENES_Disease.txt");
    (new NetworkFlow()).saveSubNet(controlNetwork, condition + "/" + filename + "_FULL_Control.txt");
    (new NetworkFlow()).saveSubNet(diseaseNetwork, condition + "/" + filename + "_FULL_Disease.txt");

    //rJava.exportGML(condition + "/" +filename+"_FULL_Control", condition + "/" +filename + "_FULL_totalFlow_Control", 
    //        condition + "/" +filename + "_FULL_flowDifference_Control");
    //rJava.exportGML(condition + "/" +filename+"_FULL_Disease", condition + "/" +filename + "_FULL_totalFlow_Disease", 
    //        condition + "/" + filename + "_FULL_flowDifference_Disease");

    String gmlControl = rJava.createScriptExportGML(condition + "/" + filename + "_FULL_Control",
            condition + "/" + filename + "_FULL_totalFlow_Control",
            condition + "/" + filename + "_FULL_flowDifference_Control");
    String gmlDisease = rJava.createScriptExportGML(condition + "/" + filename + "_FULL_Disease",
            condition + "/" + filename + "_FULL_totalFlow_Disease",
            condition + "/" + filename + "_FULL_flowDifference_Disease");
    Rscripts.add(gmlControl);
    Rscripts.add(gmlDisease);
    //===========
    //Now have to extract paths containing key edges
    Map<Edge, List<Path>> diseaseEdges2Path = NetDecoderUtils.getPathsFromEdges(diseaseNetworkPaths, keyEdges);
    Map<Edge, List<Path>> controlEdges2Path = NetDecoderUtils.getPathsFromEdges(controlNetworkPaths, keyEdges);

    //String name_2 = path2create + filename + "_" + control + "_" + condition;
    String name_2 = path2create + filename;
    NetDecoderUtils.comparePaths(diseaseEdges2Path, controlEdges2Path, totalFlowsDisease, totalFlowsControl,
            flowInNetworks, rJava, condition, genesPaths, name_2, path2create, Rscripts);

    System.out.println("Saving key targets");
    Set<String> sinks = prioritizeSinks(controlNetwork, diseaseNetwork, condition, top, rJava, name_2,
            Rscripts);
    System.out.println("Saving Network routers");
    Set<String> hidden = prioritizeHiddenProteins(controlNetwork, diseaseNetwork, condition, top, rJava, name_2,
            Rscripts);
    Set<String> IPgenes = computeImpactScore(controlNetwork, diseaseNetwork).keySet();
    prioritizePaths(controlNetworkPaths, diseaseNetworkPaths, totalFlowsDisease, flowInNetworks, sinks, hidden,
            IPgenes, rJava, name_2, Rscripts);

    String scriptName = filename + "_NETWORK_ROUTERS_combinedScript" + ".R";
    NetDecoderUtils.combineFiles(Rscripts, scriptName);
    try {
        Runtime.getRuntime().exec("R --slave --no-save CMD BATCH " + scriptName);
    } catch (IOException io) {
        System.out.println(io.getMessage());
    }
    //compute and ranking genes by impact score.
    impactScore(path2create);
}

From source file:com.aurel.track.accessControl.AccessBeans.java

/**
 * Gets the item type limitations for a project directly or inherited from
 * ancestor projects// w  ww .  ja v a2 s .  c o m
 *
 * @param projectID
 * @param selectedItemTypeIDsSet
 * @param projectIssueTypeMap
 * @param childToParentProjectIDMap
 * @return
 */
public static Set<Integer> getItemTypeLimitations(Integer projectID, Set<Integer> selectedItemTypeIDsSet,
        Map<Integer, Set<Integer>> projectIssueTypeMap, Map<Integer, Integer> childToParentProjectIDMap) {
    if (projectIssueTypeMap == null || projectID == null) {
        return null;
    }
    String projectLabel = null;
    if (LOGGER.isDebugEnabled()) {
        projectLabel = LookupContainer.getNotLocalizedLabelBeanLabel(SystemFields.INTEGER_PROJECT, projectID);
    }
    // get the Set for list-types for project:
    Set<Integer> roleRestrictedItemTypeIDSet = projectIssueTypeMap.get(projectID);
    if (roleRestrictedItemTypeIDSet != null) {
        if (roleRestrictedItemTypeIDSet.contains(null)) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug(theProject + projectLabel + " has role without item type limitation");
            }
            // no limitation on item types not needed to look at the parent
            // projects' roles
            return roleRestrictedItemTypeIDSet;
        } else {
            Set<Integer> parentItemTypeLimitations = null;
            Integer parentProjectID = childToParentProjectIDMap.get(projectID);
            String parentProjectLabel = null;
            if (parentProjectID != null) {
                parentItemTypeLimitations = getItemTypeLimitations(parentProjectID, selectedItemTypeIDsSet,
                        projectIssueTypeMap, childToParentProjectIDMap);
                if (LOGGER.isDebugEnabled()) {
                    parentProjectLabel = LookupContainer
                            .getNotLocalizedLabelBeanLabel(SystemFields.INTEGER_PROJECT, parentProjectID);
                }
                if (parentItemTypeLimitations != null && parentItemTypeLimitations.contains(null)) {
                    LOGGER.debug("The parent project " + parentProjectLabel
                            + " has role without item type limitation");
                    return parentItemTypeLimitations;
                }
            }
            if (selectedItemTypeIDsSet == null || selectedItemTypeIDsSet.isEmpty()) {
                LOGGER.debug(theProject + projectLabel
                        + " has role with item type limitation (no selected item type in filter)");
                if (parentItemTypeLimitations != null) {
                    roleRestrictedItemTypeIDSet.addAll(parentItemTypeLimitations);
                    LOGGER.debug(theProject + projectLabel
                            + " inherits role with item type limitation (no selected item type in filter) from parent project "
                            + parentProjectLabel);
                }
                return roleRestrictedItemTypeIDSet;
            } else {
                // clone the roleRestrictedItemTypeIDSet because retainAll
                // modifies the set
                Set<Integer> intersection = new HashSet<Integer>(roleRestrictedItemTypeIDSet);
                LOGGER.debug(theProject + projectLabel
                        + " has role with item type limitation (with selected item type in filter)");
                intersection.retainAll(selectedItemTypeIDsSet);
                if (parentItemTypeLimitations != null) {
                    LOGGER.debug(theProject + projectLabel
                            + " inherits role with item type limitation (with selected item type in filter) from parent project "
                            + parentProjectLabel);
                    intersection.addAll(parentItemTypeLimitations);
                }
                return intersection;
            }
        }
    } else {
        LOGGER.debug(theProject + projectLabel + " has no direct role ");
        Integer parentProjectID = childToParentProjectIDMap.get(projectID);
        if (parentProjectID != null) {
            Set<Integer> parentItemTypeLimitations = getItemTypeLimitations(parentProjectID,
                    selectedItemTypeIDsSet, projectIssueTypeMap, childToParentProjectIDMap);
            if (parentItemTypeLimitations != null) {
                String parentProjectLabel = null;
                if (LOGGER.isDebugEnabled()) {
                    parentProjectLabel = LookupContainer
                            .getNotLocalizedLabelBeanLabel(SystemFields.INTEGER_PROJECT, parentProjectID);
                }
                if (parentItemTypeLimitations.contains(null)) {
                    LOGGER.debug("The parent project " + parentProjectLabel
                            + " has role without item type limitation");
                    return parentItemTypeLimitations;
                } else {
                    LOGGER.debug(theProject + projectLabel
                            + " inherits role  with item type limitation from parent project "
                            + parentProjectLabel);
                    return parentItemTypeLimitations;
                }
            }
        }
        return null;
    }
}

From source file:org.jahia.services.query.QueryModifierAndOptimizerVisitor.java

private String getCommonChildNodeTypes(String parentPath, Set<String> commonNodeTypes)
        throws RepositoryException {
    String commonPrimaryType = null;
    JCRNodeWrapper node = session.getNode(parentPath);
    Set<String> checkedPrimaryTypes = new HashSet<String>();
    if (node.hasNodes()) {
        NodeIterator children = node.getNodes();
        if (children.getSize() < 100) {
            while (children.hasNext()) {

                JCRNodeWrapper child = (JCRNodeWrapper) children.nextNode();
                if (commonPrimaryType == null && commonNodeTypes.isEmpty()) {
                    commonPrimaryType = child.getPrimaryNodeType().getName();
                    commonNodeTypes.addAll(child.getNodeTypes());
                } else if (commonPrimaryType != null
                        && child.getPrimaryNodeType().getName().equals(commonPrimaryType)) {
                    commonPrimaryType = null;
                }//  w w w .jav  a 2  s  .  co  m
                if (!checkedPrimaryTypes.contains(child.getPrimaryNodeType().getName())) {
                    checkedPrimaryTypes.add(child.getPrimaryNodeType().getName());
                    commonNodeTypes.retainAll(child.getNodeTypes());
                }
            }
        }
    }
    return commonPrimaryType;
}

From source file:ezbake.data.mongo.EzMongoHandler.java

private void validatePlatformVisibilitiesForInsertingNewDBObject(EzSecurityToken security, Visibility vis,
        DBObject newObject, boolean fromDriver) throws EzMongoBaseException {
    // to further check if we can insert the new mongo doc,
    // get the intersection of user's Platform auths with the doc's "Write" visibilities
    Set<Long> platformAuths = security.getAuthorizations().getPlatformObjectAuthorizations();
    Set<Long> platformViz = null;
    if (fromDriver) {
        platformViz = (Set<Long>) newObject.get(RedactHelper.PLATFORM_OBJECT_WRITE_VISIBILITY_FIELD);
    } else {//from   w w  w  .  j a v a  2s  .c  o m
        if (vis.isSetAdvancedMarkings() && vis.getAdvancedMarkings().isSetPlatformObjectVisibility()) {
            platformViz = vis.getAdvancedMarkings().getPlatformObjectVisibility()
                    .getPlatformObjectWriteVisibility();
        }
    }

    Set<Long> origPlatformViz = null;
    if (platformAuths != null && platformViz != null) {
        origPlatformViz = new HashSet<Long>(platformViz);
        platformViz.retainAll(platformAuths);
    }

    final boolean canInsertViz = platformViz == null || platformViz.size() > 0;
    if (!canInsertViz) {
        // reset the platformViz to the original value for logging purposes
        newObject.put(RedactHelper.PLATFORM_OBJECT_WRITE_VISIBILITY_FIELD, origPlatformViz);
        final String message = "User does not have all the required Platform auths to insert: " + platformAuths
                + ", platformViz needed: " + origPlatformViz;
        appLog.error(message);
        throw new EzMongoBaseException(message);
    }
}

From source file:de.uni_leipzig.simba.memorymanagement.Index.graphclustering.EdgeGreedyClustering.java

@Override
public Map<Integer, Cluster> cluster(Graph g, int maxSize) {

    Comparator<Edge> comparator = new Comparator<Edge>() {
        public int compare(Edge e1, Edge e2) {
            return e1.getWeight() - e2.getWeight(); // use your logic
        }// ww  w.  ja  v a 2s  .c o  m
    };

    //init
    Set<Edge> availableEdges = new HashSet<>();
    int count = 0;
    for (Edge e : g.getAllEdges()) {
        //System.out.println("<" + e.source.item.getId() + ">" + " => <" + e.target.item.getId() + ">");
        //if (!(e.source.item.getId().equals(e.target.item.getId()))) {
        availableEdges.add(e);
        //}
    }
    Map<Integer, Cluster> result = new HashMap<>();
    Cluster cluster;
    while (!availableEdges.isEmpty()) {
        //            System.out.println("Available edges:" + availableEdges);
        int clusterSize;
        Set<Edge> emanatingEdges = new HashSet<>();
        //get largest available edge and start cluster       
        List<Edge> sortedEdges = new ArrayList<>(availableEdges);
        //get largest available edge and start cluster       
        Collections.sort(sortedEdges, comparator);
        Edge e = sortedEdges.get(0);

        cluster = new Cluster(count);
        result.put(count, cluster);
        count++;

        cluster.nodes.add(e.source);
        cluster.nodes.add(e.target);
        cluster.edges.add(e);
        //itemClusterMap.get(e.source.toString()).add(clusters.size());
        //itemClusterMap.get(e.target.toString()).add(clusters.size());
        clusterSize = e.source.getWeight() + e.target.getWeight();
        if (clusterSize > maxSize) {
            System.err.println("Basic condition for scheduling broken. Updating max size to " + clusterSize);
            maxSize = clusterSize;
        }
        availableEdges.remove(e);

        //take all edges between two nodes in the cluster and add them to the
        //edges of the cluster
        Set<Edge> toRemove = new HashSet<>();
        for (Edge e2 : availableEdges) {
            if (cluster.nodes.contains(e2.source) && cluster.nodes.contains(e2.target)) {
                cluster.edges.add(e2);
                toRemove.add(e2);
            }
        }

        for (Edge e2 : toRemove) {
            availableEdges.remove(e2);
        }

        boolean stop = false;

        //only continue to cluster if maxSize is not reached
        while (clusterSize < maxSize && !stop) {

            //find edges that emanate and are still available
            emanatingEdges.addAll(g.getEdges(e.source));
            emanatingEdges.addAll(g.getEdges(e.target));
            emanatingEdges.retainAll(availableEdges);

            Node n;

            //get largest available edge that emanates from cluster
            if (!emanatingEdges.isEmpty()) {
                //                    System.out.println("Emanating edges: "+emanatingEdges);
                stop = true;
                //                    Set<Edge> emanatingToAdd = new HashSet<>();
                //                    Set<Edge> emanatingToRemove = new HashSet<>();

                for (Edge em : emanatingEdges) {
                    //get node that is not yet in cluster
                    if (cluster.nodes.contains(em.source)) {
                        n = em.target;
                    } else {
                        n = em.source;
                    }
                    //add node to cluster and update size if n fits
                    if (clusterSize + n.getWeight() <= maxSize) {
                        //System.out.println("Adding " + n);
                        availableEdges.remove(em);
                        cluster.edges.add(em);

                        //take all edges between two nodes in the cluster and add them to the
                        //edges of the cluster
                        toRemove = new HashSet<>();
                        for (Edge e2 : availableEdges) {
                            if (cluster.nodes.contains(e2.source) && cluster.nodes.contains(e2.target)) {
                                cluster.edges.add(e2);
                                toRemove.add(e2);
                            }
                        }

                        for (Edge e2 : toRemove) {
                            availableEdges.remove(e2);
                        }

                        if (!cluster.nodes.contains(n)) {
                            clusterSize = clusterSize + n.getWeight();
                            cluster.nodes.add(n);
                            //itemClusterMap.get(n.toString()).add(clusters.size());
                        }
                        //                            System.out.println("Weight is now " + clusterSize);
                        stop = false;
                        break;
                    }
                    //update emanating edges
                    //                        if (n != null) {
                    //                            emanatingEdges.addAll(g.getEdges(n));
                    //                            emanatingEdges.retainAll(availableEdges);
                    //                        }
                }

            } else {
                stop = true;
                break;
            }
        }
        //            System.out.println("Available edges:" + availableEdges);

        //result.add(cluster);
    }
    return result;
}

From source file:org.tdar.core.service.resource.ResourceService.java

@Transactional
/**//from   w w  w  . j  av a2  s. c om
 * Given a collection of hibernate-managed beans (the 'current' collection)  and another collection of transient beans (the 'incoming' collection),
 * update the current collection to match the contents of the incoming collection. This method will associate all elements in the incoming collection 
 * with the specified resource.  Contents of both collections should satisfy the HasResource interface.
 * 
 * @param resource  the 'owner' of the elements in the incoming collection.  This method will associate all elements of the incoming collection with this resource.
 * @param shouldSave if true,  this method will persist elements of the incoming collection.
 * @param validateMethod determines what validation steps (if any) to perform on each element of the incoming collection
 * @param incoming_ the incoming collection of HasResource elements.  
 * @param current the current collection of HasResource elements.  This method will modify collection to contain the same elements as the incoming collection.
 * @param cls type of the collection elements.
 */
public <H extends HasResource<R>, R extends Resource> void saveHasResources(R resource, boolean shouldSave,
        ErrorHandling validateMethod, Collection<H> incoming_, Set<H> current, Class<H> cls) {
    if (CollectionUtils.isEmpty(incoming_) && CollectionUtils.isEmpty(current)) {
        // skip a complete no-op
        return;
    }

    if (incoming_ == null) {
        incoming_ = new ArrayList<H>();
    }
    Collection<H> incoming = incoming_;
    // there are cases where current and incoming_ are the same object, if that's the case
    // then we need to copy incoming_ before
    if ((incoming_ == current) && !CollectionUtils.isEmpty(incoming_)) {
        incoming = new ArrayList<H>();
        incoming.addAll(incoming_);
        current.clear();
    }

    // assume everything that's incoming is valid or deduped and tied back into tDAR entities/beans
    logger.debug("Current Collection of {}s ({}) : {} ",
            new Object[] { cls.getSimpleName(), current.size(), current });

    /*
     * Because we're using ID for the equality and hashCode, we have no way to avoid deleting everything and re-adding it.
     * This is an issue as what'll end up happening otherwise is something like editing a Date results in no persisted change because the
     * "retainAll" below keeps the older version
     */

    current.retainAll(incoming);
    Map<Long, H> idMap = PersistableUtils.createIdMap(current);
    if (CollectionUtils.isNotEmpty(incoming)) {
        logger.debug("Incoming Collection of {}s ({})  : {} ",
                new Object[] { cls.getSimpleName(), incoming.size(), incoming });
        Iterator<H> incomingIterator = incoming.iterator();
        while (incomingIterator.hasNext()) {
            H hasResource_ = incomingIterator.next();

            if (hasResource_ != null) {

                // attach the incoming notes to a hibernate session
                logger.trace("adding {} to {} ", hasResource_, current);
                H existing = idMap.get(hasResource_.getId());
                /*
                 * If we're not transient, compare the two beans on all of their local properties (non-recursive) -- if there are differences
                 * copy. otherwise, move on. Question -- it may be more work to compare than to just "copy"... is it worth it?
                 */
                if (PersistableUtils.isNotNullOrTransient(existing)
                        && !EqualsBuilder.reflectionEquals(existing, hasResource_)) {
                    try {
                        logger.trace("copying bean properties for entry in existing set");
                        BeanUtils.copyProperties(existing, hasResource_);
                    } catch (Exception e) {
                        logger.error("exception setting bean property", e);
                    }
                }

                if (validateMethod != ErrorHandling.NO_VALIDATION) {
                    boolean isValid = false;
                    if (hasResource_ instanceof ResourceCreator) {
                        isValid = ((ResourceCreator) hasResource_).isValidForResource(resource);
                    } else {
                        isValid = hasResource_.isValid();
                    }

                    if (!isValid) {
                        logger.debug("skipping: {} - INVALID", hasResource_);
                        if (validateMethod == ErrorHandling.VALIDATE_WITH_EXCEPTION) {
                            throw new TdarRecoverableRuntimeException(hasResource_ + " is not valid");
                        }
                        continue;
                    }
                }

                current.add(hasResource_);

                // if (shouldSave) {
                // getGenericDao().saveOrUpdate(hasResource_);
                // }
            }
        }
    }
    logger.debug("Resulting Collection of {}s ({}) : {} ",
            new Object[] { cls.getSimpleName(), current.size(), current });
}

From source file:ezbake.data.mongo.EzMongoHandler.java

private void validatePlatformVisibilitiesForManagingExistingDBObject(EzSecurityToken security, Visibility vis,
        DBObject existingObject, boolean fromDriver) throws EzMongoBaseException {
    // to further check if we can "Manage",
    // get the intersection of user's Platform auths with the doc's "Manage" visibilities
    Set<Long> platformAuths = security.getAuthorizations().getPlatformObjectAuthorizations();
    Set<Long> platformViz = null;
    if (fromDriver) {
        // mongodb has this field saved as a List - convert it to Set.
        List manageVizList = (List) existingObject.get(RedactHelper.PLATFORM_OBJECT_MANAGE_VISIBILITY_FIELD);
        if (manageVizList != null) {
            platformViz = new HashSet<Long>(manageVizList);
        }//w  w w.  j av  a 2  s.c  om
    } else {
        if (vis.isSetAdvancedMarkings() && vis.getAdvancedMarkings().isSetPlatformObjectVisibility()) {
            platformViz = vis.getAdvancedMarkings().getPlatformObjectVisibility()
                    .getPlatformObjectManageVisibility();
        }
    }

    Set<Long> origPlatformViz = null;
    if (platformAuths != null && platformViz != null) {
        origPlatformViz = new HashSet<Long>(platformViz);
        platformViz.retainAll(platformAuths);
    }

    final boolean canManageViz = platformViz == null || platformViz.size() > 0;
    if (!canManageViz) {
        // reset the platformViz to the original value for logging purposes
        existingObject.put(RedactHelper.PLATFORM_OBJECT_MANAGE_VISIBILITY_FIELD, origPlatformViz);
        final String message = "User does not have all the required Platform auths to manage the existing DBObject: "
                + platformAuths + ", platformViz needed: " + origPlatformViz;
        appLog.error(message);
        throw new EzMongoBaseException(message);
    }
}

From source file:org.openmrs.api.impl.AdministrationServiceImpl.java

/**
 * @see AdministrationService#getSearchLocales()
 *//*from  w ww .  ja  v a2 s  . c  o m*/
@Override
@Transactional(readOnly = true)
public List<Locale> getSearchLocales() throws APIException {
    Set<Locale> locales = new LinkedHashSet<Locale>();

    Locale currentLocale = Context.getLocale();

    locales.add(currentLocale); //the currently used full locale

    //the currently used language
    locales.add(new Locale(currentLocale.getLanguage()));

    //add user's proficient locales
    User user = Context.getAuthenticatedUser();
    if (user != null) {
        List<Locale> proficientLocales = user.getProficientLocales();
        if (proficientLocales != null) {
            locales.addAll(proficientLocales);
        }
    }

    //limit locales to only allowed locales
    List<Locale> allowedLocales = Context.getAdministrationService().getAllowedLocales();
    if (allowedLocales != null) {
        Set<Locale> retainLocales = new HashSet<Locale>();

        for (Locale allowedLocale : allowedLocales) {
            retainLocales.add(allowedLocale);
            retainLocales.add(new Locale(allowedLocale.getLanguage()));
        }

        locales.retainAll(retainLocales);
    }

    return new ArrayList<Locale>(locales);
}

From source file:org.tolven.config.model.CredentialManager.java

public void processTrustStore(TrustStoreDetail trustStoreDetail) {
    try {/*from  w  w  w  .  j a  v  a 2s. co  m*/
        Set<X509Certificate> newTrustStoreCerts = new HashSet<X509Certificate>();
        Set<X509Certificate> previousTrustStoreCerts = new HashSet<X509Certificate>();
        Set<X509Certificate> resultingTrustStoreCerts = new HashSet<X509Certificate>();
        for (TrustStoreCertificateDetail trustStoreCertificateDetail : trustStoreDetail.getCertificate()) {
            CertificateGroupDetail certGroup = getTolvenConfigWrapper()
                    .getCredentialGroup(trustStoreCertificateDetail.getRefId());
            if (certGroup == null) {
                throw new RuntimeException("The trusted group " + trustStoreCertificateDetail.getRefId()
                        + " in truststore " + trustStoreDetail.getId() + " does not exist");
            }
            X509Certificate trustStoreX509Certificate = getTolvenConfigWrapper().getX509Certificate(certGroup);
            newTrustStoreCerts.add(trustStoreX509Certificate);
        }
        File trustStoreFile = new File(trustStoreDetail.getSource());
        if (TolvenConfigWrapper.TOLVEN_CREDENTIAL_FORMAT_PEM.equals(trustStoreDetail.getFormat())) {
            if (trustStoreFile.exists()) {
                previousTrustStoreCerts = getTolvenConfigWrapper().getX509Certificates(trustStoreFile);
                for (X509Certificate cert : previousTrustStoreCerts) {
                    resultingTrustStoreCerts.add(cert);
                }
            }
            // And now for what Java calls a Set intersection
            resultingTrustStoreCerts.retainAll(newTrustStoreCerts);
            if (resultingTrustStoreCerts.size() != newTrustStoreCerts.size()
                    || !resultingTrustStoreCerts.containsAll(newTrustStoreCerts)) {
                FileOutputStream out = null;
                try {
                    out = new FileOutputStream(trustStoreFile);
                    for (X509Certificate x509Certificate : newTrustStoreCerts) {
                        out.write(convertToPEMBytes(x509Certificate));
                    }
                } finally {
                    if (out != null) {
                        out.close();
                    }
                }
                logger.info("Created truststore: " + trustStoreDetail.getId());
            }
        } else if (TolvenConfigWrapper.TOLVEN_CREDENTIAL_FORMAT_JKS.equals(trustStoreDetail.getFormat())
                || TolvenConfigWrapper.TOLVEN_CREDENTIAL_FORMAT_PKCS12.equals(trustStoreDetail.getFormat())) {
            char[] truststorepass = getPasswordHolder().getPassword(trustStoreDetail.getId());
            if (trustStoreFile.exists()) {
                KeyStore trustStore = getTolvenConfigWrapper().getKeyStore(truststorepass, trustStoreFile,
                        trustStoreDetail.getFormat());
                Enumeration<String> enumeration = trustStore.aliases();
                while (enumeration.hasMoreElements()) {
                    String alias = enumeration.nextElement();
                    X509Certificate cert = (X509Certificate) trustStore.getCertificate(alias);
                    previousTrustStoreCerts.add(cert);
                    resultingTrustStoreCerts.add(cert);
                }
            }
            // And now for what Java calls a Set intersection
            resultingTrustStoreCerts.retainAll(newTrustStoreCerts);
            if (resultingTrustStoreCerts.size() != newTrustStoreCerts.size()
                    || !resultingTrustStoreCerts.containsAll(newTrustStoreCerts)) {
                KeyStore trustStore = KeyStore.getInstance(trustStoreDetail.getFormat());
                trustStore.load(null, truststorepass);
                for (X509Certificate newCert : newTrustStoreCerts) {
                    String alias = newCert.getSubjectDN().getName();
                    trustStore.setCertificateEntry(alias, newCert);
                }
                trustStoreFile.getParentFile().mkdirs();
                write(trustStore, trustStoreFile, truststorepass);
                logger.info("Created truststore: " + trustStoreDetail.getId());
            }
        } else {
            throw new RuntimeException("Unrecognized keystore format: " + trustStoreDetail.getFormat());
        }
    } catch (Exception ex) {
        throw new RuntimeException("Failed to process truststore: " + trustStoreDetail.getId(), ex);
    }
}