Example usage for java.util Set containsAll

List of usage examples for java.util Set containsAll

Introduction

In this page you can find the example usage for java.util Set containsAll.

Prototype

boolean containsAll(Collection<?> c);

Source Link

Document

Returns true if this set contains all of the elements of the specified collection.

Usage

From source file:beast.evolution.tree.SimpleRandomTree.java

public void doTheWork() {
    // find taxon sets we are dealing with
    taxonSets = new ArrayList<>();
    m_bounds = new ArrayList<>();
    distributions = new ArrayList<>();
    taxonSetIDs = new ArrayList<>();
    List<Boolean> onParent = new ArrayList<>();
    lastMonophyletic = 0;//from   w  w w. j ava 2 s.com

    if (taxaInput.get() != null) {
        sTaxa.addAll(taxaInput.get().getTaxaNames());
    } else {
        sTaxa.addAll(m_taxonset.get().asStringList());
    }

    // pick up constraints from outputs, m_inititial input tree and output tree, if any
    List<MRCAPrior> calibrations = new ArrayList<MRCAPrior>();
    calibrations.addAll(calibrationsInput.get());

    // pick up constraints in m_initial tree
    for (final Object plugin : getOutputs()) {
        if (plugin instanceof MRCAPrior && !calibrations.contains(plugin)) {
            calibrations.add((MRCAPrior) plugin);
        }
    }

    if (m_initial.get() != null) {
        for (final Object plugin : m_initial.get().getOutputs()) {
            if (plugin instanceof MRCAPrior && !calibrations.contains(plugin)) {
                calibrations.add((MRCAPrior) plugin);
            }
        }
    }

    for (final MRCAPrior prior : calibrations) {
        final TaxonSet taxonSet = prior.taxonsetInput.get();
        if (taxonSet != null && !prior.onlyUseTipsInput.get()) {
            final Set<String> bTaxa = new LinkedHashSet<>();
            if (taxonSet.asStringList() == null) {
                taxonSet.initAndValidate();
            }
            for (final String sTaxonID : taxonSet.asStringList()) {

                if (!sTaxa.contains(sTaxonID)) {
                    throw new IllegalArgumentException(
                            "Taxon <" + sTaxonID + "> could not be found in list of taxa. Choose one of "
                                    + Arrays.toString(sTaxa.toArray(new String[sTaxa.size()])));
                }
                bTaxa.add(sTaxonID);
            }
            final ParametricDistribution distr = prior.distInput.get();
            final Bound bounds = new Bound();
            if (distr != null) {
                List<BEASTInterface> plugins = new ArrayList<>();
                distr.getPredecessors(plugins);
                for (int i = plugins.size() - 1; i >= 0; i--) {
                    plugins.get(i).initAndValidate();
                }
                try {
                    final double offset = distr.offsetInput.get();
                    bounds.lower = Math.max(distr.inverseCumulativeProbability(0.0) + offset, 0.0);
                    bounds.upper = distr.inverseCumulativeProbability(1.0) + offset;
                    assert bounds.lower <= bounds.upper;
                } catch (MathException e) {
                    Log.warning
                            .println("Could not set bounds in SimpleRandomTree::doTheWork : " + e.getMessage());
                }
            }

            if (prior.isMonophyleticInput.get() || bTaxa.size() == 1) {
                // add any monophyletic constraint
                boolean isDuplicate = false;
                for (int k = 0; k < lastMonophyletic; ++k) {
                    // assert prior.useOriginateInput.get().equals(onParent.get(k)) == (prior.useOriginateInput.get() == onParent.get(k));
                    if (bTaxa.size() == taxonSets.get(k).size() && bTaxa.equals(taxonSets.get(k))
                            && prior.useOriginateInput.get().equals(onParent.get(k))) {
                        if (distr != null) {
                            if (distributions.get(k) == null) {
                                distributions.set(k, distr);
                                m_bounds.set(k, bounds);
                                taxonSetIDs.set(k, prior.getID());
                            }
                        }
                        isDuplicate = true;
                    }
                }
                if (!isDuplicate) {
                    taxonSets.add(lastMonophyletic, bTaxa);
                    distributions.add(lastMonophyletic, distr);
                    onParent.add(lastMonophyletic, prior.useOriginateInput.get());
                    m_bounds.add(lastMonophyletic, bounds);
                    taxonSetIDs.add(lastMonophyletic, prior.getID());
                    lastMonophyletic++;
                }
            } else {
                // only calibrations with finite bounds are added
                if (!Double.isInfinite(bounds.lower) || !Double.isInfinite(bounds.upper)) {
                    taxonSets.add(bTaxa);
                    distributions.add(distr);
                    m_bounds.add(bounds);
                    taxonSetIDs.add(prior.getID());
                    onParent.add(prior.useOriginateInput.get());
                }
            }
        }
    }

    if (ICC) {
        for (int i = 0; i < lastMonophyletic; i++) {
            final Set<String> ti = taxonSets.get(i);
            for (int j = i + 1; j < lastMonophyletic; j++) {
                final Set<String> tj = taxonSets.get(j);
                boolean i_in_j = tj.containsAll(ti);
                boolean j_in_i = ti.containsAll(tj);
                if (i_in_j || j_in_i) {
                    boolean ok = true;
                    if (i_in_j && j_in_i) {
                        ok = (boolean) (onParent.get(i)) != (boolean) onParent.get(j);
                    }
                    assert ok : "" + i + ' ' + j + ' ' + ' ' + taxonSetIDs.get(i) + ' ' + taxonSetIDs.get(j);
                } else {
                    Set<String> tmp = new HashSet<>(tj);
                    tmp.retainAll(ti);
                    assert tmp.isEmpty();
                }
            }
        }
    }

    // assume all calibration constraints are Monophyletic
    // TODO: verify that this is a reasonable assumption
    lastMonophyletic = taxonSets.size();

    // sort constraints in increasing set inclusion order, i.e. such that if taxon set i is subset of taxon set j, then i < j
    for (int i = 0; i < lastMonophyletic; i++) {
        for (int j = i + 1; j < lastMonophyletic; j++) {

            final Set<String> taxai = taxonSets.get(i);
            final Set<String> taxaj = taxonSets.get(j);
            Set<String> intersection = new LinkedHashSet<>(taxai);
            intersection.retainAll(taxaj);

            if (intersection.size() > 0) {
                final boolean bIsSubset = taxai.containsAll(taxaj);
                final boolean bIsSubset2 = taxaj.containsAll(taxai);
                // sanity check: make sure either
                // o taxonset1 is subset of taxonset2 OR
                // o taxonset1 is superset of taxonset2 OR
                // o taxonset1 does not intersect taxonset2
                if (!(bIsSubset || bIsSubset2)) {
                    throw new IllegalArgumentException(
                            "333: Don't know how to generate a Random Tree for taxon sets that intersect, "
                                    + "but are not inclusive. Taxonset "
                                    + (taxonSetIDs.get(i) == null ? taxai : taxonSetIDs.get(i)) + " and "
                                    + (taxonSetIDs.get(j) == null ? taxaj : taxonSetIDs.get(j)));
                }
                // swap i & j if b1 subset of b2. If equal sub-sort on 'useOriginate'
                if (bIsSubset && (!bIsSubset2 || (onParent.get(i) && !onParent.get(j)))) {
                    swap(taxonSets, i, j);
                    swap(distributions, i, j);
                    swap(m_bounds, i, j);
                    swap(taxonSetIDs, i, j);
                    swap(onParent, i, j);
                }
            }
        }
    }

    if (ICC) {
        for (int i = 0; i < lastMonophyletic; i++) {
            final Set<String> ti = taxonSets.get(i);
            for (int j = i + 1; j < lastMonophyletic; j++) {
                final Set<String> tj = taxonSets.get(j);
                boolean ok = tj.containsAll(ti);
                if (ok) {
                    ok = !tj.equals(ti) || (!onParent.get(i) && onParent.get(j));
                    assert ok : "" + i + ' ' + j + ' ' + tj.equals(ti) + ' ' + taxonSetIDs.get(i) + ' '
                            + taxonSetIDs.get(j);
                } else {
                    Set<String> tmp = new HashSet<>(tj);
                    tmp.retainAll(ti);
                    assert tmp.isEmpty();
                }
            }
        }
    }

    for (int i = 0; i < lastMonophyletic; i++) {
        if (onParent.get(i)) {
            // make sure it is after constraint on node itself, if such exists
            assert (!(i + 1 < lastMonophyletic && taxonSets.get(i).equals(taxonSets.get(i + 1))
                    && onParent.get(i) && !onParent.get(i + 1)));
            // find something to attach to ....
            // find enclosing clade, if any. pick a non-intersecting clade in the enclosed without an onParent constraint, or one whose
            // onParent constraint is overlapping.
            final Set<String> iTaxa = taxonSets.get(i);
            int j = i + 1;
            Set<String> enclosingTaxa = sTaxa;
            {
                String someTaxon = iTaxa.iterator().next();
                for (/**/; j < lastMonophyletic; j++) {
                    if (taxonSets.get(j).contains(someTaxon)) {
                        enclosingTaxa = taxonSets.get(j);
                        break;
                    }
                }
            }
            final int enclosingIndex = (j == lastMonophyletic) ? j : j;
            Set<String> candidates = new HashSet<>(enclosingTaxa);
            candidates.removeAll(iTaxa);
            Set<Integer> candidateClades = new HashSet<>(5);
            List<String> canTaxa = new ArrayList<>();
            for (String c : candidates) {
                for (int k = enclosingIndex - 1; k >= 0; --k) {
                    if (taxonSets.get(k).contains(c)) {
                        if (!candidateClades.contains(k)) {
                            if (onParent.get(k)) {
                                if (!intersecting(m_bounds.get(k), m_bounds.get(i))) {
                                    break;
                                }
                            } else {
                                if (!(m_bounds.get(k).lower <= m_bounds.get(i).lower)) {
                                    break;
                                }
                            }
                            candidateClades.add(k);
                        }
                        break;
                    }
                    if (k == 0) {
                        canTaxa.add(c);
                    }
                }
            }

            final int sz1 = canTaxa.size();
            final int sz2 = candidateClades.size();

            if (sz1 + sz2 == 0 && i + 1 == enclosingIndex) {
                final Bound ebound = m_bounds.get(enclosingIndex);
                ebound.restrict(m_bounds.get(i));
            } else {
                assert sz1 + sz2 > 0;
                // prefer taxa over clades (less chance of clades useOriginate clashing)
                final int k = Randomizer.nextInt(sz1 > 0 ? sz1 : sz2);
                Set<String> connectTo;
                int insertPoint;
                if (k < sz1) {
                    // from taxa
                    connectTo = new HashSet<>(1);
                    connectTo.add(canTaxa.get(k));
                    insertPoint = i + 1;
                } else {
                    // from clade
                    final Iterator<Integer> it = candidateClades.iterator();
                    for (j = 0; j < k - sz1 - 1; ++j) {
                        it.next();
                    }
                    insertPoint = it.next();
                    connectTo = new HashSet<>(taxonSets.get(insertPoint));
                    insertPoint = Math.max(insertPoint, i) + 1;
                }

                final HashSet<String> cc = new HashSet<String>(connectTo);

                connectTo.addAll(taxonSets.get(i));
                if (!connectTo.equals(enclosingTaxa) || enclosingTaxa == sTaxa) { // equal when clade already exists

                    taxonSets.add(insertPoint, connectTo);
                    distributions.add(insertPoint, distributions.get(i));
                    onParent.add(insertPoint, false);
                    m_bounds.add(insertPoint, m_bounds.get(i));
                    final String tid = taxonSetIDs.get(i);
                    taxonSetIDs.add(insertPoint, tid);
                    lastMonophyletic += 1;
                } else {
                    // we lose distribution i :(
                    final Bound ebound = m_bounds.get(enclosingIndex);
                    ebound.restrict(m_bounds.get(i));
                }
            }
            if (true) {
                taxonSets.set(i, new HashSet<>());
                distributions.set(i, null);
                m_bounds.set(i, new Bound());
                final String tid = taxonSetIDs.get(i);
                if (tid != null) {
                    taxonSetIDs.set(i, "was-" + tid);
                }
            }
        }
    }

    {
        int icur = 0;
        for (int i = 0; i < lastMonophyletic; ++i, ++icur) {
            final Set<String> ti = taxonSets.get(i);
            if (ti.isEmpty()) {
                icur -= 1;
            } else {
                if (icur < i) {
                    taxonSets.set(icur, taxonSets.get(i));
                    distributions.set(icur, distributions.get(i));
                    m_bounds.set(icur, m_bounds.get(i));
                    taxonSetIDs.set(icur, taxonSetIDs.get(i));
                    onParent.set(icur, onParent.get(i));
                }
            }
        }
        taxonSets.subList(icur, lastMonophyletic).clear();
        distributions.subList(icur, lastMonophyletic).clear();
        m_bounds.subList(icur, lastMonophyletic).clear();
        taxonSetIDs.subList(icur, lastMonophyletic).clear();
        onParent.subList(icur, lastMonophyletic).clear();

        lastMonophyletic = icur;
    }

    if (ICC) {
        for (int i = 0; i < lastMonophyletic; i++) {
            final Set<String> ti = taxonSets.get(i);
            for (int j = i + 1; j < lastMonophyletic; j++) {
                final Set<String> tj = taxonSets.get(j);
                boolean ok = tj.containsAll(ti);
                if (ok) {
                    ok = !tj.equals(ti) || (!onParent.get(i) && onParent.get(j));
                    assert ok : "" + i + ' ' + j + ' ' + taxonSetIDs.get(i) + ' ' + taxonSetIDs.get(j);
                } else {
                    Set<String> tmp = new HashSet<>(tj);
                    tmp.retainAll(ti);
                    assert tmp.isEmpty();
                }
            }
        }
    }

    // map parent child relationships between mono clades. nParent[i] is the immediate parent clade of i, if any. An immediate parent is the
    // smallest superset of i, children[i] is a list of all clades which have i as a parent.
    // The last one, standing for the virtual "root" of all monophyletic clades is not associated with any actual clade
    final int[] nParent = new int[lastMonophyletic];
    children = new List[lastMonophyletic + 1];
    for (int i = 0; i < lastMonophyletic + 1; i++) {
        children[i] = new ArrayList<>();
    }
    for (int i = 0; i < lastMonophyletic; i++) {
        int j = i + 1;
        while (j < lastMonophyletic && !taxonSets.get(j).containsAll(taxonSets.get(i))) {
            j++;
        }
        nParent[i] = j;
        children[j].add(i);
    }

    // make sure upper bounds of a child does not exceed the upper bound of its parent
    for (int i = lastMonophyletic - 1; i >= 0; --i) {
        if (nParent[i] < lastMonophyletic) {
            if (m_bounds.get(i).upper > m_bounds.get(nParent[i]).upper) {
                m_bounds.get(i).upper = m_bounds.get(nParent[i]).upper - 1e-100;
                assert m_bounds.get(i).lower <= m_bounds.get(i).upper : i;
            }
        }
    }

    nodeCount = 2 * sTaxa.size() - 1;
    boundPerNode = new Bound[nodeCount];
    distPerNode = new ParametricDistribution[nodeCount];

    buildTree(sTaxa);
    assert nextNodeNr == nodeCount : "" + nextNodeNr + ' ' + nodeCount;

    double bm = branchMeanInput.get();

    if (bm < 0) {
        double maxMean = 0;

        for (ParametricDistribution distr : distPerNode) {
            if (distr != null) {
                double m = distr.getMean();
                if (maxMean < m)
                    maxMean = m;
            }
        }
        if (maxMean > 0) {
            double s = 0;
            for (int i = 2; i <= nodeCount; ++i) {
                s += 1.0 / i;
            }
            bm = s / maxMean;
        }
    }

    double rate = 1 / (bm < 0 ? 1 : bm);
    boolean succ = false;
    int ntries = 6;
    final double epsi = 0.01 / rate;
    double clamp = 1 - clampInput.get();
    while (!succ && ntries > 0) {
        try {
            succ = setHeights(rate, false, epsi, clamp);
        } catch (ConstraintViolatedException e) {
            throw new RuntimeException("Constraint failed: " + e.getMessage());
        }
        --ntries;
        rate *= 2;
        clamp /= 2;
    }
    if (!succ) {
        try {
            succ = setHeights(rate, true, 0, 0);
        } catch (ConstraintViolatedException e) {
            throw new RuntimeException("Constraint failed: " + e.getMessage());
        }
    }
    assert succ;

    internalNodeCount = sTaxa.size() - 1;
    leafNodeCount = sTaxa.size();

    HashMap<String, Integer> taxonToNR = null;
    // preserve node numbers where possible
    if (m_initial.get() != null) {
        taxonToNR = new HashMap<>();
        for (Node n : m_initial.get().getExternalNodes()) {
            taxonToNR.put(n.getID(), n.getNr());
        }
    }
    // re-assign node numbers
    setNodesNrs(root, 0, new int[1], taxonToNR);

    initArrays();
}

From source file:net.yacy.document.TextParser.java

/**
 * find a parser for a given url and mime type
 * because mime types returned by web severs are sometimes wrong, we also compute the mime type again
 * from the extension that can be extracted from the url path. That means that there are 3 criteria
 * that can be used to select a parser:/*from w  w w. j av a2  s  .c o m*/
 * - the given mime type (1.)
 * - the extension of url (2.)
 * - the mime type computed from the extension (3.)
 * finally the generic parser is added as backup if all above fail
 * @param url the given url
 * @param mimeType the given mime type
 * @return a list of Idiom parsers that may be appropriate for the given criteria
 * @throws Parser.Failure when the file extension or the MIME type is denied
 */
private static Set<Parser> parsers(final MultiProtocolURL url, String mimeType1) throws Parser.Failure {
    final Set<Parser> idioms = new LinkedHashSet<Parser>(2); // LinkedSet to maintain order (genericParser should be last)

    // check given mime type, place this first because this is the most likely to work and the best fit to the supplied mime
    Set<Parser> idiom;
    if (mimeType1 != null) {
        mimeType1 = normalizeMimeType(mimeType1);
        if (denyMime.containsKey(mimeType1))
            throw new Parser.Failure("mime type '" + mimeType1 + "' is denied (1)", url);
        idiom = mime2parser.get(mimeType1);
        if (idiom != null)
            idioms.addAll(idiom);
    }

    // check extension and add as backup (in case no, wrong or unknown/unsupported mime was supplied)
    String ext = MultiProtocolURL.getFileExtension(url.getFileName());
    if (ext != null && ext.length() > 0) {
        /* We do not throw here an exception when the media type is provided and inconsistent with the extension (if it is not supported an exception has already beeen thrown). 
         * Otherwise we would reject URLs with an apparently unsupported extension but whose actual Media Type is supported (for example text/html).
         * Notable example : wikimedia commons pages, such as https://commons.wikimedia.org/wiki/File:YaCy_logo.png */
        if (denyExtensionx.containsKey(ext) && (mimeType1 == null || mimeType1.equals(mimeOf(ext)))) {
            throw new Parser.Failure("file extension '" + ext + "' is denied (1)", url);
        }
        idiom = ext2parser.get(ext);
        if (idiom != null && !idioms.containsAll(idiom)) { // use containsAll -> idiom is a Set of parser
            idioms.addAll(idiom);
        }
    }

    // check mime type computed from extension
    final String mimeType2 = ext2mime.get(ext);
    if (mimeType2 != null && (idiom = mime2parser.get(mimeType2)) != null && !idioms.containsAll(idiom)) { // use containsAll -> idiom is a Set of parser
        idioms.addAll(idiom);
    }

    /* No matching idiom has been found : let's check if the media type ends with the "+xml" suffix so we can handle it with a generic XML parser 
     * (see RFC 7303 - Using '+xml' when Registering XML-Based Media Types : https://tools.ietf.org/html/rfc7303#section-4.2) */
    if (idioms.isEmpty() && mimeType1 != null && mimeType1.endsWith("+xml")) {
        idioms.add(genericXMLIdiom);
    }

    // always add the generic parser (make sure it is the last in access order)
    idioms.add(genericIdiom);
    //if (idioms.isEmpty()) throw new Parser.Failure("no parser found for extension '" + ext + "' and mime type '" + mimeType1 + "'", url);

    return idioms;
}

From source file:org.kuali.student.enrollment.class2.courseoffering.service.facade.CourseOfferingServiceFacadeImpl.java

@Override
public CourseOfferingAutogenIssue findAutogenIssuesByCourseOffering(String courseOfferingId,
        ContextInfo context) throws PermissionDeniedException, MissingParameterException,
        InvalidParameterException, OperationFailedException, DoesNotExistException {
    List<FormatOfferingInfo> fos = coService.getFormatOfferingsByCourseOffering(courseOfferingId, context);
    CourseOfferingAutogenIssue coIssue = new CourseOfferingAutogenIssue(courseOfferingId);
    for (FormatOfferingInfo fo : fos) {
        String foId = fo.getId();
        FormatOfferingAutogenIssue foIssue = new FormatOfferingAutogenIssue(foId);
        List<ActivityOfferingInfo> aoInfos = coService.getActivityOfferingsByFormatOffering(fo.getId(),
                context);/*from   ww  w .  j a  va2  s .  c om*/
        Set<String> aoIdSet = new HashSet<String>();
        for (ActivityOfferingInfo ao : aoInfos) {
            aoIdSet.add(ao.getId());
        }
        // Find AOs without clusters
        List<ActivityOfferingInfo> aosWoClusters = coService
                .getActivityOfferingsWithoutClusterByFormatOffering(foId, context);
        // Gather only the IDs
        Set<String> aoIdsWoClusters = new HashSet<String>();
        for (ActivityOfferingInfo aoInfo : aosWoClusters) {
            aoIdsWoClusters.add(aoInfo.getId());
        }
        // Then create issues associated with it
        if (!aoIdsWoClusters.isEmpty()) {
            ActivityOfferingNotInAocSubissue aoNotInAoc = new ActivityOfferingNotInAocSubissue(courseOfferingId,
                    fo.getId());
            aoNotInAoc.getActivityOfferingIds().addAll(aoIdsWoClusters);
            foIssue.getSubIssues().add(aoNotInAoc); // Add the issue
        }
        // --------------------
        // Now verify RGs have correct AOs
        // First create a map
        List<Set<String>> aocAoIdList = new ArrayList<Set<String>>();
        List<ActivityOfferingClusterInfo> clusters = coService.getActivityOfferingClustersByFormatOffering(foId,
                context);
        Set<Set<String>> possibleRgAOIds = new HashSet<Set<String>>();
        Set<Set<String>> actualRgAOIds = new HashSet<Set<String>>();
        for (ActivityOfferingClusterInfo cluster : clusters) {
            Set<String> clusterAoIds = new HashSet<String>();
            for (ActivityOfferingSetInfo set : cluster.getActivityOfferingSets()) {
                clusterAoIds.addAll(set.getActivityOfferingIds());
            }
            aocAoIdList.add(clusterAoIds);
            Set<Set<String>> possibleRgAoIdsByAoc = _generatePotentialRgAoIdSets(cluster);
            // Add to list of possible RgAOIds for this format offering
            possibleRgAOIds.addAll(possibleRgAoIdsByAoc);
        }
        // Now go through the RGs to check for invalid ones
        List<RegistrationGroupInfo> rgInfos = coService.getRegistrationGroupsByFormatOffering(fo.getId(),
                context);
        for (RegistrationGroupInfo rg : rgInfos) {
            boolean found = false;
            List<String> rgAoIds = rg.getActivityOfferingIds();
            for (Set<String> aocAoIds : aocAoIdList) {
                if (aocAoIds.containsAll(rgAoIds)) {
                    found = true;
                    break;
                }
            }
            if (!found) {
                // No AOC contains this RG
                InvalidRegGroupSubissue rgIssue = new InvalidRegGroupSubissue(courseOfferingId, foId);
                foIssue.getSubIssues().add(rgIssue); // Add the issue
            } else {
                // Valid RG, store that info
                Set<String> rgAOIds = new HashSet<String>(rg.getActivityOfferingIds());
                actualRgAOIds.add(rgAOIds);
            }
        }
        // Now find RGs that should have been created, but weren't
        possibleRgAOIds.removeAll(actualRgAOIds);
        Set<Set<String>> missingRgAOIds // renaming to make it easier to see what's going on
                = new HashSet<Set<String>>(possibleRgAOIds);
        for (Set<String> rgAoIdSet : missingRgAOIds) {
            // Create an issue
            RegGroupNotGeneratedByAocSubissue subissue = new RegGroupNotGeneratedByAocSubissue(courseOfferingId,
                    foId);
            subissue.getActivityOfferingIds().addAll(rgAoIdSet);
            foIssue.getSubIssues().add(subissue);
        }
        if (!foIssue.getSubIssues().isEmpty()) {
            coIssue.getFormatOfferingIssues().add(foIssue);
        }
    }
    if (coIssue.getFormatOfferingIssues().isEmpty()) {
        return null;
    } else {
        return coIssue;
    }
}

From source file:lu.lippmann.cdb.graph.GraphUtil.java

/**
 * /*from   w w w .j  av  a2s .co m*/
 * @param layout
 * @param picked
 */
public static Layout<CNode, CEdge> reorganize(Layout<CNode, CEdge> layout, Set<CNode> picked) {

    Layout<CNode, CEdge> subLayout = null;

    //create map that copy the transformer of existing layout
    final HashMap<CNode, CPoint> mapTransform1 = new LinkedHashMap<CNode, CPoint>();
    for (CNode v : layout.getGraph().getVertices()) {
        final Point2D tmp = layout.transform(v);
        mapTransform1.put(v, new CPoint(tmp.getX(), tmp.getY()));
    }
    //

    final Graph<CNode, CEdge> graph = layout.getGraph();

    final AggregateLayout<CNode, CEdge> clusteringLayout = new AggregateLayout<CNode, CEdge>(layout);

    // put the picked vertices into a new sublayout

    //final Set<CNode> picked = vv.getPickedVertexState().getPicked();
    if (picked != null && picked.size() >= 1) {

        final Point2D initCenter = GraphUtil.getCenter(picked, layout);

        final Graph<CNode, CEdge> subGraph = new DirectedSparseGraph<CNode, CEdge>();
        try {
            for (CNode vertex : picked) {
                subGraph.addVertex(vertex);
                final Collection<CEdge> incidentEdges = graph.getIncidentEdges(vertex);
                if (incidentEdges != null) {
                    for (final CEdge edge : incidentEdges) {
                        final Pair<CNode> endpoints = graph.getEndpoints(edge);
                        if (picked.containsAll(endpoints)) {
                            subGraph.addEdge(edge, endpoints.getFirst(), endpoints.getSecond());
                        }
                    }
                }
            }

            subLayout = buildMinimumSpanningForestLayout(subGraph);

            subLayout.setInitializer(layout);

            if (!(subLayout instanceof TreeLayout)) {
                subLayout.setSize(clusteringLayout.getSize());
            }

            final Point2D subGraphCenter = GraphUtil.getCenter(picked, subLayout);
            final Point2D subLayoutCenter = new Point2D.Double(subLayout.getSize().getWidth() / 2,
                    subLayout.getSize().getHeight() / 2);
            //System.out.println("Initial init center : " + initCenter);
            initCenter.setLocation(initCenter.getX() + (subLayoutCenter.getX() - subGraphCenter.getX()),
                    initCenter.getY() + (subLayoutCenter.getY() - subGraphCenter.getY()));
            //System.out.println("Corrected init center : " + initCenter);

            clusteringLayout.put(subLayout, initCenter);

            //create map that copy the transformer of new layout
            final HashMap<CNode, CPoint> mapTransform2 = new LinkedHashMap<CNode, CPoint>();
            for (CNode v : layout.getGraph().getVertices()) {
                final Point2D tmp = clusteringLayout.transform(v);
                mapTransform2.put(v, new CPoint(tmp.getX(), tmp.getY()));
            }
            //
            /** save the new layout and historize it !! */
            ((GraphWithOperations) layout.getGraph()).changeLayout(mapTransform1, mapTransform2);

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    return subLayout;
}

From source file:org.apache.helix.manager.zk.ZKHelixAdmin.java

@Override
public void resetPartition(String clusterName, String instanceName, String resourceName,
        List<String> partitionNames) {
    HelixDataAccessor accessor = new ZKHelixDataAccessor(clusterName,
            new ZkBaseDataAccessor<ZNRecord>(_zkClient));
    Builder keyBuilder = accessor.keyBuilder();

    // check the instance is alive
    LiveInstance liveInstance = accessor.getProperty(keyBuilder.liveInstance(instanceName));
    if (liveInstance == null) {
        throw new HelixException("Can't reset state for " + resourceName + "/" + partitionNames + " on "
                + instanceName + ", because " + instanceName + " is not alive");
    }/*ww  w  .  j  av  a 2 s . co m*/

    // check resource group exists
    IdealState idealState = accessor.getProperty(keyBuilder.idealStates(resourceName));
    if (idealState == null) {
        throw new HelixException("Can't reset state for " + resourceName + "/" + partitionNames + " on "
                + instanceName + ", because " + resourceName + " is not added");
    }

    // check partition exists in resource group
    Set<String> resetPartitionNames = new HashSet<String>(partitionNames);
    if (idealState.getRebalanceMode() == RebalanceMode.CUSTOMIZED) {
        Set<String> partitions = new HashSet<String>(idealState.getRecord().getMapFields().keySet());
        if (!partitions.containsAll(resetPartitionNames)) {
            throw new HelixException("Can't reset state for " + resourceName + "/" + partitionNames + " on "
                    + instanceName + ", because not all " + partitionNames + " exist");
        }
    } else {
        Set<String> partitions = new HashSet<String>(idealState.getRecord().getListFields().keySet());
        if (!partitions.containsAll(resetPartitionNames)) {
            throw new HelixException("Can't reset state for " + resourceName + "/" + partitionNames + " on "
                    + instanceName + ", because not all " + partitionNames + " exist");
        }
    }

    // check partition is in ERROR state
    String sessionId = liveInstance.getSessionId();
    CurrentState curState = accessor
            .getProperty(keyBuilder.currentState(instanceName, sessionId, resourceName));
    for (String partitionName : resetPartitionNames) {
        if (!curState.getState(partitionName).equals(HelixDefinedState.ERROR.toString())) {
            throw new HelixException("Can't reset state for " + resourceName + "/" + partitionNames + " on "
                    + instanceName + ", because not all " + partitionNames + " are in ERROR state");
        }
    }

    // check stateModelDef exists and get initial state
    String stateModelDef = idealState.getStateModelDefRef();
    StateModelDefinition stateModel = accessor.getProperty(keyBuilder.stateModelDef(stateModelDef));
    if (stateModel == null) {
        throw new HelixException("Can't reset state for " + resourceName + "/" + partitionNames + " on "
                + instanceName + ", because " + stateModelDef + " is NOT found");
    }

    // check there is no pending messages for the partitions exist
    List<Message> messages = accessor.getChildValues(keyBuilder.messages(instanceName));
    for (Message message : messages) {
        if (!MessageType.STATE_TRANSITION.name().equalsIgnoreCase(message.getMsgType())
                || !sessionId.equals(message.getTgtSessionId())
                || !resourceName.equals(message.getResourceName())
                || !resetPartitionNames.contains(message.getPartitionName())) {
            continue;
        }

        throw new HelixException("Can't reset state for " + resourceName + "/" + partitionNames + " on "
                + instanceName + ", because a pending message exists: " + message);
    }

    String adminName = null;
    try {
        adminName = InetAddress.getLocalHost().getCanonicalHostName() + "-ADMIN";
    } catch (UnknownHostException e) {
        // can ignore it
        logger.info("Unable to get host name. Will set it to UNKNOWN, mostly ignorable", e);
        adminName = "UNKNOWN";
    }

    List<Message> resetMessages = new ArrayList<Message>();
    List<PropertyKey> messageKeys = new ArrayList<PropertyKey>();
    for (String partitionName : resetPartitionNames) {
        // send ERROR to initialState message
        String msgId = UUID.randomUUID().toString();
        Message message = new Message(MessageType.STATE_TRANSITION, msgId);
        message.setSrcName(adminName);
        message.setTgtName(instanceName);
        message.setMsgState(MessageState.NEW);
        message.setPartitionName(partitionName);
        message.setResourceName(resourceName);
        message.setTgtSessionId(sessionId);
        message.setStateModelDef(stateModelDef);
        message.setFromState(HelixDefinedState.ERROR.toString());
        message.setToState(stateModel.getInitialState());
        message.setStateModelFactoryName(idealState.getStateModelFactoryName());

        if (idealState.getResourceGroupName() != null) {
            message.setResourceGroupName(idealState.getResourceGroupName());
        }
        if (idealState.getInstanceGroupTag() != null) {
            message.setResourceTag(idealState.getInstanceGroupTag());
        }

        resetMessages.add(message);
        messageKeys.add(keyBuilder.message(instanceName, message.getId()));
    }

    accessor.setChildren(messageKeys, resetMessages);
}

From source file:com.github.venkateshamurthy.designpatterns.builders.FluentBuilders.java

private Set<Method> getWritableNormalMethods(final Class<?> thisPojoClass) throws NotFoundException {
    final CtClass ctClass = ctPool.get(thisPojoClass.getName());
    final Set<CtMethod> ctMethodSet = new LinkedHashSet<>(); // Gets
                                                             // collected
    final Set<Method> methodSet = new LinkedHashSet<>(); // Gets collected

    final Set<Class<?>> propTypes = getPropertyClassTypes(thisPojoClass, ctClass, ctMethodSet);

    for (Method method : thisPojoClass.getDeclaredMethods()) {
        if (method.isSynthetic()) {
            LOGGER.warning(method.getName() + " is synthetically added, so ignoring");
            continue;
        }//from   w w  w . j  a v  a2  s.  c  o  m
        if (Modifier.isPublic(method.getModifiers())
                && setMethodNamePattern.matcher(method.getName()).matches()) {
            methodSet.add(method);
        }
        final CtMethod ctMethod = ctClass.getDeclaredMethod(method.getName());
        if (Modifier.isPublic(method.getModifiers()) && setMethodNamePattern.matcher(method.getName()).matches()
                && !ctMethodSet.contains(ctMethod)) {

            // Make sure the types u get from method is really is of a field
            // type
            boolean isAdded = propTypes.containsAll(Arrays.asList(method.getParameterTypes()))
                    && ctMethodSet.add(ctMethod);
            if (!isAdded) {
                LOGGER.warning(method.getName() + " is not added");
            }
        }
    }
    return methodSet;
}

From source file:ddf.test.itests.catalog.TestFederation.java

private void verifyEvents(Set<String> metacardIdsExpected, Set<String> metacardIdsNotExpected,
        Set<String> subscriptionIds) {
    long millis = 0;

    boolean isAllEventsReceived = false;
    boolean isUnexpectedEventReceived = false;

    while (!isAllEventsReceived && !isUnexpectedEventReceived && millis < TimeUnit.MINUTES.toMillis(2)) {

        Set<String> foundIds = null;

        try {//from   w  w w .jav a2  s.  co  m
            Thread.sleep(EVENT_UPDATE_WAIT_INTERVAL);
            millis += EVENT_UPDATE_WAIT_INTERVAL;
        } catch (InterruptedException e) {
            LOGGER.info("Interrupted exception while trying to sleep for events", e);
        }
        if ((millis % 1000) == 0) {
            LOGGER.info("Waiting for events to be received...{}ms", millis);
        }
        for (String id : subscriptionIds) {
            foundIds = getEvents(id);
            isAllEventsReceived = foundIds.containsAll(metacardIdsExpected);

            isUnexpectedEventReceived = foundIds.removeAll(metacardIdsNotExpected);
        }
    }
    assertTrue(isAllEventsReceived);
    assertFalse(isUnexpectedEventReceived);
}

From source file:org.openmrs.EncounterTest.java

/**
 * @see Encounter#getProvidersByRole(EncounterRole)
 * @verifies return providers for role/*from w ww  .  j  a  v  a  2s  . co  m*/
 */
@Test
public void getProvidersByRole_shouldReturnProvidersForRole() throws Exception {
    //given
    Encounter encounter = new Encounter();
    EncounterRole role = new EncounterRole();

    Provider provider = new Provider();
    encounter.addProvider(role, provider);

    Provider provider2 = new Provider();
    encounter.addProvider(role, provider2);

    EncounterRole role2 = new EncounterRole();
    Provider provider3 = new Provider();
    encounter.addProvider(role2, provider3);

    //when
    Set<Provider> providers = encounter.getProvidersByRole(role);

    //then
    Assert.assertEquals(2, providers.size());
    Assert.assertTrue(providers.containsAll(Arrays.asList(provider, provider2)));
}

From source file:dk.netarkivet.harvester.indexserver.distribute.IndexRequestClient.java

/**
 * Check the reply message is valid./* ww  w.  j  a v a  2 s .  c  o  m*/
 * @param jobSet The requested set of jobs
 * @param msg The message received
 * @throws ArgumentNotValid On wrong parameters in replied message.
 * @throws IOFailure on trouble in communication or invalid reply types.
 * @throws IllegalState if message is not OK.
 */
private void checkMessageValid(Set<Long> jobSet, NetarkivetMessage msg)
        throws IllegalState, IOFailure, ArgumentNotValid {
    //Read and check reply
    if (msg == null) {
        throw new IOFailure(
                "Timeout waiting for reply of index request " + "for jobs " + StringUtils.conjoin(",", jobSet));
    }
    if (!msg.isOk()) {
        throw new IllegalState("Reply message not ok. Message is: '" + msg.getErrMsg()
                + "' in index request for jobs " + StringUtils.conjoin(",", jobSet));
    }
    if (!(msg instanceof IndexRequestMessage)) {
        throw new IOFailure("Unexpected type of reply message: '" + msg.getClass().getName()
                + "' in index request for jobs " + StringUtils.conjoin(",", jobSet));
    }
    IndexRequestMessage reply = (IndexRequestMessage) msg;
    Set<Long> foundJobs = reply.getFoundJobs();
    if (foundJobs == null) {
        throw new ArgumentNotValid("Missing parameter foundjobs in reply to" + " index request for jobs "
                + StringUtils.conjoin(",", jobSet));
    }

    //FoundJobs should always be a subset
    if (!jobSet.containsAll(foundJobs)) {
        throw new ArgumentNotValid("foundJobs is not a subset of requested " + "jobs. Requested: "
                + StringUtils.conjoin(",", jobSet) + ". Found: " + StringUtils.conjoin(",", foundJobs));
    }

    if (jobSet.equals(foundJobs)) {
        //Files should only be present if jobSet=foundJobs
        if (reply.isIndexIsStoredInDirectory()) {
            List<RemoteFile> files;
            files = reply.getResultFiles();
            if (files == null) {
                throw new ArgumentNotValid("Missing files in reply to" + " index request for jobs "
                        + StringUtils.conjoin(",", jobSet));
            }
        } else {
            RemoteFile file = reply.getResultFile();
            if (file == null) {
                throw new ArgumentNotValid("Missing file in reply to" + " index request for jobs "
                        + StringUtils.conjoin(",", jobSet));
            }
        }
    }
}

From source file:org.apache.rave.opensocial.service.impl.DefaultAppDataService.java

/**
 * Updates app data for the specified user and group with the new values.
 *
 * @param userId  The user/*from w  w  w. j a va 2  s .  com*/
 * @param groupId The group
 * @param appId   The application ID
 * @param fields  The fields to update.  Empty set implies that all fields that should be persisted have been
 *                provided in the values map (completely replace current appData with new data).  A key in the
 *                fields set without a corresponding key in the values map implies a delete of that field.
 *                A key in the values map not present in the fields set is a bad request.
 * @param values  The values to set
 * @param token   The security token
 * @return an error if one occurs
 */
@Override
public Future<Void> updatePersonData(UserId userId, GroupId groupId, String appId, Set<String> fields,
        Map<String, Object> values, SecurityToken token) throws ProtocolException {
    //make sure the request conforms to the OpenSocial visibility rules
    String personId = validateWriteRequest(userId, groupId, appId, token);

    //lock on this user and this application to avoid any potential concurrency issues
    Lock lock = getApplicationDataLock(personId, appId);
    try {
        lock.lock();
        //get the application data for this user and application
        ApplicationData applicationData = applicationDataRepository.getApplicationData(personId, appId);

        //if there is no data, create an empty object to store the data in that we'll save when we're done
        if (applicationData == null) {
            applicationData = new ApplicationDataImpl(null, personId, appId, new HashMap<String, Object>());
        }

        //if the fields parameter is empty, we can just use the values map directly since this is a full update
        if (fields == null || fields.size() == 0) {
            applicationData.setData(values);
        }
        //if there are keys in the values map that aren't in the fields set, its a bad request
        else if (!fields.containsAll(values.keySet())) {
            throw new ProtocolException(HttpServletResponse.SC_BAD_REQUEST,
                    "Fields parameter must either be empty or contain keys "
                            + "for all name value pairs sent in request.");
        }
        //we have a partial update - we know that the fields set contains keys for all the entries in the values
        //map (due to the check above), so we can just enumerate over it now to finish our work.  So we want to remove
        //any fields found in the fields set that are not found in the values map and update the rest.
        else {
            Map<String, Object> data = applicationData.getData();
            for (String field : fields) {
                //if this field is not in the values map, its a delete
                if (!values.containsKey(field)) {
                    data.remove(field);
                } else {
                    //its an update
                    data.put(field, values.get(field));
                }
            }
        }

        //save our changes and return
        applicationDataRepository.save(applicationData);
    } finally {
        lock.unlock();
        lockService.returnLock(lock);
    }
    return Futures.immediateFuture(null);
}