Example usage for java.util BitSet clone

List of usage examples for java.util BitSet clone

Introduction

In this page you can find the example usage for java.util BitSet clone.

Prototype

public Object clone() 

Source Link

Document

Cloning this BitSet produces a new BitSet that is equal to it.

Usage

From source file:edu.udo.scaffoldhunter.model.db.StringProperty.java

/**
 * This will generate a {@link PropertyType}.BitFingerprint.
 * /*w w  w. j  a  va2s . c  o m*/
 * Precondition: Only for PropertyType BitFingerprint!
 * 
 * @param bits
 *            the bits in form of a {@link BitSet}
 * @param length
 *            the length as a {@link BitSet} does not save the exact length.
 */
public void setBitFingerprint(BitSet bits, short length) {
    checkBitFingerprint();
    Preconditions.checkArgument(length < Math.pow(2, 16), "length exceeds range");
    Preconditions.checkArgument(length > 0, "A length of zero is not supported");

    // + 1 not full block + 2 size block
    byte[] bitFingerprint = new byte[length / sizeofbyte + ((length % sizeofbyte) > 0 ? 1 : 0) + lengthbytes];

    // convert short to two bytes
    lenghtToBitFingerprint(length, bitFingerprint);

    // bits.lenght is faster because its the position of the highest on bit
    for (int i = 0; i < bits.length(); i++) {
        if (bits.get(i)) {
            // + 1 because the first char contains the size
            bitFingerprint[i / sizeofbyte + lengthbytes] |= 1 << (i % sizeofbyte);
        }
    }

    value = new String(Base64.encodeBase64String(bitFingerprint));
    lengthCache = length;
    bitsCache = (BitSet) bits.clone();
}

From source file:model.DecomposableModel.java

/**
 * Compute the difference in the entropy from this model, to one that would
 * add vertex1 and vertex2 to it/* ww  w  .  ja  va 2  s  .co m*/
 * 
 * @param a
 * @param b
 * @param computer
 * @return
 */
public int treeWidthIfAdding(Integer a, Integer b) {
    // System.out.println("computing actual entropy");
    BitSet Sab = graph.getSeparator(a, b);
    BitSet Sabuaub = (BitSet) Sab.clone();
    Sabuaub.set(a);
    Sabuaub.set(b);
    return Sabuaub.cardinality();

}

From source file:model.DecomposableModel.java

public double messageLengthDiffIfAdding(Integer a, Integer b, MessageLengthFactorialComputer computer,
        boolean verbose) {

    BitSet Sab = graph.getSeparator(a, b);
    BitSet Sabua = (BitSet) Sab.clone();
    BitSet Sabub = (BitSet) Sab.clone();
    BitSet Sabuaub = (BitSet) Sab.clone();
    Sabua.set(a);/* w w w.  ja v  a  2s.  c  o  m*/
    Sabub.set(b);
    Sabuaub.set(a);
    Sabuaub.set(b);

    double lengthFrequencies, lengthPositionData;
    // model
    // System.out.println("graph");

    //all graphs equally likely
    lengthFrequencies = this.nbParametersDiffIfAdding(a, b)
            * computer.getLogFromTable(computer.getNbInstances() + 1);

    lengthPositionData = 0.0;

    lengthPositionData += computer.computeLengthData(Sab);
    lengthPositionData -= computer.computeLengthData(Sabua);
    lengthPositionData -= computer.computeLengthData(Sabub);
    lengthPositionData += computer.computeLengthData(Sabuaub);
    assert lengthPositionData >= 0;
    if (verbose) {
        System.out.println("adding (" + a + "," + b + ")");
        System.out.println("#param diff =  " + this.nbParametersDiffIfAdding(a, b));
        System.out.println("diff length frequencies=" + lengthFrequencies);
        System.out.println("diff length data|freq=" + lengthPositionData);
    }
    encodingLength = lengthFrequencies + lengthPositionData;
    return encodingLength;
}

From source file:model.DecomposableModel.java

public long nbParametersDiffIfAdding(Integer a, Integer b) {
    // System.out.println("computing actual entropy");
    BitSet Sab = graph.getSeparator(a, b);
    BitSet Sabua = (BitSet) Sab.clone();
    BitSet Sabub = (BitSet) Sab.clone();
    BitSet Sabuaub = (BitSet) Sab.clone();
    Sabua.set(a);/*  www.j a  v a 2 s.  c  o m*/
    Sabub.set(b);
    Sabuaub.set(a);
    Sabuaub.set(b);

    long diffNbParameters = 0;

    // Sab
    int tmpNBDF = 1;
    for (int var = Sab.nextSetBit(0); var >= 0; var = Sab.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters += tmpNBDF;

    // Sab + a
    tmpNBDF = 1;
    for (int var = Sabua.nextSetBit(0); var >= 0; var = Sabua.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters -= tmpNBDF;

    // Sab + a + b
    tmpNBDF = 1;
    for (int var = Sabuaub.nextSetBit(0); var >= 0; var = Sabuaub.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters += tmpNBDF;

    // Sab + b
    tmpNBDF = 1;
    for (int var = Sabub.nextSetBit(0); var >= 0; var = Sabub.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters -= tmpNBDF;

    return diffNbParameters;
}

From source file:model.DecomposableModel.java

public Double entropyDiffIfAdding(int a, int b, BitSet Sab, EntropyComputer computer) {
    BitSet Sabua = (BitSet) Sab.clone();
    BitSet Sabub = (BitSet) Sab.clone();
    BitSet Sabuaub = (BitSet) Sab.clone();
    Sabua.set(a);/*from   www  .j av a2 s  . co  m*/
    Sabub.set(b);
    Sabuaub.set(a);
    Sabuaub.set(b);

    Double entropy = 0.0;
    Double tmp;

    // Sab
    tmp = computer.computeEntropy(Sab);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy -= tmp;
    }

    // Sab + a
    tmp = computer.computeEntropy(Sabua);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy += tmp;
    }

    // Sab + b
    tmp = computer.computeEntropy(Sabub);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy += tmp;
    }

    // Sab + a + b
    tmp = computer.computeEntropy(Sabuaub);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy -= tmp;
    }

    return entropy;
}

From source file:model.DecomposableModel.java

/**
 * Compute the difference in the entropy from this model, to one that would
 * add vertex1 and vertex2 to it//w  w  w .j a  v a  2s  . com
 * 
 * @param a
 * @param b
 * @param computer
 * @return
 */
public Double entropyDiffIfAdding(Integer a, Integer b, EntropyComputer computer, boolean verbose) {
    // System.out.println("computing actual entropy");
    BitSet Sab = graph.getSeparator(a, b);
    BitSet Sabua = (BitSet) Sab.clone();
    BitSet Sabub = (BitSet) Sab.clone();
    BitSet Sabuaub = (BitSet) Sab.clone();
    Sabua.set(a);
    Sabub.set(b);
    Sabuaub.set(a);
    Sabuaub.set(b);

    Double entropy = 0.0;
    Double tmp;

    // Sab
    tmp = computer.computeEntropy(Sab);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy -= tmp;
    }
    if (verbose)
        System.out.println("-" + Sab + ":" + tmp);

    // Sab + a
    tmp = computer.computeEntropy(Sabua);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy += tmp;
    }
    if (verbose)
        System.out.println("+" + Sabua + ":" + tmp);

    // Sab + b
    tmp = computer.computeEntropy(Sabub);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy += tmp;
    }
    if (verbose)
        System.out.println("+" + Sabub + ":" + tmp);

    // Sab + a + b
    tmp = computer.computeEntropy(Sabuaub);
    if (tmp == null) {
        entropy = null;
        return entropy;
    } else {
        entropy -= tmp;
    }
    if (verbose)
        System.out.println("-" + Sabuaub + ":" + tmp);

    return entropy;
}

From source file:model.DecomposableModel.java

public long nbParametersDiffIfAdding(Integer a, Integer b, BitSet Sab) {
    // System.out.println("computing actual entropy");
    BitSet Sabua = (BitSet) Sab.clone();
    BitSet Sabub = (BitSet) Sab.clone();
    BitSet Sabuaub = (BitSet) Sab.clone();
    Sabua.set(a);//ww w . ja v  a2s.  c  o  m
    Sabub.set(b);
    Sabuaub.set(a);
    Sabuaub.set(b);

    long diffNbParameters = 0;

    // Sab
    int tmpNBDF = 1;
    for (int var = Sab.nextSetBit(0); var >= 0; var = Sab.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters += tmpNBDF;

    // Sab + a
    tmpNBDF = 1;
    for (int var = Sabua.nextSetBit(0); var >= 0; var = Sabua.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters -= tmpNBDF;

    // Sab + a + b
    tmpNBDF = 1;
    for (int var = Sabuaub.nextSetBit(0); var >= 0; var = Sabuaub.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters += tmpNBDF;

    // Sab + b
    tmpNBDF = 1;
    for (int var = Sabub.nextSetBit(0); var >= 0; var = Sabub.nextSetBit(var + 1)) {
        tmpNBDF *= dimensionsForVariables[var];
    }
    tmpNBDF = tmpNBDF - 1;
    diffNbParameters -= tmpNBDF;

    return diffNbParameters;
}

From source file:model.DecomposableModel.java

/**
 * Export a dot file representing the graph
 * //from   ww w.jav a 2  s  .  c  om
 * @see {@linkplain http://www.graphviz.org/content/dot-language}
 * @param file
 *            the file to save the representation to
 * @param variableNames
 *            the names of the variables
 */
public void exportDOTCG(File file, String[] variableNames) {
    // String[] simplVar = Arrays.copyOf(variableNames,
    // variableNames.length);
    // for(int i=0;i<simplVar.length;i++){
    // simplVar[i] = simplVar[i].replaceAll("[%()]","");
    // }
    try {
        PrintWriter out = new PrintWriter(new FileOutputStream(file), true);
        out.println("graph G{");

        for (CliqueGraphEdge edge : graph.cg.edgeSet()) {
            BitSet source = graph.cg.getEdgeSource(edge);
            BitSet target = graph.cg.getEdgeTarget(edge);
            BitSet inter = (BitSet) source.clone();
            inter.and(target);
            if (variableNames == null) {
                out.println(graph.cg.getEdgeSource(edge) + "--" + graph.cg.getEdgeTarget(edge));
            } else {
                out.print("\"");
                for (int v = source.nextSetBit(0); v >= 0; v = source.nextSetBit(v + 1)) {
                    out.print(variableNames[v] + ";");
                }
                out.print("\"--\"");
                for (int v = target.nextSetBit(0); v >= 0; v = target.nextSetBit(v + 1)) {
                    out.print(variableNames[v] + ";");
                }
                out.print("\" [label = \"");
                for (int v = inter.nextSetBit(0); v >= 0; v = inter.nextSetBit(v + 1)) {
                    out.print(variableNames[v] + ";");
                }
                out.println("\"]");
            }
        }

        out.println("}");

        out.close();
    } catch (FileNotFoundException e1) {
        e1.printStackTrace();
    }

}

From source file:itemsetmining.itemset.ItemsetTree.java

/**
 * Pearson's chi-squared test for itemset independence. This tests the
 * empirical itemset distribution against the independence model.
 *
 * <p>//from  w  ww  .  jav  a  2 s. co m
 * N.B. the chi-squared distribution has one degree of freedom.
 *
 * @see S. Brin et al. Beyond Market Baskets: Generalizing Association Rules
 *      to Correlations
 */
private double recursiveChiSquared(final int n, final BitSet cell, final int[] sortedItems,
        final Multiset<Integer> singletons) {
    double chiSquared = 0.;
    if (n == sortedItems.length) {
        double pInd = noTransactions;
        final int[] inItems = new int[cell.cardinality()];
        final int[] outItems = new int[n - cell.cardinality()];
        int i = 0, j = 0;
        for (int k = 0; k < n; k++) {
            if (cell.get(k)) {
                inItems[i] = sortedItems[k];
                i++;
                pInd *= singletons.count(sortedItems[k]) / (double) noTransactions;
            } else {
                outItems[j] = sortedItems[k];
                j++;
                pInd *= (noTransactions - singletons.count(sortedItems[k])) / (double) noTransactions;
            }
        }
        final double pEmp = countEmpirical(inItems, outItems, root, new int[0]);
        chiSquared = ((pEmp - pInd) * (pEmp - pInd)) / pInd;
    } else {
        final BitSet celln = (BitSet) cell.clone();
        celln.set(n);
        chiSquared += recursiveChiSquared(n + 1, celln, sortedItems, singletons);
        chiSquared += recursiveChiSquared(n + 1, cell, sortedItems, singletons);
    }
    return chiSquared;
}

From source file:dendroscope.autumn.hybridnetwork.ComputeHybridizationNetwork.java

/**
 * recursively compute the hybrid number
 *
 * @param root1/* ww w . j  a va2s. c o  m*/
 * @param root2
 * @param isReduced    @return hybrid number
 * @param k
 * @param totalResults
 */
private int computeRec(Root root1, Root root2, boolean isReduced, BitSet candidateHybridsOriginal, int k,
        Collection<Root> totalResults, String depth) throws IOException, CanceledException {
    if (verbose) {
        System.err.println(depth + "---------- ComputeRec:");
        System.err.println(depth + "Tree1: " + root1.toStringFullTreeX());
        System.err.println(depth + "Tree2: " + root2.toStringFullTreeX());
    }

    if (System.currentTimeMillis() > nextTime) {
        progressListener.incrementProgress();
        nextTime += waitTime;
        waitTime *= 1.5;
    } else
        progressListener.checkForCancel();

    // root1.reorderSubTree();
    //  root2.reorderSubTree();
    if (checking) {
        root1.checkTree();
        root2.checkTree();
        if (!root2.getTaxa().equals(root1.getTaxa()))
            throw new RuntimeException("Unequal taxon sets: X=" + Basic.toString(root1.getTaxa()) + " vs "
                    + Basic.toString(root2.getTaxa()));
    }

    if (!isReduced) {
        // 1. try to perform a subtree reduction:
        {
            final Single<Integer> placeHolderTaxon = new Single<Integer>();
            List<Pair<Root, Root>> reducedSubtreePairs = new LinkedList<Pair<Root, Root>>();

            switch (SubtreeReduction.apply(root1, root2, reducedSubtreePairs, placeHolderTaxon)) {
            case ISOMORPHIC:
                Root isomorphicTree = MergeIsomorphicInducedTrees.apply(root1, root2);
                if (verbose) {
                    System.err.println(depth + "Trees are isomorphic");
                    System.err.println(depth + "Isomorphic tree: " + isomorphicTree.toStringFullTreeX());
                }
                totalResults.add(isomorphicTree);
                return 0; // two trees are isomorphic, no hybrid node needed
            case REDUCED: // a reduction was performed, cannot maintain lexicographical ordering in removal loop below
                List<Root> subTrees = new LinkedList<Root>();
                for (Pair<Root, Root> pair : reducedSubtreePairs) {
                    subTrees.add(MergeIsomorphicInducedTrees.apply(pair.getFirst(), pair.getSecond()));
                }
                if (verbose) {
                    System.err.println(depth + "Trees are reducible:");
                    System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX());
                    System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX());
                    for (Root root : subTrees) {
                        System.err.println(depth + "Merged reduced subtree: " + root.toStringFullTreeX());
                    }
                }

                BitSet candidateHybrids;
                if (false)
                    candidateHybrids = getAllAliveTaxa(root1, root2); // need to reconsider all possible hybrids
                else {
                    candidateHybrids = (BitSet) candidateHybridsOriginal.clone();
                    candidateHybrids.set(placeHolderTaxon.get(), true);
                }

                Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator());

                int h = cacheComputeRec(root1, root2, false, candidateHybrids, k, currentResults, depth + " >");
                List<Root> merged = MergeNetworks.apply(currentResults, subTrees);
                if (verbose) {
                    for (Root r : merged) {
                        System.err.println(depth + "Result-merged: " + r.toStringNetworkFull());
                    }
                }
                totalResults.addAll(fixOrdering(merged));
                return h;
            case IRREDUCIBLE:
                if (verbose)
                    System.err.println(depth + "Trees are subtree-irreducible");
                break;
            }
        }

        // 2. try to perform a cluster reduction:
        {
            final Single<Integer> placeHolderTaxon = new Single<Integer>();
            Pair<Root, Root> clusterTrees = ClusterReduction.apply(root1, root2, placeHolderTaxon);

            if (clusterTrees != null) {
                Set<Root> resultBottomPair = new TreeSet<Root>(new NetworkComparator());
                int h = cacheComputeRec(clusterTrees.getFirst(), clusterTrees.getSecond(), true,
                        candidateHybridsOriginal, k, resultBottomPair, depth + " >");

                // for the top pair, we should reconsider the place holder in the top pair as a possible place holder
                BitSet candidateHybrids = (BitSet) candidateHybridsOriginal.clone();

                candidateHybrids.set(placeHolderTaxon.get(), true);

                Set<Root> resultTopPair = new TreeSet<Root>(new NetworkComparator());
                h += cacheComputeRec(root1, root2, false, candidateHybrids, k - h, resultTopPair, depth + " >");

                Set<Root> currentResults = new TreeSet<Root>(new NetworkComparator());

                for (Root r : resultBottomPair) {
                    currentResults.addAll(MergeNetworks.apply(resultTopPair, Arrays.asList(r)));
                }
                if (verbose) {
                    System.err.println(depth + "Cluster reduction applied::");
                    System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX());
                    System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX());
                    System.err.println(depth + "Subtree-1:     " + clusterTrees.getFirst().toStringFullTreeX());
                    System.err
                            .println(depth + "Subtree-2:     " + clusterTrees.getSecond().toStringFullTreeX());

                    for (Root r : resultBottomPair) {
                        System.err.println(depth + "Results for reduced-trees: " + r.toStringNetworkFull());
                    }

                    for (Root r : resultTopPair) {
                        System.err.println(depth + "Results for sub-trees: " + r.toStringNetworkFull());
                    }

                    for (Root r : currentResults) {
                        System.err
                                .println(depth + "Merged cluster-reduced networks: " + r.toStringNetworkFull());
                    }
                }
                totalResults.addAll(currentResults);
                clusterTrees.getFirst().deleteSubTree();
                clusterTrees.getSecond().deleteSubTree();

                return h;
            }
        }
    } else {
        if (verbose)
            System.err.println(depth + "Trees are already reduced");
    }

    if (k <= 0) // 1, if only interested in number or in finding only one network, 0 else
        return LARGE;

    int hBest = LARGE;
    List<Root> leaves1 = getAllAliveLeaves(root1);

    /*
    if (leaves1.size() <= 2) // try 2 rather than one...
    {
    totalResults.add(MergeNetworks.apply(root1,root2)); // todo: this needs to be fixed
    return 0;
    }
    */

    for (Root leaf2remove : leaves1) {
        BitSet taxa2remove = leaf2remove.getTaxa();
        if (taxa2remove.cardinality() != 1)
            throw new IOException(depth + "Leaf taxa cardinality: " + taxa2remove.cardinality());

        int hybridTaxon = taxa2remove.nextSetBit(0);

        if (candidateHybridsOriginal.get(hybridTaxon)) {
            if (verbose) {
                System.err.println(depth + "Removing: " + hybridTaxon);
                System.err.println(depth + "candidateHybrids: " + Basic.toString(candidateHybridsOriginal));
                System.err.println(depth + "Tree1: " + root1.toStringFullTreeX());
                System.err.println(depth + "Tree2: " + root2.toStringFullTreeX());
            }

            Root root1x = root1.copySubNetwork();
            Root root2x = root2.copySubNetwork();
            RemoveTaxon.apply(root1x, 1, hybridTaxon);
            RemoveTaxon.apply(root2x, 2, hybridTaxon); // now we keep removed taxa as separate sets

            if (verbose) {
                System.err.println(depth + "Tree1-x: " + root1x.toStringFullTreeX());
                System.err.println(depth + "Tree2-x: " + root2x.toStringFullTreeX());
            }

            Refine.apply(root1x, root2x);

            if (verbose) {
                System.err.println(depth + "Tree1-x-refined: " + root1x.toStringFullTreeX());
                System.err.println(depth + "Tree2-x-refined: " + root2x.toStringFullTreeX());
            }

            Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator());
            candidateHybridsOriginal.set(hybridTaxon, false);

            int h = cacheComputeRec(root1x, root2x, false, candidateHybridsOriginal, k - 1, currentResults,
                    depth + " >") + 1;
            candidateHybridsOriginal.set(hybridTaxon, true);

            if (h < k)
                k = h;

            // System.err.println("Subproblem with " + Basic.toString(taxa2remove) + " removed, h=" + h);

            if (h < hBest && h <= k) {
                hBest = h;
                totalResults.clear();
            }
            if (h == hBest && h <= k) {
                if (verbose) {
                    for (Root r : currentResults) {
                        System.err.println(depth + "Result: " + r.toStringNetworkFull());
                    }
                }

                // add the hybrid node:
                currentResults = copyAll(currentResults);
                AddHybridNode.apply(currentResults, hybridTaxon);
                totalResults.addAll(fixOrdering(currentResults));
            }
            root1x.deleteSubTree();
            root2x.deleteSubTree();
        }
    }
    return hBest;
}