List of usage examples for java.util BitSet cardinality
public int cardinality()
From source file:com.joliciel.jochre.graphics.VectorizerImpl.java
private List<LineSegment> combineSegments(Shape shape, List<LineSegment> lineSegments) { // get rid of overlapping segments List<LineSegment> lineSegmentsToDelete = new ArrayList<LineSegment>(); for (int i = 0; i < lineSegments.size() - 1; i++) { for (int j = i + 1; j < lineSegments.size(); j++) { LineSegment lineSegment1 = lineSegments.get(i); LineSegment lineSegment2 = lineSegments.get(j); // check for overlap int tolerance = 3; int line1left, line1top, line1right, line1bottom; int line2left, line2top, line2right, line2bottom; if (lineSegment1.getStartX() <= lineSegment1.getEndX()) { line1left = lineSegment1.getStartX() - tolerance; line1right = lineSegment1.getEndX() + tolerance; } else { line1left = lineSegment1.getEndX() - tolerance; line1right = lineSegment1.getStartX() + tolerance; }// w ww . ja v a2 s . co m if (lineSegment2.getStartX() <= lineSegment2.getEndX()) { line2left = lineSegment2.getStartX() - tolerance; line2right = lineSegment2.getEndX() + tolerance; } else { line2left = lineSegment2.getEndX() - tolerance; line2right = lineSegment2.getStartX() + tolerance; } if (lineSegment1.getStartY() <= lineSegment1.getEndY()) { line1top = lineSegment1.getStartY() - tolerance; line1bottom = lineSegment1.getEndY() + tolerance; } else { line1top = lineSegment1.getEndY() - tolerance; line1bottom = lineSegment1.getStartY() + tolerance; } if (lineSegment2.getStartY() <= lineSegment2.getEndY()) { line2top = lineSegment2.getStartY() - tolerance; line2bottom = lineSegment2.getEndY() + tolerance; } else { line2top = lineSegment2.getEndY() - tolerance; line2bottom = lineSegment2.getStartY() + tolerance; } // is overlap possible? if (line1left <= line2right && line1right >= line2left && line1top <= line2bottom && line1bottom >= line2top) { // note: line1 is guaranteed to be longer than or of equal length to line 2 BitSet rect2 = lineSegment2.getEnclosingRectangle(tolerance); BitSet intersection = lineSegment1.getEnclosingRectangleIntersection(lineSegment2, tolerance); int area2 = rect2.cardinality(); int interMulitplied = intersection.cardinality() * 2; if (interMulitplied > area2) { lineSegmentsToDelete.add(lineSegment2); } } } } lineSegments.removeAll(lineSegmentsToDelete); //TODO: combine lines that are "more or less" in the same location & direction return lineSegments; }
From source file:org.apache.hadoop.mapred.TestCombineSequenceFileInputFormat.java
@Test(timeout = 10000) public void testFormat() throws Exception { JobConf job = new JobConf(conf); Reporter reporter = Reporter.NULL;/*from ww w . j a va 2s.c o m*/ Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; // create a file with various lengths createFiles(length, numFiles, random); // create a combine split for the files InputFormat<IntWritable, BytesWritable> format = new CombineSequenceFileInputFormat<IntWritable, BytesWritable>(); IntWritable key = new IntWritable(); BytesWritable value = new BytesWritable(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / (SequenceFile.SYNC_INTERVAL / 20)) + 1; LOG.info("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.info("splitting: got = " + splits.length); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.length); InputSplit split = splits[0]; assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check each split BitSet bits = new BitSet(length); RecordReader<IntWritable, BytesWritable> reader = format.getRecordReader(split, job, reporter); try { while (reader.next(key, value)) { assertFalse("Key in multiple partitions.", bits.get(key.get())); bits.set(key.get()); } } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:org.apache.hadoop.hive.ql.optimizer.SharedWorkOptimizer.java
private static boolean areMergeable(ParseContext pctx, SharedWorkOptimizerCache optimizerCache, TableScanOperator tsOp1, TableScanOperator tsOp2) throws SemanticException { // First we check if the two table scan operators can actually be merged // If schemas do not match, we currently do not merge List<String> prevTsOpNeededColumns = tsOp1.getNeededColumns(); List<String> tsOpNeededColumns = tsOp2.getNeededColumns(); if (prevTsOpNeededColumns.size() != tsOpNeededColumns.size()) { return false; }//from w w w .java2 s.c o m boolean notEqual = false; for (int i = 0; i < prevTsOpNeededColumns.size(); i++) { if (!prevTsOpNeededColumns.get(i).equals(tsOpNeededColumns.get(i))) { notEqual = true; break; } } if (notEqual) { return false; } // If row limit does not match, we currently do not merge if (tsOp1.getConf().getRowLimit() != tsOp2.getConf().getRowLimit()) { return false; } // If partitions do not match, we currently do not merge PrunedPartitionList prevTsOpPPList = pctx.getPrunedPartitions(tsOp1); PrunedPartitionList tsOpPPList = pctx.getPrunedPartitions(tsOp2); if (!prevTsOpPPList.getPartitions().equals(tsOpPPList.getPartitions())) { return false; } // If is a DPP, check if actually it refers to same target, column, etc. // Further, the DPP value needs to be generated from same subtree List<Operator<?>> dppsOp1 = new ArrayList<>(optimizerCache.tableScanToDPPSource.get(tsOp1)); List<Operator<?>> dppsOp2 = new ArrayList<>(optimizerCache.tableScanToDPPSource.get(tsOp2)); if (dppsOp1.isEmpty() && dppsOp2.isEmpty()) { return true; } for (int i = 0; i < dppsOp1.size(); i++) { Operator<?> op = dppsOp1.get(i); if (op instanceof ReduceSinkOperator) { Set<Operator<?>> ascendants = findAscendantWorkOperators(pctx, optimizerCache, op); if (ascendants.contains(tsOp2)) { dppsOp1.remove(i); i--; } } } for (int i = 0; i < dppsOp2.size(); i++) { Operator<?> op = dppsOp2.get(i); if (op instanceof ReduceSinkOperator) { Set<Operator<?>> ascendants = findAscendantWorkOperators(pctx, optimizerCache, op); if (ascendants.contains(tsOp1)) { dppsOp2.remove(i); i--; } } } if (dppsOp1.size() != dppsOp2.size()) { // Only first or second operator contains DPP pruning return false; } // Check if DPP branches are equal for (int i = 0; i < dppsOp1.size(); i++) { Operator<?> dppOp1 = dppsOp1.get(i); BitSet bs = new BitSet(); for (int j = 0; j < dppsOp2.size(); j++) { if (!bs.get(j)) { // If not visited yet Operator<?> dppOp2 = dppsOp2.get(j); if (compareAndGatherOps(pctx, dppOp1, dppOp2) != null) { // The DPP operator/branch are equal bs.set(j); break; } } } if (bs.cardinality() == i) { return false; } } return true; }
From source file:org.ala.spatial.analysis.layers.SitesBySpeciesTabulated.java
/** * Generate and write the sites by species list. * <p/>//from w w w. j a va 2 s .c o m * Output files have both .csv and .json decades, tabulation by decades * decadecounts, tabulation by (species in) sequential decades * bioregionName, tabulation by bioregions (from ssf or grid & gridColumns) * * @param records all occurrence records for this density grid as Records. * @param outputDirectory path to the output directory. * @param region area restriction, or null for everywhere the occurrences * appear, as SimpleRegion. * @param envelopeGrid area restriction as an envelope grid, or null for * everywhere the occurrences appear, as Grid * @param bioregionName null or output bioregion name. * @param ssf null or bioregion as shape file with a single column as * SimpleRegion. * @param grid null or bioregion as Grid. Must also have gridColumns. * @param gridColumns null or grid bioregion category lookup values as * String []. * @param decade true to generate decades and decadecounts output * tabulations. * @throws IOException */ public void write(Records records, String outputDirectory, SimpleRegion region, Grid envelopeGrid, String bioregionName, SimpleShapeFile ssf, Grid grid, String[] gridColumns, boolean decade) throws IOException { String[] columns = null; int[] gridIntersections = null; int numberOfBioregions = 0; // get columns for bioregion categories from ssf or gridColumns. if (ssf != null) { columns = ssf.getColumnLookup(); } else if (grid != null) { columns = gridColumns; gridIntersections = new int[records.getRecordsSize()]; double[][] points = new double[records.getRecordsSize()][2]; for (int i = 0; i < records.getRecordsSize(); i++) { points[i][0] = records.getLongitude(i); points[i][1] = records.getLatitude(i); } float[] f = grid.getValues(points); for (int i = 0; i < f.length; i++) { gridIntersections[i] = (int) f[i]; if (gridIntersections[i] < 0 || gridIntersections[i] >= gridColumns.length + 1) { gridIntersections[i] = -1; } } f = null; points = null; } if (columns != null) { numberOfBioregions = columns.length + 1; } int uniqueSpeciesCount = records.getSpeciesSize(); short[] decadeIdx = getDecadeIdx(records); int numberOfDecades = decadeIdx[decadeIdx.length - 1] + 1; HashMap<Integer, Integer>[] bioMap = new HashMap[numberOfBioregions]; HashMap<Integer, Integer>[] decMap = new HashMap[numberOfDecades]; HashMap<Integer, Integer>[] decCountMap = new HashMap[numberOfDecades + 1]; for (int i = 0; i < bioMap.length; i++) { bioMap[i] = new HashMap<Integer, Integer>(); } for (int i = 0; i < decMap.length; i++) { decMap[i] = new HashMap<Integer, Integer>(); } for (int i = 0; i < decCountMap.length; i++) { decCountMap[i] = new HashMap<Integer, Integer>(); } records.sortedStarts(bbox[1], bbox[0], resolution); BitSet[] bsDecades = new BitSet[numberOfDecades]; BitSet[] bsBioregions = new BitSet[numberOfBioregions]; for (int j = 0; j < numberOfBioregions; j++) { bsBioregions[j] = new BitSet(uniqueSpeciesCount); } for (int j = 0; j < numberOfDecades; j++) { bsDecades[j] = new BitSet(uniqueSpeciesCount); } int[] decContinousCounts = new int[records.getSpeciesSize()]; for (int pos = 0; pos < records.getRecordsSize();) { //find end pos int x = (int) ((records.getLongitude(pos) - bbox[0]) / resolution); int y = (int) ((records.getLatitude(pos) - bbox[1]) / resolution); int endPos = pos + 1; while (endPos < records.getRecordsSize() && x == (int) ((records.getLongitude(endPos) - bbox[0]) / resolution) && y == (int) ((records.getLatitude(pos) - bbox[1]) / resolution)) { endPos++; } double longitude = (x + 0.5) * resolution; double latitude = (y + 0.5) * resolution; if ((region == null || region.isWithin_EPSG900913(longitude, latitude)) && (envelopeGrid == null || envelopeGrid.getValues2(new double[][] { { longitude, latitude } })[0] > 0)) { //process this cell getNextIntArrayRow(records, pos, endPos, bsBioregions, bsDecades, ssf, gridIntersections, decadeIdx); for (int j = 0; j < numberOfBioregions; j++) { int group = bsBioregions[j].cardinality(); if (group > 0) { Integer count = bioMap[j].get(group); bioMap[j].put(group, count == null ? 1 : count + 1); } } for (int j = 0; j < numberOfDecades; j++) { int group = bsDecades[j].cardinality(); if (group > 0) { Integer count = decMap[j].get(group); decMap[j].put(group, count == null ? 1 : count + 1); } } //reset for (int j = 0; j < decContinousCounts.length; j++) { decContinousCounts[j] = 0; } //sum for (int j = 0; j < numberOfDecades; j++) { BitSet bs = bsDecades[j]; if (bs.cardinality() > 0) { for (int k = 0; k < bs.length(); k++) { if (bs.get(k)) { decContinousCounts[k]++; } } } } //count java.util.Arrays.sort(decContinousCounts); int count = 1; for (int j = 1; j < decContinousCounts.length; j++) { if (decContinousCounts[j] == decContinousCounts[j - 1]) { count++; } else { Integer c = decCountMap[decContinousCounts[j - 1]].get(count); decCountMap[decContinousCounts[j - 1]].put(count, c == null ? 1 : c + 1); count = 1; } } Integer c = decCountMap[decContinousCounts[decContinousCounts.length - 1]].get(count); decCountMap[decContinousCounts[decContinousCounts.length - 1]].put(count, c == null ? 1 : c + 1); } pos = endPos; } if (numberOfBioregions > 0) { writeBioregions(bioregionName, outputDirectory, columns, bioMap); } writeDecades(outputDirectory, decadeIdx, decMap); writeDecadeCounts(outputDirectory, decCountMap); }
From source file:dendroscope.autumn.hybridnumber.ComputeHybridNumber.java
/** * run the algorithm. This can be reentered by rerootings of the same two trees * * @param tree1//from w w w . j av a 2s .c om * @param tree2 * @return reduced trees */ int run(PhyloTree tree1, PhyloTree tree2, Taxa allTaxa) throws IOException, CanceledException { if (!initialized) { initialized = true; progressListener.setMaximum(20); progressListener.setProgress(0); startTime = System.currentTimeMillis(); nextTime = this.startTime + waitTime; } if (bestScore.get() == LARGE) { // no upper bound given, use cluster network System.err.print("Computing upper bound using cluster network: "); int upperBound = Utilities.getNumberOfReticulationsInClusterNetwork(tree1, tree2, progressListener); System.err.println(upperBound); bestScore.set(upperBound); } Pair<Root, Root> roots = PreProcess.apply(tree1, tree2, allTaxa); Root root1 = roots.getFirst(); Root root2 = roots.getSecond(); BitSet onlyTree1 = Cluster.setminus(root1.getTaxa(), root2.getTaxa()); BitSet onlyTree2 = Cluster.setminus(root2.getTaxa(), root1.getTaxa()); if (root1.getTaxa().cardinality() == onlyTree1.cardinality()) throw new IOException("None of the taxa in tree2 are contained in tree1"); if (root2.getTaxa().cardinality() == onlyTree2.cardinality()) throw new IOException("None of the taxa in tree1 are contained in tree2"); if (onlyTree1.cardinality() > 0) { if (!silent) System.err.println("Killing all taxa only present in tree1: " + onlyTree1.cardinality()); for (int t = onlyTree1.nextSetBit(0); t != -1; t = onlyTree1.nextSetBit(t + 1)) { BitSet one = new BitSet(); one.set(t); root1 = CopyWithTaxaRemoved.apply(root1, one); } } if (onlyTree2.cardinality() > 0) { if (!silent) System.err.println("Killing all taxa only present in tree2: " + onlyTree2.cardinality()); for (int t = onlyTree2.nextSetBit(0); t != -1; t = onlyTree2.nextSetBit(t + 1)) { BitSet one = new BitSet(); one.set(t); root2 = CopyWithTaxaRemoved.apply(root2, one); } } if (!root1.getTaxa().equals(root2.getTaxa())) throw new IOException("Trees have unequal taxon sets (even after killing)"); // run the refine algorithm if (!silent) System.err.println("Computing common refinement of both trees"); Refine.apply(root1, root2); if (true) { System.err.println(root1.toStringTree()); System.err.println(root2.toStringTree()); } if (tree1.getRoot() == null || tree2.getRoot() == null) { throw new IOException("Can't compute hybrid number, at least one of the trees is empty or unrooted"); } // we maintain both trees in lexicographic order for ease of comparison root1.reorderSubTree(); root2.reorderSubTree(); if (!silent) System.err.println("Computing hybridization number using Autumn algorithm..."); if (!silent) System.err.println("(Number of worker threads: " + (additionalThreads + 1) + ")"); int result = computeHybridNumberRec(root1, root2, false, null, null, true, 0, new ValuesList()); if (!silent) System.err.println("(Result: " + result + ")"); if (!silent) System.err.println("Hybridization number: " + bestScore.get()); if (bestScore.get() > result) throw new IOException("bestScore > result: " + bestScore.get() + " " + result); return bestScore.get(); }
From source file:com.joliciel.jochre.graphics.LineSegmentImplTest.java
@Test public void testGetEnclosingRectangle(@NonStrict final Shape shape) { LineDefinitionImpl lineDef = new LineDefinitionImpl(0, 0); List<Integer> steps = new ArrayList<Integer>(); steps.add(2);// www .j a v a2 s .c om steps.add(3); lineDef.setSteps(steps); new NonStrictExpectations() { { shape.getHeight(); returns(8); shape.getWidth(); returns(8); } }; LineSegmentImpl lineSegment = new LineSegmentImpl(shape, lineDef, 5, 2, 1, 3); lineSegment.setLength(4); BitSet rectangle = lineSegment.getEnclosingRectangle(1); int[] bitsetPixels = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; for (int x = 0; x < 8; x++) for (int y = 0; y < 8; y++) { assertEquals("x = " + x + ", y = " + y, bitsetPixels[y * 8 + x] == 1, rectangle.get(y * 8 + x)); } assertEquals(3 * (lineSegment.getLength() + 1), rectangle.cardinality()); }
From source file:org.elasticsearch.hadoop.rest.RestRepository.java
public void flush() { BitSet bulk = tryFlush(); if (!bulk.isEmpty()) { throw new EsHadoopException( String.format("Could not write all entries [%s/%s] (maybe ES was overloaded?). Bailing out...", bulk.cardinality(), bulk.size())); }/*from ww w. jav a2 s . c o m*/ }
From source file:dendroscope.autumn.hybridnetwork.ComputeHybridizationNetwork.java
/** * recursively compute the hybrid number * * @param root1//from w w w . j a v a 2 s . c o m * @param root2 * @param isReduced @return hybrid number * @param k * @param totalResults */ private int computeRec(Root root1, Root root2, boolean isReduced, BitSet candidateHybridsOriginal, int k, Collection<Root> totalResults, String depth) throws IOException, CanceledException { if (verbose) { System.err.println(depth + "---------- ComputeRec:"); System.err.println(depth + "Tree1: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2: " + root2.toStringFullTreeX()); } if (System.currentTimeMillis() > nextTime) { progressListener.incrementProgress(); nextTime += waitTime; waitTime *= 1.5; } else progressListener.checkForCancel(); // root1.reorderSubTree(); // root2.reorderSubTree(); if (checking) { root1.checkTree(); root2.checkTree(); if (!root2.getTaxa().equals(root1.getTaxa())) throw new RuntimeException("Unequal taxon sets: X=" + Basic.toString(root1.getTaxa()) + " vs " + Basic.toString(root2.getTaxa())); } if (!isReduced) { // 1. try to perform a subtree reduction: { final Single<Integer> placeHolderTaxon = new Single<Integer>(); List<Pair<Root, Root>> reducedSubtreePairs = new LinkedList<Pair<Root, Root>>(); switch (SubtreeReduction.apply(root1, root2, reducedSubtreePairs, placeHolderTaxon)) { case ISOMORPHIC: Root isomorphicTree = MergeIsomorphicInducedTrees.apply(root1, root2); if (verbose) { System.err.println(depth + "Trees are isomorphic"); System.err.println(depth + "Isomorphic tree: " + isomorphicTree.toStringFullTreeX()); } totalResults.add(isomorphicTree); return 0; // two trees are isomorphic, no hybrid node needed case REDUCED: // a reduction was performed, cannot maintain lexicographical ordering in removal loop below List<Root> subTrees = new LinkedList<Root>(); for (Pair<Root, Root> pair : reducedSubtreePairs) { subTrees.add(MergeIsomorphicInducedTrees.apply(pair.getFirst(), pair.getSecond())); } if (verbose) { System.err.println(depth + "Trees are reducible:"); System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX()); for (Root root : subTrees) { System.err.println(depth + "Merged reduced subtree: " + root.toStringFullTreeX()); } } BitSet candidateHybrids; if (false) candidateHybrids = getAllAliveTaxa(root1, root2); // need to reconsider all possible hybrids else { candidateHybrids = (BitSet) candidateHybridsOriginal.clone(); candidateHybrids.set(placeHolderTaxon.get(), true); } Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); int h = cacheComputeRec(root1, root2, false, candidateHybrids, k, currentResults, depth + " >"); List<Root> merged = MergeNetworks.apply(currentResults, subTrees); if (verbose) { for (Root r : merged) { System.err.println(depth + "Result-merged: " + r.toStringNetworkFull()); } } totalResults.addAll(fixOrdering(merged)); return h; case IRREDUCIBLE: if (verbose) System.err.println(depth + "Trees are subtree-irreducible"); break; } } // 2. try to perform a cluster reduction: { final Single<Integer> placeHolderTaxon = new Single<Integer>(); Pair<Root, Root> clusterTrees = ClusterReduction.apply(root1, root2, placeHolderTaxon); if (clusterTrees != null) { Set<Root> resultBottomPair = new TreeSet<Root>(new NetworkComparator()); int h = cacheComputeRec(clusterTrees.getFirst(), clusterTrees.getSecond(), true, candidateHybridsOriginal, k, resultBottomPair, depth + " >"); // for the top pair, we should reconsider the place holder in the top pair as a possible place holder BitSet candidateHybrids = (BitSet) candidateHybridsOriginal.clone(); candidateHybrids.set(placeHolderTaxon.get(), true); Set<Root> resultTopPair = new TreeSet<Root>(new NetworkComparator()); h += cacheComputeRec(root1, root2, false, candidateHybrids, k - h, resultTopPair, depth + " >"); Set<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); for (Root r : resultBottomPair) { currentResults.addAll(MergeNetworks.apply(resultTopPair, Arrays.asList(r))); } if (verbose) { System.err.println(depth + "Cluster reduction applied::"); System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX()); System.err.println(depth + "Subtree-1: " + clusterTrees.getFirst().toStringFullTreeX()); System.err .println(depth + "Subtree-2: " + clusterTrees.getSecond().toStringFullTreeX()); for (Root r : resultBottomPair) { System.err.println(depth + "Results for reduced-trees: " + r.toStringNetworkFull()); } for (Root r : resultTopPair) { System.err.println(depth + "Results for sub-trees: " + r.toStringNetworkFull()); } for (Root r : currentResults) { System.err .println(depth + "Merged cluster-reduced networks: " + r.toStringNetworkFull()); } } totalResults.addAll(currentResults); clusterTrees.getFirst().deleteSubTree(); clusterTrees.getSecond().deleteSubTree(); return h; } } } else { if (verbose) System.err.println(depth + "Trees are already reduced"); } if (k <= 0) // 1, if only interested in number or in finding only one network, 0 else return LARGE; int hBest = LARGE; List<Root> leaves1 = getAllAliveLeaves(root1); /* if (leaves1.size() <= 2) // try 2 rather than one... { totalResults.add(MergeNetworks.apply(root1,root2)); // todo: this needs to be fixed return 0; } */ for (Root leaf2remove : leaves1) { BitSet taxa2remove = leaf2remove.getTaxa(); if (taxa2remove.cardinality() != 1) throw new IOException(depth + "Leaf taxa cardinality: " + taxa2remove.cardinality()); int hybridTaxon = taxa2remove.nextSetBit(0); if (candidateHybridsOriginal.get(hybridTaxon)) { if (verbose) { System.err.println(depth + "Removing: " + hybridTaxon); System.err.println(depth + "candidateHybrids: " + Basic.toString(candidateHybridsOriginal)); System.err.println(depth + "Tree1: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2: " + root2.toStringFullTreeX()); } Root root1x = root1.copySubNetwork(); Root root2x = root2.copySubNetwork(); RemoveTaxon.apply(root1x, 1, hybridTaxon); RemoveTaxon.apply(root2x, 2, hybridTaxon); // now we keep removed taxa as separate sets if (verbose) { System.err.println(depth + "Tree1-x: " + root1x.toStringFullTreeX()); System.err.println(depth + "Tree2-x: " + root2x.toStringFullTreeX()); } Refine.apply(root1x, root2x); if (verbose) { System.err.println(depth + "Tree1-x-refined: " + root1x.toStringFullTreeX()); System.err.println(depth + "Tree2-x-refined: " + root2x.toStringFullTreeX()); } Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); candidateHybridsOriginal.set(hybridTaxon, false); int h = cacheComputeRec(root1x, root2x, false, candidateHybridsOriginal, k - 1, currentResults, depth + " >") + 1; candidateHybridsOriginal.set(hybridTaxon, true); if (h < k) k = h; // System.err.println("Subproblem with " + Basic.toString(taxa2remove) + " removed, h=" + h); if (h < hBest && h <= k) { hBest = h; totalResults.clear(); } if (h == hBest && h <= k) { if (verbose) { for (Root r : currentResults) { System.err.println(depth + "Result: " + r.toStringNetworkFull()); } } // add the hybrid node: currentResults = copyAll(currentResults); AddHybridNode.apply(currentResults, hybridTaxon); totalResults.addAll(fixOrdering(currentResults)); } root1x.deleteSubTree(); root2x.deleteSubTree(); } } return hBest; }
From source file:com.joliciel.jochre.graphics.LineSegmentImplTest.java
@Test public void testGetEnclosingRectangleDoubleDiagonal(@NonStrict final Shape shape) { LineDefinitionImpl lineDef = new LineDefinitionImpl(1, 0); List<Integer> steps = new ArrayList<Integer>(); steps.add(2);//from w w w . j a v a 2 s . c o m lineDef.setSteps(steps); new NonStrictExpectations() { { shape.getHeight(); returns(8); shape.getWidth(); returns(8); } }; LineSegmentImpl lineSegment = new LineSegmentImpl(shape, lineDef, 5, 2, 3, 6); lineSegment.setLength(4); BitSet rectangle = lineSegment.getEnclosingRectangle(1); int[] bitsetPixels = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; for (int x = 0; x < 8; x++) for (int y = 0; y < 8; y++) { assertEquals("failure at x=" + x + ",y=" + y, bitsetPixels[y * 8 + x] == 1, rectangle.get(y * 8 + x)); } assertEquals(3 * (lineSegment.getLength() + 1), rectangle.cardinality()); }
From source file:com.joliciel.jochre.graphics.LineSegmentImplTest.java
@Test public void testGetEnclosingRectangleDiagonal(@NonStrict final Shape shape) { LineDefinitionImpl lineDef = new LineDefinitionImpl(0, 0); List<Integer> steps = new ArrayList<Integer>(); steps.add(1);/*from w w w . jav a2s.c o m*/ steps.add(2); lineDef.setSteps(steps); new NonStrictExpectations() { { shape.getHeight(); returns(8); shape.getWidth(); returns(8); } }; LineSegmentImpl lineSegment = new LineSegmentImpl(shape, lineDef, 5, 2, 1, 5); lineSegment.setLength(4); BitSet rectangle = lineSegment.getEnclosingRectangle(1); int[] bitsetPixels = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; for (int y = 0; y < 8; y++) { for (int x = 0; x < 8; x++) { assertEquals("failure at x=" + x + ",y=" + y, bitsetPixels[y * 8 + x] == 1, rectangle.get(y * 8 + x)); } } assertEquals(3 * (lineSegment.getLength() + 1), rectangle.cardinality()); }