List of usage examples for java.util BitSet set
public void set(int fromIndex, int toIndex)
From source file:bobs.is.compress.sevenzip.SevenZOutputFile.java
private void writeFileMTimes(final DataOutput header) throws IOException { int numLastModifiedDates = 0; for (final SevenZArchiveEntry entry : files) { if (entry.getHasLastModifiedDate()) { ++numLastModifiedDates;//from w ww .j a va 2 s . c o m } } if (numLastModifiedDates > 0) { header.write(NID.kMTime); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(baos); if (numLastModifiedDates != files.size()) { out.write(0); final BitSet mTimes = new BitSet(files.size()); for (int i = 0; i < files.size(); i++) { mTimes.set(i, files.get(i).getHasLastModifiedDate()); } writeBits(out, mTimes, files.size()); } else { out.write(1); // "allAreDefined" == true } out.write(0); for (final SevenZArchiveEntry entry : files) { if (entry.getHasLastModifiedDate()) { out.writeLong( Long.reverseBytes(SevenZArchiveEntry.javaTimeToNtfsTime(entry.getLastModifiedDate()))); } } out.flush(); final byte[] contents = baos.toByteArray(); writeUint64(header, contents.length); header.write(contents); } }
From source file:bobs.is.compress.sevenzip.SevenZOutputFile.java
private void writeFileWindowsAttributes(final DataOutput header) throws IOException { int numWindowsAttributes = 0; for (final SevenZArchiveEntry entry : files) { if (entry.getHasWindowsAttributes()) { ++numWindowsAttributes;//from w w w . j a v a 2 s. co m } } if (numWindowsAttributes > 0) { header.write(NID.kWinAttributes); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(baos); if (numWindowsAttributes != files.size()) { out.write(0); final BitSet attributes = new BitSet(files.size()); for (int i = 0; i < files.size(); i++) { attributes.set(i, files.get(i).getHasWindowsAttributes()); } writeBits(out, attributes, files.size()); } else { out.write(1); // "allAreDefined" == true } out.write(0); for (final SevenZArchiveEntry entry : files) { if (entry.getHasWindowsAttributes()) { out.writeInt(Integer.reverseBytes(entry.getWindowsAttributes())); } } out.flush(); final byte[] contents = baos.toByteArray(); writeUint64(header, contents.length); header.write(contents); } }
From source file:org.wso2.andes.kernel.router.TopicRoutingMatcher.java
/** * Removing a storageQueue from the structure. * * @param storageQueue The storageQueue to remove *//* w w w . ja v a 2s . c om*/ public void removeStorageQueue(StorageQueue storageQueue) { int queueIndex = storageQueueList.indexOf(storageQueue); if (queueIndex > -1) { for (Map<String, BitSet> constituentTable : constituentTables) { for (Map.Entry<String, BitSet> constituentRow : constituentTable.entrySet()) { // For every row create a new BitSet with the values for the removed storageQueue removed String constituent = constituentRow.getKey(); BitSet bitSet = constituentRow.getValue(); BitSet newBitSet = new BitSet(); int bitIndex = 0; for (int i = 0; i < bitSet.size(); i++) { if (bitIndex == queueIndex) { // If the this is the index to remove then skip this round bitIndex++; } newBitSet.set(i, bitSet.get(bitIndex)); bitIndex++; } constituentTable.put(constituent, newBitSet); } } // Remove the storageQueue from storageQueue list storageQueueList.remove(queueIndex); } else { log.warn("Storage queue for with name : " + storageQueue.getName() + " is not found to " + "remove"); } }
From source file:MSUmpire.SpectrumParser.mzXMLParser.java
@Override public ScanCollection GetAllScanCollectionByMSLabel(boolean MS1Included, boolean MS2Included, boolean MS1Peak, boolean MS2Peak, float startTime, float endTime) { ScanCollection scanCollection = InitializeScanCollection(); Logger.getRootLogger()// w w w. jav a 2 s .co m .debug("Memory usage before loading scans:" + Math.round( (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576) + "MB (" + NoCPUs + " threads)"); // ArrayList<Integer> IncludedScans = new ArrayList<>(); final IntArrayList IncludedScans = new IntArrayList(); for (int ScanNum : MsLevelList.keySet()) { if (MsLevelList.get(ScanNum) == 1 && MS1Included) { IncludedScans.add(ScanNum); } if (MsLevelList.get(ScanNum) == 2 && MS2Included) { IncludedScans.add(ScanNum); } } List<MzXMLthreadUnit> ScanList = null; int StartScanNo = 0; int EndScanNo = 0; StartScanNo = GetStartScan(startTime); EndScanNo = GetEndScan(endTime); // ArrayList<Integer> temp=new ArrayList<>(); final BitSet temp = new BitSet(); // for(int scannum : IncludedScans){ for (int i = 0; i < IncludedScans.size(); ++i) { final int scannum = IncludedScans.get(i); if (scannum >= StartScanNo && scannum <= EndScanNo) { temp.set(scannum, true); } } ScanList = ParseScans(temp); for (MzXMLthreadUnit result : ScanList) { scanCollection.AddScan(result.scan); } ScanList.clear(); ScanList = null; System.gc(); Logger.getRootLogger() .debug("Memory usage after loading scans:" + Math.round( (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576) + "MB"); return scanCollection; }
From source file:org.wso2.andes.subscription.TopicSubscriptionBitMapStore.java
/** * Removing a subscription from the structure. * * @param subscription The subscription to remove *///from w ww .j a va 2s .c om @Override public void removeSubscription(AndesSubscription subscription) { int subscriptionIndex = subscriptionList.indexOf(subscription); if (subscriptionIndex > -1) { for (Map<String, BitSet> constituentTable : constituentTables) { for (Map.Entry<String, BitSet> constituentRow : constituentTable.entrySet()) { // For every row create a new BitSet with the values for the removed subscription removed String constituent = constituentRow.getKey(); BitSet bitSet = constituentRow.getValue(); BitSet newBitSet = new BitSet(); int bitIndex = 0; for (int i = 0; i < bitSet.size(); i++) { if (bitIndex == i) { // If the this is the index to remove then skip this round bitIndex++; } newBitSet.set(i, bitSet.get(bitIndex)); bitIndex++; } constituentTable.put(constituent, newBitSet); } } // Remove the subscription from subscription list subscriptionList.remove(subscriptionIndex); } else { log.warn("Subscription for destination : " + subscription.getSubscribedDestination() + " is not found to " + "remove"); } }
From source file:org.apache.tez.runtime.library.common.sort.impl.dflt.TestDefaultSorter.java
public void testEmptyCaseFileLengthsHelper(int numPartitions, String[] keys, String[] values) throws IOException { OutputContext context = createTezOutputContext(); MemoryUpdateCallbackHandler handler = new MemoryUpdateCallbackHandler(); conf.setInt(TezRuntimeConfiguration.TEZ_RUNTIME_IO_SORT_MB, 1); context.requestInitialMemory(//from w w w . j a v a 2 s . c om ExternalSorter.getInitialMemoryRequirement(conf, context.getTotalMemoryAvailableToTask()), handler); String auxService = conf.get(TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID, TezConfiguration.TEZ_AM_SHUFFLE_AUXILIARY_SERVICE_ID_DEFAULT); SorterWrapper sorterWrapper = new SorterWrapper(context, conf, numPartitions, handler.getMemoryAssigned()); DefaultSorter sorter = sorterWrapper.getSorter(); assertEquals("Key and Values must have the same number of elements", keys.length, values.length); BitSet keyRLEs = new BitSet(keys.length); for (int i = 0; i < keys.length; i++) { boolean isRLE = sorterWrapper.writeKeyValue(new Text(keys[i]), new Text(values[i])); keyRLEs.set(i, isRLE); } sorterWrapper.close(); List<Event> events = new ArrayList<>(); String pathComponent = (context.getUniqueIdentifier() + "_" + 0); ShuffleUtils.generateEventOnSpill(events, true, true, context, 0, sorter.indexCacheList.get(0), 0, true, pathComponent, sorter.getPartitionStats(), sorter.reportDetailedPartitionStats(), auxService, TezCommonUtils.newBestCompressionDeflater()); CompositeDataMovementEvent compositeDataMovementEvent = (CompositeDataMovementEvent) events.get(1); ByteBuffer bb = compositeDataMovementEvent.getUserPayload(); ShuffleUserPayloads.DataMovementEventPayloadProto shufflePayload = ShuffleUserPayloads.DataMovementEventPayloadProto .parseFrom(ByteString.copyFrom(bb)); if (shufflePayload.hasEmptyPartitions()) { byte[] emptyPartitionsBytesString = TezCommonUtils .decompressByteStringToByteArray(shufflePayload.getEmptyPartitions()); BitSet emptyPartitionBitSet = TezUtilsInternal.fromByteArray(emptyPartitionsBytesString); Assert.assertEquals("Number of empty partitions did not match!", emptyPartitionBitSet.cardinality(), sorterWrapper.getEmptyPartitionsCount()); } else { Assert.assertEquals(sorterWrapper.getEmptyPartitionsCount(), 0); } // Each non-empty partition adds 4 bytes for header, 2 bytes for EOF_MARKER, 4 bytes for checksum int expectedFileOutLength = sorterWrapper.getNonEmptyPartitionsCount() * 10; for (int i = 0; i < keys.length; i++) { // Each Record adds 1 byte for key length, 1 byte Text overhead (length), key.length bytes for key expectedFileOutLength += keys[i].length() + 2; // Each Record adds 1 byte for value length, 1 byte Text overhead (length), value.length bytes for value expectedFileOutLength += values[i].length() + 2; } assertEquals("Unexpected Output File Size!", localFs.getFileStatus(sorter.getFinalOutputFile()).getLen(), expectedFileOutLength); assertEquals(sorter.getNumSpills(), 1); verifyCounters(sorter, context); }
From source file:org.wso2.andes.subscription.ClusterSubscriptionBitMapHandler.java
/** * Removing a subscription from the structure. * * @param subscription The subscription to remove *//*from w w w .ja v a2 s . com*/ @Override public void removeWildCardSubscription(AndesSubscription subscription) { int subscriptionIndex = wildCardSubscriptionList.indexOf(subscription); if (subscriptionIndex > -1) { for (Map<String, BitSet> constituentTable : constituentTables) { for (Map.Entry<String, BitSet> constituentRow : constituentTable.entrySet()) { // For every row create a new BitSet with the values for the removed subscription removed String constituent = constituentRow.getKey(); BitSet bitSet = constituentRow.getValue(); BitSet newBitSet = new BitSet(); int bitIndex = 0; for (int i = 0; i < bitSet.size(); i++) { if (bitIndex == i) { // If the this is the index to remove then skip this round bitIndex++; } newBitSet.set(i, bitSet.get(bitIndex)); bitIndex++; } constituentTable.put(constituent, newBitSet); } } // Remove the subscription from subscription list wildCardSubscriptionList.remove(subscriptionIndex); } else { log.warn("Subscription for destination : " + subscription.getSubscribedDestination() + " is not found to " + "remove"); } }
From source file:bobs.is.compress.sevenzip.SevenZOutputFile.java
private void writeFileEmptyFiles(final DataOutput header) throws IOException { boolean hasEmptyFiles = false; int emptyStreamCounter = 0; final BitSet emptyFiles = new BitSet(0); for (final SevenZArchiveEntry file1 : files) { if (!file1.hasStream()) { final boolean isDir = file1.isDirectory(); emptyFiles.set(emptyStreamCounter++, !isDir); hasEmptyFiles |= !isDir;/*w w w .j a va 2s . c om*/ } } if (hasEmptyFiles) { header.write(NID.kEmptyFile); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(baos); writeBits(out, emptyFiles, emptyStreamCounter); out.flush(); final byte[] contents = baos.toByteArray(); writeUint64(header, contents.length); header.write(contents); } }
From source file:bobs.is.compress.sevenzip.SevenZOutputFile.java
private void writeFileAntiItems(final DataOutput header) throws IOException { boolean hasAntiItems = false; final BitSet antiItems = new BitSet(0); int antiItemCounter = 0; for (final SevenZArchiveEntry file1 : files) { if (!file1.hasStream()) { final boolean isAnti = file1.isAntiItem(); antiItems.set(antiItemCounter++, isAnti); hasAntiItems |= isAnti;//w w w. j a v a 2 s .com } } if (hasAntiItems) { header.write(NID.kAnti); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(baos); writeBits(out, antiItems, antiItemCounter); out.flush(); final byte[] contents = baos.toByteArray(); writeUint64(header, contents.length); header.write(contents); } }
From source file:dendroscope.autumn.hybridnetwork.ComputeHybridizationNetwork.java
/** * recursively compute the hybrid number * * @param root1// w w w . j a va 2s. com * @param root2 * @param isReduced @return hybrid number * @param k * @param totalResults */ private int computeRec(Root root1, Root root2, boolean isReduced, BitSet candidateHybridsOriginal, int k, Collection<Root> totalResults, String depth) throws IOException, CanceledException { if (verbose) { System.err.println(depth + "---------- ComputeRec:"); System.err.println(depth + "Tree1: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2: " + root2.toStringFullTreeX()); } if (System.currentTimeMillis() > nextTime) { progressListener.incrementProgress(); nextTime += waitTime; waitTime *= 1.5; } else progressListener.checkForCancel(); // root1.reorderSubTree(); // root2.reorderSubTree(); if (checking) { root1.checkTree(); root2.checkTree(); if (!root2.getTaxa().equals(root1.getTaxa())) throw new RuntimeException("Unequal taxon sets: X=" + Basic.toString(root1.getTaxa()) + " vs " + Basic.toString(root2.getTaxa())); } if (!isReduced) { // 1. try to perform a subtree reduction: { final Single<Integer> placeHolderTaxon = new Single<Integer>(); List<Pair<Root, Root>> reducedSubtreePairs = new LinkedList<Pair<Root, Root>>(); switch (SubtreeReduction.apply(root1, root2, reducedSubtreePairs, placeHolderTaxon)) { case ISOMORPHIC: Root isomorphicTree = MergeIsomorphicInducedTrees.apply(root1, root2); if (verbose) { System.err.println(depth + "Trees are isomorphic"); System.err.println(depth + "Isomorphic tree: " + isomorphicTree.toStringFullTreeX()); } totalResults.add(isomorphicTree); return 0; // two trees are isomorphic, no hybrid node needed case REDUCED: // a reduction was performed, cannot maintain lexicographical ordering in removal loop below List<Root> subTrees = new LinkedList<Root>(); for (Pair<Root, Root> pair : reducedSubtreePairs) { subTrees.add(MergeIsomorphicInducedTrees.apply(pair.getFirst(), pair.getSecond())); } if (verbose) { System.err.println(depth + "Trees are reducible:"); System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX()); for (Root root : subTrees) { System.err.println(depth + "Merged reduced subtree: " + root.toStringFullTreeX()); } } BitSet candidateHybrids; if (false) candidateHybrids = getAllAliveTaxa(root1, root2); // need to reconsider all possible hybrids else { candidateHybrids = (BitSet) candidateHybridsOriginal.clone(); candidateHybrids.set(placeHolderTaxon.get(), true); } Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); int h = cacheComputeRec(root1, root2, false, candidateHybrids, k, currentResults, depth + " >"); List<Root> merged = MergeNetworks.apply(currentResults, subTrees); if (verbose) { for (Root r : merged) { System.err.println(depth + "Result-merged: " + r.toStringNetworkFull()); } } totalResults.addAll(fixOrdering(merged)); return h; case IRREDUCIBLE: if (verbose) System.err.println(depth + "Trees are subtree-irreducible"); break; } } // 2. try to perform a cluster reduction: { final Single<Integer> placeHolderTaxon = new Single<Integer>(); Pair<Root, Root> clusterTrees = ClusterReduction.apply(root1, root2, placeHolderTaxon); if (clusterTrees != null) { Set<Root> resultBottomPair = new TreeSet<Root>(new NetworkComparator()); int h = cacheComputeRec(clusterTrees.getFirst(), clusterTrees.getSecond(), true, candidateHybridsOriginal, k, resultBottomPair, depth + " >"); // for the top pair, we should reconsider the place holder in the top pair as a possible place holder BitSet candidateHybrids = (BitSet) candidateHybridsOriginal.clone(); candidateHybrids.set(placeHolderTaxon.get(), true); Set<Root> resultTopPair = new TreeSet<Root>(new NetworkComparator()); h += cacheComputeRec(root1, root2, false, candidateHybrids, k - h, resultTopPair, depth + " >"); Set<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); for (Root r : resultBottomPair) { currentResults.addAll(MergeNetworks.apply(resultTopPair, Arrays.asList(r))); } if (verbose) { System.err.println(depth + "Cluster reduction applied::"); System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX()); System.err.println(depth + "Subtree-1: " + clusterTrees.getFirst().toStringFullTreeX()); System.err .println(depth + "Subtree-2: " + clusterTrees.getSecond().toStringFullTreeX()); for (Root r : resultBottomPair) { System.err.println(depth + "Results for reduced-trees: " + r.toStringNetworkFull()); } for (Root r : resultTopPair) { System.err.println(depth + "Results for sub-trees: " + r.toStringNetworkFull()); } for (Root r : currentResults) { System.err .println(depth + "Merged cluster-reduced networks: " + r.toStringNetworkFull()); } } totalResults.addAll(currentResults); clusterTrees.getFirst().deleteSubTree(); clusterTrees.getSecond().deleteSubTree(); return h; } } } else { if (verbose) System.err.println(depth + "Trees are already reduced"); } if (k <= 0) // 1, if only interested in number or in finding only one network, 0 else return LARGE; int hBest = LARGE; List<Root> leaves1 = getAllAliveLeaves(root1); /* if (leaves1.size() <= 2) // try 2 rather than one... { totalResults.add(MergeNetworks.apply(root1,root2)); // todo: this needs to be fixed return 0; } */ for (Root leaf2remove : leaves1) { BitSet taxa2remove = leaf2remove.getTaxa(); if (taxa2remove.cardinality() != 1) throw new IOException(depth + "Leaf taxa cardinality: " + taxa2remove.cardinality()); int hybridTaxon = taxa2remove.nextSetBit(0); if (candidateHybridsOriginal.get(hybridTaxon)) { if (verbose) { System.err.println(depth + "Removing: " + hybridTaxon); System.err.println(depth + "candidateHybrids: " + Basic.toString(candidateHybridsOriginal)); System.err.println(depth + "Tree1: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2: " + root2.toStringFullTreeX()); } Root root1x = root1.copySubNetwork(); Root root2x = root2.copySubNetwork(); RemoveTaxon.apply(root1x, 1, hybridTaxon); RemoveTaxon.apply(root2x, 2, hybridTaxon); // now we keep removed taxa as separate sets if (verbose) { System.err.println(depth + "Tree1-x: " + root1x.toStringFullTreeX()); System.err.println(depth + "Tree2-x: " + root2x.toStringFullTreeX()); } Refine.apply(root1x, root2x); if (verbose) { System.err.println(depth + "Tree1-x-refined: " + root1x.toStringFullTreeX()); System.err.println(depth + "Tree2-x-refined: " + root2x.toStringFullTreeX()); } Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); candidateHybridsOriginal.set(hybridTaxon, false); int h = cacheComputeRec(root1x, root2x, false, candidateHybridsOriginal, k - 1, currentResults, depth + " >") + 1; candidateHybridsOriginal.set(hybridTaxon, true); if (h < k) k = h; // System.err.println("Subproblem with " + Basic.toString(taxa2remove) + " removed, h=" + h); if (h < hBest && h <= k) { hBest = h; totalResults.clear(); } if (h == hBest && h <= k) { if (verbose) { for (Root r : currentResults) { System.err.println(depth + "Result: " + r.toStringNetworkFull()); } } // add the hybrid node: currentResults = copyAll(currentResults); AddHybridNode.apply(currentResults, hybridTaxon); totalResults.addAll(fixOrdering(currentResults)); } root1x.deleteSubTree(); root2x.deleteSubTree(); } } return hBest; }