List of usage examples for java.util BitSet BitSet
private BitSet(long[] words)
From source file:org.apache.fop.fonts.truetype.TTFFile.java
private boolean readUnicodeCmap // CSOK: MethodLength (long cmapUniOffset, int encodingID) throws IOException { //Read CMAP table and correct mtxTab.index int mtxPtr = 0; // Read unicode cmap seekTab(fontFile, TTFTableName.CMAP, cmapUniOffset); int cmapFormat = fontFile.readTTFUShort(); /*int cmap_length =*/ fontFile.readTTFUShort(); //skip cmap length if (log.isDebugEnabled()) { log.debug("CMAP format: " + cmapFormat); }// w ww .ja v a2 s . c om if (cmapFormat == 4) { fontFile.skip(2); // Skip version number int cmapSegCountX2 = fontFile.readTTFUShort(); int cmapSearchRange = fontFile.readTTFUShort(); int cmapEntrySelector = fontFile.readTTFUShort(); int cmapRangeShift = fontFile.readTTFUShort(); if (log.isDebugEnabled()) { log.debug("segCountX2 : " + cmapSegCountX2); log.debug("searchRange : " + cmapSearchRange); log.debug("entrySelector: " + cmapEntrySelector); log.debug("rangeShift : " + cmapRangeShift); } int[] cmapEndCounts = new int[cmapSegCountX2 / 2]; int[] cmapStartCounts = new int[cmapSegCountX2 / 2]; int[] cmapDeltas = new int[cmapSegCountX2 / 2]; int[] cmapRangeOffsets = new int[cmapSegCountX2 / 2]; for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapEndCounts[i] = fontFile.readTTFUShort(); } fontFile.skip(2); // Skip reservedPad for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapStartCounts[i] = fontFile.readTTFUShort(); } for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapDeltas[i] = fontFile.readTTFShort(); } //int startRangeOffset = in.getCurrentPos(); for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapRangeOffsets[i] = fontFile.readTTFUShort(); } int glyphIdArrayOffset = fontFile.getCurrentPos(); BitSet eightBitGlyphs = new BitSet(256); // Insert the unicode id for the glyphs in mtxTab // and fill in the cmaps ArrayList for (int i = 0; i < cmapStartCounts.length; i++) { if (log.isTraceEnabled()) { log.trace(i + ": " + cmapStartCounts[i] + " - " + cmapEndCounts[i]); } if (log.isDebugEnabled()) { if (isInPrivateUseArea(cmapStartCounts[i], cmapEndCounts[i])) { log.debug("Font contains glyphs in the Unicode private use area: " + Integer.toHexString(cmapStartCounts[i]) + " - " + Integer.toHexString(cmapEndCounts[i])); } } for (int j = cmapStartCounts[i]; j <= cmapEndCounts[i]; j++) { // Update lastChar if (j < 256 && j > lastChar) { lastChar = (short) j; } if (j < 256) { eightBitGlyphs.set(j); } if (mtxPtr < mtxTab.length) { int glyphIdx; // the last character 65535 = .notdef // may have a range offset if (cmapRangeOffsets[i] != 0 && j != 65535) { int glyphOffset = glyphIdArrayOffset + ((cmapRangeOffsets[i] / 2) + (j - cmapStartCounts[i]) + (i) - cmapSegCountX2 / 2) * 2; fontFile.seekSet(glyphOffset); glyphIdx = (fontFile.readTTFUShort() + cmapDeltas[i]) & 0xffff; unicodeMappings.add(new UnicodeMapping(glyphIdx, j)); mtxTab[glyphIdx].getUnicodeIndex().add(new Integer(j)); // Also add winAnsiWidth List<Integer> v = ansiIndex.get(new Integer(j)); if (v != null) { for (Integer aIdx : v) { ansiWidth[aIdx.intValue()] = mtxTab[glyphIdx].getWx(); if (log.isTraceEnabled()) { log.trace("Added width " + mtxTab[glyphIdx].getWx() + " uni: " + j + " ansi: " + aIdx.intValue()); } } } if (log.isTraceEnabled()) { log.trace("Idx: " + glyphIdx + " Delta: " + cmapDeltas[i] + " Unicode: " + j + " name: " + mtxTab[glyphIdx].getName()); } } else { glyphIdx = (j + cmapDeltas[i]) & 0xffff; if (glyphIdx < mtxTab.length) { mtxTab[glyphIdx].getUnicodeIndex().add(new Integer(j)); } else { log.debug("Glyph " + glyphIdx + " out of range: " + mtxTab.length); } unicodeMappings.add(new UnicodeMapping(glyphIdx, j)); if (glyphIdx < mtxTab.length) { mtxTab[glyphIdx].getUnicodeIndex().add(new Integer(j)); } else { log.debug("Glyph " + glyphIdx + " out of range: " + mtxTab.length); } // Also add winAnsiWidth List<Integer> v = ansiIndex.get(new Integer(j)); if (v != null) { for (Integer aIdx : v) { ansiWidth[aIdx.intValue()] = mtxTab[glyphIdx].getWx(); } } //getLogger().debug("IIdx: " + // mtxPtr + // " Delta: " + cmap_deltas[i] + // " Unicode: " + j + // " name: " + // mtxTab[(j+cmap_deltas[i]) & 0xffff].name); } if (glyphIdx < mtxTab.length) { if (mtxTab[glyphIdx].getUnicodeIndex().size() < 2) { mtxPtr++; } } } } } } else { log.error("Cmap format not supported: " + cmapFormat); return false; } return true; }
From source file:org.apache.fop.fonts.truetype.OpenFont.java
private boolean readUnicodeCmap(long cmapUniOffset, int encodingID) throws IOException { //Read CMAP table and correct mtxTab.index int mtxPtr = 0; // Read unicode cmap seekTab(fontFile, OFTableName.CMAP, cmapUniOffset); int cmapFormat = fontFile.readTTFUShort(); /*int cmap_length =*/ fontFile.readTTFUShort(); //skip cmap length if (log.isDebugEnabled()) { log.debug("CMAP format: " + cmapFormat); }//from w w w .j ava 2 s .c o m if (cmapFormat == 4) { fontFile.skip(2); // Skip version number int cmapSegCountX2 = fontFile.readTTFUShort(); int cmapSearchRange = fontFile.readTTFUShort(); int cmapEntrySelector = fontFile.readTTFUShort(); int cmapRangeShift = fontFile.readTTFUShort(); if (log.isDebugEnabled()) { log.debug("segCountX2 : " + cmapSegCountX2); log.debug("searchRange : " + cmapSearchRange); log.debug("entrySelector: " + cmapEntrySelector); log.debug("rangeShift : " + cmapRangeShift); } int[] cmapEndCounts = new int[cmapSegCountX2 / 2]; int[] cmapStartCounts = new int[cmapSegCountX2 / 2]; int[] cmapDeltas = new int[cmapSegCountX2 / 2]; int[] cmapRangeOffsets = new int[cmapSegCountX2 / 2]; for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapEndCounts[i] = fontFile.readTTFUShort(); } fontFile.skip(2); // Skip reservedPad for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapStartCounts[i] = fontFile.readTTFUShort(); } for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapDeltas[i] = fontFile.readTTFShort(); } //int startRangeOffset = in.getCurrentPos(); for (int i = 0; i < (cmapSegCountX2 / 2); i++) { cmapRangeOffsets[i] = fontFile.readTTFUShort(); } int glyphIdArrayOffset = fontFile.getCurrentPos(); BitSet eightBitGlyphs = new BitSet(256); // Insert the unicode id for the glyphs in mtxTab // and fill in the cmaps ArrayList for (int i = 0; i < cmapStartCounts.length; i++) { if (log.isTraceEnabled()) { log.trace(i + ": " + cmapStartCounts[i] + " - " + cmapEndCounts[i]); } if (log.isDebugEnabled()) { if (isInPrivateUseArea(cmapStartCounts[i], cmapEndCounts[i])) { log.debug("Font contains glyphs in the Unicode private use area: " + Integer.toHexString(cmapStartCounts[i]) + " - " + Integer.toHexString(cmapEndCounts[i])); } } for (int j = cmapStartCounts[i]; j <= cmapEndCounts[i]; j++) { // Update lastChar if (j < 256 && j > lastChar) { lastChar = (short) j; } if (j < 256) { eightBitGlyphs.set(j); } if (mtxPtr < mtxTab.length) { int glyphIdx; // the last character 65535 = .notdef // may have a range offset if (cmapRangeOffsets[i] != 0 && j != 65535) { int glyphOffset = glyphIdArrayOffset + ((cmapRangeOffsets[i] / 2) + (j - cmapStartCounts[i]) + (i) - cmapSegCountX2 / 2) * 2; fontFile.seekSet(glyphOffset); glyphIdx = (fontFile.readTTFUShort() + cmapDeltas[i]) & 0xffff; //mtxTab[glyphIdx].setName(mtxTab[glyphIdx].getName() + " - "+(char)j); unicodeMappings.add(new UnicodeMapping(this, glyphIdx, j)); mtxTab[glyphIdx].getUnicodeIndex().add(new Integer(j)); if (encodingID == 0 && j >= 0xF020 && j <= 0xF0FF) { //Experimental: Mapping 0xF020-0xF0FF to 0x0020-0x00FF //Tested with Wingdings and Symbol TTF fonts which map their //glyphs in the region 0xF020-0xF0FF. int mapped = j - 0xF000; if (!eightBitGlyphs.get(mapped)) { //Only map if Unicode code point hasn't been mapped before unicodeMappings.add(new UnicodeMapping(this, glyphIdx, mapped)); mtxTab[glyphIdx].getUnicodeIndex().add(new Integer(mapped)); } } // Also add winAnsiWidth List<Integer> v = ansiIndex.get(new Integer(j)); if (v != null) { for (Integer aIdx : v) { ansiWidth[aIdx.intValue()] = mtxTab[glyphIdx].getWx(); if (log.isTraceEnabled()) { log.trace("Added width " + mtxTab[glyphIdx].getWx() + " uni: " + j + " ansi: " + aIdx.intValue()); } } } if (log.isTraceEnabled()) { log.trace("Idx: " + glyphIdx + " Delta: " + cmapDeltas[i] + " Unicode: " + j + " name: " + mtxTab[glyphIdx].getName()); } } else { glyphIdx = (j + cmapDeltas[i]) & 0xffff; if (glyphIdx < mtxTab.length) { mtxTab[glyphIdx].getUnicodeIndex().add(new Integer(j)); } else { log.debug("Glyph " + glyphIdx + " out of range: " + mtxTab.length); } unicodeMappings.add(new UnicodeMapping(this, glyphIdx, j)); if (glyphIdx < mtxTab.length) { mtxTab[glyphIdx].getUnicodeIndex().add(new Integer(j)); } else { log.debug("Glyph " + glyphIdx + " out of range: " + mtxTab.length); } // Also add winAnsiWidth List<Integer> v = ansiIndex.get(new Integer(j)); if (v != null) { for (Integer aIdx : v) { ansiWidth[aIdx.intValue()] = mtxTab[glyphIdx].getWx(); } } //getLogger().debug("IIdx: " + // mtxPtr + // " Delta: " + cmap_deltas[i] + // " Unicode: " + j + // " name: " + // mtxTab[(j+cmap_deltas[i]) & 0xffff].name); } if (glyphIdx < mtxTab.length) { if (mtxTab[glyphIdx].getUnicodeIndex().size() < 2) { mtxPtr++; } } } } } } else { log.error("Cmap format not supported: " + cmapFormat); return false; } return true; }
From source file:org.apache.hyracks.algebricks.rewriter.rules.ExtractCommonOperatorsRule.java
private void candidatesGrow(List<Mutable<ILogicalOperator>> opList, List<Mutable<ILogicalOperator>> candidates) { List<Mutable<ILogicalOperator>> previousCandidates = new ArrayList<Mutable<ILogicalOperator>>(); previousCandidates.addAll(candidates); candidates.clear();//w ww. j a v a 2s.c o m boolean validCandidate = false; for (Mutable<ILogicalOperator> op : opList) { List<Mutable<ILogicalOperator>> inputs = op.getValue().getInputs(); for (int i = 0; i < inputs.size(); i++) { Mutable<ILogicalOperator> inputRef = inputs.get(i); validCandidate = false; for (Mutable<ILogicalOperator> candidate : previousCandidates) { // if current input is in candidates if (inputRef.getValue().equals(candidate.getValue())) { if (inputs.size() == 1) { validCandidate = true; } else { BitSet candidateInputBitMap = opToCandidateInputs.get(op); if (candidateInputBitMap == null) { candidateInputBitMap = new BitSet(inputs.size()); opToCandidateInputs.put(op, candidateInputBitMap); } candidateInputBitMap.set(i); if (candidateInputBitMap.cardinality() == inputs.size()) { validCandidate = true; } } break; } } } if (!validCandidate) { continue; } if (!candidates.contains(op)) { candidates.add(op); } } }
From source file:bobs.is.compress.sevenzip.SevenZOutputFile.java
private void writeFileAntiItems(final DataOutput header) throws IOException { boolean hasAntiItems = false; final BitSet antiItems = new BitSet(0); int antiItemCounter = 0; for (final SevenZArchiveEntry file1 : files) { if (!file1.hasStream()) { final boolean isAnti = file1.isAntiItem(); antiItems.set(antiItemCounter++, isAnti); hasAntiItems |= isAnti;//from w w w. j a va2 s .c o m } } if (hasAntiItems) { header.write(NID.kAnti); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(baos); writeBits(out, antiItems, antiItemCounter); out.flush(); final byte[] contents = baos.toByteArray(); writeUint64(header, contents.length); header.write(contents); } }
From source file:org.springframework.kafka.listener.KafkaMessageListenerContainerTests.java
@Test public void testSlowConsumerWithSlowThenExceptionThenGood() throws Exception { logger.info("Start " + this.testName.getMethodName()); Map<String, Object> props = KafkaTestUtils.consumerProps("slow4", "false", embeddedKafka); DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props); ContainerProperties containerProps = new ContainerProperties(topic4); final CountDownLatch latch = new CountDownLatch(18); final BitSet bitSet = new BitSet(6); final Map<String, AtomicInteger> faults = new HashMap<>(); RetryingMessageListenerAdapter<Integer, String> adapter = new RetryingMessageListenerAdapter<>( new MessageListener<Integer, String>() { @Override//from w ww.ja v a 2 s .co m public void onMessage(ConsumerRecord<Integer, String> message) { logger.info("slow4: " + message); bitSet.set((int) (message.partition() * 4 + message.offset())); String key = message.topic() + message.partition() + message.offset(); if (faults.get(key) == null) { faults.put(key, new AtomicInteger(1)); } else { faults.get(key).incrementAndGet(); } latch.countDown(); // 3 per = 18 if (faults.get(key).get() == 1) { try { Thread.sleep(1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } if (faults.get(key).get() < 3) { // succeed on the third attempt throw new FooEx(); } } }, buildRetry(), null); containerProps.setMessageListener(adapter); containerProps.setPauseAfter(100); KafkaMessageListenerContainer<Integer, String> container = new KafkaMessageListenerContainer<>(cf, containerProps); container.setBeanName("testSlow4"); container.start(); Consumer<?, ?> consumer = spyOnConsumer(container); ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic()); Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps); KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf); template.setDefaultTopic(topic4); template.sendDefault(0, "foo"); template.sendDefault(2, "bar"); template.sendDefault(0, "baz"); template.sendDefault(2, "qux"); template.flush(); Thread.sleep(300); template.sendDefault(0, "fiz"); template.sendDefault(2, "buz"); template.flush(); assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue(); assertThat(bitSet.cardinality()).isEqualTo(6); verify(consumer, atLeastOnce()).pause(anyObject()); verify(consumer, atLeastOnce()).resume(anyObject()); container.stop(); logger.info("Stop " + this.testName.getMethodName()); }
From source file:srebrinb.compress.sevenzip.SevenZFile.java
private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException { for (final Folder folder : archive.folders) { folder.numUnpackSubStreams = 1;/*from w w w . j a va 2 s .c om*/ } int totalUnpackStreams = archive.folders.length; int nid = getUnsignedByte(header); if (nid == NID.kNumUnpackStream) { totalUnpackStreams = 0; for (final Folder folder : archive.folders) { final long numStreams = readUint64(header); folder.numUnpackSubStreams = (int) numStreams; totalUnpackStreams += numStreams; } nid = getUnsignedByte(header); } final SubStreamsInfo subStreamsInfo = new SubStreamsInfo(); subStreamsInfo.unpackSizes = new long[totalUnpackStreams]; subStreamsInfo.hasCrc = new BitSet(totalUnpackStreams); subStreamsInfo.crcs = new long[totalUnpackStreams]; int nextUnpackStream = 0; for (final Folder folder : archive.folders) { if (folder.numUnpackSubStreams == 0) { continue; } long sum = 0; if (nid == NID.kSize) { for (int i = 0; i < folder.numUnpackSubStreams - 1; i++) { final long size = readUint64(header); subStreamsInfo.unpackSizes[nextUnpackStream++] = size; sum += size; } } subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - sum; } if (nid == NID.kSize) { nid = getUnsignedByte(header); } int numDigests = 0; for (final Folder folder : archive.folders) { if (folder.numUnpackSubStreams != 1 || !folder.hasCrc) { numDigests += folder.numUnpackSubStreams; } } if (nid == NID.kCRC) { final BitSet hasMissingCrc = readAllOrBits(header, numDigests); final long[] missingCrcs = new long[numDigests]; for (int i = 0; i < numDigests; i++) { if (hasMissingCrc.get(i)) { missingCrcs[i] = 0xffffFFFFL & header.getInt(); } } int nextCrc = 0; int nextMissingCrc = 0; for (final Folder folder : archive.folders) { if (folder.numUnpackSubStreams == 1 && folder.hasCrc) { subStreamsInfo.hasCrc.set(nextCrc, true); subStreamsInfo.crcs[nextCrc] = folder.crc; ++nextCrc; } else { for (int i = 0; i < folder.numUnpackSubStreams; i++) { subStreamsInfo.hasCrc.set(nextCrc, hasMissingCrc.get(nextMissingCrc)); subStreamsInfo.crcs[nextCrc] = missingCrcs[nextMissingCrc]; ++nextCrc; ++nextMissingCrc; } } } nid = getUnsignedByte(header); } if (nid != NID.kEnd) { throw new IOException("Badly terminated SubStreamsInfo"); } archive.subStreamsInfo = subStreamsInfo; }
From source file:edu.iu.subgraph.colorcount_HJ.java
/** * @brief compute color counting in N iterations * * @param template/*from w ww. j a va 2 s . c om*/ * @param N * * @return */ public double do_full_count(Graph template, int N) { this.t = template; this.num_iter = N; this.labels_t = t.labels; // --------------------------- creating subtemplates and comb number index system --------------------------- if (this.verbose) { LOG.info("Begining partition..."); } //partition the template into subtemplates this.part = new partitioner(this.t, this.labeled, this.labels_t); this.part.sort_subtemplates(); if (this.verbose) { LOG.info("done partitioning"); } //colors equals the num of vertices this.num_colors = this.t.num_vertices(); //get array of subtemplates this.subtemplates = this.part.get_subtemplates(); //subtemplates num this.subtemplate_count = this.part.get_subtemplate_count(); //obtain the hash values table for each subtemplate and a combination of color sets create_tables(); //initialize dynamic prog table, with subtemplate-vertices-color array this.dt.init(this.subtemplates, this.subtemplate_count, this.num_verts_graph, this.num_colors, this.max_abs_id); //vertice num of the full graph, huge this.chunks = divide_chunks(this.num_verts_graph, this.thread_num); // in pipeline regroup-update, thread 0 is doing communication this.chunks_pipeline = divide_chunks(this.num_verts_graph, this.thread_num - 1); //triggering vtune profiling add command option for vtune // java.nio.file.Path vtune_file = java.nio.file.Paths.get("vtune-flag.txt"); // String flag_trigger = "Start training process and trigger vtune profiling."; // try{ // java.nio.file.Files.write(vtune_file, flag_trigger.getBytes()); // }catch (IOException e) // { // LOG.info("Failed to create vtune trigger flag"); // } if (this.verbose) { LOG.info("Starting Multi-threading Counting Iterations"); } launchHabaneroApp(() -> forallChunked(0, this.thread_num - 1, (threadIdx) -> { //set Java threads affinity BitSet bitSet = new BitSet(this.core_num); int thread_mask = 0; if (this.verbose && threadIdx == 0) { LOG.info("Set up threads affinity: Core Num: " + this.core_num + "; Total Threads: " + this.thread_num + "; thd per core: " + this.tpc + "; affinity: " + this.affinity); } if (this.affinity == "scatter") { //implement threads bind by core round-robin thread_mask = threadIdx % this.core_num; } else { //default affinity compact //implement a compact bind, 2 threads a core int tpn = this.tpc * this.core_num; thread_mask = threadIdx % tpn; thread_mask /= this.tpc; } bitSet.set(thread_mask); Affinity.setAffinity(bitSet); try { // start the main loop of iterations for (int cur_itr = 0; cur_itr < this.num_iter; cur_itr++) { if (threadIdx == 0) this.cur_iter = cur_itr; //start sampling colors this.barrier.await(); if (verbose && threadIdx == 0) { LOG.info("Start Sampling Graph for Itr: " + cur_itr); // this.start_comp = System.currentTimeMillis(); this.start_misc = System.currentTimeMillis(); } Random rand = new Random(System.currentTimeMillis()); //sampling the vertices of full graph g for (int i = this.chunks[threadIdx]; i < this.chunks[threadIdx + 1]; ++i) { this.colors_g[i] = rand.nextInt(this.num_colors); } this.barrier.await(); if (this.verbose && threadIdx == 0) { LOG.info("Finish Sampling Graph for Itr: " + cur_itr + "; use time: " + (System.currentTimeMillis() - this.start_misc) + "ms"); } // start doing counting for (int s = this.subtemplate_count - 1; s > 0; --s) { if (threadIdx == 0) { //get num_vert of subtemplate s this.num_verts_sub_ato = this.num_verts_table[s]; if (this.verbose) LOG.info("Initing Subtemplate " + s + ", t verts: " + num_verts_sub_ato); int a = this.part.get_active_index(s); int p = this.part.get_passive_index(s); if (this.verbose) LOG.info("Subtemplate: " + s + "; active_idx: " + a + "; passive_idx: " + p); this.dt.init_sub(s, a, p); } this.barrier.await(); if (this.verbose && threadIdx == 0) LOG.info("Start Counting Local Graph Subtemplate " + s); //hit the bottom of subtemplate chain, dangling template node if (this.num_verts_sub_ato == 1) { if (s == this.subtemplate_count - 1) { init_table_node_HJ(s, threadIdx, this.chunks); } else { if (threadIdx == 0) dt.set_to_table(this.subtemplate_count - 1, s); } } else { colorful_count_HJ(s, threadIdx, this.chunks); } this.barrier.await(); if (this.verbose && threadIdx == 0) LOG.info("Finish Counting Local Graph Subtemplate " + s); //start communication part single thread // only for subtemplates size > 1, having neighbours on other mappers // only if more than one mapper, otherwise all g verts are local if (this.mapper_num > 1 && this.num_verts_sub_ato > 1) { if (this.rotation_pipeline) regroup_update_pipeline(s, threadIdx); else regroup_update_all(s, threadIdx, this.chunks); } // printout results for sub s this.barrier.await(); if (threadIdx == 0) { if (this.verbose) LOG.info("JVM Memory Used in subtemplate: " + s + " is: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())); int a = this.part.get_active_index(s); int p = this.part.get_passive_index(s); //release the template with size > 1 if (a != SCConstants.NULL_VAL) { // do not free the dangling template node if (this.num_verts_table[a] > 1) this.dt.clear_sub(a); } if (p != SCConstants.NULL_VAL) { if (this.num_verts_table[p] > 1) this.dt.clear_sub(p); } } this.barrier.await(); //print out counts for this subtemplate if (this.verbose) { print_counts(s, threadIdx, this.chunks); this.barrier.await(); if (threadIdx == 0) { double sub_total_counts = 0; for (int x = 0; x < this.thread_num; x++) sub_total_counts += cc_ato[x]; LOG.info("Total counts for sub-temp: " + s + " is: " + sub_total_counts); } this.barrier.await(); } if (threadIdx == 0) this.context.progress(); } // end of a subtemplate if (verbose && threadIdx == 0) LOG.info("Done with initialization. Doing full count"); // do the count for the full template if (threadIdx == 0) { this.num_verts_sub_ato = this.num_verts_table[0]; int a = this.part.get_active_index(0); int p = this.part.get_passive_index(0); if (this.verbose) LOG.info("Subtemplate 0 ; active_idx: " + a + "; passive_idx: " + p); dt.init_sub(0, a, p); } this.barrier.await(); colorful_count_HJ(0, threadIdx, chunks); this.barrier.await(); //comm and add the communicated counts to full_count_ato // only for subtemplates size > 1, having neighbours on other mappers // only if more than one mapper, otherwise all g verts are local if (this.num_verts_sub_ato > 1 && this.mapper_num > 1) { if (this.rotation_pipeline) regroup_update_pipeline(0, threadIdx); else regroup_update_all(0, threadIdx, this.chunks); } this.barrier.await(); // printout results for last sub if (threadIdx == 0) { double sum_count = 0.0; for (int k = 0; k < this.thread_num; k++) { sum_count += this.count_local_root[k]; sum_count += this.count_comm_root[k]; } this.full_count_ato = sum_count; LOG.info("Finish update comm counts for last subtemplate: " + "; total counts: " + sum_count); } this.barrier.await(); if (threadIdx == 0) { if (this.verbose) LOG.info("JVM Memory Used in Last subtemplate is: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())); int a = this.part.get_active_index(0); int p = this.part.get_passive_index(0); if (a != SCConstants.NULL_VAL) this.dt.clear_sub(a); if (p != SCConstants.NULL_VAL) this.dt.clear_sub(p); //free the first dangling template node this.dt.clear_sub(subtemplate_count - 1); } //add counts from every iteration if (threadIdx == 0) { this.cumulate_count_ato += this.full_count_ato; } // free comm data if (threadIdx == 0) { if (this.mapper_num > 1) { ResourcePool.get().clean(); ConnPool.get().clean(); } System.gc(); this.context.progress(); } } // end of an iteration this.barrier.await(); } catch (InterruptedException | BrokenBarrierException e) { LOG.info("Catch barrier exception in itr: " + this.cur_iter); e.printStackTrace(); } })); //----------------------- end of color_counting ----------------- double final_count = cumulate_count_ato / (double) this.num_iter; //free memory this.send_vertex_table = null; this.comm_vertex_table = null; this.update_map = null; this.colors_g = null; this.compress_cache_array = null; this.compress_cache_index = null; this.map_ids_cache_pip = null; this.chunk_ids_cache_pip = null; this.chunk_internal_offsets_cache_pip = null; delete_tables(); this.part.clear_temparrays(); return final_count; }
From source file:org.wso2.andes.kernel.router.TopicRoutingMatcher.java
/** * Get storage queues matching to routing key * @param routingKey routing key to match queues * @return set of storage queues//from w w w .ja va 2s . com */ public Set<StorageQueue> getMatchingStorageQueues(String routingKey) { Set<StorageQueue> matchingQueues = new HashSet<>(); if (StringUtils.isNotEmpty(routingKey)) { // constituentDelimiter is quoted to avoid making the delimiter a regex symbol String[] constituents = routingKey.split(Pattern.quote(constituentsDelimiter), -1); int noOfCurrentMaxConstituents = constituentTables.size(); // If given routingKey has more constituents than any subscriber has, then create constituent tables // for those before collecting matching subscribers if (constituents.length > noOfCurrentMaxConstituents) { for (int i = noOfCurrentMaxConstituents; i < constituents.length; i++) { addEmptyConstituentTable(); } } // Keeps the results of 'AND' operations between each bit sets BitSet andBitSet = new BitSet(storageQueueList.size()); // Since BitSet is initialized with false for each element we need to flip andBitSet.flip(0, storageQueueList.size()); // Get corresponding bit set for each constituent in the routingKey and operate bitwise AND operation for (int constituentIndex = 0; constituentIndex < constituents.length; constituentIndex++) { String constituent = constituents[constituentIndex]; Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex); BitSet bitSetForAnd = constituentTable.get(constituent); if (null == bitSetForAnd) { // The constituent is not found in the table, hence matching with 'other' constituent bitSetForAnd = constituentTable.get(OTHER_CONSTITUENT); } andBitSet.and(bitSetForAnd); } // If there are more constituent tables, get the null constituent in each of them and operate bitwise AND for (int constituentIndex = constituents.length; constituentIndex < constituentTables .size(); constituentIndex++) { Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex); andBitSet.and(constituentTable.get(NULL_CONSTITUENT)); } // Valid queues are filtered, need to pick from queue pool int nextSetBitIndex = andBitSet.nextSetBit(0); while (nextSetBitIndex > -1) { matchingQueues.add(storageQueueList.get(nextSetBitIndex)); nextSetBitIndex = andBitSet.nextSetBit(nextSetBitIndex + 1); } } else { log.warn("Cannot retrieve storage queues via bitmap handler since routingKey to match is empty"); } return matchingQueues; }
From source file:aprofplot.jfreechart.SamplingXYLineAndShapeRenderer.java
/** * Initialises the renderer./* w ww.j a v a 2 s . c o m*/ * <P> * This method will be called before the first item is rendered, giving the * renderer an opportunity to initialise any state information it wants to * maintain. The renderer can do nothing if it chooses. * * @param g2 the graphics device. * @param dataArea the area inside the axes. * @param plot the plot. * @param dataset the dataset. * @param info an optional info collection object to return data back to * the caller. * * @return The renderer state. */ @Override public XYItemRendererState initialise(Graphics2D g2, Rectangle2D dataArea, XYPlot plot, XYDataset dataset, PlotRenderingInfo info) { State state = new State(info, sampleDataset, plot, dataset, dataArea, shapeSize, lineWidth); state.series2Sample = new BitSet(dataset.getSeriesCount()); for (int series = 0; series < dataset.getSeriesCount(); series++) { if ((getItemVisible(series, 0) && getItemShapeVisible(series, 0)) || getItemCreateEntity(series, 0)) { state.series2Sample.set(series); } } double dpi = 72; // Integer dpiVal = (Integer) g2.getRenderingHint(HintKey.DPI); // if (dpiVal != null) { // dpi = dpiVal.intValue(); // } state.seriesPath = new GeneralPath(); state.intervalPath = new GeneralPath(); state.findVisiblePointsInDataset(plot, dataset); //state.dX = 72.0 / dpi; return state; }
From source file:bobs.is.compress.sevenzip.SevenZOutputFile.java
private void writeFileCTimes(final DataOutput header) throws IOException { int numCreationDates = 0; for (final SevenZArchiveEntry entry : files) { if (entry.getHasCreationDate()) { ++numCreationDates;//from ww w. jav a 2 s . c om } } if (numCreationDates > 0) { header.write(NID.kCTime); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(baos); if (numCreationDates != files.size()) { out.write(0); final BitSet cTimes = new BitSet(files.size()); for (int i = 0; i < files.size(); i++) { cTimes.set(i, files.get(i).getHasCreationDate()); } writeBits(out, cTimes, files.size()); } else { out.write(1); // "allAreDefined" == true } out.write(0); for (final SevenZArchiveEntry entry : files) { if (entry.getHasCreationDate()) { out.writeLong( Long.reverseBytes(SevenZArchiveEntry.javaTimeToNtfsTime(entry.getCreationDate()))); } } out.flush(); final byte[] contents = baos.toByteArray(); writeUint64(header, contents.length); header.write(contents); } }