Example usage for java.util BitSet nextSetBit

List of usage examples for java.util BitSet nextSetBit

Introduction

In this page you can find the example usage for java.util BitSet nextSetBit.

Prototype

public int nextSetBit(int fromIndex) 

Source Link

Document

Returns the index of the first bit that is set to true that occurs on or after the specified starting index.

Usage

From source file:org.apache.nutch.tools.PruneIndexTool.java

/**
 * For each query, find all matching documents and delete them from all input
 * indexes. Optionally, an additional check can be performed by using {@link PruneChecker}
 * implementations.//from ww w .  j a va 2  s. c  o m
 */
public void run() {
    BitSet bits = new BitSet(reader.maxDoc());
    AllHitsCollector ahc = new AllHitsCollector(bits);
    boolean doDelete = false;
    for (int i = 0; i < queries.length; i++) {
        if (LOG.isInfoEnabled()) {
            LOG.info(dr + "Processing query: " + queries[i].toString());
        }
        bits.clear();
        try {
            searcher.search(queries[i], ahc);
        } catch (IOException e) {
            if (LOG.isWarnEnabled()) {
                LOG.warn(dr + " - failed: " + e.getMessage());
            }
            continue;
        }
        if (bits.cardinality() == 0) {
            if (LOG.isInfoEnabled()) {
                LOG.info(dr + " - no matching documents.");
            }
            continue;
        }
        if (LOG.isInfoEnabled()) {
            LOG.info(dr + " - found " + bits.cardinality() + " document(s).");
        }
        // Now delete all matching documents
        int docNum = -1, start = 0, cnt = 0;
        // probably faster than looping sequentially through all index values?
        while ((docNum = bits.nextSetBit(start)) != -1) {
            // don't delete the same document multiple times
            if (reader.isDeleted(docNum))
                continue;
            try {
                if (checkers != null && checkers.length > 0) {
                    boolean check = true;
                    for (int k = 0; k < checkers.length; k++) {
                        // fail if any checker returns false
                        check &= checkers[k].isPrunable(queries[i], reader, docNum);
                    }
                    doDelete = check;
                } else
                    doDelete = true;
                if (doDelete) {
                    if (!dryrun)
                        reader.deleteDocument(docNum);
                    cnt++;
                }
            } catch (Exception e) {
                if (LOG.isWarnEnabled()) {
                    LOG.warn(dr + " - failed to delete doc #" + docNum);
                }
            }
            start = docNum + 1;
        }
        if (LOG.isInfoEnabled()) {
            LOG.info(dr + " - deleted " + cnt + " document(s).");
        }
    }
    // close checkers
    if (checkers != null) {
        for (int i = 0; i < checkers.length; i++) {
            checkers[i].close();
        }
    }
    try {
        reader.close();
    } catch (IOException e) {
        if (LOG.isWarnEnabled()) {
            LOG.warn(dr + "Exception when closing reader(s): " + e.getMessage());
        }
    }
}

From source file:edu.umich.flowfence.service.SandboxManager.java

private void dumpSandboxes() {
    if (localLOGV) {
        BitSet seenSandboxes = new BitSet(SANDBOX_COUNT);
        Log.v(TAG, ">>> Dumping current sandbox state:");
        Log.v(TAG, "Running: " + mRunningSandboxes.size() + " sandboxes");
        for (Sandbox sb : mRunningSandboxes) {
            dumpSandbox(sb, seenSandboxes);
        }//from w  w w. j ava  2s .  c  o m
        Log.v(TAG, "Idle: " + mIdleSandboxes.size() + " sandboxes (LRU order)");
        for (Sandbox sb : mIdleSandboxes.keySet()) {
            dumpSandbox(sb, seenSandboxes);
        }
        Log.v(TAG, "Stopped: " + mStoppedSandboxes.size() + " sandboxes");
        for (Sandbox sb : mStoppedSandboxes) {
            dumpSandbox(sb, seenSandboxes);
        }
        Log.v(TAG, "Hot spares: " + mHotSpares.size() + " sandboxes");
        for (Sandbox sb : mHotSpares) {
            dumpSandbox(sb, seenSandboxes);
        }
        seenSandboxes.flip(0, SANDBOX_COUNT); // true = unseen
        if (!seenSandboxes.isEmpty()) {
            Log.w(TAG, "WARNING: leaked " + seenSandboxes.cardinality() + " sandboxes");
            int leaked = -1;
            while ((leaked = seenSandboxes.nextSetBit(leaked + 1)) >= 0) {
                dumpSandbox(Sandbox.get(leaked), null);
            }
        } else {
            Log.v(TAG, "No leaks detected");
        }
        Log.v(TAG, "<<< End of state dump");
    }
}

From source file:com.turn.ttorrent.client.TorrentHandler.java

/**
 * Build this torrent's pieces array.//from   w ww .ja va  2  s.c om
 *
 * <p>
 * Hash and verify any potentially present local data and create this
 * torrent's pieces array from their respective hash provided in the
 * torrent meta-info.
 * </p>
 *
 * <p>
 * This function should be called soon after the constructor to initialize
 * the pieces array.
 * </p>
 */
@VisibleForTesting
/* pp */ void init() throws InterruptedException, IOException {
    {
        State s = getState();
        if (s != State.WAITING) {
            LOG.info("Restarting torrent from state " + s);
            return;
        }
    }
    setState(State.VALIDATING);

    try {
        int npieces = torrent.getPieceCount();

        long size = getSize();
        // Store in a local so we can update with minimal synchronization.
        BitSet completedPieces = new BitSet(npieces);
        long completedSize = 0;

        ThreadPoolExecutor executor = client.getEnvironment().getExecutorService();
        // TorrentCreator.newExecutor("TorrentHandlerInit");
        try {
            LOG.info("{}: Analyzing local data for {} ({} pieces)...",
                    new Object[] { getLocalPeerName(), getName(), npieces });

            int step = 10;
            CountDownLatch latch = new CountDownLatch(npieces);
            for (int index = 0; index < npieces; index++) {
                // TODO: Read the file sequentially and pass it to the validator.
                // Otherwise we thrash the disk on validation.
                ByteBuffer buffer = ByteBuffer.allocate(getPieceLength(index));
                bucket.read(buffer, getPieceOffset(index));
                buffer.flip();
                executor.execute(new PieceValidator(torrent, index, buffer, completedPieces, latch));

                if (index / (float) npieces * 100f > step) {
                    LOG.info("{}:  ... {}% complete", getLocalPeerName(), step);
                    step += 10;
                }
            }
            latch.await();

            for (int i = completedPieces.nextSetBit(0); i >= 0; i = completedPieces.nextSetBit(i + 1)) {
                completedSize += getPieceLength(i);
            }
        } finally {
            // Request orderly executor shutdown and wait for hashing tasks to
            // complete.
            // executor.shutdown();
            // executor.awaitTermination(1, TimeUnit.SECONDS);
        }

        LOG.debug("{}: {}: we have {}/{} bytes ({}%) [{}/{} pieces].",
                new Object[] { getLocalPeerName(), getName(), completedSize, size,
                        String.format("%.1f", (100f * (completedSize / (float) size))),
                        completedPieces.cardinality(), getPieceCount() });

        synchronized (lock) {
            this.completedPieces = completedPieces;
        }

        if (isComplete())
            setState(State.SEEDING);
        else
            setState(State.SHARING);
    } catch (Exception e) {
        setState(State.ERROR);
        Throwables.propagateIfPossible(e, InterruptedException.class, IOException.class);
        throw Throwables.propagate(e);
    }
}

From source file:org.wso2.andes.kernel.router.TopicRoutingMatcher.java

/**
 * Get storage queues matching to routing key
 * @param routingKey routing key to match queues
 * @return set of storage queues// ww  w.j a  va 2s.  c  o  m
 */
public Set<StorageQueue> getMatchingStorageQueues(String routingKey) {
    Set<StorageQueue> matchingQueues = new HashSet<>();

    if (StringUtils.isNotEmpty(routingKey)) {

        // constituentDelimiter is quoted to avoid making the delimiter a regex symbol
        String[] constituents = routingKey.split(Pattern.quote(constituentsDelimiter), -1);

        int noOfCurrentMaxConstituents = constituentTables.size();

        // If given routingKey has more constituents than any subscriber has, then create constituent tables
        // for those before collecting matching subscribers
        if (constituents.length > noOfCurrentMaxConstituents) {
            for (int i = noOfCurrentMaxConstituents; i < constituents.length; i++) {
                addEmptyConstituentTable();
            }
        }

        // Keeps the results of 'AND' operations between each bit sets
        BitSet andBitSet = new BitSet(storageQueueList.size());

        // Since BitSet is initialized with false for each element we need to flip
        andBitSet.flip(0, storageQueueList.size());

        // Get corresponding bit set for each constituent in the routingKey and operate bitwise AND operation
        for (int constituentIndex = 0; constituentIndex < constituents.length; constituentIndex++) {
            String constituent = constituents[constituentIndex];
            Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex);

            BitSet bitSetForAnd = constituentTable.get(constituent);

            if (null == bitSetForAnd) {
                // The constituent is not found in the table, hence matching with 'other' constituent
                bitSetForAnd = constituentTable.get(OTHER_CONSTITUENT);
            }

            andBitSet.and(bitSetForAnd);
        }

        // If there are more constituent tables, get the null constituent in each of them and operate bitwise AND
        for (int constituentIndex = constituents.length; constituentIndex < constituentTables
                .size(); constituentIndex++) {
            Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex);
            andBitSet.and(constituentTable.get(NULL_CONSTITUENT));
        }

        // Valid queues are filtered, need to pick from queue pool
        int nextSetBitIndex = andBitSet.nextSetBit(0);
        while (nextSetBitIndex > -1) {
            matchingQueues.add(storageQueueList.get(nextSetBitIndex));
            nextSetBitIndex = andBitSet.nextSetBit(nextSetBitIndex + 1);
        }

    } else {
        log.warn("Cannot retrieve storage queues via bitmap handler since routingKey to match is empty");
    }

    return matchingQueues;
}

From source file:org.apache.pig.tools.pigstats.mapreduce.MRScriptState.java

public String getPigFeature(MapReduceOper mro) {
    if (featureMap == null) {
        featureMap = new HashMap<MapReduceOper, String>();
    }//from w  w w. j  a va  2 s.c  om

    String retStr = featureMap.get(mro);
    if (retStr == null) {
        BitSet feature = new BitSet();
        feature.clear();
        if (mro.isSkewedJoin()) {
            feature.set(PIG_FEATURE.SKEWED_JOIN.ordinal());
        }
        if (mro.isGlobalSort()) {
            feature.set(PIG_FEATURE.ORDER_BY.ordinal());
        }
        if (mro.isSampler()) {
            feature.set(PIG_FEATURE.SAMPLER.ordinal());
        }
        if (mro.isIndexer()) {
            feature.set(PIG_FEATURE.INDEXER.ordinal());
        }
        if (mro.isCogroup()) {
            feature.set(PIG_FEATURE.COGROUP.ordinal());
        }
        if (mro.isGroupBy()) {
            feature.set(PIG_FEATURE.GROUP_BY.ordinal());
        }
        if (mro.isRegularJoin()) {
            feature.set(PIG_FEATURE.HASH_JOIN.ordinal());
        }
        if (mro.needsDistinctCombiner()) {
            feature.set(PIG_FEATURE.DISTINCT.ordinal());
        }
        if (!mro.combinePlan.isEmpty()) {
            feature.set(PIG_FEATURE.COMBINER.ordinal());
        }
        if (mro instanceof NativeMapReduceOper) {
            feature.set(PIG_FEATURE.NATIVE.ordinal());
        } else {// if it is NATIVE MR , don't explore its plans
            try {
                new FeatureVisitor(mro.mapPlan, feature).visit();
                if (mro.reducePlan.isEmpty()) {
                    feature.set(PIG_FEATURE.MAP_ONLY.ordinal());
                } else {
                    new FeatureVisitor(mro.reducePlan, feature).visit();
                }
            } catch (VisitorException e) {
                LOG.warn("Feature visitor failed", e);
            }
        }
        StringBuilder sb = new StringBuilder();
        for (int i = feature.nextSetBit(0); i >= 0; i = feature.nextSetBit(i + 1)) {
            if (sb.length() > 0)
                sb.append(",");
            sb.append(PIG_FEATURE.values()[i].name());
        }
        retStr = sb.toString();
        featureMap.put(mro, retStr);
    }
    return retStr;
}

From source file:org.wso2.andes.subscription.TopicSubscriptionBitMapStore.java

/**
 * {@inheritDoc}/*from  w  ww .  j  av a2s .  co m*/
 */
@Override
public Set<AndesSubscription> getMatchingSubscriptions(String destination, DestinationType destinationType) {
    Set<AndesSubscription> subscriptions = new HashSet<>();

    if (StringUtils.isNotEmpty(destination)) {

        // constituentDelimiter is quoted to avoid making the delimiter a regex symbol
        String[] constituents = destination.split(Pattern.quote(constituentsDelimiter), -1);

        int noOfCurrentMaxConstituents = constituentTables.size();

        // If given destination has more constituents than any subscriber has, then create constituent tables
        // for those before collecting matching subscribers
        if (constituents.length > noOfCurrentMaxConstituents) {
            for (int i = noOfCurrentMaxConstituents; i < constituents.length; i++) {
                addEmptyConstituentTable();
            }
        }

        // Keeps the results of 'AND' operations between each bit sets
        BitSet andBitSet = new BitSet(subscriptionList.size());

        // Since BitSet is initialized with false for each element we need to flip
        andBitSet.flip(0, subscriptionList.size());

        // Get corresponding bit set for each constituent in the destination and operate bitwise AND operation
        for (int constituentIndex = 0; constituentIndex < constituents.length; constituentIndex++) {
            String constituent = constituents[constituentIndex];
            Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex);

            BitSet bitSetForAnd = constituentTable.get(constituent);

            if (null == bitSetForAnd) {
                // The constituent is not found in the table, hence matching with 'other' constituent
                bitSetForAnd = constituentTable.get(OTHER_CONSTITUENT);
            }

            andBitSet.and(bitSetForAnd);
        }

        // If there are more constituent tables, get the null constituent in each of them and operate bitwise AND
        for (int constituentIndex = constituents.length; constituentIndex < constituentTables
                .size(); constituentIndex++) {
            Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex);
            andBitSet.and(constituentTable.get(NULL_CONSTITUENT));
        }

        // Valid subscriptions are filtered, need to pick from subscription pool
        int nextSetBitIndex = andBitSet.nextSetBit(0);
        while (nextSetBitIndex > -1) {
            subscriptions.add(subscriptionList.get(nextSetBitIndex));
            nextSetBitIndex = andBitSet.nextSetBit(nextSetBitIndex + 1);
        }

    } else {
        log.warn("Cannot retrieve subscriptions via bitmap handler since destination to match is empty");
    }

    return subscriptions;
}

From source file:org.wso2.andes.subscription.ClusterSubscriptionBitMapHandler.java

/**
 * Get matching subscribers for a given non-wildcard destination.
 *
 * @param destination The destination without wildcard
 * @return Set of matching subscriptions
 *///from  w  w  w .ja va2 s  . c o m
@Override
public Set<AndesSubscription> getMatchingWildCardSubscriptions(String destination) {
    Set<AndesSubscription> subscriptions = new HashSet<AndesSubscription>();

    if (StringUtils.isNotEmpty(destination)) {

        // constituentDelimiter is quoted to avoid making the delimiter a regex symbol
        String[] constituents = destination.split(Pattern.quote(constituentsDelimiter));

        int noOfCurrentMaxConstituents = constituentTables.size();

        // If given destination has more constituents than any subscriber has, then create constituent tables
        // for those before collecting matching subscribers
        if (constituents.length > noOfCurrentMaxConstituents) {
            for (int i = noOfCurrentMaxConstituents; i < constituents.length; i++) {
                addEmptyConstituentTable();
            }
        }

        // Keeps the results of 'AND' operations between each bit sets
        BitSet andBitSet = new BitSet(wildCardSubscriptionList.size());

        // Since BitSet is initialized with false for each element we need to flip
        andBitSet.flip(0, wildCardSubscriptionList.size());

        // Get corresponding bit set for each constituent in the destination and operate bitwise AND operation
        for (int constituentIndex = 0; constituentIndex < constituents.length; constituentIndex++) {
            String constituent = constituents[constituentIndex];
            Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex);

            BitSet bitSetForAnd = constituentTable.get(constituent);

            if (null == bitSetForAnd) {
                // The constituent is not found in the table, hence matching with 'other' constituent
                bitSetForAnd = constituentTable.get(OTHER_CONSTITUENT);
            }

            andBitSet.and(bitSetForAnd);
        }

        // If there are more constituent tables, get the null constituent in each of them and operate bitwise AND
        for (int constituentIndex = constituents.length; constituentIndex < constituentTables
                .size(); constituentIndex++) {
            Map<String, BitSet> constituentTable = constituentTables.get(constituentIndex);
            andBitSet.and(constituentTable.get(NULL_CONSTITUENT));
        }

        // Valid subscriptions are filtered, need to pick from subscription pool
        int nextSetBitIndex = andBitSet.nextSetBit(0);
        while (nextSetBitIndex > -1) {
            subscriptions.add(wildCardSubscriptionList.get(nextSetBitIndex));
            nextSetBitIndex = andBitSet.nextSetBit(nextSetBitIndex + 1);
        }

    } else {
        log.warn("Cannot retrieve subscriptions via bitmap handler since destination to match is empty");
    }

    return subscriptions;
}

From source file:dr.app.tools.AntigenicPlotter.java

public AntigenicPlotter(int burnin, boolean tabFormat, boolean discreteModel, final String inputFileName,
        final String treeFileName, final String outputFileName) throws IOException {

    double[][] reference = null;
    List<String> tipLabels = null;

    if (treeFileName != null) {
        System.out.println("Reading tree file...");

        NexusImporter importer = new NexusImporter(new FileReader(treeFileName));
        try {/*from  w  w w.  j  av  a 2s .  c  o  m*/
            Tree tree = importer.importNextTree();

            reference = new double[tree.getExternalNodeCount()][2];
            tipLabels = new ArrayList<String>();

            for (int i = 0; i < tree.getExternalNodeCount(); i++) {
                NodeRef tip = tree.getExternalNode(i);
                tipLabels.add(tree.getNodeTaxon(tip).getId());

                reference[i][0] = (Double) tree.getNodeAttribute(tip, "antigenic1");
                reference[i][1] = (Double) tree.getNodeAttribute(tip, "antigenic2");
            }
        } catch (Importer.ImportException e) {
            e.printStackTrace();
            return;
        }
    }

    System.out.println("Reading log file...");

    FileReader fileReader = new FileReader(inputFileName);
    try {
        File file = new File(inputFileName);

        LogFileTraces traces = new LogFileTraces(inputFileName, file);
        traces.loadTraces();

        if (burnin == -1) {
            burnin = (int) (traces.getMaxState() / 10);
        }

        traces.setBurnIn(burnin);

        System.out.println();
        System.out.println("burnIn   <= " + burnin);
        System.out.println("maxState  = " + traces.getMaxState());
        System.out.println();

        int traceCount = traces.getTraceCount();
        if (discreteModel) {
            // for the discrete model, there are 4 sets of traces, pairs coordinates, cluster allocations, and cluster sizes
            traceCount /= 4;
        } else {
            // for continuous, just pairs of coordinates
            traceCount /= 2;
        }

        int stateCount = traces.getStateCount();

        double[][][] data;
        String[] labels = new String[traceCount];

        if (tipLabels != null) {
            data = new double[stateCount][tipLabels.size()][2];
        } else {
            data = new double[stateCount][traceCount][2];
        }

        for (int i = 0; i < traceCount; i++) {
            String name = traces.getTraceName(i * 2);
            name = name.substring(0, name.length() - 1);

            if (tipLabels != null) {
                int index = tipLabels.indexOf(name);
                if (index != -1) {
                    for (int j = 0; j < stateCount; j++) {
                        data[j][index][0] = traces.getStateValue(i * 2, j);
                        data[j][index][1] = traces.getStateValue((i * 2) + 1, j);
                    }
                }
            } else {
                for (int j = 0; j < stateCount; j++) {
                    data[j][i][0] = traces.getStateValue(i * 2, j);
                    data[j][i][1] = traces.getStateValue((i * 2) + 1, j);
                }
                labels[i] = name;
            }
        }

        int[][] clusterIndices = null;
        int[][] clusterSizes = null;

        if (discreteModel) {
            clusterIndices = new int[stateCount][traceCount];
            clusterSizes = new int[stateCount][traceCount];

            for (int i = 0; i < traceCount; i++) {
                for (int j = 0; j < stateCount; j++) {
                    clusterIndices[j][i] = (int) traces.getStateValue((traceCount * 2) + i, j);
                    clusterSizes[j][i] = (int) traces.getStateValue((traceCount * 3) + i, j);
                }
            }

            Map<BitSet, Integer> clusterMap = new HashMap<BitSet, Integer>();

            for (int i = 0; i < stateCount; i++) {
                BitSet[] clusters = new BitSet[clusterIndices[i].length];
                for (int j = 0; j < clusterIndices[i].length; j++) {
                    BitSet bits = clusters[clusterIndices[i][j]];

                    if (bits == null) {
                        bits = new BitSet();
                        clusters[clusterIndices[i][j]] = bits;
                    }
                    bits.set(j);

                    Integer count = clusterMap.get(bits);
                    if (count == null) {
                        count = 0;
                    }
                    clusterMap.put(bits, count + 1);
                }

                Arrays.sort(clusters, new Comparator<BitSet>() {
                    public int compare(BitSet bitSet1, BitSet bitSet2) {
                        if (bitSet1 == null) {
                            return -1;
                        }
                        if (bitSet2 == null) {
                            return 1;
                        }
                        return bitSet2.cardinality() - bitSet1.cardinality();
                    }
                });
            }

            for (BitSet bits : clusterMap.keySet()) {
                int count = clusterMap.get(bits);
                if (count > 1) {
                    System.out.print(count);
                    for (int i = bits.nextSetBit(0); i >= 0; i = bits.nextSetBit(i + 1)) {
                        System.out.print("\t" + labels[i]);
                    }
                    System.out.println();
                }
            }
        }

        if (tipLabels != null) {
            labels = new String[tipLabels.size()];
            tipLabels.toArray(labels);
        }

        if (reference != null) {
            procrustinate(data, reference);
        } else {
            procrustinate(data);
        }

        if (tabFormat) {
            writeTabformat(outputFileName, labels, data);
        } else {
            if (discreteModel) {
                writeKML(outputFileName, labels, data, clusterIndices, clusterSizes);
            } else {
                writeKML(outputFileName, labels, data);
            }
        }

    } catch (Exception e) {
        System.err.println("Error Parsing Input File: " + e.getMessage());

        e.printStackTrace(System.err);
        return;
    }
    fileReader.close();

}

From source file:org.roaringbitmap.TestRoaringBitmap.java

public static boolean equals(BitSet bs, RoaringBitmap rr) {
    final int[] a = new int[bs.cardinality()];
    int pos = 0;//from  w w w.  java2  s .  c o  m
    for (int x = bs.nextSetBit(0); x >= 0; x = bs.nextSetBit(x + 1))
        a[pos++] = x;
    return Arrays.equals(rr.toArray(), a);
}

From source file:org.apache.hyracks.control.cc.executor.ActivityClusterPlanner.java

private Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> computeTaskConnectivity(JobRun jobRun,
        Map<ActivityId, ActivityPlan> activityPlanMap, Set<ActivityId> activities) {
    Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = new HashMap<>();
    ActivityClusterGraph acg = jobRun.getActivityClusterGraph();
    BitSet targetBitmap = new BitSet();
    for (ActivityId ac1 : activities) {
        ActivityCluster ac = acg.getActivityMap().get(ac1);
        Task[] ac1TaskStates = activityPlanMap.get(ac1).getTasks();
        int nProducers = ac1TaskStates.length;
        List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(ac1);
        if (outputConns == null) {
            continue;
        }/* www. j a  v a  2s . co m*/
        for (IConnectorDescriptor c : outputConns) {
            ConnectorDescriptorId cdId = c.getConnectorId();
            ActivityId ac2 = ac.getConsumerActivity(cdId);
            Task[] ac2TaskStates = activityPlanMap.get(ac2).getTasks();
            int nConsumers = ac2TaskStates.length;
            if (c.allProducersToAllConsumers()) {
                List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = new ArrayList<>();
                for (int j = 0; j < nConsumers; j++) {
                    TaskId targetTID = ac2TaskStates[j].getTaskId();
                    cInfoList.add(Pair.of(targetTID, cdId));
                }
                for (int i = 0; i < nProducers; ++i) {
                    taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList);
                }
                continue;
            }
            for (int i = 0; i < nProducers; ++i) {
                c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
                List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity
                        .get(ac1TaskStates[i].getTaskId());
                if (cInfoList == null) {
                    cInfoList = new ArrayList<>();
                    taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList);
                }
                for (int j = targetBitmap.nextSetBit(0); j >= 0; j = targetBitmap.nextSetBit(j + 1)) {
                    TaskId targetTID = ac2TaskStates[j].getTaskId();
                    cInfoList.add(Pair.of(targetTID, cdId));
                }
            }
        }
    }
    return taskConnectivity;
}