Example usage for org.apache.commons.collections CollectionUtils intersection

List of usage examples for org.apache.commons.collections CollectionUtils intersection

Introduction

In this page you can find the example usage for org.apache.commons.collections CollectionUtils intersection.

Prototype

public static Collection intersection(final Collection a, final Collection b) 

Source Link

Document

Returns a Collection containing the intersection of the given Collection s.

Usage

From source file:org.alfresco.repo.workflow.PackageManager.java

@SuppressWarnings("unchecked")
private void checkPackageItems(NodeRef packageRef) {
    List<NodeRef> currentitems = getCurrentItems(packageRef);
    Collection<NodeRef> intersection = CollectionUtils.intersection(addItems, removeItems);
    addItems.removeAll(intersection);//from  w  ww  .  j  a v a 2s .  c o  m
    removeItems.removeAll(intersection);
    for (NodeRef node : intersection) {
        if (logger.isDebugEnabled())
            logger.debug("Item was added and removed from package! Ignoring item: " + node);
    }
    checkAddedItems(currentitems);
    checkRemovedItems(currentitems);
}

From source file:org.amanzi.neo.models.impl.network.NetworkModel.java

@SuppressWarnings("unchecked")
@Override/*from w ww  .j  a v a2 s.  c  o m*/
public IDataElement findSector(final String sectorName, final Integer ci, final Integer lac)
        throws ModelException {
    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug(getStartLogStatement("findSector", sectorName, ci, lac));
    }

    // validate input
    if (StringUtils.isEmpty(sectorName) && ci == null && lac == null) {
        throw new ParameterInconsistencyException(getGeneralNodeProperties().getNodeNameProperty(), sectorName);
    }
    if (ci == null && StringUtils.isEmpty(sectorName)) {
        throw new ParameterInconsistencyException(networkNodeProperties.getCIProperty(), ci);
    }

    IDataElement result = null;

    if (!StringUtils.isEmpty(sectorName)) {
        result = findElement(NetworkElementType.SECTOR, sectorName);
    }
    if (result == null && ci != null) {
        final List<Node> ciList = getNodeListFromIndex(NetworkElementType.SECTOR,
                networkNodeProperties.getCIProperty(), ci);
        List<Node> resultList = null;

        if (lac != null) {
            final List<Node> lacNodes = getNodeListFromIndex(NetworkElementType.SECTOR,
                    networkNodeProperties.getLACProperty(), lac);

            resultList = new ArrayList<Node>(CollectionUtils.intersection(ciList, lacNodes));
        }

        if (resultList != null && !resultList.isEmpty()) {
            result = new DataElement(resultList.get(0));
        } else if (!ciList.isEmpty() && lac == null) {
            result = new DataElement(ciList.get(0));
        }
    }

    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug(getFinishLogStatement("findElement"));
    }
    return result;
}

From source file:org.ankus.mapreduce.algorithms.correlation.booleanset.CalculationBooleanSetReducer.java

@Override
protected void reduce(TextTwoWritableComparable key, Iterable<TextIntegerTwoPairsWritableComparable> values,
        Context context) throws IOException, InterruptedException {

    if (algorithmOption.equals(Constants.HAMMING_DISTACNE_FOR_BOOLEAN)) {
        int hammingDistance = 0;

        Map<String, Integer> itemID1Map = new HashMap<String, Integer>();
        Map<String, Integer> itemID2Map = new HashMap<String, Integer>();

        for (TextIntegerTwoPairsWritableComparable textIntegerPairsWritable : values) {
            itemID1Map.put(textIntegerPairsWritable.getText1().toString(),
                    textIntegerPairsWritable.getNumber1());
            itemID2Map.put(textIntegerPairsWritable.getText2().toString(),
                    textIntegerPairsWritable.getNumber2());
        }/*from   www.jav a  2s  .  c  o  m*/

        char[] item1CharArray = itemID1Map.toString().toCharArray();
        char[] item2CharArray = itemID2Map.toString().toCharArray();

        int item1CharArrayLength = item1CharArray.length;
        int item2CharArrayLength = item2CharArray.length;

        if (itemID1Map.containsValue(itemID2Map))
            hammingDistance = 0;
        if (item1CharArrayLength != item2CharArrayLength) {
            hammingDistance = -1;
        } else {
            for (int i = 0; i < item1CharArrayLength; ++i) {
                if (itemID1Map.toString().charAt(i) == itemID2Map.toString().charAt(i)) {
                    hammingDistance += 0;
                } else if (itemID1Map.toString().charAt(i) != itemID2Map.toString().charAt(i)) {
                    ++hammingDistance;
                }
            }
        }
        context.write(key, new DoubleWritable(hammingDistance));

    } else if (algorithmOption.equals(Constants.DICE_COEFFICIENT)) {
        double diceCoefficient = 0.0d;
        int size1 = 0;
        int size2 = 0;

        Map<String, Integer> itemID1Map = new HashMap<String, Integer>();
        Map<String, Integer> itemID2Map = new HashMap<String, Integer>();

        for (TextIntegerTwoPairsWritableComparable textIntegerPairsWritable : values) {
            itemID1Map.put(textIntegerPairsWritable.getText1().toString(),
                    textIntegerPairsWritable.getNumber1());
            itemID2Map.put(textIntegerPairsWritable.getText2().toString(),
                    textIntegerPairsWritable.getNumber2());

            size1 += textIntegerPairsWritable.getNumber1();
            size2 += textIntegerPairsWritable.getNumber2();
        }

        // Find the intersection, and get the number of elements in that set.
        Collection<String> intersection = CollectionUtils.intersection(itemID1Map.entrySet(),
                itemID2Map.entrySet());

        diceCoefficient = (2.0 * (float) intersection.size()) / ((float) (size1 + size2));
        context.write(key, new DoubleWritable(Double.parseDouble(String.format("%.3f%n", diceCoefficient))));

    } else if (algorithmOption.equals(Constants.JACCARD_COEFFICIENT)) {
        double jaccardCoefficient = 0.0d;
        int unionSize = 0;

        Map<String, Integer> itemID1Map = new HashMap<String, Integer>();
        Map<String, Integer> itemID2Map = new HashMap<String, Integer>();

        for (TextIntegerTwoPairsWritableComparable textIntegerPairsWritable : values) {
            itemID1Map.put(textIntegerPairsWritable.getText1().toString(),
                    textIntegerPairsWritable.getNumber1());
            itemID2Map.put(textIntegerPairsWritable.getText2().toString(),
                    textIntegerPairsWritable.getNumber2());

            if ((textIntegerPairsWritable.getNumber1() + textIntegerPairsWritable.getNumber2()) >= 1) {
                unionSize += 1;
            }
        }

        Collection<String> intersection = CollectionUtils.intersection(itemID1Map.entrySet(),
                itemID2Map.entrySet());

        jaccardCoefficient = (float) intersection.size() / (float) unionSize;
        context.write(key, new DoubleWritable(Double.parseDouble(String.format("%.3f%n", jaccardCoefficient))));
    }
}

From source file:org.ankus.mapreduce.algorithms.correlation.numericset.CalculationNumericSetReducer.java

@Override
protected void reduce(TextTwoWritableComparable key, Iterable<TextDoubleTwoPairsWritableComparable> values,
        Context context) throws IOException, InterruptedException {

    if (algorithmOption.equals(Constants.COSINE_COEFFICIENT)) {
        int docProduct = 0;
        int normItemID1 = 0;
        int normItemID2 = 0;

        for (TextDoubleTwoPairsWritableComparable textDoublePairsWritableComparable : values) {
            docProduct += textDoublePairsWritableComparable.getNumber1()
                    * textDoublePairsWritableComparable.getNumber2();

            normItemID1 += Math.pow(textDoublePairsWritableComparable.getNumber1(), 2);
            normItemID2 += Math.pow(textDoublePairsWritableComparable.getNumber2(), 2);
        }/*  ww  w .  j ava2 s .com*/

        double cosineCoefficient = docProduct / (Math.sqrt(normItemID1) * Math.sqrt(normItemID2));
        context.write(key, new DoubleWritable(Double.parseDouble(String.format("%.3f%n", cosineCoefficient))));

    } else if (algorithmOption.equals(Constants.PEARSON_COEFFICIENT)) {
        double sumID1 = 0.0d;
        double sumID2 = 0.0d;
        double squareSumID1 = 0.0d;
        double squareSumID2 = 0.0d;
        double totalSumIDs = 0.0d;
        // PCC(Pearson Correlation Coefficient) variable 
        double r = 0.0d;
        int n = 0;

        for (TextDoubleTwoPairsWritableComparable textDoublePairsWritable : values) {

            // Count values for sigma(standard deviation)
            n++;

            //  Sum of item values for users
            sumID1 += textDoublePairsWritable.getNumber1();
            sumID2 += textDoublePairsWritable.getNumber2();

            // Sum of squares for users
            squareSumID1 += Math.pow(textDoublePairsWritable.getNumber1(), 2);
            squareSumID2 += Math.pow(textDoublePairsWritable.getNumber2(), 2);

            // Calculate sum of times for users
            totalSumIDs += (textDoublePairsWritable.getNumber1() * textDoublePairsWritable.getNumber2());
        }

        // 1. Calculate numerator
        double numerator = totalSumIDs - ((sumID1 * sumID2) / n);

        // 2. Calculate each of the denominator user1 and denominator user2
        double denominatorUserId1 = squareSumID1 - ((Math.pow(sumID1, 2)) / n);
        double denominatorUserId2 = squareSumID2 - ((Math.pow(sumID2, 2)) / n);

        // 3. Calculate denominator
        double denominator = Math.sqrt(denominatorUserId1 * denominatorUserId2);

        // 4. Calculate PCC(Pearson Correlation Coefficient)
        if (denominator == 0) {
            r = 0.0d;
        } else {
            r = numerator / denominator;
        }

        context.write(key, new DoubleWritable(Double.parseDouble(String.format("%.3f%n", r))));

    } else if (algorithmOption.equals(Constants.TANIMOTO_COEFFICIENT)) {
        double tanimotoCoefficient = 0.0d;

        Map<String, Double> itemID1Map = new HashMap<String, Double>();
        Map<String, Double> itemID2Map = new HashMap<String, Double>();

        for (TextDoubleTwoPairsWritableComparable textDoubleTwoPairsWritableComparable : values) {
            itemID1Map.put(textDoubleTwoPairsWritableComparable.getText1().toString(),
                    textDoubleTwoPairsWritableComparable.getNumber1());
            itemID2Map.put(textDoubleTwoPairsWritableComparable.getText2().toString(),
                    textDoubleTwoPairsWritableComparable.getNumber2());
        }

        Collection<String> intersection = CollectionUtils.intersection(itemID1Map.entrySet(),
                itemID2Map.entrySet());
        double sumItemsSize = itemID1Map.size() + itemID2Map.size();

        tanimotoCoefficient = ((float) intersection.size()) / ((float) (sumItemsSize - intersection.size()));
        context.write(key,
                new DoubleWritable(Double.parseDouble(String.format("%.3f%n", tanimotoCoefficient))));

    } else if (algorithmOption.equals(Constants.MANHATTAN_DISTANCE)) {
        double manhattanDistance = 0.0d;

        for (TextDoubleTwoPairsWritableComparable textDoublePairsWritable : values) {
            manhattanDistance += Math
                    .abs(textDoublePairsWritable.getNumber1() - textDoublePairsWritable.getNumber2());
        }
        context.write(key, new DoubleWritable(manhattanDistance));

    } else if (algorithmOption.equals(Constants.UCLIDEAN_DISTANCE)) {
        double sum = 0.0d;
        double uclideanDistance = 0.0d;

        for (TextDoubleTwoPairsWritableComparable textDoublePairsWritable : values) {
            sum += Math.pow((textDoublePairsWritable.getNumber1() - textDoublePairsWritable.getNumber2()), 2);
        }

        uclideanDistance = Math.sqrt(sum);
        context.write(key, new DoubleWritable(Double.parseDouble(String.format("%.3f%n", uclideanDistance))));
    }
}

From source file:org.apache.ambari.server.state.ConfigMergeHelper.java

@SuppressWarnings("unchecked")
public Map<String, Map<String, ThreeWayValue>> getConflicts(String clusterName, StackId targetStack)
        throws AmbariException {
    Cluster cluster = m_clusters.get().getCluster(clusterName);
    StackId oldStack = cluster.getCurrentStackVersion();

    Map<String, Map<String, String>> oldMap = new HashMap<String, Map<String, String>>();
    Map<String, Map<String, String>> newMap = new HashMap<String, Map<String, String>>();

    // Add service properties for old and new stack
    for (String serviceName : cluster.getServices().keySet()) {
        Set<PropertyInfo> oldStackProperties = m_ambariMetaInfo.get()
                .getServiceProperties(oldStack.getStackName(), oldStack.getStackVersion(), serviceName);
        addToMap(oldMap, oldStackProperties);

        Set<PropertyInfo> newStackProperties = m_ambariMetaInfo.get()
                .getServiceProperties(targetStack.getStackName(), targetStack.getStackVersion(), serviceName);
        addToMap(newMap, newStackProperties);
    }/*from  w ww  . ja  v a 2s  .  co  m*/

    // Add stack properties for old and new stack
    Set<PropertyInfo> set = m_ambariMetaInfo.get().getStackProperties(oldStack.getStackName(),
            oldStack.getStackVersion());
    addToMap(oldMap, set);

    set = m_ambariMetaInfo.get().getStackProperties(targetStack.getStackName(), targetStack.getStackVersion());
    addToMap(newMap, set);

    // Final result after merging.
    Map<String, Map<String, ThreeWayValue>> result = new HashMap<String, Map<String, ThreeWayValue>>();

    for (Entry<String, Map<String, String>> entry : oldMap.entrySet()) {
        if (!newMap.containsKey(entry.getKey())) {
            LOG.info("Stack {} does not have an equivalent config type {} in {}", oldStack.getStackId(),
                    entry.getKey(), targetStack.getStackId());
            continue;
        }

        Map<String, String> oldPairs = entry.getValue();
        Map<String, String> newPairs = newMap.get(entry.getKey());
        Collection<String> customValueKeys = null;

        Config config = cluster.getDesiredConfigByType(entry.getKey());
        if (null != config) {
            Set<String> valueKeys = config.getProperties().keySet();

            customValueKeys = CollectionUtils.subtract(valueKeys, oldPairs.keySet());
        }

        // Keep properties with custom values (i.e., changed from default value in old stack)
        if (null != customValueKeys) {
            for (String prop : customValueKeys) {
                String newVal = newPairs.get(prop);
                String savedVal = config.getProperties().get(prop);
                if (null != newVal && null != savedVal && !newVal.equals(savedVal)) {
                    ThreeWayValue twv = new ThreeWayValue();
                    twv.oldStackValue = null;
                    twv.newStackValue = normalizeValue(savedVal, newVal.trim());
                    twv.savedValue = savedVal.trim();

                    if (!result.containsKey(entry.getKey())) {
                        result.put(entry.getKey(), new HashMap<String, ThreeWayValue>());
                    }

                    result.get(entry.getKey()).put(prop, twv);
                }
            }
        }

        Collection<String> common = CollectionUtils.intersection(newPairs.keySet(), oldPairs.keySet());

        for (String prop : common) {
            String oldStackVal = oldPairs.get(prop);
            String newStackVal = newPairs.get(prop);
            String savedVal = "";
            if (null != config) {
                savedVal = config.getProperties().get(prop);
            }

            // If values are not defined in stack (null), we skip them
            // Or if values in old stack and in new stack are the same, and value
            // in current config is different, skip it
            if (!(newStackVal == null && oldStackVal == null) && !newStackVal.equals(savedVal)
                    && (!oldStackVal.equals(newStackVal) || !oldStackVal.equals(savedVal))) {
                ThreeWayValue twv = new ThreeWayValue();
                twv.oldStackValue = normalizeValue(savedVal, oldStackVal.trim());
                twv.newStackValue = normalizeValue(savedVal, newStackVal.trim());
                twv.savedValue = (null == savedVal) ? null : savedVal.trim();

                if (!result.containsKey(entry.getKey())) {
                    result.put(entry.getKey(), new HashMap<String, ThreeWayValue>());
                }

                result.get(entry.getKey()).put(prop, twv);
            }
        }
    }

    return result;
}

From source file:org.apache.carbondata.hadoop.api.CarbonInputFormat.java

private List<ExtendedBlocklet> intersectFilteredBlocklets(CarbonTable carbonTable,
        List<ExtendedBlocklet> previousDataMapPrunedBlocklets,
        List<ExtendedBlocklet> otherDataMapPrunedBlocklets) {
    List<ExtendedBlocklet> prunedBlocklets = null;
    if (BlockletDataMapUtil.isCacheLevelBlock(carbonTable)) {
        prunedBlocklets = new ArrayList<>();
        for (ExtendedBlocklet otherBlocklet : otherDataMapPrunedBlocklets) {
            if (previousDataMapPrunedBlocklets.contains(otherBlocklet)) {
                prunedBlocklets.add(otherBlocklet);
            }/*from  ww w .  java 2 s .c om*/
        }
    } else {
        prunedBlocklets = (List) CollectionUtils.intersection(otherDataMapPrunedBlocklets,
                previousDataMapPrunedBlocklets);
    }
    return prunedBlocklets;
}

From source file:org.apache.eagle.alert.coordinator.trigger.DynamicPolicyLoader.java

/**
 * When it is run at the first time, due to cachedPolicies being empty, all existing policies are expected
 * to be addedPolicies.//from   w w  w  . j  av a  2s. c  o m
 */
@SuppressWarnings("unchecked")
@Override
public void run() {
    // we should catch every exception to avoid zombile thread
    try {
        final Stopwatch watch = Stopwatch.createStarted();
        LOG.info("Starting to load policies");
        List<PolicyDefinition> current = client.listPolicies();
        Map<String, PolicyDefinition> currPolicies = new HashMap<>();
        current.forEach(pe -> currPolicies.put(pe.getName(), pe));

        Collection<String> addedPolicies = CollectionUtils.subtract(currPolicies.keySet(),
                cachedPolicies.keySet());
        Collection<String> removedPolicies = CollectionUtils.subtract(cachedPolicies.keySet(),
                currPolicies.keySet());
        Collection<String> potentiallyModifiedPolicies = CollectionUtils.intersection(currPolicies.keySet(),
                cachedPolicies.keySet());

        List<String> reallyModifiedPolicies = new ArrayList<>();
        for (String updatedPolicy : potentiallyModifiedPolicies) {
            if (currPolicies.get(updatedPolicy) != null
                    && !currPolicies.get(updatedPolicy).equals(cachedPolicies.get(updatedPolicy))) {
                reallyModifiedPolicies.add(updatedPolicy);
            }
        }

        boolean policyChanged = false;
        if (addedPolicies.size() != 0 || removedPolicies.size() != 0 || reallyModifiedPolicies.size() != 0) {
            policyChanged = true;
        }

        if (!policyChanged) {
            LOG.info("No policy (totally {}) changed since last round", current.size());
            return;
        }

        synchronized (this) {
            for (PolicyChangeListener listener : listeners) {
                listener.onPolicyChange(current, addedPolicies, removedPolicies, reallyModifiedPolicies);
            }
        }

        watch.stop();

        LOG.info("Finished loading {} policies, added: {}, removed: {}, modified: {}, taken: {} ms",
                current.size(), addedPolicies.size(), removedPolicies.size(),
                potentiallyModifiedPolicies.size(), watch.elapsed(TimeUnit.MILLISECONDS));
        // reset cached policies
        cachedPolicies = currPolicies;
    } catch (Throwable t) {
        LOG.warn("Error loading policy, but continue to run", t);
    }
}

From source file:org.apache.eagle.alert.engine.runner.AlertBolt.java

@SuppressWarnings("unchecked")
@Override/*  ww w.  java  2 s  . c o m*/
public synchronized void onAlertBoltSpecChange(AlertBoltSpec spec, Map<String, StreamDefinition> sds) {
    List<PolicyDefinition> newPolicies = spec.getBoltPoliciesMap().get(boltId);
    if (newPolicies == null) {
        LOG.info("no new policy with AlertBoltSpec {} for this bolt {}", spec, boltId);
        return;
    }

    Map<String, PolicyDefinition> newPoliciesMap = new HashMap<>();
    newPolicies.forEach(p -> newPoliciesMap.put(p.getName(), p));
    MapComparator<String, PolicyDefinition> comparator = new MapComparator<>(newPoliciesMap, cachedPolicies);
    comparator.compare();

    MapComparator<String, StreamDefinition> streamComparator = new MapComparator<>(sds, sdf);
    streamComparator.compare();

    List<StreamDefinition> addOrUpdatedStreams = streamComparator.getAdded();
    addOrUpdatedStreams.addAll(streamComparator.getModified());
    List<PolicyDefinition> cachedPoliciesTemp = new ArrayList<>(cachedPolicies.values());
    addOrUpdatedStreams.forEach(s -> {
        cachedPoliciesTemp.stream().filter(p -> p.getInputStreams().contains(s.getStreamId())
                || p.getOutputStreams().contains(s.getStreamId())).forEach(p -> {
                    if (comparator.getModified().stream().filter(x -> x.getName().equals(p.getName()))
                            .count() <= 0
                            && comparator.getAdded().stream().filter(x -> x.getName().equals(p.getName()))
                                    .count() <= 0) {
                        comparator.getModified().add(p);
                    }
                });
        ;
    });

    policyGroupEvaluator.onPolicyChange(spec.getVersion(), comparator.getAdded(), comparator.getRemoved(),
            comparator.getModified(), sds);

    // update alert output collector
    Set<PublishPartition> newPublishPartitions = new HashSet<>();
    spec.getPublishPartitions().forEach(p -> {
        if (newPolicies.stream().filter(o -> o.getName().equals(p.getPolicyId())).count() > 0) {
            newPublishPartitions.add(p);
        }
    });

    Collection<PublishPartition> addedPublishPartitions = CollectionUtils.subtract(newPublishPartitions,
            cachedPublishPartitions);
    Collection<PublishPartition> removedPublishPartitions = CollectionUtils.subtract(cachedPublishPartitions,
            newPublishPartitions);
    Collection<PublishPartition> modifiedPublishPartitions = CollectionUtils.intersection(newPublishPartitions,
            cachedPublishPartitions);

    LOG.debug("added PublishPartition " + addedPublishPartitions);
    LOG.debug("removed PublishPartition " + removedPublishPartitions);
    LOG.debug("modified PublishPartition " + modifiedPublishPartitions);

    alertOutputCollector.onAlertBoltSpecChange(addedPublishPartitions, removedPublishPartitions,
            modifiedPublishPartitions);

    // switch
    cachedPolicies = newPoliciesMap;
    cachedPublishPartitions = newPublishPartitions;
    sdf = sds;
    specVersion = spec.getVersion();
    this.spec = spec;
}

From source file:org.apache.eagle.alert.engine.runner.MapComparator.java

@SuppressWarnings("unchecked")
public void compare() {
    Set<K> keys1 = map1.keySet();
    Set<K> keys2 = map2.keySet();
    Collection<K> addedKeys = CollectionUtils.subtract(keys1, keys2);
    Collection<K> removedKeys = CollectionUtils.subtract(keys2, keys1);
    Collection<K> modifiedKeys = CollectionUtils.intersection(keys1, keys2);

    addedKeys.forEach(k -> added.add(map1.get(k)));
    removedKeys.forEach(k -> removed.add(map2.get(k)));
    modifiedKeys.forEach(k -> {//ww  w .java  2 s  .c  o  m
        if (!map1.get(k).equals(map2.get(k))) {
            modified.add(map1.get(k));
        }
    });
}

From source file:org.apache.eagle.alert.engine.runner.StreamRouterBolt.java

/**
 * Compare with metadata snapshot cache to generate diff like added, removed and modified between different versions.
 *
 * @param spec//from   w  w w .ja v  a  2  s .  c om
 */
@SuppressWarnings("unchecked")
@Override
public synchronized void onStreamRouteBoltSpecChange(RouterSpec spec, Map<String, StreamDefinition> sds) {
    sanityCheck(spec);

    // figure out added, removed, modified StreamSortSpec
    Map<StreamPartition, StreamSortSpec> newSSS = spec.makeSSS();

    Set<StreamPartition> newStreamIds = newSSS.keySet();
    Set<StreamPartition> cachedStreamIds = cachedSSS.keySet();
    Collection<StreamPartition> addedStreamIds = CollectionUtils.subtract(newStreamIds, cachedStreamIds);
    Collection<StreamPartition> removedStreamIds = CollectionUtils.subtract(cachedStreamIds, newStreamIds);
    Collection<StreamPartition> modifiedStreamIds = CollectionUtils.intersection(newStreamIds, cachedStreamIds);

    Map<StreamPartition, StreamSortSpec> added = new HashMap<>();
    Map<StreamPartition, StreamSortSpec> removed = new HashMap<>();
    Map<StreamPartition, StreamSortSpec> modified = new HashMap<>();
    addedStreamIds.forEach(s -> added.put(s, newSSS.get(s)));
    removedStreamIds.forEach(s -> removed.put(s, cachedSSS.get(s)));
    modifiedStreamIds.forEach(s -> {
        if (!newSSS.get(s).equals(cachedSSS.get(s))) { // this means StreamSortSpec is changed for one specific streamId
            modified.put(s, newSSS.get(s));
        }
    });
    if (LOG.isDebugEnabled()) {
        LOG.debug("added StreamSortSpec " + added);
        LOG.debug("removed StreamSortSpec " + removed);
        LOG.debug("modified StreamSortSpec " + modified);
    }
    router.onStreamSortSpecChange(added, removed, modified);
    // switch cache
    cachedSSS = newSSS;

    // figure out added, removed, modified StreamRouterSpec
    Map<StreamPartition, List<StreamRouterSpec>> newSRS = spec.makeSRS();

    Set<StreamPartition> newStreamPartitions = newSRS.keySet();
    Set<StreamPartition> cachedStreamPartitions = cachedSRS.keySet();

    Collection<StreamPartition> addedStreamPartitions = CollectionUtils.subtract(newStreamPartitions,
            cachedStreamPartitions);
    Collection<StreamPartition> removedStreamPartitions = CollectionUtils.subtract(cachedStreamPartitions,
            newStreamPartitions);
    Collection<StreamPartition> modifiedStreamPartitions = CollectionUtils.intersection(newStreamPartitions,
            cachedStreamPartitions);

    Collection<StreamRouterSpec> addedRouterSpecs = new ArrayList<>();
    Collection<StreamRouterSpec> removedRouterSpecs = new ArrayList<>();
    Collection<StreamRouterSpec> modifiedRouterSpecs = new ArrayList<>();
    addedStreamPartitions.forEach(s -> addedRouterSpecs.addAll(newSRS.get(s)));
    removedStreamPartitions.forEach(s -> removedRouterSpecs.addAll(cachedSRS.get(s)));
    modifiedStreamPartitions.forEach(s -> {
        if (!CollectionUtils.isEqualCollection(newSRS.get(s), cachedSRS.get(s))) { // this means StreamRouterSpec is changed for one specific StreamPartition
            modifiedRouterSpecs.addAll(newSRS.get(s));
        }
    });

    if (LOG.isDebugEnabled()) {
        LOG.debug("added StreamRouterSpec " + addedRouterSpecs);
        LOG.debug("removed StreamRouterSpec " + removedRouterSpecs);
        LOG.debug("modified StreamRouterSpec " + modifiedRouterSpecs);
    }

    routeCollector.onStreamRouterSpecChange(addedRouterSpecs, removedRouterSpecs, modifiedRouterSpecs, sds);
    // switch cache
    cachedSRS = newSRS;
    sdf = sds;
    specVersion = spec.getVersion();
}