Example usage for org.apache.commons.collections CollectionUtils intersection

List of usage examples for org.apache.commons.collections CollectionUtils intersection

Introduction

In this page you can find the example usage for org.apache.commons.collections CollectionUtils intersection.

Prototype

public static Collection intersection(final Collection a, final Collection b) 

Source Link

Document

Returns a Collection containing the intersection of the given Collection s.

Usage

From source file:org.apache.eagle.alert.engine.spark.function.AlertBoltFunction.java

private void onAlertBoltSpecChange(String boltId, AlertBoltSpec spec, Map<String, StreamDefinition> sds,
        PolicyGroupEvaluatorImpl policyGroupEvaluator, AlertBoltOutputCollectorWrapper alertOutputCollector,
        PolicyState policyState, PublishState publishState) {
    List<PolicyDefinition> newPolicies = spec.getBoltPoliciesMap().get(boltId);
    LOG.debug("newPolicies {}", newPolicies);
    if (newPolicies == null) {
        LOG.info("no new policy with AlertBoltSpec {} for this bolt {}", spec, boltId);
        return;/*from  w ww .  j ava 2 s  . com*/
    }

    Map<String, PolicyDefinition> newPoliciesMap = new HashMap<>();
    newPolicies.forEach(p -> newPoliciesMap.put(p.getName(), p));
    LOG.debug("newPoliciesMap {}", newPoliciesMap);
    MapComparator<String, PolicyDefinition> comparator = new MapComparator<>(newPoliciesMap,
            policyState.getCachedPolicyByBoltId(boltId));
    comparator.compare();
    LOG.debug("getAdded {}", comparator.getAdded());
    LOG.debug("getRemoved {}", comparator.getRemoved());
    LOG.debug("getModified {}", comparator.getModified());
    policyGroupEvaluator.onPolicyChange(null, comparator.getAdded(), comparator.getRemoved(),
            comparator.getModified(), sds);

    policyState.store(boltId, newPoliciesMap, policyGroupEvaluator.getPolicyDefinitionMap(),
            policyGroupEvaluator.getPolicyStreamHandlerMap());

    // update alert output collector
    Set<PublishPartition> newPublishPartitions = new HashSet<>();
    spec.getPublishPartitions().forEach(p -> {
        if (newPolicies.stream().filter(o -> o.getName().equals(p.getPolicyId())).count() > 0) {
            newPublishPartitions.add(p);
        }
    });

    Set<PublishPartition> cachedPublishPartitions = publishState.getCachedPublishPartitionsByBoltId(boltId);
    Collection<PublishPartition> addedPublishPartitions = CollectionUtils.subtract(newPublishPartitions,
            cachedPublishPartitions);
    Collection<PublishPartition> removedPublishPartitions = CollectionUtils.subtract(cachedPublishPartitions,
            newPublishPartitions);
    Collection<PublishPartition> modifiedPublishPartitions = CollectionUtils.intersection(newPublishPartitions,
            cachedPublishPartitions);

    LOG.debug("added PublishPartition " + addedPublishPartitions);
    LOG.debug("removed PublishPartition " + removedPublishPartitions);
    LOG.debug("modified PublishPartition " + modifiedPublishPartitions);

    alertOutputCollector.onAlertBoltSpecChange(addedPublishPartitions, removedPublishPartitions,
            modifiedPublishPartitions);

    publishState.storePublishPartitions(boltId, alertOutputCollector.getPublishPartitions());

}

From source file:org.apache.eagle.alert.engine.spark.function.StreamRouteBoltFunction.java

public void onStreamRouteBoltSpecChange(RouterSpec spec, Map<String, StreamDefinition> sds,
        StreamRouterImpl router, StreamRouterBoltOutputCollector routeCollector,
        final Map<StreamPartition, StreamSortSpec> cachedSSS,
        final Map<StreamPartition, List<StreamRouterSpec>> cachedSRS, int partitionNum) {
    //sanityCheck(spec);

    // figure out added, removed, modified StreamSortSpec
    Map<StreamPartition, StreamSortSpec> newSSS = spec.makeSSS();

    Set<StreamPartition> newStreamIds = newSSS.keySet();
    Set<StreamPartition> cachedStreamIds = cachedSSS.keySet();
    Collection<StreamPartition> addedStreamIds = CollectionUtils.subtract(newStreamIds, cachedStreamIds);
    Collection<StreamPartition> removedStreamIds = CollectionUtils.subtract(cachedStreamIds, newStreamIds);
    Collection<StreamPartition> modifiedStreamIds = CollectionUtils.intersection(newStreamIds, cachedStreamIds);

    Map<StreamPartition, StreamSortSpec> added = new HashMap<>();
    Map<StreamPartition, StreamSortSpec> removed = new HashMap<>();
    Map<StreamPartition, StreamSortSpec> modified = new HashMap<>();
    addedStreamIds.forEach(s -> added.put(s, newSSS.get(s)));
    removedStreamIds.forEach(s -> removed.put(s, cachedSSS.get(s)));
    modifiedStreamIds.forEach(s -> {//from   ww  w .ja v a2 s .com
        if (!newSSS.get(s).equals(cachedSSS.get(s))) { // this means StreamSortSpec is changed for one specific streamId
            modified.put(s, newSSS.get(s));
        }
    });
    if (LOG.isDebugEnabled()) {
        LOG.debug("added StreamSortSpec " + added);
        LOG.debug("removed StreamSortSpec " + removed);
        LOG.debug("modified StreamSortSpec " + modified);
    }
    router.onStreamSortSpecChange(added, removed, modified);
    // switch cache
    this.cachedSSS = newSSS;

    // figure out added, removed, modified StreamRouterSpec
    Map<StreamPartition, List<StreamRouterSpec>> newSRS = spec.makeSRS();
    Set<StreamPartition> newStreamPartitions = newSRS.keySet();
    Set<StreamPartition> cachedStreamPartitions = cachedSRS.keySet();

    Collection<StreamPartition> addedStreamPartitions = CollectionUtils.subtract(newStreamPartitions,
            cachedStreamPartitions);
    Collection<StreamPartition> removedStreamPartitions = CollectionUtils.subtract(cachedStreamPartitions,
            newStreamPartitions);
    Collection<StreamPartition> modifiedStreamPartitions = CollectionUtils.intersection(newStreamPartitions,
            cachedStreamPartitions);

    Collection<StreamRouterSpec> addedRouterSpecs = new ArrayList<>();
    Collection<StreamRouterSpec> removedRouterSpecs = new ArrayList<>();
    Collection<StreamRouterSpec> modifiedRouterSpecs = new ArrayList<>();
    addedStreamPartitions.forEach(s -> addedRouterSpecs.addAll(newSRS.get(s)));
    removedStreamPartitions.forEach(s -> removedRouterSpecs.addAll(cachedSRS.get(s)));
    modifiedStreamPartitions.forEach(s -> {
        if (!CollectionUtils.isEqualCollection(newSRS.get(s), cachedSRS.get(s))) { // this means StreamRouterSpec is changed for one specific StreamPartition
            modifiedRouterSpecs.addAll(newSRS.get(s));
        }
    });

    if (LOG.isDebugEnabled()) {
        LOG.debug("added StreamRouterSpec " + addedRouterSpecs);
        LOG.debug("removed StreamRouterSpec " + removedRouterSpecs);
        LOG.debug("modified StreamRouterSpec " + modifiedRouterSpecs);
    }

    routeCollector.onStreamRouterSpecChange(addedRouterSpecs, removedRouterSpecs, modifiedRouterSpecs, sds);
    // switch cache
    this.cachedSRS = newSRS;
    routeState.store(routeCollector, cachedSSS, cachedSRS, partitionNum);
}

From source file:org.apache.eagle.alert.engine.spout.CorrelationSpout.java

@SuppressWarnings("unchecked")
public void onReload(final SpoutSpec newMeta, Map<String, StreamDefinition> sds) throws Exception {
    // calculate topic create/remove/update
    List<String> topics = getTopics(newMeta);
    List<String> cachedTopcies = getTopics(cachedSpoutSpec);
    Collection<String> newTopics = CollectionUtils.subtract(topics, cachedTopcies);
    Collection<String> removeTopics = CollectionUtils.subtract(cachedTopcies, topics);
    Collection<String> updateTopics = CollectionUtils.intersection(topics, cachedTopcies);

    LOG.info("Topics were added={}, removed={}, modified={}", newTopics, removeTopics, updateTopics);

    // build lookup table for scheme
    Map<String, String> newSchemaName = new HashMap<String, String>();
    Map<String, Map<String, String>> dataSourceProperties = new HashMap<>();
    for (Kafka2TupleMetadata ds : newMeta.getKafka2TupleMetadataMap().values()) {
        newSchemaName.put(ds.getTopic(), ds.getSchemeCls());
        dataSourceProperties.put(ds.getTopic(), ds.getProperties());
    }/*from  ww w  .java  2  s .c o m*/

    // copy and swap
    Map<String, KafkaSpoutWrapper> newKafkaSpoutList = new HashMap<>(this.kafkaSpoutList);
    // iterate new topics and then create KafkaSpout
    for (String topic : newTopics) {
        KafkaSpoutWrapper wrapper = newKafkaSpoutList.get(topic);
        if (wrapper != null) {
            LOG.warn(MessageFormat.format(
                    "try to create new topic {0}, but found in the active spout list, this may indicate some inconsistency",
                    topic));
            continue;
        }
        KafkaSpoutWrapper newWrapper = createKafkaSpout(
                ConfigFactory.parseMap(dataSourceProperties.get(topic)).withFallback(this.config), conf,
                context, collector, topic, newSchemaName.get(topic), newMeta, sds);
        newKafkaSpoutList.put(topic, newWrapper);
    }
    // iterate remove topics and then close KafkaSpout
    for (String topic : removeTopics) {
        KafkaSpoutWrapper wrapper = newKafkaSpoutList.get(topic);
        if (wrapper == null) {
            LOG.warn(MessageFormat.format(
                    "try to remove topic {0}, but not found in the active spout list, this may indicate some inconsistency",
                    topic));
            continue;
        }
        removeKafkaSpout(wrapper);
        newKafkaSpoutList.remove(topic);
    }

    // iterate update topic and then update metadata
    for (String topic : updateTopics) {
        KafkaSpoutWrapper spoutWrapper = newKafkaSpoutList.get(topic);
        if (spoutWrapper == null) {
            LOG.warn(MessageFormat.format(
                    "try to update topic {0}, but not found in the active spout list, this may indicate some inconsistency",
                    topic));
            continue;
        }
        spoutWrapper.update(newMeta, sds);
    }

    // swap
    this.cachedSpoutSpec = newMeta;
    this.kafkaSpoutList = newKafkaSpoutList;
    this.sds = sds;
}

From source file:org.apache.falcon.catalog.CatalogPartitionHandler.java

private void registerPartitions(Configuration conf, CatalogStorage storage, Path staticPath,
        List<String> staticPartition) throws FalconException {
    try {/*from  www .  j  a v a 2s .  c  o m*/
        FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(conf);
        if (!fs.exists(staticPath)) {
            //Do nothing if the output path doesn't exist
            return;
        }

        List<String> partitionColumns = getPartitionColumns(conf, storage);
        int dynamicPartCols = partitionColumns.size() - staticPartition.size();
        Path searchPath = staticPath;
        if (dynamicPartCols > 0) {
            searchPath = new Path(staticPath, StringUtils.repeat("*", "/", dynamicPartCols));
        }

        //Figure out the dynamic partitions from the directories on hdfs
        FileStatus[] files = fs.globStatus(searchPath, PATH_FILTER);
        Map<List<String>, String> partitions = new HashMap<List<String>, String>();
        for (FileStatus file : files) {
            List<String> dynamicParts = getDynamicPartitions(file.getPath(), staticPath);
            List<String> partitionValues = new ArrayList<String>(staticPartition);
            partitionValues.addAll(dynamicParts);
            LOG.debug("Final partition - " + partitionValues);
            partitions.put(partitionValues, file.getPath().toString());
        }

        List<List<String>> existPartitions = listPartitions(conf, storage, staticPartition);
        Collection<List<String>> targetPartitions = partitions.keySet();

        Collection<List<String>> partitionsForDrop = CollectionUtils.subtract(existPartitions,
                targetPartitions);
        Collection<List<String>> partitionsForAdd = CollectionUtils.subtract(targetPartitions, existPartitions);
        Collection<List<String>> partitionsForUpdate = CollectionUtils.intersection(existPartitions,
                targetPartitions);

        for (List<String> partition : partitionsForDrop) {
            dropPartitions(conf, storage, partition);
        }

        for (List<String> partition : partitionsForAdd) {
            addPartition(conf, storage, partition, partitions.get(partition));
        }

        for (List<String> partition : partitionsForUpdate) {
            updatePartition(conf, storage, partition, partitions.get(partition));
        }
    } catch (IOException e) {
        throw new FalconException(e);
    }
}

From source file:org.apache.hadoop.hbase.regionserver.indexed.IndexFilterEvaluator.java

@SuppressWarnings("unchecked")
private List<KeyValue> evaluate(Map<Pair, List<StoreFile>> indexMap, FilterList filterList) throws IOException {
    List<KeyValue> result = null;
    if (filterList.getOperator() == FilterList.Operator.MUST_PASS_ALL) {
        for (Filter filter : filterList.getFilters()) {
            List<KeyValue> childResult = evaluate(indexMap, filter);
            if (result == null) {
                result = childResult;//from www  .  j  a  v a  2  s.  c o m
            } else if (childResult != null) {
                result = (ArrayList<KeyValue>) CollectionUtils.intersection(result, childResult);
            }
        }
    } else if (filterList.getOperator() == FilterList.Operator.MUST_PASS_ONE) {
        for (Filter filter : filterList.getFilters()) {
            List<KeyValue> childResult = evaluate(indexMap, filter);
            if (result == null) {
                result = childResult;
            } else if (childResult != null) {
                result = (ArrayList<KeyValue>) CollectionUtils.union(result, childResult);
            }
        }
    }
    return result;
}

From source file:org.apache.kylin.cube.model.CubeDesc.java

public void validateAggregationGroups() {
    int index = 0;

    for (AggregationGroup agg : getAggregationGroups()) {
        if (agg.getIncludes() == null) {
            logger.error("Aggregation group " + index + " 'includes' field not set");
            throw new IllegalStateException("Aggregation group " + index + " includes field not set");
        }//from ww  w.  ja v a 2  s.  c o m

        if (agg.getSelectRule() == null) {
            logger.error("Aggregation group " + index + " 'select_rule' field not set");
            throw new IllegalStateException("Aggregation group " + index + " select rule field not set");
        }

        Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(includeDims, agg.getIncludes());

        Set<String> mandatoryDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(mandatoryDims, agg.getSelectRule().mandatoryDims);

        ArrayList<Set<String>> hierarchyDimsList = Lists.newArrayList();
        Set<String> hierarchyDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(hierarchyDimsList, hierarchyDims, agg.getSelectRule().hierarchyDims);

        ArrayList<Set<String>> jointDimsList = Lists.newArrayList();
        Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(jointDimsList, jointDims, agg.getSelectRule().jointDims);

        if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims)
                || !includeDims.containsAll(jointDims)) {
            List<String> notIncluded = Lists.newArrayList();
            final Iterable<String> all = Iterables
                    .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
            for (String dim : all) {
                if (includeDims.contains(dim) == false) {
                    notIncluded.add(dim);
                }
            }
            Collections.sort(notIncluded);
            logger.error("Aggregation group " + index
                    + " Include dimensions not containing all the used dimensions");
            throw new IllegalStateException("Aggregation group " + index
                    + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
        }

        if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
            logger.warn(
                    "Aggregation group " + index + " mandatory dimensions overlap with hierarchy dimensions: "
                            + ensureOrder(CollectionUtils.intersection(mandatoryDims, hierarchyDims)));
        }
        if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
            logger.warn("Aggregation group " + index + " mandatory dimensions overlap with joint dimensions: "
                    + ensureOrder(CollectionUtils.intersection(mandatoryDims, jointDims)));
        }

        if (CollectionUtils.containsAny(hierarchyDims, jointDims)) {
            logger.error("Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions");
            throw new IllegalStateException(
                    "Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions: "
                            + ensureOrder(CollectionUtils.intersection(hierarchyDims, jointDims)));
        }

        if (hasSingleOrNone(hierarchyDimsList)) {
            logger.error("Aggregation group " + index + " require at least 2 dimensions in a hierarchy");
            throw new IllegalStateException(
                    "Aggregation group " + index + " require at least 2 dimensions in a hierarchy.");
        }
        if (hasSingleOrNone(jointDimsList)) {
            logger.error("Aggregation group " + index + " require at least 2 dimensions in a joint");
            throw new IllegalStateException(
                    "Aggregation group " + index + " require at least 2 dimensions in a joint");
        }

        Pair<Boolean, Set<String>> overlap = hasOverlap(hierarchyDimsList, hierarchyDims);
        if (overlap.getFirst() == true) {
            logger.error("Aggregation group " + index + " a dimension exist in more than one hierarchy: "
                    + ensureOrder(overlap.getSecond()));
            throw new IllegalStateException("Aggregation group " + index
                    + " a dimension exist in more than one hierarchy: " + ensureOrder(overlap.getSecond()));
        }

        overlap = hasOverlap(jointDimsList, jointDims);
        if (overlap.getFirst() == true) {
            logger.error("Aggregation group " + index + " a dimension exist in more than one joint: "
                    + ensureOrder(overlap.getSecond()));
            throw new IllegalStateException("Aggregation group " + index
                    + " a dimension exist in more than one joint: " + ensureOrder(overlap.getSecond()));
        }

        index++;
    }
}

From source file:org.apache.kylin.cube.model.CubeDesc.java

private Pair<Boolean, Set<String>> hasOverlap(ArrayList<Set<String>> dimsList, Set<String> Dims) {
    Set<String> existing = new HashSet<>();
    Set<String> overlap = new HashSet<>();
    for (Set<String> dims : dimsList) {
        if (CollectionUtils.containsAny(existing, dims)) {
            overlap.addAll(ensureOrder(CollectionUtils.intersection(existing, dims)));
        }//from   w  w  w .  ja  v a  2 s  .  c  o  m
        existing.addAll(dims);
    }
    return new Pair<>(overlap.size() > 0, overlap);
}

From source file:org.apache.kylin.cube.model.validation.rule.AggregationGroupRule.java

private void inner(CubeDesc cube, ValidateContext context) {

    if (cube.getAggregationGroups() == null || cube.getAggregationGroups().size() == 0) {
        context.addResult(ResultLevel.ERROR, "Cube should have at least one Aggregation group.");
        return;//  w  w  w.  j  av  a  2 s.com
    }

    int index = 0;
    for (AggregationGroup agg : cube.getAggregationGroups()) {
        if (agg.getIncludes() == null) {
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'includes' field not set");
            continue;
        }

        if (agg.getSelectRule() == null) {
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'select rule' field not set");
            continue;
        }

        Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getIncludes() != null) {
            for (String include : agg.getIncludes()) {
                includeDims.add(include);
            }
        }

        Set<String> mandatoryDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getSelectRule().mandatoryDims != null) {
            for (String m : agg.getSelectRule().mandatoryDims) {
                mandatoryDims.add(m);
            }
        }

        Set<String> hierarchyDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getSelectRule().hierarchyDims != null) {
            for (String[] ss : agg.getSelectRule().hierarchyDims) {
                for (String s : ss) {
                    hierarchyDims.add(s);
                }
            }
        }

        Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getSelectRule().jointDims != null) {
            for (String[] ss : agg.getSelectRule().jointDims) {
                for (String s : ss) {
                    jointDims.add(s);
                }
            }
        }

        if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims)
                || !includeDims.containsAll(jointDims)) {
            List<String> notIncluded = Lists.newArrayList();
            final Iterable<String> all = Iterables
                    .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
            for (String dim : all) {
                if (includeDims.contains(dim) == false) {
                    notIncluded.add(dim);
                }
            }
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                    + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
            continue;
        }

        if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
            Set<String> intersection = new HashSet<>(mandatoryDims);
            intersection.retainAll(hierarchyDims);
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                    + " mandatory dimension has overlap with hierarchy dimension: " + intersection.toString());
            continue;
        }
        if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
            Set<String> intersection = new HashSet<>(mandatoryDims);
            intersection.retainAll(jointDims);
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                    + " mandatory dimension has overlap with joint dimension: " + intersection.toString());
            continue;
        }

        int jointDimNum = 0;
        if (agg.getSelectRule().jointDims != null) {
            for (String[] joints : agg.getSelectRule().jointDims) {

                Set<String> oneJoint = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                for (String s : joints) {
                    oneJoint.add(s);
                }

                if (oneJoint.size() < 2) {
                    context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                            + " require at least 2 dimensions in a joint: " + oneJoint.toString());
                    continue;
                }
                jointDimNum += oneJoint.size();

                int overlapHierarchies = 0;
                if (agg.getSelectRule().hierarchyDims != null) {
                    for (String[] oneHierarchy : agg.getSelectRule().hierarchyDims) {
                        Set<String> share = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                        share.addAll(CollectionUtils.intersection(oneJoint, Arrays.asList(oneHierarchy)));

                        if (!share.isEmpty()) {
                            overlapHierarchies++;
                        }
                        if (share.size() > 1) {
                            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                                    + " joint dimensions has overlap with more than 1 dimensions in same hierarchy: "
                                    + share.toString());
                            continue;
                        }
                    }

                    if (overlapHierarchies > 1) {
                        context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                                + " joint dimensions has overlap with more than 1 hierarchies");
                        continue;
                    }
                }
            }

            if (jointDimNum != jointDims.size()) {

                Set<String> existing = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                Set<String> overlap = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                for (String[] joints : agg.getSelectRule().jointDims) {
                    Set<String> oneJoint = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                    for (String s : joints) {
                        oneJoint.add(s);
                    }
                    if (CollectionUtils.containsAny(existing, oneJoint)) {
                        overlap.addAll(CollectionUtils.intersection(existing, oneJoint));
                    }
                    existing.addAll(oneJoint);
                }
                context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                        + " a dimension exists in more than one joint: " + overlap.toString());
                continue;
            }
        }
        long combination = 0;
        try {
            combination = agg.calculateCuboidCombination();
        } catch (Exception ex) {
            combination = getMaxCombinations(cube) + 1;
        } finally {
            if (combination > getMaxCombinations(cube)) {
                String msg = "Aggregation group " + index
                        + " has too many combinations, current combination is " + combination
                        + ", max allowed combination is " + getMaxCombinations(cube)
                        + "; use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.";
                context.addResult(ResultLevel.ERROR, msg);
                continue;
            }
        }

        index++;
    }
}

From source file:org.apache.myfaces.custom.ppr.PPRPhaseListener.java

/**
 * Writes the XML elements for the triggered components to the provided
 * {@link PrintWriter}. Also encode the current state in a separate XML
 * element./*ww  w . j  a v a2 s . c  o m*/
 *
 * @param out                 the output Writer
 * @param triggeredComponents comma-separated list of component IDs
 * @param viewRoot            the current ViewRoot
 * @param context             the current {@link FacesContext}
 */
private void encodeTriggeredComponents(PrintWriter out, String triggeredComponents, UIViewRoot viewRoot,
        FacesContext context) {
    StringTokenizer st = new StringTokenizer(triggeredComponents, ",", false);
    String clientId;
    UIComponent component;
    boolean handleState = true;

    Set toAppendMessagesComponents = new HashSet();
    Set toReplaceMessagesComponents = new HashSet();

    // Iterate over the individual client IDs
    while (st.hasMoreTokens()) {
        clientId = st.nextToken();
        component = viewRoot.findComponent(clientId);
        if (component != null) {
            //get info about state writing/rendering
            //if at least one ppr does not update the state
            //the response will not include state information
            PPRPanelGroup ppr = null;
            int oldIndex = 0;
            if (component instanceof UIComponentPerspective) {
                UIComponentPerspective uiComponentPerspective = (UIComponentPerspective) component;
                ExecuteOnCallback getComponentCallback = new ExecuteOnCallback() {
                    public Object execute(FacesContext context, UIComponent component) {
                        return component;
                    }
                };
                Object retval = uiComponentPerspective.executeOn(context, getComponentCallback);
                if (retval instanceof PPRPanelGroup) {
                    ppr = (PPRPanelGroup) retval;
                } else {
                    throw new IllegalArgumentException("Expect PPRPanelGroup or UiComponentPerspective");
                }
                //setup perspective
                oldIndex = uiComponentPerspective.getUiData().getRowIndex();
                uiComponentPerspective.getUiData().setRowIndex(uiComponentPerspective.getRowIndex());
            } else if (component instanceof PPRPanelGroup) {
                ppr = (PPRPanelGroup) component;
            } else {
                throw new IllegalArgumentException("Expect PPRPanelGroup or UiComponentPerspective");
            }
            if (ppr.getStateUpdate().booleanValue() == false) {
                handleState = false;
            }
            //Check which messages components this group wants to append to
            if (ppr.getAppendMessages() != null) {
                List appendMessagesForThisGroup = PPRSupport.getComponentsByCommaSeparatedIdList(context, ppr,
                        ppr.getAppendMessages(), HtmlMessages.class);
                toAppendMessagesComponents.addAll(appendMessagesForThisGroup);
            }

            //Check which messages components this group should refresh
            if (ppr.getReplaceMessages() != null) {
                List replaceMessagesForThisGroup = PPRSupport.getComponentsByCommaSeparatedIdList(context, ppr,
                        ppr.getReplaceMessages(), HtmlMessages.class);
                toReplaceMessagesComponents.addAll(replaceMessagesForThisGroup);
            }

            // Write a component tag which contains a CDATA section whith
            // the rendered HTML
            // of the component children
            out.print("<component id=\"" + component.getClientId(context) + "\"><![CDATA[");
            boolean oldValue = HtmlRendererUtils.isAllowedCdataSection(context);
            HtmlRendererUtils.allowCdataSection(context, false);
            try {
                component.encodeChildren(context);
            } catch (IOException e) {
                throw new FacesException(e);
            }
            HtmlRendererUtils.allowCdataSection(context, oldValue);
            out.print("]]></component>");

            //tear down perspective
            if (component instanceof UIComponentPerspective) {
                UIComponentPerspective uiComponentPerspective = (UIComponentPerspective) component;
                uiComponentPerspective.getUiData().setRowIndex(oldIndex);
            }
        } else {
            log.debug("PPRPhaseListener component with id" + clientId + "not found!");
        }
    }

    boolean handleFacesMessages = !toAppendMessagesComponents.isEmpty()
            || !toReplaceMessagesComponents.isEmpty();

    if (handleFacesMessages) { //encode all facesMessages into  xml-elements
        //for starter just return all messages (bother with client IDs later on)
        Iterator messagesIterator = context.getMessages();

        //only write messages-elements if messages are present
        while (messagesIterator.hasNext()) {
            FacesMessage msg = (FacesMessage) messagesIterator.next();
            String messageText = msg.getSummary() + " " + msg.getDetail();
            out.print("<message><![CDATA[");
            out.print(messageText);
            out.print("]]></message>");
        }

        //Replace has precedence over append
        Collection intersection = CollectionUtils.intersection(toAppendMessagesComponents,
                toReplaceMessagesComponents);
        for (Iterator iterator = intersection.iterator(); iterator.hasNext();) {
            UIComponent uiComponent = (UIComponent) iterator.next();
            log.warn("Component with id " + uiComponent.getClientId(context)
                    + " is marked for replace and append messages -> replace has precedence");
            toAppendMessagesComponents.remove(uiComponent);
        }

        for (Iterator iterator = toAppendMessagesComponents.iterator(); iterator.hasNext();) {
            UIComponent uiComponent = (UIComponent) iterator.next();
            out.print("<append id=\"");
            out.print(uiComponent.getClientId(context));
            out.print("\"/>");
        }

        for (Iterator iterator = toReplaceMessagesComponents.iterator(); iterator.hasNext();) {
            UIComponent uiComponent = (UIComponent) iterator.next();
            out.print("<replace id=\"");
            out.print(uiComponent.getClientId(context));
            out.print("\"/>");
        }
    }

    if (handleState) {
        // Write the serialized state into a separate XML element
        out.print("<state>");
        FacesContext facesContext = FacesContext.getCurrentInstance();
        StateManager stateManager = facesContext.getApplication().getStateManager();
        StateManager.SerializedView serializedView = stateManager.saveSerializedView(facesContext);
        try {
            stateManager.writeState(facesContext, serializedView);
        } catch (IOException e) {
            throw new FacesException(e);
        }
        out.print("</state>");
    }

}

From source file:org.apache.openejb.config.rules.CheckClassLoading.java

public static Collection<DiffItem> intersection(final Classes cl1, final Classes cl2) {
    final List<DiffItem> diff = new ArrayList<DiffItem>();
    for (final Map.Entry<String, Collection<String>> entry1 : cl1.fileByArchive.entrySet()) {
        for (final Map.Entry<String, Collection<String>> entry2 : cl2.fileByArchive.entrySet()) {
            final Collection<String> v1 = entry1.getValue();
            final Collection<String> v2 = entry2.getValue();
            final Collection<String> inter = CollectionUtils.intersection(v1, v2);

            if (inter.size() == 0) {
                continue;
            }//www .  j  a v  a2  s  .  c o m

            if (inter.size() == v1.size() && v1.size() == v2.size()) {
                diff.add(new SameItem(inter, entry1.getKey(), entry2.getKey()));
            } else if (inter.size() == v1.size()) {
                diff.add(new IncludedItem(inter, entry1.getKey(), entry2.getKey()));
            } else if (inter.size() == v2.size()) {
                diff.add(new ContainingItem(inter, entry1.getKey(), entry2.getKey()));
            } else {
                diff.add(new DiffItem(inter, entry1.getKey(), entry2.getKey()));
            }
        }
    }

    Collections.sort(diff, DiffItemComparator.getInstance());
    return diff;
}