Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:com.ignorelist.kassandra.steam.scraper.Tagger.java

private void addTags(SharedConfig sharedConfig, Long gameId, Options taggerOptions, Set<String> externalTags) {
    Set<String> existingTags = sharedConfig.getTags(gameId);

    if (null != taggerOptions.getWhiteList() && !taggerOptions.getWhiteList().isEmpty()) {
        externalTags.retainAll(taggerOptions.getWhiteList());
    }//from w  w  w.j  av  a2  s  .c om

    existingTags.addAll(externalTags);

    if (null != taggerOptions.getReplacementMap()) {
        for (Map.Entry<String, String> e : taggerOptions.getReplacementMap().entrySet()) {
            if (existingTags.remove(e.getKey())) {
                existingTags.add(e.getValue());
            }
        }
    }

    if (null != taggerOptions.getRemoveTags()) {
        existingTags.removeAll(taggerOptions.getRemoveTags());
    }
    if (null != taggerOptions.getWhiteList() && !taggerOptions.getWhiteList().isEmpty()
            && taggerOptions.isRemoveNotWhiteListed()) {
        existingTags.retainAll(taggerOptions.getWhiteList());
    }

    sharedConfig.setTags(gameId, existingTags);
}

From source file:org.apache.kylin.cube.model.validation.rule.AggregationGroupRule.java

private void inner(CubeDesc cube, ValidateContext context) {

    if (cube.getAggregationGroups() == null || cube.getAggregationGroups().size() == 0) {
        context.addResult(ResultLevel.ERROR, "Cube should have at least one Aggregation group.");
        return;//w  w w .  j a va  2s  . co m
    }

    int index = 0;
    for (AggregationGroup agg : cube.getAggregationGroups()) {
        if (agg.getIncludes() == null) {
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'includes' field not set");
            continue;
        }

        if (agg.getSelectRule() == null) {
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'select rule' field not set");
            continue;
        }

        Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getIncludes() != null) {
            for (String include : agg.getIncludes()) {
                includeDims.add(include);
            }
        }

        Set<String> mandatoryDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getSelectRule().mandatoryDims != null) {
            for (String m : agg.getSelectRule().mandatoryDims) {
                mandatoryDims.add(m);
            }
        }

        Set<String> hierarchyDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getSelectRule().hierarchyDims != null) {
            for (String[] ss : agg.getSelectRule().hierarchyDims) {
                for (String s : ss) {
                    hierarchyDims.add(s);
                }
            }
        }

        Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        if (agg.getSelectRule().jointDims != null) {
            for (String[] ss : agg.getSelectRule().jointDims) {
                for (String s : ss) {
                    jointDims.add(s);
                }
            }
        }

        if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims)
                || !includeDims.containsAll(jointDims)) {
            List<String> notIncluded = Lists.newArrayList();
            final Iterable<String> all = Iterables
                    .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
            for (String dim : all) {
                if (includeDims.contains(dim) == false) {
                    notIncluded.add(dim);
                }
            }
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                    + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
            continue;
        }

        if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
            Set<String> intersection = new HashSet<>(mandatoryDims);
            intersection.retainAll(hierarchyDims);
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                    + " mandatory dimension has overlap with hierarchy dimension: " + intersection.toString());
            continue;
        }
        if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
            Set<String> intersection = new HashSet<>(mandatoryDims);
            intersection.retainAll(jointDims);
            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                    + " mandatory dimension has overlap with joint dimension: " + intersection.toString());
            continue;
        }

        int jointDimNum = 0;
        if (agg.getSelectRule().jointDims != null) {
            for (String[] joints : agg.getSelectRule().jointDims) {

                Set<String> oneJoint = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                for (String s : joints) {
                    oneJoint.add(s);
                }

                if (oneJoint.size() < 2) {
                    context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                            + " require at least 2 dimensions in a joint: " + oneJoint.toString());
                    continue;
                }
                jointDimNum += oneJoint.size();

                int overlapHierarchies = 0;
                if (agg.getSelectRule().hierarchyDims != null) {
                    for (String[] oneHierarchy : agg.getSelectRule().hierarchyDims) {
                        Set<String> share = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                        share.addAll(CollectionUtils.intersection(oneJoint, Arrays.asList(oneHierarchy)));

                        if (!share.isEmpty()) {
                            overlapHierarchies++;
                        }
                        if (share.size() > 1) {
                            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                                    + " joint dimensions has overlap with more than 1 dimensions in same hierarchy: "
                                    + share.toString());
                            continue;
                        }
                    }

                    if (overlapHierarchies > 1) {
                        context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                                + " joint dimensions has overlap with more than 1 hierarchies");
                        continue;
                    }
                }
            }

            if (jointDimNum != jointDims.size()) {

                Set<String> existing = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                Set<String> overlap = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                for (String[] joints : agg.getSelectRule().jointDims) {
                    Set<String> oneJoint = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
                    for (String s : joints) {
                        oneJoint.add(s);
                    }
                    if (CollectionUtils.containsAny(existing, oneJoint)) {
                        overlap.addAll(CollectionUtils.intersection(existing, oneJoint));
                    }
                    existing.addAll(oneJoint);
                }
                context.addResult(ResultLevel.ERROR, "Aggregation group " + index
                        + " a dimension exists in more than one joint: " + overlap.toString());
                continue;
            }
        }
        long combination = 0;
        try {
            combination = agg.calculateCuboidCombination();
        } catch (Exception ex) {
            combination = getMaxCombinations(cube) + 1;
        } finally {
            if (combination > getMaxCombinations(cube)) {
                String msg = "Aggregation group " + index
                        + " has too many combinations, current combination is " + combination
                        + ", max allowed combination is " + getMaxCombinations(cube)
                        + "; use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.";
                context.addResult(ResultLevel.ERROR, msg);
                continue;
            }
        }

        index++;
    }
}

From source file:uniol.apt.adt.automaton.FiniteAutomatonUtility.java

/**
 * Get a finite automaton accepting the intersection of the languages of two automatons. A word is in the
 * intersection of the languages if it is in all of the individual languages.
 * @param a1 The first automaton of the intersection.
 * @param a2 The second automaton of the intersection.
 * @return An automaton accepting the intersection.
 *//*from  w ww.ja va2 s .co  m*/
static public DeterministicFiniteAutomaton intersection(DeterministicFiniteAutomaton a1,
        DeterministicFiniteAutomaton a2) {
    Set<Symbol> alphabet = new HashSet<>(a1.getAlphabet());
    alphabet.retainAll(a2.getAlphabet());
    return getAutomaton(new SynchronousParallelComposition(alphabet, a1.getInitialState(), a2.getInitialState(),
            SynchronousParallelComposition.Mode.INTERSECTION));
}

From source file:edu.cornell.mannlib.vitro.webapp.searchindex.extensions.LabelsAcrossContextNodes.java

private boolean isAnyMatch(Set<String> set1, Set<String> set2) {
    Set<String> matches = new HashSet<>(set1);
    matches.retainAll(set2);
    return !matches.isEmpty();
}

From source file:ubic.BAMSandAllen.StructureCatalogAnalyze.java

@Deprecated
public void analyze() throws Exception {
    // need correlation matrix between all regions for -
    // connectivity
    // gene expression
    Direction direction = Direction.INCOMING;

    AllenCatalogMatrices allenMatrices = new AllenCatalogMatrices();
    DoubleMatrix<String, String> connectionMatrix;
    connectionMatrix = makeConnectionMatrix(direction);
    int BAMSRegionCount = connectionMatrix.columns();
    Set<String> BAMSregionNames = new HashSet<String>(connectionMatrix.getColNames());

    DoubleMatrix<String, String> allExpressionMatrix;
    allExpressionMatrix = allenMatrices.getEnergies();

    // setup expression for BAMS regions, average when required
    StructureCatalogLoader allenCatalog = new StructureCatalogLoader();

    DoubleMatrix<String, String> expressionMatrix = new DenseDoubleMatrix<String, String>(
            allExpressionMatrix.rows(), BAMSregionNames.size());
    expressionMatrix.setColumnNames(connectionMatrix.getColNames());
    expressionMatrix.setRowNames(allExpressionMatrix.getRowNames());
    log.info("Expression matrix is " + expressionMatrix.columns() + " by " + expressionMatrix.rows());
    // log.info( expressionMatrix.toString() );

    for (String BAMSregion : BAMSregionNames) {
        Set<String> allenRegions = allenCatalog.getAllenMappedRegions(BAMSregion);
        // keep Allen regions that have expression information
        allenRegions.retainAll(allExpressionMatrix.getColNames());

        // so now these allen regions need to be moved into the BAMS region space
        for (String allenRegion : allenRegions) {
            int allColumnIndex = allExpressionMatrix.getColIndexByName(allenRegion);
            double[] expressionColumn = allExpressionMatrix.getColumn(allColumnIndex);

            for (int row = 0; row < expressionColumn.length; row++) {
                int columnIndex = expressionMatrix.getColIndexByName(BAMSregion);
                double current = expressionMatrix.get(row, columnIndex);
                // average things out
                double value = current + expressionColumn[row] / ((double) allenRegions.size());
                expressionMatrix.set(row, columnIndex, value);
            }/* w w w  .  j  av  a 2s .  c  o m*/
        }
    }
    // print the matrix?
    MatrixDisplay matDisplay;
    // MatrixDisplay matDisplay = new MatrixDisplay( expressionMatrix );
    // matDisplay.setLabelsVisible( true );
    // matDisplay.saveImage( SetupParameters.getDataFolder() + "expressionMatrixCatalog.png" );

    // for (String regionA)
    // allenCatalog.getAllenMappedRegions()

    // Set<String> zeroColumns = findZeroColumns( connectionMatrix );
    // zeroColumns.addAll( findZeroColumns( expressionMatrix ) );

    log.info("Conn Zeroes:" + Util.findZeroColumns(connectionMatrix).size());
    log.info("Exp Zeroes:" + Util.findZeroColumns(expressionMatrix).size());

    // two square matrices - size is based on BAMS mapped terms but Allen could be used (how to merge connection
    // profiles?)
    DoubleMatrix<String, String> connectionCorrelations = Util.correlateColumns(connectionMatrix, false);
    matDisplay = new MatrixDisplay(connectionCorrelations);
    matDisplay.setLabelsVisible(true);
    matDisplay.saveImage(SetupParameters.getDataFolder() + "connectionCorrelation.png");

    DoubleMatrix<String, String> expressionCorrelations = Util.correlateColumns(expressionMatrix, false);
    matDisplay = new MatrixDisplay(expressionCorrelations);
    matDisplay.setLabelsVisible(true);
    matDisplay.saveImage(SetupParameters.getDataFolder() + "expressionCorrelation.png");

    double[] expVecTri = Util.getTriangle(expressionCorrelations);
    double[] conVecTri = Util.getTriangle(connectionCorrelations);

    // for ( double d : expVecTri )
    // System.out.print( " E" + d );
    // System.out.println();
    // System.out.println();
    // for ( double d : conVecTri )
    // System.out.print( " C" + d );
    log.info(conVecTri[conVecTri.length - 1]);
    log.info(expVecTri[conVecTri.length - 1]);
    log.info("Size:" + conVecTri.length);
    log.info("Size:" + expVecTri.length);

    log.info(CorrelationStats.correl(getTriangle(expressionMatrix), getTriangle(connectionMatrix)));
    // convert to vector?
    // correlate vectors?
    List<String> names = new LinkedList<String>(expressionMatrix.getColNames());
    Random random = new Random(1);
    log.info(expressionMatrix.asArray().length);
    log.info(expressionMatrix.asArray()[0].length);
    // System.exit(1);
    double real = CorrelationStats.correl(getTriangle(expressionMatrix), getTriangle(connectionMatrix));
    int lowerScore = 0;
    for (int i = 0; i < 1000; i++) {
        if (i % 100 == 0)
            log.info("Shuffle " + i);
        Collections.shuffle(names, random);
        DoubleMatrix<String, String> shuffled = new DenseDoubleMatrix<String, String>(expressionMatrix.rows(),
                expressionMatrix.columns());
        shuffled.setColumnNames(names);
        // ugly
        for (int row = 0; row < shuffled.rows(); row++) {
            for (int column = 0; column < shuffled.columns(); column++) {
                String name = names.get(column);
                int originalLocation = expressionMatrix.getColIndexByName(name);
                shuffled.set(row, column, expressionMatrix.get(row, originalLocation));
            }
        }

        // log.info( names.get( 0 ) );
        // log.info( expressionMatrix.getColName( 0 ) );

        double shuffledResult = CorrelationStats.correl(getTriangle(shuffled), getTriangle(connectionMatrix));
        if (Math.abs(shuffledResult) >= real) {
            log.info(shuffledResult);
            lowerScore++;
        }
    }
    log.info(lowerScore + " random sets have correlation with absolute value above " + real);

}

From source file:com.devicehive.dao.riak.DeviceDaoRiakImpl.java

@Override
public List<DeviceVO> list(String name, String namePattern, Long networkId, String networkName,
        Long deviceClassId, String deviceClassName, String sortField, Boolean isSortOrderAsc, Integer take,
        Integer skip, HivePrincipal principal) {
    //TODO [rafa] when filtering by device class name we have to instead query DeviceClass bucket for ids, and then use ids.
    // here is what happens, since device class is not embeddable in case of Riak we need to either keep id only and perform the logic above.
    // or we need to update device class embedded data in every device corresponding to the class, which is nighmare.

    BucketMapReduce.Builder builder = new BucketMapReduce.Builder().withNamespace(DEVICE_NS);
    addMapValues(builder);//w ww . j  a  v  a 2s  .c om
    if (name != null) {
        addReduceFilter(builder, "name", FilterOperator.EQUAL, name);
    } else if (namePattern != null) {
        namePattern = namePattern.replace("%", "");
        addReduceFilter(builder, "name", FilterOperator.REGEX, namePattern);
    }
    addReduceFilter(builder, "network.id", FilterOperator.EQUAL, networkId);
    addReduceFilter(builder, "network.name", FilterOperator.EQUAL, networkName);
    addReduceFilter(builder, "deviceClass.id", FilterOperator.EQUAL, deviceClassId);
    addReduceFilter(builder, "deviceClass.name", FilterOperator.EQUAL, deviceClassName);
    if (principal != null) {
        UserVO user = principal.getUser();
        if (user != null && !user.isAdmin()) {
            Set<Long> networks = userNetworkDao.findNetworksForUser(user.getId());
            if (principal.getNetworkIds() != null) {
                networks.retainAll(principal.getNetworkIds());
            }
            addReduceFilter(builder, "network.id", FilterOperator.IN, networks);
        }
        if (principal.getDeviceGuids() != null) {
            Set<String> deviceGuids = principal.getDeviceGuids();
            addReduceFilter(builder, "guid", FilterOperator.IN, deviceGuids);
        }
    }
    addReduceSort(builder, sortField, isSortOrderAsc);
    addReducePaging(builder, true, take, skip);
    try {
        MapReduce.Response response = client.execute(builder.build());
        return response.getResultsFromAllPhases(RiakDevice.class).stream().map(RiakDevice::convertToVo)
                .collect(Collectors.toList());
    } catch (InterruptedException | ExecutionException e) {
        LOGGER.error("Exception accessing Riak Storage.", e);
        throw new HivePersistenceLayerException("Cannot get list of devices.", e);
    }
}

From source file:org.apache.geode.management.internal.cli.commands.DataCommandsUtils.java

public static Set<DistributedMember> getQueryRegionsAssociatedMembers(Set<String> regions,
        final InternalCache cache, boolean returnAll) {
    LogWriter logger = cache.getLogger();
    Set<DistributedMember> members;
    Set<DistributedMember> newMembers = null;
    Iterator<String> iterator = regions.iterator();
    String region = iterator.next();
    members = getRegionAssociatedMembers(region, cache, true);
    if (logger.fineEnabled()) {
        logger.fine("Members for region " + region + " Members " + members);
    }/*from w ww  .  ja  v  a2 s  . c  o m*/
    List<String> regionAndingList = new ArrayList<>();
    regionAndingList.add(region);
    if (regions.size() == 1) {
        newMembers = members;
    } else {
        if (CollectionUtils.isNotEmpty(members)) {
            while (iterator.hasNext()) {
                region = iterator.next();
                newMembers = getRegionAssociatedMembers(region, cache, true);
                if (newMembers == null) {
                    newMembers = new HashSet<>();
                }
                if (logger.fineEnabled()) {
                    logger.fine("Members for region " + region + " Members " + newMembers);
                }
                regionAndingList.add(region);
                newMembers.retainAll(members);
                members = newMembers;
                if (logger.fineEnabled()) {
                    logger.fine(
                            "Members after anding for regions " + regionAndingList + " List : " + newMembers);
                }
            }
        }
    }
    members = new HashSet<>();
    if (newMembers == null) {
        return members;
    }
    for (DistributedMember newMember : newMembers) {
        members.add(newMember);
        if (!returnAll) {
            return members;
        }
    }
    return members;
}

From source file:org.alfresco.repo.events.node.EventGenerationBehaviours.java

private Map<String, Property> getChanges(Map<QName, Serializable> before, Map<QName, Serializable> after) {
    Map<String, Property> ret = new HashMap<>();

    Set<QName> intersect = new HashSet<QName>(before.keySet());
    intersect.retainAll(after.keySet());

    Map<QName, Serializable> properties = new HashMap<>();
    for (QName propQName : intersect) {
        Serializable valueBefore = before.get(propQName);
        Serializable valueAfter = after.get(propQName);

        Serializable value = null;
        if (valueBefore == null && valueAfter == null) {
            continue;
        } else if (valueBefore == null && valueAfter != null) {
            value = valueAfter;//from  w w  w .j a va  2s.c o  m
        } else if (valueBefore != null && valueAfter == null) {
            value = valueAfter;
        } else if (!valueBefore.equals(valueAfter)) {
            value = valueAfter;
        }

        properties.put(propQName, value);
    }

    ret = propertySerializer.serialize(properties);
    return ret;
}

From source file:org.openmrs.module.reporting.query.encounter.evaluator.PatientEncounterQueryEvaluator.java

/**
 * @see EncounterQueryEvaluator#evaluate(EncounterQuery, EvaluationContext)
 * @should return all of the encounter ids for all patients in the defined patient query
 * @should filter results by patient and encounter given an EncounterEvaluationContext
 * @should filter results by patient given an EvaluationContext
 *//*from w ww  . j  a v  a2s.c  o  m*/
public EncounterQueryResult evaluate(EncounterQuery definition, EvaluationContext context)
        throws EvaluationException {

    context = ObjectUtil.nvl(context, new EvaluationContext());
    PatientEncounterQuery query = (PatientEncounterQuery) definition;
    EncounterQueryResult queryResult = new EncounterQueryResult(query, context);

    // Calculate the patients for this query
    Cohort c = Context.getService(CohortDefinitionService.class).evaluate(query.getPatientQuery(), context);

    // Get all of the encounters for all of these patients
    EvaluationContext ec = new EvaluationContext();
    ec.setBaseCohort(c);
    Set<Integer> ret = EncounterDataUtil.getEncounterIdsForContext(ec, false);

    // Limit that to only the passed in encounters if relevant
    if (context instanceof EncounterEvaluationContext) {
        EncounterEvaluationContext eec = (EncounterEvaluationContext) context;
        if (eec.getBaseEncounters() != null) {
            ret.retainAll(eec.getBaseEncounters().getMemberIds());
        }
    }

    queryResult.setMemberIds(ret);
    return queryResult;
}

From source file:org.hawkular.alerter.elasticsearch.ElasticsearchAlerter.java

private synchronized void update() {
    final Set<TriggerKey> existingKeys = queryFutures.keySet();
    final Set<TriggerKey> activeKeys = activeTriggers.keySet();

    Set<TriggerKey> newKeys = new HashSet<>();
    Set<TriggerKey> canceledKeys = new HashSet<>();

    Set<TriggerKey> updatedKeys = new HashSet<>(activeKeys);
    updatedKeys.retainAll(activeKeys);

    activeKeys.stream().filter(key -> !existingKeys.contains(key)).forEach(key -> newKeys.add(key));
    existingKeys.stream().filter(key -> !activeKeys.contains(key)).forEach(key -> canceledKeys.add(key));

    log.debugf("newKeys %s", newKeys);
    log.debugf("updatedKeys %s", updatedKeys);
    log.debugf("canceledKeys %s", canceledKeys);

    canceledKeys.stream().forEach(key -> {
        ScheduledFuture canceled = queryFutures.remove(key);
        if (canceled != null) {
            canceled.cancel(false);//from w  w  w.  ja va  2 s  .co m
        }
    });
    updatedKeys.stream().forEach(key -> {
        ScheduledFuture updated = queryFutures.remove(key);
        if (updated != null) {
            updated.cancel(false);
        }
    });

    if (scheduledExecutor == null) {
        scheduledExecutor = new ScheduledThreadPoolExecutor(THREAD_POOL_SIZE);
    }

    newKeys.addAll(updatedKeys);

    for (TriggerKey key : newKeys) {
        Trigger t = activeTriggers.get(key);
        String interval = t.getContext().get(INTERVAL) == null ? INTERVAL_DEFAULT
                : t.getContext().get(INTERVAL);
        queryFutures.put(key,
                scheduledExecutor.scheduleAtFixedRate(new ElasticsearchQuery(t, defaultProperties, alerts), 0L,
                        getIntervalValue(interval), getIntervalUnit(interval)));

    }
}