Example usage for com.google.common.collect Sets difference

List of usage examples for com.google.common.collect Sets difference

Introduction

In this page you can find the example usage for com.google.common.collect Sets difference.

Prototype

public static <E> SetView<E> difference(final Set<E> set1, final Set<?> set2) 

Source Link

Document

Returns an unmodifiable view of the difference of two sets.

Usage

From source file:ai.grakn.graql.internal.reasoner.query.QueryAnswer.java

@Override
public QueryAnswer filterVars(Set<VarName> vars) {
    QueryAnswer filteredAnswer = new QueryAnswer(this);
    Set<VarName> varsToRemove = Sets.difference(this.keySet(), vars);
    varsToRemove.forEach(filteredAnswer::remove);

    filteredAnswer.setExplanation(this.getExplanation());
    return filteredAnswer;
}

From source file:se.kth.climate.fast.netcdf.MetaInfo.java

public static MetaInfo fromNetCDF(NetcdfFile ncfile) {
    LOG.debug("Global Attrs:");
    LOG.debug(ncfile.getGlobalAttributes().toString());
    LOG.debug("Dimensions:");
    LOG.debug(ncfile.getDimensions().toString());
    LOG.debug("Variables:");
    LOG.debug(ncfile.getVariables().toString());

    MetaInfo mInfo = new MetaInfo(ncfile);
    Set<String> normalDims = new HashSet<>();
    Set<String> bndsDims = new HashSet<>();
    for (Variable v : ncfile.getVariables()) {
        if (v.getFullNameEscaped().contains("bnds")) {
            for (Dimension d : v.getDimensions()) {
                bndsDims.add(d.getFullNameEscaped());
            }/*w  ww .j a  v  a2s  . c  o m*/
        } else {
            for (Dimension d : v.getDimensions()) {
                normalDims.add(d.getFullNameEscaped());
            }
        }
    }
    Sets.SetView<String> boundDim = Sets.difference(bndsDims, normalDims);

    if (boundDim.isEmpty()) {
        LOG.info(
                "No bounds dimension found. Bounds Variables Dimensions are {} and normal Variable Dimensions are {}",
                bndsDims, normalDims);
    } else {
        for (String boundDimensionName : boundDim) {
            Dimension d = ncfile.findDimension(boundDimensionName);
            if (d == null) {
                throw new RuntimeException("Couldn't find dimension for " + boundDimensionName);
            }
            mInfo.dimensionSize.put(boundDimensionName, d.getLength());
        }
        for (Variable v : ncfile.getVariables()) {
            if (v.getFullNameEscaped().contains("bnds")) {
                for (Dimension d : v.getDimensions()) {
                    if (boundDim.contains(d.getFullNameEscaped())) {
                        mInfo.variable2Dimension.put(v.getFullNameEscaped(), d.getFullNameEscaped());
                    }
                }
            }
        }
    }
    for (Dimension d : ncfile.getDimensions()) {
        mInfo.dimensionCache.add(d.getFullNameEscaped());
        if (!mInfo.canBeBounds(d)) { // also create an index field for this dimension
            String name = d.getFullNameEscaped() + "_index";
            mInfo.indices.put(name, d.getFullNameEscaped());
        }
    }
    for (Variable v : ncfile.getVariables()) {
        List<Dimension> dims = v.getDimensions();
        mInfo.variableDimensionCache.putAll(v.getFullNameEscaped(),
                dims.stream().map(d -> d.getFullNameEscaped())::iterator);
        if (NetCDFUtils.isConstant(dims)) {
            mInfo.constants.add(v.getFullNameEscaped());
            mInfo.variableSize.put(v.getFullNameEscaped(), Long.valueOf(v.getDataType().getSize()));
        } else {
            if (v.getDataType() != DataType.STRING) {
                long size = v.getSize() * v.getElementSize();
                mInfo.variableSize.put(v.getFullNameEscaped(), size);
            } else {
                throw new RuntimeException(
                        "String variables aren't supported at the moment as their size cannot be calculated");
            }
        }
    }

    return mInfo;
}

From source file:org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.java

/**
 *
 * @param gcBefore/*ww  w. j  ava  2  s  .c  om*/
 * @return
 */
private List<SSTableReader> getNextBackgroundSSTables(final int gcBefore) {
    if (cfs.getSSTables().isEmpty())
        return Collections.emptyList();

    Set<SSTableReader> uncompacting = Sets.intersection(sstables, cfs.getUncompactingSSTables());

    Set<SSTableReader> expired = Collections.emptySet();
    // we only check for expired sstables every 10 minutes (by default) due to it being an expensive operation
    if (System.currentTimeMillis() - lastExpiredCheck > options.expiredSSTableCheckFrequency) {
        // Find fully expired SSTables. Those will be included no matter what.
        expired = CompactionController.getFullyExpiredSSTables(cfs, uncompacting,
                cfs.getOverlappingSSTables(uncompacting), gcBefore);
        lastExpiredCheck = System.currentTimeMillis();
    }
    Set<SSTableReader> candidates = Sets.newHashSet(filterSuspectSSTables(uncompacting));

    List<SSTableReader> compactionCandidates = new ArrayList<>(
            getNextNonExpiredSSTables(Sets.difference(candidates, expired), gcBefore));
    if (!expired.isEmpty()) {
        logger.trace("Including expired sstables: {}", expired);
        compactionCandidates.addAll(expired);
    }
    return compactionCandidates;
}

From source file:org.apache.storm.nimbus.LeaderListenerCallback.java

/**
 * Invoke when gains leadership.//from   w  w  w.  ja va2 s  .c  o  m
 */
public void leaderCallBack() {
    //set up nimbus-info to zk
    setUpNimbusInfo(acls);
    //sync zk assignments/id-info to local
    LOG.info("Sync remote assignments and id-info to local");
    clusterState.syncRemoteAssignments(null);
    clusterState.syncRemoteIds(null);
    clusterState.setAssignmentsBackendSynchronized();

    Set<String> activeTopologyIds = new TreeSet<>(
            ClientZookeeper.getChildren(zk, ClusterUtils.STORMS_SUBTREE, false));

    Set<String> activeTopologyBlobKeys = populateTopologyBlobKeys(activeTopologyIds);
    Set<String> activeTopologyCodeKeys = filterTopologyCodeKeys(activeTopologyBlobKeys);
    Set<String> allLocalBlobKeys = Sets.newHashSet(blobStore.listKeys());
    Set<String> allLocalTopologyBlobKeys = filterTopologyBlobKeys(allLocalBlobKeys);

    // this finds all active topologies blob keys from all local topology blob keys
    Sets.SetView<String> diffTopology = Sets.difference(activeTopologyBlobKeys, allLocalTopologyBlobKeys);
    LOG.info("active-topology-blobs [{}] local-topology-blobs [{}] diff-topology-blobs [{}]",
            generateJoinedString(activeTopologyIds), generateJoinedString(allLocalTopologyBlobKeys),
            generateJoinedString(diffTopology));

    if (diffTopology.isEmpty()) {
        Set<String> activeTopologyDependencies = getTopologyDependencyKeys(activeTopologyCodeKeys);

        // this finds all dependency blob keys from active topologies from all local blob keys
        Sets.SetView<String> diffDependencies = Sets.difference(activeTopologyDependencies, allLocalBlobKeys);
        LOG.info("active-topology-dependencies [{}] local-blobs [{}] diff-topology-dependencies [{}]",
                generateJoinedString(activeTopologyDependencies), generateJoinedString(allLocalBlobKeys),
                generateJoinedString(diffDependencies));

        if (diffDependencies.isEmpty()) {
            LOG.info(
                    "Accepting leadership, all active topologies and corresponding dependencies found locally.");
            tc.clear();
        } else {
            LOG.info(
                    "Code for all active topologies is available locally, but some dependencies are not found locally, "
                            + "giving up leadership.");
            closeLatch();
        }
    } else {
        LOG.info("code for all active topologies not available locally, giving up leadership.");
        closeLatch();
    }
}

From source file:org.dllearner.learningproblems.PosNegLP.java

@Override
public void init() throws ComponentInitException {
    // check if some positive examples have been set
    if (positiveExamples.isEmpty()) {
        throw new ComponentInitException("No positive examples have been set.");
    }// w  w w  .j a  v  a2  s  .  c  om

    // check if some negative examples have been set and give warning if not
    if (negativeExamples.isEmpty()) {
        logger.warn("No negative examples have been set, but you decided to use the positive-negative learning"
                + "problem. We recommend to use the positive-only learning problem for the case of no negative examples instead.");
    }

    // check if there is some overlap between positive and negative examples and give warning
    // in that case
    SetView<OWLIndividual> overlap = Sets.intersection(positiveExamples, negativeExamples);
    if (!overlap.isEmpty()) {
        logger.warn("You declared some individuals as both positive and negative examples.");
    }

    allExamples = Sets.union(positiveExamples, negativeExamples);

    if (accuracyMethod == null) {
        accuracyMethod = new AccMethodPredAcc(true);
    }
    if (accuracyMethod instanceof AccMethodApproximate) {
        ((AccMethodApproximate) accuracyMethod).setReasoner(reasoner);
    }

    // sanity check whether examples are contained in KB
    if (reasoner != null && !reasoner.getIndividuals().containsAll(allExamples)
            && !reasoner.getClass().isAssignableFrom(SPARQLReasoner.class)) {
        Set<OWLIndividual> missing = Sets.difference(allExamples, reasoner.getIndividuals());
        double percentage = missing.size() / allExamples.size();
        percentage = Math.round(percentage * 1000) / 1000;
        String str = "The examples (" + (percentage * 100)
                + " % of total) below are not contained in the knowledge base (check spelling and prefixes)\n";
        str += missing.toString();
        if (missing.size() == allExamples.size()) {
            throw new ComponentInitException(str);
        }
        if (percentage < 0.10) {
            logger.warn(str);
        } else {
            logger.error(str);
        }
    }
}

From source file:com.streamsets.pipeline.sdk.ProtoRunner.java

@SuppressWarnings("unchecked")
protected void configureObject(Object stage, Map<String, Object> configuration) {
    try {/*  w  w w .ja va  2  s  .c  o  m*/
        Set<String> fields = getStageConfigurationFields(stage.getClass());
        Set<String> configs = configuration.keySet();
        if (!fields.equals(configs)) {
            Set<String> missingConfigs = Sets.difference(fields, configs);
            Set<String> extraConfigs = Sets.difference(configs, fields);

            missingConfigs = filterNonActiveConfigurationsFromMissing(stage, configuration, missingConfigs);
            if (missingConfigs.size() + extraConfigs.size() > 0) { //x
                throw new RuntimeException(Utils.format(
                        "Invalid stage configuration for '{}', Missing configurations '{}' and invalid configurations '{}'",
                        stage.getClass().getName(), missingConfigs, extraConfigs));
            }
        }
        for (Field field : stage.getClass().getFields()) {
            if (field.isAnnotationPresent(ConfigDef.class)) {
                ConfigDef configDef = field.getAnnotation(ConfigDef.class);
                if (isConfigurationActive(configDef, configuration)) {
                    if (configDef.type() != ConfigDef.Type.MAP) {
                        field.set(stage, configuration.get(field.getName()));
                    } else {
                        //we need to handle special case of List of Map elements with key/value entries
                        Object value = configuration.get(field.getName());
                        if (value != null && value instanceof List) {
                            Map map = new HashMap();
                            for (Map element : (List<Map>) value) {
                                if (!element.containsKey("key") || !element.containsKey("value")) {
                                    throw new RuntimeException(Utils.format(
                                            "Invalid stage configuration for '{}' Map as list must have"
                                                    + " a List of Maps all with 'key' and 'value' entries",
                                            field.getName()));
                                }
                                String k = (String) element.get("key");
                                String v = (String) element.get("value");
                                map.put(k, v);
                            }
                            value = map;
                        }
                        field.set(stage, value);
                    }
                }
            }
        }
    } catch (Exception ex) {
        if (ex instanceof RuntimeException) {
            throw (RuntimeException) ex;
        }
        throw new RuntimeException(ex);
    }
}

From source file:no.ssb.vtl.script.operations.KeepOperation.java

/**
 * Compute the list of component that need to be removed.
 *//*from   w  w w .j av a  2  s . c  o  m*/
private ImmutableList<Component> getComponentsToRemove() {
    HashSet<Component> oldComponents = Sets.newLinkedHashSet(getChild().getDataStructure().values());
    HashSet<Component> newComponents = Sets.newLinkedHashSet(getDataStructure().values());

    return ImmutableList.copyOf(Sets.difference(oldComponents, newComponents));
}

From source file:com.xebialabs.overtherepy.DirectoryDiff.java

private List<OverthereFile[]> compareDirectory(OverthereFile left, OverthereFile right,
        DirectoryChangeSet changeSet) throws IOException {
    Set<FileWrapper> leftFiles = listFiles(left);
    Set<FileWrapper> rightFiles = listFiles(right);

    //find new files
    Set<FileWrapper> filesAdded = Sets.difference(rightFiles, leftFiles);
    //find removed files
    Set<FileWrapper> filesRemoved = Sets.difference(leftFiles, rightFiles);

    //find changed files
    Set<FileWrapper> potentialChangedFiles = newHashSet(leftFiles);
    potentialChangedFiles.removeAll(filesRemoved);

    //filter out directories
    Map<FileWrapper, FileWrapper> rightFilesIndex = newHashMap();
    for (FileWrapper file : rightFiles) {
        rightFilesIndex.put(file, file);
    }//w  w  w  .j  av a  2 s . c  o m

    Set<FileWrapper> filesChanged = newHashSet();
    for (FileWrapper potentialChangedFile : Sets.filter(potentialChangedFiles, FileWrapperPredicates.FILE)) {
        HashCode leftHash = hash(potentialChangedFile.getFile(), hashFunction);
        FileWrapper rightFile = rightFilesIndex.get(potentialChangedFile);
        HashCode rightHash = hash(rightFile.getFile(), hashFunction);
        if (!leftHash.equals(rightHash)) {
            filesChanged.add(rightFile);
        }
    }

    Function<FileWrapper, OverthereFile> unwrapFunction = new Function<FileWrapper, OverthereFile>() {
        @Override
        public OverthereFile apply(final FileWrapper input) {
            return input.getFile();
        }
    };

    changeSet.getRemoved().addAll(Collections2.transform(filesRemoved, unwrapFunction));
    changeSet.getAdded().addAll(Collections2.transform(filesAdded, unwrapFunction));
    changeSet.getChanged().addAll(Collections2.transform(filesChanged, unwrapFunction));

    Set<FileWrapper> potentialChangedDirectories = Sets.filter(potentialChangedFiles,
            FileWrapperPredicates.DIRECTORY);
    List<OverthereFile[]> directoriesStillToCheck = newArrayList();
    for (FileWrapper potentialChangedDirectory : potentialChangedDirectories) {
        directoriesStillToCheck.add(new OverthereFile[] { potentialChangedDirectory.getFile(),
                rightFilesIndex.get(potentialChangedDirectory).getFile() });
    }
    return directoriesStillToCheck;
}

From source file:uk.ac.ebi.atlas.model.baseline.BaselineProfileComparator.java

public double getExpressionLevelFoldChange(BaselineProfile baselineProfile) {

    double averageExpressionLevelOnSelectedQueryFactors = baselineProfile
            .getAverageExpressionLevelOn(selectedQueryFactors);

    Set<Factor> nonSelectedQueryFactors = Sets.difference(allQueryFactors, selectedQueryFactors);

    double maxExpressionLevelOnNonSelectedQueryFactors = baselineProfile
            .getMaxExpressionLevelOn(nonSelectedQueryFactors);

    if (maxExpressionLevelOnNonSelectedQueryFactors == 0) {
        if (nonSelectedQueryFactors.isEmpty()) {
            return averageExpressionLevelOnSelectedQueryFactors;
        }//w  w w.  j av  a2 s.  c o m
        return averageExpressionLevelOnSelectedQueryFactors / cutoffDivisor;
    }

    return averageExpressionLevelOnSelectedQueryFactors / maxExpressionLevelOnNonSelectedQueryFactors;
}

From source file:co.mitro.core.server.BeforeAfterState.java

public Map<DBIdentity, UserDiff> diffState() throws SQLException, MitroServletException {
    // get the new state
    Multimap<Integer, Integer> newState = ArrayListMultimap.create();
    addUserDataToSet(userIdToSecretIds.keySet(), newState);

    Map<DBIdentity, UserDiff> rval = Maps.newHashMap();
    for (Integer uid : userIdToSecretIds.keySet()) {
        Set<Integer> preSecrets = Sets.newHashSet(userIdToSecretIds.get(uid));
        Set<Integer> postSecrets = Sets.newHashSet(newState.get(uid));
        UserDiff ud = new UserDiff();
        ud.removedSecrets = Sets.difference(preSecrets, postSecrets);
        ud.newSecrets = Sets.difference(postSecrets, preSecrets);
        if (ud.removedSecrets.isEmpty() && ud.newSecrets.isEmpty()) {
            continue;
        }/* w  w  w .j ava2s . c  o  m*/

        // TODO: optimize this to one query instead of n queries.
        DBIdentity id = manager.identityDao.queryForId(uid);
        ud.userName = id.getName();
        rval.put(id, ud);
    }
    return rval;
}