Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:com.edmunds.etm.management.api.ManagementVips.java

/**
 * Removes the specified vips and returns the resulting ManagementVips object.
 * <p/>/* w ww  .ja va 2s  . co m*/
 * This method does not modify the receiver, but instead creates a copy with the specified vips removed.
 *
 * @param vips the vips to remove
 * @return a new ManagementVips object
 */
public ManagementVips removeAll(Collection<ManagementVip> vips) {
    Set<ManagementVip> vipSet = Sets.newHashSet(getVips());
    vipSet.removeAll(vips);
    return new ManagementVips(this.vipType, vipSet);
}

From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java

/**
 * Recusivly builds a substream-per-stream ordered tree graph using the
 * join information supplied for outer joins and from the query graph (where clause).
 * <p>/*www  .java2 s  .  c o  m*/
 * Required streams are considered first and their lookup is placed first in the list
 * to gain performance.
 * @param streamNum is the root stream number that supplies the incoming event to build the tree for
 * @param queryGraph contains where-clause stream relationship info
 * @param completedStreams is a temporary holder for streams already considered
 * @param substreamsPerStream is the ordered, tree-like structure to be filled
 * @param streamCallStack the query plan call stack of streams available via cursor
 * @param dependencyGraph - dependencies between historical streams
 * @throws ExprValidationException if the query planning failed
 */
protected static void recursiveBuildInnerJoin(int streamNum, Stack<Integer> streamCallStack,
        QueryGraph queryGraph, Set<Integer> completedStreams, LinkedHashMap<Integer, int[]> substreamsPerStream,
        DependencyGraph dependencyGraph) throws ExprValidationException {
    // add this stream to the set of completed streams
    completedStreams.add(streamNum);

    // check if the dependencies have been satisfied
    if (dependencyGraph.hasDependency(streamNum)) {
        Set<Integer> dependencies = dependencyGraph.getDependenciesForStream(streamNum);
        for (Integer dependentStream : dependencies) {
            if (!streamCallStack.contains(dependentStream)) {
                throw new ExprValidationException(
                        "Historical stream " + streamNum + " parameter dependency originating in stream "
                                + dependentStream + " cannot or may not be satisfied by the join");
            }
        }
    }

    // Determine the streams we can navigate to from this stream
    Set<Integer> navigableStreams = queryGraph.getNavigableStreams(streamNum);

    // remove streams with a dependency on other streams not yet processed
    Integer[] navigableStreamArr = navigableStreams.toArray(new Integer[navigableStreams.size()]);
    for (int navigableStream : navigableStreamArr) {
        if (dependencyGraph.hasUnsatisfiedDependency(navigableStream, completedStreams)) {
            navigableStreams.remove(navigableStream);
        }
    }

    // remove those already done
    navigableStreams.removeAll(completedStreams);

    // if we are a leaf node, we are done
    if (navigableStreams.isEmpty()) {
        substreamsPerStream.put(streamNum, new int[0]);
        return;
    }

    // First the outer (required) streams to this stream, then the inner (optional) streams
    int[] substreams = new int[navigableStreams.size()];
    substreamsPerStream.put(streamNum, substreams);
    int count = 0;
    for (int stream : navigableStreams) {
        substreams[count++] = stream;
        completedStreams.add(stream);
    }

    for (int stream : navigableStreams) {
        streamCallStack.push(stream);
        recursiveBuildInnerJoin(stream, streamCallStack, queryGraph, completedStreams, substreamsPerStream,
                dependencyGraph);
        streamCallStack.pop();
    }
}

From source file:com.puppycrawl.tools.checkstyle.internal.XdocsPagesTest.java

private static void fixCapturedProperties(String sectionName, Object instance, Class<?> clss,
        Set<String> properties) {
    // remove global properties that don't need documentation
    if (hasParentModule(sectionName)) {
        if (AbstractJavadocCheck.class.isAssignableFrom(clss)) {
            properties.removeAll(JAVADOC_CHECK_PROPERTIES);
        } else if (AbstractCheck.class.isAssignableFrom(clss)) {
            properties.removeAll(CHECK_PROPERTIES);
        }/*w w w.  ja  va  2 s.  co  m*/
    }
    if (AbstractFileSetCheck.class.isAssignableFrom(clss)) {
        properties.removeAll(FILESET_PROPERTIES);

        // override
        properties.add("fileExtensions");
    }

    // remove undocumented properties
    new HashSet<>(properties).stream()
            .filter(p -> UNDOCUMENTED_PROPERTIES.contains(clss.getSimpleName() + "." + p))
            .forEach(properties::remove);

    if (AbstractCheck.class.isAssignableFrom(clss)) {
        final AbstractCheck check = (AbstractCheck) instance;

        final int[] acceptableTokens = check.getAcceptableTokens();
        Arrays.sort(acceptableTokens);
        final int[] defaultTokens = check.getDefaultTokens();
        Arrays.sort(defaultTokens);
        final int[] requiredTokens = check.getRequiredTokens();
        Arrays.sort(requiredTokens);

        if (!Arrays.equals(acceptableTokens, defaultTokens)
                || !Arrays.equals(acceptableTokens, requiredTokens)) {
            properties.add("tokens");
        }
    }

    if (AbstractJavadocCheck.class.isAssignableFrom(clss)) {
        final AbstractJavadocCheck check = (AbstractJavadocCheck) instance;

        final int[] acceptableJavadocTokens = check.getAcceptableJavadocTokens();
        Arrays.sort(acceptableJavadocTokens);
        final int[] defaultJavadocTokens = check.getDefaultJavadocTokens();
        Arrays.sort(defaultJavadocTokens);
        final int[] requiredJavadocTokens = check.getRequiredJavadocTokens();
        Arrays.sort(requiredJavadocTokens);

        if (!Arrays.equals(acceptableJavadocTokens, defaultJavadocTokens)
                || !Arrays.equals(acceptableJavadocTokens, requiredJavadocTokens)) {
            properties.add("javadocTokens");
        }
    }
}

From source file:com.github.fge.jsonschema.keyword.validator.common.AdditionalPropertiesValidator.java

@Override
public void validate(final Processor<FullData, FullData> processor, final ProcessingReport report,
        final MessageBundle bundle, final FullData data) throws ProcessingException {
    if (additionalOK)
        return;//from   w  ww  . j  a  va2s .  co m

    final JsonNode instance = data.getInstance().getNode();
    final Set<String> fields = Sets.newHashSet(instance.fieldNames());

    fields.removeAll(properties);

    final Set<String> tmp = Sets.newHashSet();

    for (final String field : fields)
        for (final String regex : patternProperties)
            if (RhinoHelper.regMatch(regex, field))
                tmp.add(field);

    fields.removeAll(tmp);

    if (fields.isEmpty())
        return;

    /*
     * Display extra properties in order in the report
     */
    final ArrayNode node = JacksonUtils.nodeFactory().arrayNode();
    for (final String field : Ordering.natural().sortedCopy(fields))
        node.add(field);
    report.error(
            newMsg(data, bundle, "err.common.additionalProperties.notAllowed").putArgument("unwanted", node));
}

From source file:org.eel.kitchen.jsonschema.keyword.AdditionalPropertiesKeywordValidator.java

@Override
public void validate(final ValidationContext context, final ValidationReport report, final JsonNode instance) {
    final Set<String> fields = JacksonUtils.fieldNames(instance);

    fields.removeAll(properties);

    final Set<String> tmp = Sets.newHashSet();

    for (final String field : fields)
        for (final String regex : patternProperties)
            if (RhinoHelper.regMatch(regex, field))
                tmp.add(field);/*from w  w  w .j a va  2  s  .c o  m*/

    fields.removeAll(tmp);

    if (fields.isEmpty())
        return;

    /*
     * Display extra properties in order in the report
     */
    final Message.Builder msg = newMsg().addInfo("unwanted", Ordering.natural().sortedCopy(fields))
            .setMessage("additional properties not permitted");
    report.addMessage(msg.build());
}

From source file:org.malaguna.cmdit.service.commands.usrmgt.RoleEdition.java

@Override
public ResultCommand<DualListModel<String>> runCommand() throws Exception {
    DualListModel<String> result = null;

    usuario = getUserDao().findById(usuario.getPid());

    Set<String> allRoles = roleHelper.getAllRoles();
    Set<String> userRoles = usuario.getRoles();
    result = new DualListModel<String>();

    allRoles.removeAll(userRoles);
    result.setSource(new ArrayList<String>(allRoles));
    result.setTarget(new ArrayList<String>(userRoles));

    this.setResult(result);
    return this;
}

From source file:springfox.documentation.spring.web.scanners.ApiModelReader.java

@SuppressWarnings("unchecked")
private void mergeModelMap(Map<String, Model> target, Model source) {
    String sourceModelKey = source.getId();

    if (!target.containsKey(sourceModelKey)) {
        //if we encounter completely unknown model, just add it
        LOG.debug("Adding a new model with key {}", sourceModelKey);
        target.put(sourceModelKey, source);
    } else {// www  .  ja  v a 2s .c  o  m
        //we can encounter a known model with an unknown property
        //if (de)serialization is not symmetrical (@JsonIgnore on setter, @JsonProperty on getter).
        //In these cases, don't overwrite the entire model entry for that type, just add the unknown property.
        Model targetModelValue = target.get(sourceModelKey);

        Map<String, ModelProperty> targetProperties = targetModelValue.getProperties();
        Map<String, ModelProperty> sourceProperties = source.getProperties();

        Set<String> newSourcePropKeys = newHashSet(sourceProperties.keySet());
        newSourcePropKeys.removeAll(targetProperties.keySet());
        Map<String, ModelProperty> mergedTargetProperties = Maps.newHashMap(targetProperties);
        for (String newProperty : newSourcePropKeys) {
            LOG.debug("Adding a missing property {} to model {}", newProperty, sourceModelKey);
            mergedTargetProperties.put(newProperty, sourceProperties.get(newProperty));
        }

        Model mergedModel = new ModelBuilder().id(targetModelValue.getId()).name(targetModelValue.getName())
                .type(targetModelValue.getType()).qualifiedType(targetModelValue.getQualifiedType())
                .properties(mergedTargetProperties).description(targetModelValue.getDescription())
                .baseModel(targetModelValue.getBaseModel()).discriminator(targetModelValue.getDiscriminator())
                .subTypes(targetModelValue.getSubTypes()).build();

        target.put(sourceModelKey, mergedModel);
    }
}

From source file:com.streamsets.pipeline.stage.processor.xmlparser.XmlParserConfig.java

public boolean init(Stage.Context context, List<Stage.ConfigIssue> issues) {
    boolean valid = true;

    try {/*from   ww w. j a  va  2 s  . c o m*/
        Charset.forName(charset);
    } catch (UnsupportedCharsetException ex) {
        issues.add(context.createConfigIssue("XML", "charset", DataFormatErrors.DATA_FORMAT_05, charset));
        valid = false;
    }
    if (StringUtils.isNotBlank(xmlRecordElement)) {
        String invalidXPathError = XPathValidatorUtil.getXPathValidationError(xmlRecordElement);
        if (StringUtils.isNotBlank(invalidXPathError)) {
            issues.add(context.createConfigIssue(Groups.XML.name(), "xmlRecordElement",
                    DataFormatErrors.DATA_FORMAT_03, xmlRecordElement, invalidXPathError));
            valid = false;
        } else {
            final Set<String> nsPrefixes = XPathValidatorUtil.getNamespacePrefixes(xmlRecordElement);
            nsPrefixes.removeAll(xPathNamespaceContext.keySet());
            if (!nsPrefixes.isEmpty()) {
                issues.add(context.createConfigIssue(Groups.XML.name(), "xPathNamespaceContext",
                        DataFormatErrors.DATA_FORMAT_304, StringUtils.join(nsPrefixes, ", ")));
                valid = false;
            }
        }
    }
    return valid;
}

From source file:com.googlecode.ddom.weaver.ext.ModelExtensionGenerator.java

void validate() {
    if (implementationMap.size() != requiredImplementations.size()) {
        Set<ClassInfo> missingImplementations = new HashSet<ClassInfo>(requiredImplementations);
        missingImplementations.removeAll(implementationMap.keySet());
        throw new ReactorException("The implementations for the following interfaces have not been found: "
                + missingImplementations);
    }//from  w w w. j  a v  a2 s  .c om
    if (log.isDebugEnabled()) {
        log.debug("Implementation map: " + implementationMap);
    }
}

From source file:org.wallerlab.yoink.adaptive.smooth.SCMPWeightFactors.java

private void calculateWeightForEachConfiguration(List<Molecule> bufferMolecules, List<Integer> bufferIndices,
        double s_qm_out, double t_qm_out, double s_qm_in, double t_qm_in, double s_mm_out, double t_mm_out,
        double s_mm_in, double t_mm_in, int number_qmInBuffer, Coord centerCoord) {
    Subsets.split(Ints.toArray(bufferIndices), number_qmInBuffer).parallelStream().forEach(qmSet -> {
        Set<Integer> mmSet = new HashSet<Integer>(bufferIndices);
        mmSet.removeAll(qmSet);

        double sigma = calculateSigma(centerCoord, bufferMolecules, bufferIndices, s_qm_out, t_qm_out, s_qm_in,
                t_qm_in, s_mm_out, t_mm_out, s_mm_in, t_mm_in, qmSet, mmSet);
        sigmaIndexMap.put(qmSet, sigma);

    });//from w ww .  ja va 2s  .c  o  m
}