Example usage for java.util Collection removeAll

List of usage examples for java.util Collection removeAll

Introduction

In this page you can find the example usage for java.util Collection removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes all of this collection's elements that are also contained in the specified collection (optional operation).

Usage

From source file:com.alibaba.jstorm.hdfs.transaction.RocksDbHdfsState.java

private void removeObsoleteRemoteCheckpoints(long successBatchId) {
    long startTime = System.currentTimeMillis();
    if (lastSuccessBatchId != -1 && lastSuccessBatchId != (successBatchId - 1)) {
        LOG.warn("Some ack msgs from TM were lost!. Last success batch Id: {}, Current success batch Id: {}",
                lastSuccessBatchId, successBatchId);
    }//from  w  w w  . j a v a 2 s  .c  o m
    lastSuccessBatchId = successBatchId;
    long obsoleteBatchId = successBatchId - 1;
    try {
        Collection<String> lastSuccessSStFiles = hdfsCache
                .readLines(getRemoteCheckpointSstListFile(successBatchId));
        if (hdfsCache.exist(getRemoteCheckpointPath(obsoleteBatchId))) {
            // remove obsolete sst files
            Collection<String> obsoleteSstFiles = hdfsCache
                    .readLines(getRemoteCheckpointSstListFile(obsoleteBatchId));
            obsoleteSstFiles.removeAll(lastSuccessSStFiles);
            for (String sstFile : obsoleteSstFiles) {
                hdfsCache.remove(hdfsDbDir + "/" + sstFile, false);
            }

            // remove checkpoint dir
            hdfsCache.remove(getRemoteCheckpointPath(obsoleteBatchId), true);
        }

        // Sometimes if remove was failed, some checkpoint files would be left in remote FS.
        // So here to check if full clean is required for a specified period. If so, clean all checkpoint files which are expired.
        long currentTime = System.currentTimeMillis();
        if (currentTime - lastCleanTime > cleanPeriod) {
            FileStatus successCpFileStatus = hdfsCache
                    .getFileStatus(getRemoteCheckpointSstListFile(successBatchId));
            // remove obsolete sst files
            FileStatus[] fileStatuses = hdfsCache.listFileStatus(hdfsDbDir);
            for (FileStatus fileStatus : fileStatuses) {
                String fileName = fileStatus.getPath().getName();
                if (fileStatus.getModificationTime() < successCpFileStatus.getModificationTime()
                        && !lastSuccessSStFiles.contains(fileName)) {
                    hdfsCache.remove(hdfsDbDir + "/" + fileName, true);
                }
            }

            // remove obsolete checkpoint dir
            fileStatuses = hdfsCache.listFileStatus(hdfsCheckpointDir);
            for (FileStatus fileStatus : fileStatuses) {
                String checkpointId = fileStatus.getPath().getName();
                if (fileStatus.getModificationTime() < successCpFileStatus.getModificationTime()
                        && Integer.valueOf(checkpointId) != successBatchId) {
                    hdfsCache.remove(hdfsCheckpointDir + "/" + checkpointId, true);
                }
            }

            lastCleanTime = currentTime;
        }
    } catch (IOException e) {
        LOG.error("Failed to remove obsolete checkpoint data for batch-" + obsoleteBatchId, e);
    }

    if (JStormMetrics.enabled)
        this.hdfsDeleteLatency.update(System.currentTimeMillis() - startTime);
}

From source file:knowledgeMiner.DisjointnessDisambiguator.java

/**
 * Finds the maximally conjoint set of assertions that are consistent with
 * one-another. This process also mixes in the existing assertions, treating
 * them as assertions to be added.// w  ww . j  a  v  a  2s. co m
 * 
 * @param conceptModule
 *            The concept to be consistent with.
 * @param ontology
 *            The ontology access.
 * @return The maximally conjoint set of assertions.
 */
@SuppressWarnings("unchecked")
public void findMaximalConjoint(ConceptModule conceptModule, OntologySocket ontology) {
    // Null grid check
    if (coreAssertionGrid_.isEmpty()) {
        caseNumber_ = -1;
        return;
    }

    Collection<DefiniteAssertion> existingAssertions = getExistingAssertions(conceptModule, ontology);
    currentAssertionGrid_ = integrateGroundTruths(conceptModule, existingAssertions, ASSERTION_REMOVAL,
            ontology);
    consistentAssertions_ = currentAssertionGrid_.findMaximalConjoint(ontology);
    caseNumber_ = 0;

    // Note the removed assertions
    logger_.trace("Added " + consistentAssertions_.size());
    if (ASSERTION_REMOVAL) {
        existingAssertions.removeAll(consistentAssertions_);
        removedAssertions_ = existingAssertions;
        logger_.trace("Removed " + removedAssertions_.size());
    } else
        removedAssertions_ = CollectionUtils.EMPTY_COLLECTION;
}

From source file:com.ebay.cloud.cms.metadata.model.MetaValidator.java

public void validateForUpdate(MetaClass meta, Map<String, MetaClass> metas, MetaClassGraph tempGraph) {
    validate(meta, metas, tempGraph);//from   ww  w.ja v  a  2  s .  co  m

    String name = meta.getName();
    String parent = meta.getParent();
    IMetadataService metadataService = meta.getMetadataService();
    MetaOption options = meta.getOptions();
    Map<String, MetaField> fieldNameIndex = meta.getFieldNameIndex();
    String repository = meta.getRepository();

    Collection<String> fieldNames = new HashSet<String>(fieldNameIndex.keySet());

    MetaClass existingMetadata = metadataService.getMetaClass(name);
    if (existingMetadata == null || !existingMetadata.getName().equals(name)) {
        throw new MetaClassNotExistsException(repository, name);
    }

    Boolean isEmbed = meta.getEmbed();
    if (isEmbed == null) {
        isEmbed = existingMetadata.getEmbed();
    }
    Boolean isInner = meta.getInner();
    if (isInner == null) {
        isInner = existingMetadata.getInner();
    }
    if (isEmbed != null && isInner != null) {
        CheckConditions.checkCondition(!(isEmbed && isInner),
                new IllegalMetaClassException("meta class " + name + " cannot be embed and inner"));
    }

    fieldNames.addAll(existingMetadata.getFieldNames());
    meta.setupAncestors(metadataService, metas);
    // the parent for update metaclass must be either "null" or the same with it's original metaclass's parent
    //      CheckConditions.checkCondition(parent == null || !parent.equals(m1.getName()), new IllegalMetaClassException("parent could be the metaclass: " + name));

    // appending field should not exist in it's ancestors metaclasses
    // if origin metaclass has this field, the fields should be exact same (not only names)
    Iterator<Entry<String, MetaField>> iter = fieldNameIndex.entrySet().iterator();
    StringBuilder reDefineFields = new StringBuilder();
    Collection<String> newFieldNames = new HashSet<String>();
    while (iter.hasNext()) {
        MetaField newField = iter.next().getValue();
        String fieldName = newField.getName();
        newFieldNames.add(fieldName);
        MetaField existingField = existingMetadata.getFieldByName(fieldName);

        // non-internal field with same name exist in original metaclass or it's ancestor's metaclass
        if (existingField != null && !existingField.isInternal()) {
            MetaField parentField = null;
            if (!StringUtils.isNullOrEmpty(parent)) {
                parentField = metadataService.getMetaClass(parent).getFieldByName(fieldName);
            }
            if (parentField != null) {
                if (!newField.isOverridable(parentField)) {
                    throw new MetaFieldExistsException(MessageFormat.format(
                            "field {0} already exist in ancestor's class. "
                                    + "Override it by re-define Mandatory, DefaultValue or EnumValue.",
                            fieldName));
                }
            } else if (newField.equals(existingField)) {
                // exist in original metaclass
                // then remove it for later update operation
                iter.remove();
            } else {
                reDefineFields.append(fieldName).append(",");
            }
        }
    }
    if (reDefineFields.length() > 0) {
        reDefineFields.setLength(reDefineFields.length() - 1);
        throw new MetaFieldExistsException("fields " + reDefineFields.toString()
                + " with different definition already exists in origin class");
    }

    //Fix CMS-4021
    if (org.apache.commons.lang.StringUtils.isEmpty(existingMetadata.getParent())
            && !StringUtils.isNullOrEmpty(parent)) {
        Collection<String> difference = new HashSet<String>();
        difference.addAll(fieldNames);
        difference.removeAll(newFieldNames);
        for (String fieldName : difference) {
            MetaField existingField = existingMetadata.getFieldByName(fieldName);

            // non-internal field with same name exist in original metaclass or it's ancestor's metaclass
            if (existingField != null && !existingField.isInternal()) {
                MetaField parentField = metadataService.getMetaClass(parent).getFieldByName(fieldName);
                if (parentField != null) {
                    //exist in ancestor's metaclass
                    throw new MetaFieldExistsException(
                            "field " + fieldName + " already exist in ancestor's class");
                }
            }
        }
    }

    //appending field should not exist in descendants fields
    List<MetaClass> descendants = existingMetadata.getDescendants();
    for (MetaClass d : descendants) {
        for (MetaField f : meta.getClassFields()) {
            if (!f.isInternal() && d.getFieldByName(f.getName()) != null) {
                throw new MetaFieldExistsException(
                        "field " + f.getName() + " already exist in descandant's class " + d.getName());
            }
        }
    }

    // index should not be existing
    Iterator<IndexInfo> indexIter = options.getIndexes().iterator();
    while (indexIter.hasNext()) {
        IndexInfo indexInfo = indexIter.next();
        if (indexInfo.isInternal()) {
            continue;
        }

        if (existingMetadata.getIndexByName(indexInfo.getIndexName()) != null) {
            indexIter.remove();
            continue;
        }

        for (String idxField : indexInfo.getKeyList()) {
            if (!fieldNames.contains(idxField)) {
                throw new MetaDataException(MetaErrCodeEnum.META_FIELD_NOT_EXISTS, MessageFormat.format(
                        "index {0} has field {1} that doesn''t exsiting", indexInfo.getIndexName(), idxField));
            }
        }

        if (existingMetadata.getIndexByName(indexInfo.getIndexName()) != null) {
            throw new IndexExistsException(MessageFormat.format(
                    "index name {0} already exsits. Can not update index options with metaclass update",
                    indexInfo.getIndexName()));
        }
    }
}

From source file:org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.EnforceVariablesVisitor.java

private ILogicalOperator visitsInputs(ILogicalOperator op, Collection<LogicalVariable> varsToRecover)
        throws AlgebricksException {
    if (op.getInputs().size() == 0 || varsToRecover.isEmpty()) {
        return op;
    }/*from ww  w .  ja  va 2 s.  co m*/
    Set<LogicalVariable> producedVars = new HashSet<>();
    VariableUtilities.getProducedVariables(op, producedVars);
    varsToRecover.removeAll(producedVars);
    if (!varsToRecover.isEmpty()) {
        if (op.getInputs().size() == 1) {
            // Deals with single input operators.
            ILogicalOperator newOp = op.getInputs().get(0).getValue().accept(this, varsToRecover);
            op.getInputs().get(0).setValue(newOp);
        } else {
            // Deals with multi-input operators.
            for (Mutable<ILogicalOperator> childRef : op.getInputs()) {
                ILogicalOperator child = childRef.getValue();
                Set<LogicalVariable> varsToRecoverInChild = new HashSet<>();
                VariableUtilities.getProducedVariablesInDescendantsAndSelf(child, varsToRecoverInChild);
                // Obtains the variables that this particular child should propagate.
                varsToRecoverInChild.retainAll(varsToRecover);
                ILogicalOperator newChild = child.accept(this, varsToRecoverInChild);
                childRef.setValue(newChild);
            }
        }
    }
    return op;
}

From source file:net.sourceforge.fenixedu.presentationTier.Action.resourceAllocationManager.ShiftDistributionFirstYearDA.java

private Collection<Degree> removeDegreesWithNoVacancy(final Collection<Degree> result) {
    for (final String degreeCode : NO_VACANCY_DEGREE_CODES) {
        result.removeAll(Degree.readAllByDegreeCode(degreeCode));
    }// ww w.  j a v  a 2  s  .c  o  m
    return result;
}

From source file:com.garethahealy.camel.file.loadbalancer.example1.routes.ReadThreeFilesWithThreeReadersTest.java

@Test
public void readThreeFilesWithThreeReaders() throws InterruptedException, MalformedURLException {
    Map<String, String> answer = getRouteToEndpointPriority();

    //Used for debugging purposes, in-case we need to know which endpoint has what priority
    LOG.info("EndpointSetup: " + answer.toString());

    MockEndpoint first = getMockEndpoint("mock:endFirst");
    first.setExpectedMessageCount(1);/*  w ww.  j  ava2  s .  c  o  m*/
    first.setResultWaitTime(TimeUnit.SECONDS.toMillis(15));
    first.setAssertPeriod(TimeUnit.SECONDS.toMillis(1));

    MockEndpoint second = getMockEndpoint("mock:endSecond");
    second.setExpectedMessageCount(1);
    second.setResultWaitTime(TimeUnit.SECONDS.toMillis(15));
    second.setAssertPeriod(TimeUnit.SECONDS.toMillis(1));

    MockEndpoint third = getMockEndpoint("mock:endThird");
    third.setExpectedMessageCount(1);
    third.setResultWaitTime(TimeUnit.SECONDS.toMillis(15));
    third.setAssertPeriod(TimeUnit.SECONDS.toMillis(1));

    //Wait for the files to be processed
    sleep(10);

    File firstDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel0"));
    File secondDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel1"));
    File thirdDirectory = FileUtils.toFile(new URL("file:" + rootDirectory + "/.camel2"));

    Assert.assertTrue(".camel0 doesnt exist", firstDirectory.exists());
    Assert.assertTrue(".camel1 doesnt exist", secondDirectory.exists());
    Assert.assertTrue(".camel2 doesnt exist", thirdDirectory.exists());

    Collection<File> firstFiles = FileUtils.listFiles(firstDirectory, FileFilterUtils.fileFileFilter(), null);
    Collection<File> secondFiles = FileUtils.listFiles(secondDirectory, FileFilterUtils.fileFileFilter(), null);
    Collection<File> thirdFiles = FileUtils.listFiles(thirdDirectory, FileFilterUtils.fileFileFilter(), null);

    Assert.assertNotNull(firstFiles);
    Assert.assertNotNull(secondFiles);
    Assert.assertNotNull(thirdFiles);

    //Check the files are unique, and we haven't copied the same file twice
    firstFiles.removeAll(secondFiles);
    firstFiles.removeAll(thirdFiles);

    secondFiles.removeAll(firstFiles);
    secondFiles.removeAll(thirdFiles);

    thirdFiles.removeAll(firstFiles);
    thirdFiles.removeAll(secondFiles);

    //Each directory should of only copied one file
    Assert.assertEquals(new Integer(1), new Integer(firstFiles.size()));
    Assert.assertEquals(new Integer(1), new Integer(secondFiles.size()));
    Assert.assertEquals(new Integer(1), new Integer(thirdFiles.size()));

    //Assert the endpoints last, as there seems to be a strange bug where they fail but the files have been processed,
    //so that would suggest the MockEndpoints are reporting a false-positive
    first.assertIsSatisfied();
    second.assertIsSatisfied();
    third.assertIsSatisfied();
}

From source file:com.pinterest.arcee.autoscaling.AwsAutoScaleGroupManager.java

@Override
public void addInstancesToAutoScalingGroup(Collection<String> instances, String groupName) throws Exception {
    // Already make sure that do not describe instances more than 50 records
    instances.removeAll(instancesInAutoScalingGroup(instances));
    if (instances.isEmpty()) {
        return;/*from   w  w  w  .ja  v a  2  s .  c o m*/
    }

    AttachInstancesRequest attachInstancesRequest = new AttachInstancesRequest();
    attachInstancesRequest.withAutoScalingGroupName(groupName).setInstanceIds(instances);
    aasClient.attachInstances(attachInstancesRequest);
}

From source file:ubic.gemma.analysis.report.ExpressionExperimentReportServiceImpl.java

private Collection<Long> getTroubled(Collection<ExpressionExperiment> ees) {
    Collection<Long> ids = EntityUtils.getIds(ees);
    Collection<Long> untroubled = expressionExperimentService.getUntroubled(ids);
    ids.removeAll(untroubled);
    return ids;/*from w w  w  .j av a  2  s  .  co m*/
}

From source file:com.github.jrh3k5.plugin.maven.l10n.mojo.report.TranslationKeyVerifier.java

@Override
protected void executeReport(Locale locale) throws MavenReportException {
    ClassLoader classLoader;//from   w  w w . j a  v a 2  s  .c  o  m
    try {
        classLoader = ClassLoaderUtils.getClassLoader(getProject());
    } catch (IOException e) {
        throw new MavenReportException("Failed to load project classloader.", e);
    }

    AuthoritativeMessagesProperties authoritativeProperties;
    Collection<TranslatedMessagesProperties> translatedProperties;
    try {
        authoritativeProperties = new AuthoritativeMessagesProperties.Parser().parse(messagesFile);
    } catch (IOException e) {
        throw new MavenReportException(
                String.format("Failed to parse authoritative messages file: %s", messagesFile), e);
    }

    try {
        final List<File> translationFiles = FileUtils.getFiles(getProject().getBasedir(),
                translatedMessagesPattern, null);
        // Don't consider the authoritative resource, if found, to be a "translation"
        translationFiles.remove(messagesFile);
        translatedProperties = new TranslatedMessagesProperties.Parser().parse(authoritativeProperties,
                translationFiles);
    } catch (IOException e) {
        throw new MavenReportException(String.format(
                "Failed to parse translated messages files for pattern: %s", translatedMessagesPattern), e);
    }

    ClassinessAnalysisResults analysisResults;
    try {
        analysisResults = TranslationKeyAnalysisUtils.getInstance(getLog()).analyzeClassiness(classLoader,
                authoritativeProperties);
    } catch (IOException e) {
        throw new MavenReportException(String.format("Failed to verify %s", messagesFile), e);
    }

    Collection<String> translationClassKeys;
    try {
        translationClassKeys = TranslationClassUtils.getTranslationKeys(keyClasses, classLoader);
    } catch (ClassNotFoundException e) {
        throw new MavenReportException("Failed to translate key classes: " + keyClasses, e);
    }
    translationClassKeys.removeAll(authoritativeProperties.getTranslationKeys());

    new ReportRenderer(this, locale, getSink(), authoritativeProperties, analysisResults, translatedProperties,
            translationClassKeys).render();
}

From source file:gov.bnl.channelfinder.ChannelsResource.java

/**
 * Check is all the tags and properties already exist
 * @return//from  w ww  .  j  ava2s.com
 * @throws IOException 
 * @throws JsonMappingException 
 * @throws JsonParseException 
 */
private XmlChannel validateChannel(XmlChannel channel, Client client)
        throws JsonParseException, JsonMappingException, IOException {

    if (channel.getName() == null || channel.getName().isEmpty()) {
        throw new IllegalArgumentException("Invalid channel name ");
    }

    if (channel.getOwner() == null || channel.getOwner().isEmpty()) {
        throw new IllegalArgumentException(
                "Invalid channel owner (null or empty string) for '" + channel.getName() + "'");
    }

    for (XmlProperty xmlProperty : channel.getProperties()) {
        if (xmlProperty.getValue() == null || xmlProperty.getValue().isEmpty()) {
            throw new IllegalArgumentException("Invalid property value (missing or null or empty string) for '"
                    + xmlProperty.getName() + "'");
        }
    }
    final Map<String, XmlTag> tags = new HashMap<String, XmlTag>();
    final Map<String, XmlProperty> properties = new HashMap<String, XmlProperty>();

    ObjectMapper mapper = new ObjectMapper();
    mapper.addMixIn(XmlProperty.class, OnlyXmlProperty.class);
    mapper.addMixIn(XmlTag.class, OnlyXmlTag.class);

    SearchResponse response = client.prepareSearch("properties").setTypes("property")
            .setQuery(new MatchAllQueryBuilder()).setSize(1000).execute().actionGet();
    for (SearchHit hit : response.getHits()) {
        XmlProperty prop = mapper.readValue(hit.getSourceAsString(), XmlProperty.class);
        properties.put(prop.getName(), prop);
    }
    response = client.prepareSearch("tags").setTypes("tag").setQuery(new MatchAllQueryBuilder()).setSize(1000)
            .execute().actionGet();
    for (SearchHit hit : response.getHits()) {
        XmlTag tag = mapper.readValue(hit.getSourceAsString(), XmlTag.class);
        tags.put(tag.getName(), tag);
    }
    if (tags.keySet().containsAll(ChannelUtil.getTagNames(channel))
            && properties.keySet().containsAll(ChannelUtil.getPropertyNames(channel))) {
        channel.getTags().parallelStream().forEach((tag) -> {
            tag.setOwner(tags.get(tag.getName()).getOwner());
        });
        channel.getProperties().parallelStream().forEach((prop) -> {
            prop.setOwner(properties.get(prop.getName()).getOwner());
        });
        return channel;
    } else {
        StringBuffer errorMsg = new StringBuffer();
        Collection<String> missingTags = ChannelUtil.getTagNames(channel);
        missingTags.removeAll(tags.keySet());
        for (String tag : missingTags) {
            errorMsg.append(tag + "|");
        }
        Collection<String> missingProps = ChannelUtil.getPropertyNames(channel);
        missingProps.removeAll(properties.keySet());
        for (String prop : missingProps) {
            errorMsg.append(prop + "|");
        }
        throw new IllegalArgumentException(
                "The following Tags and/or Properties on the channel don't exist -- " + errorMsg.toString());
    }
}