Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:com.otway.picasasync.syncutil.SyncManager.java

private List<AlbumSync> getRemoteDownloadList(List<AlbumEntry> remoteAlbums, final File rootFolder,
        LocalDateTime oldestDate) throws ServiceException, IOException {
    HashSet<String> uniqueNames = new HashSet<String>();
    List<AlbumSync> result = new ArrayList<AlbumSync>();

    // If this is false, we only care about instant upload albums.
    boolean nonInstantUploadAlbums = settings.getDownloadChanged() || settings.getUploadChanged()
            || settings.getDownloadNew() || settings.getUploadNew();

    for (AlbumEntry album : remoteAlbums) {

        String title = album.getTitle().getPlainText();
        boolean isInstantUploadType = PicasawebClient.isInstantUpload(album);

        if (oldestDate.isAfter(getTimeFromMS(album.getUpdated().getValue()))) {
            log.debug("Album update date (" + album.getUpdated() + ") too old. Skipping " + title);
            continue;
        }// www. j  a va 2 s .c  o  m

        if (!settings.getAutoBackupDownload() && isInstantUploadType) {
            log.info("Skipping Auto-Backup album: " + title);
            continue;
        }

        if (settings.getExcludeDropBox() && title.equals("Drop Box")) {
            log.info("Skipping DropBox album.");
            continue;
        }

        String suffix = "";

        if (uniqueNames.contains(title)) {
            log.info(" Duplicate online album: " + title + " (" + album.getName() + ") - skipping...");
            continue;
        }

        uniqueNames.add(title);

        // Might need to convert some auto-backup style folder names, which have slashes
        File albumFolder = PicasawebClient.getFolderNameForAlbum(rootFolder, album);

        if (!isInstantUploadType && !suffix.isEmpty()) {

            // If it's not AutoBackup, add the suffix to differentiate duplicate titles
            albumFolder = new File(albumFolder.getParent(), albumFolder.getName() + suffix);
        }

        if (!isInstantUploadType && !nonInstantUploadAlbums)
            continue;

        result.add(new AlbumSync(album, albumFolder, this, settings));
    }

    return result;
}

From source file:edu.cornell.mannlib.vitro.webapp.edit.n3editing.configuration.preprocessors.WorkHasContributionPreprocessor.java

private Model getRetractionsToRemove(Model retractionsModel, Model additionsModel) {
    // TODO Auto-generated method stub
    Model retractionsToRemove = ModelFactory.createDefaultModel();

    if (!retractionsModel.isEmpty()) {
        HashSet<String> additionsAgentURIs = new HashSet<String>();
        HashSet<String> preserveRetractionsAgentURIs = new HashSet<String>();
        String queryStr = getSparqlQuery();
        Query query = null;/*from  ww w.j  a va 2 s. c o m*/
        QueryExecution qe = null;

        additionsModel.getLock().enterCriticalSection(Lock.READ);
        try {
            query = QueryFactory.create(queryStr);
            qe = QueryExecutionFactory.create(query, additionsModel);
            ResultSet res = qe.execSelect();

            while (res.hasNext()) {
                QuerySolution qs = res.nextSolution();
                additionsAgentURIs.add(qs.getResource("agent").getURI());
            }
        } catch (Exception ex) {
            log.error("Exception occurred in querying additions model for agent ", ex);
        }

        retractionsModel.getLock().enterCriticalSection(Lock.READ);
        try {
            query = QueryFactory.create(queryStr);
            qe = QueryExecutionFactory.create(query, retractionsModel);
            ResultSet res = qe.execSelect();

            while (res.hasNext()) {
                QuerySolution qs = res.nextSolution();
                String agentURI = qs.getResource("agent").getURI();
                //if this uri is not in the additoins, then the agent is being removed from the property and we want to ensure
                //that type, rdfs:label, and foaf:name are not added to the retractions
                if (!additionsAgentURIs.contains(agentURI)) {
                    preserveRetractionsAgentURIs.add(agentURI);
                }
            }
        } catch (Exception ex) {
            log.error("Exception occurred in querying additions model for agent ", ex);
        }

        //Now, with the agent uris to be preserved
        for (String uri : preserveRetractionsAgentURIs) {
            Resource agentURI = ResourceFactory.createResource(uri);
            Property foafNameProperty = ResourceFactory.createProperty(foaf + "name");
            retractionsToRemove.add(retractionsModel.listStatements(agentURI, RDF.type, (RDFNode) null));
            retractionsToRemove
                    .add(retractionsModel.listStatements(agentURI, foafNameProperty, (RDFNode) null));
            retractionsToRemove.add(retractionsModel.listStatements(agentURI, RDFS.label, (RDFNode) null));

        }
    }

    return retractionsToRemove;
}

From source file:edu.cornell.mannlib.vitro.webapp.edit.n3editing.configuration.preprocessors.AddAssociatedConceptsPreprocessor.java

private void addLiteralsAndUrisOnForm(int numberTerms) {
    List<String> urisOnForm = new ArrayList<String>();
    List<String> literalsOnForm = new ArrayList<String>();

    int index;//from  w  ww .j ava 2  s . c  om
    HashSet<String> conceptSemanticTypeURIs = new HashSet<String>();
    // First one already included so add new ones here
    for (index = 1; index <= numberTerms; index++) {
        int suffix = index;
        String conceptNode = conceptNodeBase + suffix;
        String label = labelBase + suffix;
        String source = sourceBase + suffix;
        String conceptSemanticTypeLabel = conceptSemanticTypeLabelBase + suffix;
        //String conceptSemanticTypeURI = conceptSemanticTypeURIBase + suffix;
        String conceptSemanticTypeURI = this.getConceptSemanticTypeURIFieldName(conceptSemanticTypeLabel,
                suffix);
        String conceptBroaderURI = conceptBroaderURIBase + suffix;
        String conceptNarrowerURI = conceptNarrowerURIBase + suffix;
        urisOnForm.add(conceptNode);
        urisOnForm.add(source);
        if (!conceptSemanticTypeURIs.contains(conceptSemanticTypeURI)) {
            conceptSemanticTypeURIs.add(conceptSemanticTypeURI);
            urisOnForm.add(conceptSemanticTypeURI);
        }
        urisOnForm.add(conceptBroaderURI);
        urisOnForm.add(conceptNarrowerURI);
        literalsOnForm.add(label);
        literalsOnForm.add(conceptSemanticTypeLabel);
    }
    editConfiguration.setUrisOnform(urisOnForm);
    editConfiguration.setLiteralsOnForm(literalsOnForm);
}

From source file:org.walkmod.conf.providers.yml.RemoveTransformationYMLAction.java

@Override
public void doAction(JsonNode node) throws Exception {
    HashSet<String> transList = new HashSet<String>(transformations);
    JsonNode transfListNode = null;//ww  w  .  ja  v a 2  s  .  co m
    if (chain == null || "".equals(chain)) {
        if (node.has("transformations")) {
            transfListNode = node.get("transformations");

        }
    } else {
        if (node.has("chains")) {
            JsonNode chainsListNode = node.get("chains");
            if (chainsListNode.isArray()) {
                Iterator<JsonNode> it = chainsListNode.iterator();
                boolean found = false;
                while (it.hasNext() && !found) {
                    JsonNode current = it.next();
                    if (current.has("name")) {
                        String name = current.get("name").asText();
                        found = name.equals(chain);

                        if (current.has("transformations")) {
                            transfListNode = current.get("transformations");
                        }
                    }
                }
            }
        }
    }

    if (transfListNode != null) {
        if (transfListNode.isArray()) {
            ArrayNode transArray = (ArrayNode) transfListNode;
            Iterator<JsonNode> it = transArray.iterator();
            List<Integer> removeIndex = new LinkedList<Integer>();
            int i = 0;
            while (it.hasNext()) {
                JsonNode transfNode = it.next();
                if (transfNode.has("type")) {
                    String type = transfNode.get("type").asText();
                    if (transList.contains(type)) {
                        removeIndex.add(i);
                    }
                }
                i++;
            }
            for (Integer pos : removeIndex) {
                transArray.remove(pos);
            }
        }
        provider.write(node);
    }

}

From source file:com.thoughtworks.cruise.utils.configfile.CruiseConfigDom.java

public void removePipelinesExcept(String... pipelines) {
    HashSet keepPipelines = new HashSet(Arrays.asList(pipelines));
    for (String pipeline : allPipelines()) {
        if (!keepPipelines.contains(pipeline)) {
            removePipeline(pipeline);/*from  w  ww . j  av  a2 s  . co  m*/
        }
    }
}

From source file:net.semanticmetadata.lire.imageanalysis.bovw.LocalFeatureHistogramBuilderFromCodeBook.java

private HashSet<Integer> selectVocabularyDocs() throws IOException {
    // need to make sure that this is not running forever ...
    int loopCount = 0;
    float maxDocs = reader.maxDoc();
    int capacity = (int) Math.min(numDocsForVocabulary, maxDocs);
    if (capacity < 0)
        capacity = (int) (maxDocs / 2);
    HashSet<Integer> result = new HashSet<Integer>(capacity);
    int tmpDocNumber, tmpIndex;
    LinkedList<Integer> docCandidates = new LinkedList<Integer>();
    // three cases:
    ///* w w w. j av a 2  s .  c om*/
    // either it's more or the same number as documents
    if (numDocsForVocabulary >= maxDocs) {
        for (int i = 0; i < maxDocs; i++) {
            result.add(i);
        }
        return result;
    } else if (numDocsForVocabulary >= maxDocs - 100) { // or it's slightly less:
        for (int i = 0; i < maxDocs; i++) {
            result.add(i);
        }
        while (result.size() > numDocsForVocabulary) {
            result.remove((int) Math.floor(Math.random() * result.size()));
        }
        return result;
    } else {
        for (int i = 0; i < maxDocs; i++) {
            docCandidates.add(i);
        }
        for (int r = 0; r < capacity; r++) {
            boolean worksFine = false;
            do {
                tmpIndex = (int) Math.floor(Math.random() * (double) docCandidates.size());
                tmpDocNumber = docCandidates.get(tmpIndex);
                docCandidates.remove(tmpIndex);
                // check if the selected doc number is valid: not null, not deleted and not already chosen.
                worksFine = (reader.document(tmpDocNumber) != null) && !result.contains(tmpDocNumber);
            } while (!worksFine);
            result.add(tmpDocNumber);
            // need to make sure that this is not running forever ...
            if (loopCount++ > capacity * 100)
                throw new UnsupportedOperationException(
                        "Could not get the documents, maybe there are not enough documents in the index?");
        }
        return result;
    }
}

From source file:edu.ku.brc.specify.conversion.ConvertTaxonHelper.java

/**
 * //from w w w  . j a  va2  s  .c  o  m
 */
public void createTaxonIdMappings() {
    IdMapperMgr idMapperMgr = IdMapperMgr.getInstance();

    // These are the names as they occur in the old datamodel
    String[] tableNames = { "Habitat", "TaxonCitation", "TaxonomicUnitType", // Added Only 
    };

    int i = 0;
    IdTableMapper idMapper = null;
    for (String tableName : tableNames) {
        idMapper = idMapperMgr.addTableMapper(tableName, tableName + "ID");
        log.debug("mapIds() for table" + tableName);

        if (i < tableNames.length - 1) {
            idMapper.mapAllIds();
        }
        i++;
    }

    //---------------------------------
    // TaxonomyType
    //---------------------------------

    HashSet<Integer> txTypHashSet = new HashSet<Integer>();
    StringBuilder inSB = new StringBuilder();

    //HashMap<Integer, StringBuilder> txTypToKgdmHash = new HashMap<Integer, StringBuilder>();
    for (CollectionInfo ci : CollectionInfo.getFilteredCollectionInfoList()) {
        log.debug("For Collection[" + ci.getCatSeriesName() + "]  TaxonomyTypeId: " + ci.getTaxonomyTypeId()
                + "  " + (txTypHashSet.contains(ci.getTaxonomyTypeId()) ? "Done" : "not Done."));
        if (!txTypHashSet.contains(ci.getTaxonomyTypeId())) {
            log.debug("Mapping TaxonomyTypeId [" + ci.getTaxonomyTypeId() + "]  For Collection["
                    + ci.getCatSeriesName() + "]");
            if (inSB.length() > 0)
                inSB.append(',');
            inSB.append(ci.getTaxonomyTypeId());
            txTypHashSet.add(ci.getTaxonomyTypeId());
        }
    }

    taxonomyTypeIdInClause = " in (" + inSB.toString() + ")";

    // KU Vert Paleo
    //taxonomyTypeIdInClause = " in (0,1,2,3,4,7)";

    IdTableMapper taxonomyTypeMapper = idMapperMgr.addTableMapper("TaxonomyType", "TaxonomyTypeID", true);
    //taxonomyTypeMapper.mapAllIds();

    //---------------------------------
    // TaxonName
    //---------------------------------

    taxonFromClause = String.format(
            " FROM taxonname tx INNER JOIN taxonomicunittype tu ON tx.TaxonomicUnitTypeID = tu.TaxonomicUnitTypeID "
                    + "WHERE tx.RankID IS NOT NULL AND tx.TaxonomyTypeId %s ORDER BY tx.RankID",
            taxonomyTypeIdInClause);
    String sql = "SELECT COUNT(*)" + taxonFromClause;
    log.debug(sql);
    int count = BasicSQLUtils.getCountAsInt(oldDBConn, sql);

    sql = "SELECT tx.TaxonNameID" + taxonFromClause;
    log.debug(count + " - " + sql);

    // This mapping is used by Discipline
    idMapper = idMapperMgr.addTableMapper("TaxonName", "TaxonNameID", sql, true);
    idMapper.mapAllIdsWithSQL();
}

From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule.java

private static Pair<Boolean, Boolean> pushThroughOp(HashSet<LogicalVariable> toPush,
        Mutable<ILogicalOperator> opRef2, ILogicalOperator initialOp, IOptimizationContext context)
        throws AlgebricksException {
    List<LogicalVariable> initProjectList = new ArrayList<LogicalVariable>(toPush);
    AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
    do {/*  w  ww.jav  a2  s .c o  m*/
        if (op2.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE
                || op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE
                || op2.getOperatorTag() == LogicalOperatorTag.PROJECT
                || op2.getOperatorTag() == LogicalOperatorTag.REPLICATE
                || op2.getOperatorTag() == LogicalOperatorTag.UNIONALL) {
            return new Pair<Boolean, Boolean>(false, false);
        }
        if (!op2.isMap()) {
            break;
        }
        LinkedList<LogicalVariable> usedVars = new LinkedList<LogicalVariable>();
        VariableUtilities.getUsedVariables(op2, usedVars);
        toPush.addAll(usedVars);
        LinkedList<LogicalVariable> producedVars = new LinkedList<LogicalVariable>();
        VariableUtilities.getProducedVariables(op2, producedVars);
        toPush.removeAll(producedVars);
        // we assume pipelineable ops. have only one input
        opRef2 = op2.getInputs().get(0);
        op2 = (AbstractLogicalOperator) opRef2.getValue();
    } while (true);

    LinkedList<LogicalVariable> produced2 = new LinkedList<LogicalVariable>();
    VariableUtilities.getProducedVariables(op2, produced2);
    LinkedList<LogicalVariable> used2 = new LinkedList<LogicalVariable>();
    VariableUtilities.getUsedVariables(op2, used2);

    boolean canCommuteProjection = initProjectList.containsAll(toPush) && initProjectList.containsAll(produced2)
            && initProjectList.containsAll(used2);
    // if true, we can get rid of the initial projection

    // get rid of useless decor vars.
    if (!canCommuteProjection && op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
        boolean gbyChanged = false;
        GroupByOperator gby = (GroupByOperator) op2;
        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> newDecorList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) {
            LogicalVariable decorVar = GroupByOperator.getDecorVariable(p);
            if (!toPush.contains(decorVar)) {
                used2.remove(decorVar);
                gbyChanged = true;
            } else {
                newDecorList.add(p);
            }
        }
        gby.getDecorList().clear();
        gby.getDecorList().addAll(newDecorList);
        if (gbyChanged) {
            context.computeAndSetTypeEnvironmentForOperator(gby);
        }
    }
    used2.clear();
    VariableUtilities.getUsedVariables(op2, used2);

    toPush.addAll(used2); // remember that toPush is a Set
    toPush.removeAll(produced2);

    if (toPush.isEmpty()) {
        return new Pair<Boolean, Boolean>(false, false);
    }

    boolean smthWasPushed = false;
    for (Mutable<ILogicalOperator> c : op2.getInputs()) {
        if (pushNeededProjections(toPush, c, context, initialOp)) {
            smthWasPushed = true;
        }
    }
    if (op2.hasNestedPlans()) {
        AbstractOperatorWithNestedPlans n = (AbstractOperatorWithNestedPlans) op2;
        for (ILogicalPlan p : n.getNestedPlans()) {
            for (Mutable<ILogicalOperator> r : p.getRoots()) {
                if (pushNeededProjections(toPush, r, context, initialOp)) {
                    smthWasPushed = true;
                }
            }
        }
    }
    return new Pair<Boolean, Boolean>(smthWasPushed, canCommuteProjection);
}

From source file:eu.europa.ec.fisheries.uvms.rules.service.business.AbstractFact.java

/**
 * This method will return false if any codeType do not have matching value from the list valuesToBe matched
 *
 * @param codeTypes//from  w w  w  .  j a  v  a  2 s .c om
 * @param valuesToMatch
 * @return
 */
public boolean codeTypeValueContainsMatch(List<CodeType> codeTypes, String... valuesToMatch) {
    if (CollectionUtils.isEmpty(codeTypes) || valuesToMatch == null) {
        return false;
    }
    HashSet<String> valuesToBeFound = new HashSet<>(Arrays.asList(valuesToMatch));

    for (CodeType codeType : codeTypes) {
        if (codeType == null || codeType.getValue() == null || !valuesToBeFound.contains(codeType.getValue())) {
            return false;
        }
    }

    return true;
}

From source file:net.sf.markov4jmeter.testplangenerator.transformation.SimpleProtocolLayerEFSMTransformer.java

/**
 * {@inheritDoc}//from  www .  ja v a2 s. co m
 *
 * <p>This method is specialized for a <b>straight ("simple") run</b>
 * through the states, assuming that no state has more than one outgoing
 * transitions; if this restriction does not hold for a state, the run
 * continues with the first outgoing transition, and a warning will be
 * given.
 */
@Override
protected ListedHashTree transformProtocolState(final m4jdsl.ProtocolState state,
        final HashSet<m4jdsl.ProtocolState> visitedStates, final TestPlanElementFactory testPlanElementFactory)
        throws TransformationException {

    // Test Plan fragment to be returned;
    final ListedHashTree samplers = new ListedHashTree();

    final Request request = state.getRequest();

    // ignore initial markov state
    if (!state.getRequest().getEId().contains("(INITIAL)")) {
        // create a named Sampler with properties and parameters;
        ListedHashTree sampler = this.transformRequest(request, testPlanElementFactory);
        samplers.add(sampler);
    }

    // outgoing transitions of the M4J-DSL state indicate further Samplers;
    final List<ProtocolTransition> outgoingTransitions = state.getOutgoingTransitions();

    // mark current state as "visited";
    visitedStates.add(state);

    final int n = outgoingTransitions.size();

    if (n >= 1) {

        // successors must be unique for sequential Samplers (requests);
        // if this restriction does not hold, a warning will be given;
        if (n > 1) {

            final String message = String.format(
                    SimpleProtocolLayerEFSMTransformer.WARNING_AMBIGUOUS_TRANSITIONS_IN_PROTOCOL_STATE,
                    request.getEId());

            SimpleProtocolLayerEFSMTransformer.LOG.warn(message);
        }

        final ProtocolTransition transition = outgoingTransitions.get(0);

        // continue with the target state in the M4J-DSL model;
        final ProtocolLayerEFSMState targetState = transition.getTargetState();

        if (targetState instanceof ProtocolState && !visitedStates.contains(targetState)) {

            // target state has not been visited yet -> transform it;
            final ListedHashTree targetProtocolStates = this.transformProtocolState((ProtocolState) targetState,
                    visitedStates, testPlanElementFactory);

            samplers.add(targetProtocolStates);
        }
    }

    return samplers;
}