Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:de.da_sense.moses.client.FormFragment.java

/**
 * Displays a multiple choice question to the user.
 * @param question the question to be displayed
 * @param linearLayoutInsideAScrollView the view to add the question to
 * @param ordinal the ordinal number of the question i.e. 1, 2, 3, 4 or 5
 *//*from  w  ww  . j a v a2 s  . c  om*/
private void makeMultipleChoice(final Question question, LinearLayout linearLayoutInsideAScrollView,
        int ordinal) {
    LinearLayout questionContainer = generateQuestionContainer(linearLayoutInsideAScrollView);
    String questionText = question.getTitle();
    List<PossibleAnswer> possibleAnswers = question.getPossibleAnswers();
    Collections.sort(possibleAnswers);

    TextView questionView = new TextView(getActivity());
    questionView.setText(ordinal + ". " + questionText);
    if (question.isMandatory())
        questionView.setTextAppearance(getActivity(), R.style.QuestionTextStyleMandatory);
    else
        questionView.setTextAppearance(getActivity(), R.style.QuestionTextStyle);
    questionContainer.addView(questionView);
    mQuestionTitleMappings.put(question, questionView);

    Log.i(LOG_TAG, "questionView = " + questionView.getText());

    final HashSet<String> madeAnswers = new HashSet<String>();
    madeAnswers.addAll(Arrays.asList(question.getAnswer().split(",")));
    madeAnswers.remove(""); // paranoia

    final CheckBox[] checkBoxs = new CheckBox[possibleAnswers.size()];
    for (int i = 0; i < checkBoxs.length; i++) {
        final PossibleAnswer possibleAnswer = possibleAnswers.get(i);
        final String possibleAnswerId = String.valueOf(possibleAnswer.getId());
        checkBoxs[i] = new CheckBox(getActivity());
        if (i % 2 == 0)
            checkBoxs[i].setBackgroundColor(getActivity().getResources().getColor(R.color.light_gray));
        checkBoxs[i].setText(possibleAnswer.getTitle());
        checkBoxs[i].setTextAppearance(getActivity(), R.style.PossibleAnswerTextStyle);
        if (madeAnswers.contains(possibleAnswerId))
            checkBoxs[i].setChecked(true);

        // click handling
        checkBoxs[i].setOnCheckedChangeListener(new OnCheckedChangeListener() {

            @Override
            public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
                if (isChecked)
                    madeAnswers.add(possibleAnswerId);
                else
                    madeAnswers.remove(possibleAnswerId);
                String newAnswer = "";
                for (String madeAnswer1 : madeAnswers)
                    newAnswer = newAnswer + "," + madeAnswer1;
                if (!newAnswer.isEmpty())
                    newAnswer = newAnswer.substring(1); // remove the leading ","
                question.setAnswer(newAnswer);
            }
        });

        checkBoxs[i].setVisibility(View.VISIBLE);
        if (mBelongsTo == WelcomeActivityPagerAdapter.TAB_HISTORY)
            checkBoxs[i].setEnabled(false);
        questionContainer.addView(checkBoxs[i]);
    }
}

From source file:byps.http.HActiveMessages.java

/**
 * Cleanup expired messages and close expired streams.
 * @param all false: cleanup expired, true: cleanup all
 *//*from  w w  w  .  j av a  2s  .co  m*/
public void cleanup(final boolean all) {
    if (log.isDebugEnabled())
        log.debug("cleanup(");

    // HashSet for all active incoming streams
    HashSet<Long> activeIncomingStreamIds = new HashSet<Long>(incomingStreams.keys());

    // HashSet for referenced incoming streams.
    // Initialize with outgoing streams because incoming streams might be used in 
    // return values or sent to other clients.
    HashSet<Long> referencedIncomingStreamIds = new HashSet<Long>(outgoingStreams.keys());

    // Cleanup messages.
    ArrayList<HActiveMessage> arr = new ArrayList<HActiveMessage>(activeMessages.values());
    for (HActiveMessage msg : arr) {

        if (all || msg.checkReferencedStreamIds(activeIncomingStreamIds, referencedIncomingStreamIds)) {
            if (log.isDebugEnabled())
                log.debug("remove message=" + msg);
            if (all)
                msg.cancelMessage();
            activeMessages.remove(msg.messageId);
        } else if (log.isDebugEnabled() && !msg.isLongPoll()) {
            log.debug("active message=" + msg);
        }
    }

    // Cleanup expired or not referenced incoming streams
    for (Long streamId : activeIncomingStreamIds) {
        BContentStream stream = incomingStreams.get(streamId);
        if (stream == null)
            continue;

        if (all || stream.isExpired() || !referencedIncomingStreamIds.contains(streamId)) {
            try {
                if (log.isDebugEnabled())
                    log.debug("close/remove incoming stream=" + stream);
                stream.close(); // removes from incomingStreams or outgoingStreams
                incomingStreams.remove(streamId);
            } catch (Throwable e) {
                log.debug("Failed to close stream=" + stream, e);
            }
        } else {
            if (log.isDebugEnabled())
                log.debug("active incoming stream=" + stream);
        }
    }

    // Cleanup expired outgoing streams.
    // And cleanup expired upload streams.
    ArrayList<BContentStream> ostreams = new ArrayList<BContentStream>(outgoingStreams.values());
    for (BContentStream stream : ostreams) {
        if (all || stream.isExpired()) {
            try {
                if (log.isDebugEnabled())
                    log.debug("close/remove outgoing stream=" + stream);
                stream.close(); // removes from incomingStreams or outgoingStreams
                outgoingStreams.remove(stream.getTargetId().getStreamId());
            } catch (Throwable e) {
                log.debug("Failed to close stream=" + stream, e);
            }
        } else {
            if (log.isDebugEnabled())
                log.debug("active outgoing stream=" + stream);
        }
    }

    if (log.isDebugEnabled())
        log.debug(")cleanup");
}

From source file:com.bitsofproof.supernode.core.CachedBlockStore.java

private void validateTransaction(final TransactionContext tcontext, final Tx t)
        throws TransactionValidationException {
    if (t.getInputs() == null || t.getInputs().isEmpty()) {
        throw new TransactionValidationException("a transaction must have inputs", t);
    }//from www  .ja  va 2 s  . c  o  m
    if (t.getOutputs() == null || t.getOutputs().isEmpty()) {
        throw new TransactionValidationException("a transaction must have outputs", t);
    }

    if (isCoinBase(t)) {
        if (tcontext.block == null) {
            throw new TransactionValidationException("coinbase only allowed in a block", t);
        }
        if (t.getInputs().get(0).getScript().length < 2 || t.getInputs().get(0).getScript().length > 100) {
            throw new TransactionValidationException("coinbase script length out of bounds", t);
        }
    }

    long sumOut = 0;
    for (TxOut o : t.getOutputs()) {
        if (o.getValue() < 0) {
            throw new TransactionValidationException("negative output is not allowed", t);
        }
        if (o.getValue() > Tx.MAX_MONEY) {
            throw new TransactionValidationException("output too high", t);
        }
        synchronized (tcontext) {
            tcontext.nsigs += ScriptFormat.sigOpCount(o.getScript(), false);
            tcontext.blkSumOutput = tcontext.blkSumOutput.add(BigInteger.valueOf(o.getValue()));
        }
        sumOut += o.getValue();
    }

    if (isCoinBase(t) == false) {
        long sumIn = 0;
        int inNumber = 0;
        List<Callable<TransactionValidationException>> callables = new ArrayList<Callable<TransactionValidationException>>();
        Map<String, HashSet<Long>> inputUse = new HashMap<String, HashSet<Long>>();
        for (TxIn i : t.getInputs()) {
            HashSet<Long> seen = inputUse.get(i.getSourceHash());
            if (seen == null) {
                inputUse.put(i.getSourceHash(), seen = new HashSet<Long>());
            }
            if (seen.contains(i.getIx())) {
                throw new TransactionValidationException("duplicate input", t);
            }
            seen.add(i.getIx());

            TxOut source = tcontext.resolvedInputs.get(i.getSourceHash(), i.getIx());
            sumIn += source.getValue();
            try {
                synchronized (tcontext) {
                    if (ScriptFormat.isPayToScriptHash(source.getScript())) {
                        ScriptFormat.Tokenizer tokenizer = new ScriptFormat.Tokenizer(i.getScript());
                        byte[] last = null;
                        while (tokenizer.hashMoreElements()) {
                            last = tokenizer.nextToken().data;
                        }
                        tcontext.nsigs += ScriptFormat.sigOpCount(last, true);
                    } else {
                        tcontext.nsigs += ScriptFormat.sigOpCount(i.getScript(), false);
                    }
                    tcontext.blkSumInput = tcontext.blkSumInput.add(BigInteger.valueOf(source.getValue()));
                }
            } catch (ValidationException e) {
                throw new TransactionValidationException(e, t);
            }

            final ScriptEvaluation evaluation = new ScriptEvaluation(t, inNumber++, source);
            callables.add(new Callable<TransactionValidationException>() {
                @Override
                public TransactionValidationException call() throws Exception {
                    try {
                        if (!evaluation.evaluate(chain.isProduction())) {
                            return new TransactionValidationException(
                                    "The transaction script does not evaluate to true in input", t,
                                    evaluation.getInr());
                        }
                    } catch (Exception e) {
                        return new TransactionValidationException(e, t, evaluation.getInr());
                    }
                    return null;
                }
            });
        }
        if (sumOut > sumIn) {
            throw new TransactionValidationException("Transaction value out more than in", t);
        }
        List<Future<TransactionValidationException>> results;
        try {
            results = inputProcessor.invokeAll(callables);
        } catch (InterruptedException e1) {
            throw new TransactionValidationException(e1, t);
        }
        for (Future<TransactionValidationException> r : results) {
            TransactionValidationException ex;
            try {
                ex = r.get();
            } catch (InterruptedException e) {
                throw new TransactionValidationException(e, t);
            } catch (ExecutionException e) {
                throw new TransactionValidationException(e, t);
            }
            if (ex != null) {
                throw ex;
            }
        }
    }
}

From source file:com.almarsoft.GroundhogReader.MessageListActivity.java

private void markThreadAsReadOrUnread(HeaderItemClass header, boolean setread) {

    // Proxy stuff
    String thread_subject = header.getArticle().simplifiedSubject();
    String msgId;//from ww  w.  j a v a 2 s.  com
    Article article;
    ArrayList<HeaderItemClass> proxyHeaderItems = mHeaderItemsList;
    HashSet<String> proxyReadSet = mReadSet;
    int headerItemsSize = proxyHeaderItems.size();
    int proxyNumUnread = mNumUnread;
    // End proxy stuff

    for (int i = 0; i < headerItemsSize; i++) {
        article = proxyHeaderItems.get(i).getArticle();

        if (thread_subject.equalsIgnoreCase(article.simplifiedSubject())) {
            msgId = article.getArticleId();
            if (setread) {
                DBUtils.markAsRead(msgId, getApplicationContext());
                if (!proxyReadSet.contains(msgId))
                    proxyNumUnread--;
            } else {
                DBUtils.markAsUnread(msgId, getApplicationContext());
                if (proxyReadSet.contains(msgId))
                    proxyNumUnread++;
            }
        }
    }
    mNumUnread = proxyNumUnread;

    mTitleBar.setText(mGroup + ":" + mNumUnread);

    mReadSet = DBUtils.getReadMessagesSet(mGroup, getApplicationContext());
    DBUtils.updateUnreadInGroupsTable(mNumUnread, mGroupID, getApplicationContext());
    mMsgList.invalidateViews();
}

From source file:de.gfz_potsdam.datasync.Datasync.java

public void syncDeletedToRemote(String basedir, HashSet<String> entities, Container parent) throws Exception {

    //delete in infrastructure container members, where there is no local file but it is in our database

    if (parent == null)
        return;/*  ww  w .ja  v  a 2s  .c  o m*/

    HashMap<String, String> syncedfiles = App.db.listEntries(basedir, File.separator);
    HashMap<String, String> srvdelete = new HashMap<String, String>();

    HashMap<String, Integer> pathToIdCount = new HashMap<String, Integer>();
    for (String path : syncedfiles.keySet()) {
        String id = syncedfiles.get(path);
        Integer count = pathToIdCount.get(id);
        if (count == null)
            count = new Integer(1);
        else
            count = new Integer(count.intValue() + 1);
        pathToIdCount.put(id, count);
    }

    for (String path : syncedfiles.keySet()) {
        String id = syncedfiles.get(path);
        Integer count = pathToIdCount.get(id);
        if (!entities.contains(path)) {
            if (count.intValue() <= 1) {
                srvdelete.put(path, id);
                log.log(Level.INFO, "Remote delete: {0} {1}", new Object[] { path, id });
            }
        }
    }
    //items with more components

    if (!srvdelete.isEmpty()) {

        srv.containerRemoveMembers(parent, srvdelete.values());
        for (String path : srvdelete.keySet()) {
            App.db.deleteMapping(path, srvdelete.get(path), File.separator);

        }
        File parentdir = new File(directory + File.separator + basedir);
        App.db.storeMapping(basedir, parent.getObjid(), parentdir.lastModified(),
                parent.getLastModificationDate(), SyncDB.DIRECTORY);

    }

}

From source file:service.EventService.java

public List<CabinetUser> getCUListForCampaignSpecification(Long campaignId, Long pkId) {
    List<CabinetUser> mclist = getActiveMakingCallsUsers(pkId);
    HashSet<CabinetUser> pset = getParticipatedUsers(campaignId, pkId);
    HashSet<Long> idset = new HashSet();
    for (CabinetUser cu : mclist) {
        idset.add(cu.getId());// www.  j av a 2 s  . c om
    }
    for (CabinetUser cu : pset) {
        if (!idset.contains(cu.getId())) {
            mclist.add(cu);
        }
    }
    return mclist;
}

From source file:edu.cornell.mannlib.vitro.webapp.edit.n3editing.configuration.preprocessors.AgentHasContributionPreprocessor.java

private Model getRetractionsToRemove(Model retractionsModel, Model additionsModel) {
    // TODO Auto-generated method stub
    Model retractionsToRemove = ModelFactory.createDefaultModel();

    if (!retractionsModel.isEmpty()) {
        HashSet<String> additionsWorkURIs = new HashSet<String>();
        HashSet<String> preserveRetractionsWorkURIs = new HashSet<String>();
        String queryStr = getSparqlQuery();
        Query query = null;/*from  w  w w  .java  2  s . co  m*/
        QueryExecution qe = null;

        additionsModel.getLock().enterCriticalSection(Lock.READ);
        try {
            query = QueryFactory.create(queryStr);
            qe = QueryExecutionFactory.create(query, additionsModel);
            ResultSet res = qe.execSelect();

            while (res.hasNext()) {
                QuerySolution qs = res.nextSolution();
                additionsWorkURIs.add(qs.getResource("work").getURI());
            }
        } catch (Exception ex) {
            log.error("Exception occurred in querying additions model for agent ", ex);
        }

        retractionsModel.getLock().enterCriticalSection(Lock.READ);
        try {
            query = QueryFactory.create(queryStr);
            qe = QueryExecutionFactory.create(query, retractionsModel);
            ResultSet res = qe.execSelect();

            while (res.hasNext()) {
                QuerySolution qs = res.nextSolution();
                String workURI = qs.getResource("work").getURI();
                //if this uri is not in the additoins, then the agent is being removed from the property and we want to ensure
                //that type, rdfs:label, and foaf:name are not added to the retractions
                if (!additionsWorkURIs.contains(workURI)) {
                    preserveRetractionsWorkURIs.add(workURI);
                }
            }
        } catch (Exception ex) {
            log.error("Exception occurred in querying additions model for agent ", ex);
        }

        //Now, with the agent uris to be preserved
        for (String uri : preserveRetractionsWorkURIs) {
            Resource workURI = ResourceFactory.createResource(uri);
            Property foafNameProperty = ResourceFactory.createProperty(foaf + "name");
            retractionsToRemove.add(retractionsModel.listStatements(workURI, RDF.type, (RDFNode) null));
            retractionsToRemove.add(retractionsModel.listStatements(workURI, foafNameProperty, (RDFNode) null));
            retractionsToRemove.add(retractionsModel.listStatements(workURI, RDFS.label, (RDFNode) null));

        }
    }

    return retractionsToRemove;
}

From source file:com.sec.ose.osi.sdk.protexsdk.discovery.report.DefaultEntityListCreator.java

protected ReportEntityList buildEntityList(BufferedReader XmlReportReader, ArrayList<String> entityKeyList,
        ArrayList<String> duplicationCheckingField) {
    ReportEntityList reportEntityList = new ReportEntityList();
    ReportEntity reportEntity = new ReportEntity();
    String tmpLine = null;//w  ww  .j a v a  2  s.  co m
    StringBuffer value = new StringBuffer();
    int index = 0;

    if (duplicationCheckingField == null) {
        duplicationCheckingField = new ArrayList<String>();
    }
    boolean entityDuplicationCheck = (duplicationCheckingField.size() > 0) ? true : false;
    HashSet<String> entityDuplicationCheckKeySet = new HashSet<String>();
    String duplicationCheckString = "";

    int totalCnt = 0;
    int insertedCnt = 0;

    try {
        while ((tmpLine = XmlReportReader.readLine()) != null) {

            totalCnt++;
            if (totalCnt % 10000 == 0) {
                log.debug("buildEntityList cnt: " + totalCnt + ", insertedCnt: " + insertedCnt);
            }
            if (totalCnt > Property.getInstance().getMaxNumOfReportEntity()) {

                log.error("Report Entity is larger than MAX_NUM_OF_REPORT_ENTITY: "
                        + Property.getInstance().getMaxNumOfReportEntity());
                JOptionPane.showMessageDialog(null,
                        "[OUT OF MEMORY] Project loading has been failed.\n"
                                + "Please, Reanalyze this project with smallar files.\n"
                                + "to reduce the size of project.\n",
                        "Program Exit - Project size is too big", JOptionPane.ERROR_MESSAGE);
                System.exit(0);
            }

            if (tmpLine.startsWith(ROW_END_TAG)) {

                if (entityDuplicationCheck == true) {
                    if (entityDuplicationCheckKeySet.contains(duplicationCheckString) == false) {
                        reportEntityList.addEntity(reportEntity);
                        insertedCnt++;
                    }

                    entityDuplicationCheckKeySet.add(duplicationCheckString);
                    duplicationCheckString = "";
                } else {
                    reportEntityList.addEntity(reportEntity);
                    insertedCnt++;
                }

                reportEntity = new ReportEntity();
                index = 0;
                if (XmlReportReader.readLine().equals(TABLE_END_TAG)) {
                    break; // read <Row ss:Index="#">
                }
            } else {
                int startIndex = tmpLine.indexOf(DATA_START_TAG);
                if (startIndex > 0) {
                    tmpLine = tmpLine.substring(startIndex + DATA_START_TAG_LEN);
                }
                int endIndex = tmpLine.indexOf(DATA_END_TAG_WITH_NS);
                if (endIndex >= 0) {
                    value.append(tmpLine.substring(0, endIndex));
                    if (entityDuplicationCheck == true) {
                        String currentKey = entityKeyList.get(index);
                        if (duplicationCheckingField.contains(currentKey))
                            duplicationCheckString += value.toString() + "-";
                    }
                    reportEntity.setValue(entityKeyList.get(index), value.toString());
                    index++;
                    value = new StringBuffer();

                } else {
                    value.append(tmpLine);
                }

            }

        }
    } catch (IOException e) {
        log.warn(e);
        String[] buttonOK = { "OK" };
        JOptionPane.showOptionDialog(null, "Out Of Memory Error", "Java heap space", JOptionPane.OK_OPTION,
                JOptionPane.ERROR_MESSAGE, null, buttonOK, "OK");
    }
    log.debug("total buildEntityList cnt: " + totalCnt + ", inserted cnt: " + insertedCnt);
    return reportEntityList;
}

From source file:com.microsoft.tfs.client.common.ui.wit.form.WorkItemLinksControl.java

@Override
protected Object[] getItemsFromWorkItem(final WorkItem workItem) {
    // If no options were supplied there is no filtering to perform.
    if (linksControlOptions == null) {
        final ArrayList<Link> links = new ArrayList<Link>();
        for (final Link link : workItem.getLinks()) {
            if (!link.isPendingDelete()) {
                links.add(link);/*from  ww w.  j  a v  a  2 s  .  c  om*/
            }
        }
        return links.toArray();
    }

    // Filter the links based on the specified options.
    final WorkItemLinkTypeCollection linkTypes = workItem.getClient().getLinkTypes();
    final WIFormLinksControlWILinkFilters wiLinkFilters = linksControlOptions.getWorkItemLinkFilters();
    final WIFormLinksControlWITypeFilters wiTypeFilters = linksControlOptions.getWorkItemTypeFilters();
    final WIFormLinksControlExternalLinkFilters externalLinkFilters = linksControlOptions
            .getExternalLinkFilters();

    // Filter the full list by link type. Track the IDs of work item links.
    final ArrayList<Link> filteredByLinkType = new ArrayList<Link>();
    final ArrayList<Link> workItemLinks = new ArrayList<Link>();

    for (final Link link : workItem.getLinks()) {
        if (link.isPendingDelete()) {
            continue;
        }

        if (link instanceof ExternalLink || link instanceof Hyperlink) {
            if (externalLinkFilters == null || externalLinkFilters.includes(link.getLinkType().getName())) {
                filteredByLinkType.add(link);
            }
        } else if (link instanceof RelatedLink) {
            final int linkTypeId = ((RelatedLink) link).getWorkItemLinkTypeID();
            final String linkReferenceName = linkTypes.getReferenceName(linkTypeId);
            final boolean isForward = linkTypes.isForwardLink(linkTypeId);
            final boolean isReverse = linkTypes.isReverseLink(linkTypeId);

            if (wiLinkFilters == null || wiLinkFilters.includes(linkReferenceName, isForward, isReverse)) {
                filteredByLinkType.add(link);
                workItemLinks.add(link);
            }
        }
    }

    // If there are no work-item type filters, we are done.
    if (wiTypeFilters == null || workItemLinks.size() == 0) {
        return filteredByLinkType.toArray();
    }

    // If the work-item type filter says INCLUDEALL we are done.
    final WIFormLinksControlWITypeFilterEnum filterType = wiTypeFilters.getFilter();
    if (filterType == WIFormLinksControlWITypeFilterEnum.INCLUDEALL) {
        return filteredByLinkType.toArray();
    }

    // Create a SQL query to further filter the list by work item type.
    final int[] candidateWorkItemIds = new int[workItemLinks.size()];
    for (int i = 0; i < workItemLinks.size(); i++) {
        candidateWorkItemIds[i] = ((RelatedLink) workItemLinks.get(i)).getTargetWorkItemID();
    }

    final String projectName = workItem.getType().getProject().getName();
    final String wiql = wiTypeFilters.createFilterWIQLQuery(candidateWorkItemIds, projectName);

    // TODO: try/catch here?
    final WorkItemCollection workItems = workItem.getClient().query(wiql);

    // The workItem IDs returned from the query are the work item links to
    // keep.
    final HashSet<Integer> mapWorkItemIds = new HashSet<Integer>();
    final int[] workItemIds = workItems.getIDs();
    for (int i = 0; i < workItemIds.length; i++) {
        mapWorkItemIds.add(new Integer(workItemIds[i]));
    }

    final ArrayList<Link> filteredByWorkItemType = new ArrayList<Link>();
    for (int i = 0; i < filteredByLinkType.size(); i++) {
        final Link link = filteredByLinkType.get(i);

        if (link instanceof RelatedLink) {
            final Integer boxedId = new Integer(((RelatedLink) link).getTargetWorkItemID());
            if (mapWorkItemIds.contains(boxedId)) {
                filteredByWorkItemType.add(link);
            }
        } else {
            filteredByWorkItemType.add(link);
        }
    }

    return filteredByWorkItemType.toArray();
}

From source file:net.bluehornreader.service.FeedCrawlerService.java

/**
 * If there's any change, it deletes all previous feeds. Whould be nicer to keep what already exists but not sure it's worth it
 *
 * @throws Exception/*from   w w  w  .j  ava2 s  . co m*/
 */
private void updateFeedList() throws Exception {

    int feedIdsSeq = crawlerDb.getFeedIdsSeq(crawler.crawlerId);
    if (crawler.feedIdsSeq == feedIdsSeq) {
        return;
    }

    LOG.info("Feed list changed");
    HashMap<String, FeedInfo> newFeedMap = new HashMap<>();

    Crawler newCrawler = crawlerDb.getCrawler(crawler.crawlerId);

    synchronized (this) {

        // Some feeds might be being crawled at this time; we don't want to end up with 2 entries for them in availableFeeds, so we don't add them
        HashSet<String> crawlingFeedIds = new HashSet<>(feedMap.keySet());
        {
            HashSet<String> availableFeedIds = new HashSet<>();
            for (FeedInfo feedInfo : availableFeeds) {
                availableFeedIds.add(feedInfo.feed.feedId);
            }
            crawlingFeedIds.removeAll(availableFeedIds);
        }

        availableFeeds = new PriorityQueue<>(newFeedMap.size() + 1, feedInfoComparator);
        for (String feedId : newCrawler.feedIds) {
            Feed feed = feedDb.get(feedId);
            if (feed == null) {
                LOG.warn(String.format(
                        "FeedCrawlerService %s was asked to crawl feed %s but couldn't find such a feed", IP,
                        feedId));
            } else {
                FeedInfo feedInfo = feedMap.get(feedId);
                if (feedInfo == null) {
                    feedInfo = new FeedInfo(feed, getSeq());
                    LOG.info("New feed to crawl: " + feedInfo);
                }
                newFeedMap.put(feedId, feedInfo);
                if (crawlingFeedIds.contains(feedId)) {
                    LOG.info(String.format(
                            "Feed %s is being currently crawled, so it's not going to be added to the list with available feeds",
                            feedInfo));
                } else {
                    availableFeeds.add(feedInfo);
                }
            }
        }

        feedMap = newFeedMap;
        crawler.feedIdsSeq = feedIdsSeq;
        LOG.info("Feeds to crawl: " + feedMap);
    }
}