Example usage for java.util HashSet remove

List of usage examples for java.util HashSet remove

Introduction

In this page you can find the example usage for java.util HashSet remove.

Prototype

public boolean remove(Object o) 

Source Link

Document

Removes the specified element from this set if it is present.

Usage

From source file:org.kuali.ext.mm.context.DataDictionaryConfigurationTest.java

@Test
public void testAllDataDicitionaryDocumentTypesExistInWorkflowDocumentTypeTable() throws Exception {
    HashSet<String> workflowDocumentTypeNames = new HashSet<String>();
    DataSource mySource = (DataSource) SpringContext.getBean("dataSource");
    Connection dbCon = null;/*from ww w .  j  ava2 s .  c om*/
    try {

        dbCon = mySource.getConnection();
        Statement dbAsk = dbCon.createStatement();
        ResultSet dbAnswer = dbAsk.executeQuery("select DOC_TYP_NM from KREW_DOC_TYP_T where CUR_IND = 1");
        while (dbAnswer.next()) {
            String docName = dbAnswer.getString(1);
            if (StringUtils.isNotBlank(docName)) {
                workflowDocumentTypeNames.add(docName);
            }
        }

    } catch (Exception e) {
        throw (e);
    }
    // Using HashSet since duplicate objects would otherwise be returned
    HashSet<DocumentEntry> documentEntries = new HashSet(dataDictionary.getDocumentEntries().values());
    List<String> ddEntriesWithMissingTypes = new ArrayList<String>();
    for (DocumentEntry documentEntry : documentEntries) {
        String name = documentEntry.getDocumentTypeName();
        String testName = new String(" ");
        // if (documentEntry instanceof StoresMaintenanceDocumentEntry){
        // testName=((StoresMaintenanceDocumentEntry)documentEntry).getBusinessObjectClass().getName();
        // }else{
        // testName=documentEntry.getDocumentClass().getName();
        // }
        if (!workflowDocumentTypeNames.contains(name) && !"RiceUserMaintenanceDocument".equals(name)
                && !testName.contains("rice")) {
            ddEntriesWithMissingTypes.add(name);
        } else {
            workflowDocumentTypeNames.remove(name);
        }
    }

    if (workflowDocumentTypeNames.size() > 0) {
        try {
            // If documents are parent docs, then they aren't superfluous.
            String queryString = "select distinct doc_typ_nm from krew_doc_typ_t"
                    + " where doc_typ_id in (select parnt_id from krew_doc_typ_t" + " where actv_ind = 1"
                    + " and cur_ind = 1)";
            Statement dbAsk = dbCon.createStatement();
            ResultSet dbAnswer = dbAsk.executeQuery(queryString);
            while (dbAnswer.next()) {
                String docName = dbAnswer.getString(1);
                if (StringUtils.isNotBlank(docName)) {
                    workflowDocumentTypeNames.remove(docName);
                }
            }
        } catch (Exception e) {
            throw (e);
        }

        System.err.print("superfluousTypesDefinedInWorkflowDatabase: " + workflowDocumentTypeNames);
    }
    assertEquals("documentTypesNotDefinedInWorkflowDatabase: " + ddEntriesWithMissingTypes, 0,
            ddEntriesWithMissingTypes.size());
}

From source file:com.webcohesion.enunciate.modules.docs.DocsModule.java

private TreeSet<Artifact> findDocumentationArtifacts() {
    HashSet<String> explicitArtifacts = new HashSet<String>();
    TreeSet<Artifact> artifacts = new TreeSet<Artifact>();
    for (ExplicitDownloadConfig download : getExplicitDownloads()) {
        if (download.getArtifact() != null) {
            explicitArtifacts.add(download.getArtifact());
        } else if (download.getFile() != null) {
            File downloadFile = resolveFile(download.getFile());

            debug("File %s to be added as an extra download.", downloadFile.getAbsolutePath());
            SpecifiedArtifact artifact = new SpecifiedArtifact(getName(), downloadFile.getName(), downloadFile);

            if (download.getName() != null) {
                artifact.setName(download.getName());
            }//w  ww .  j  a  v  a 2 s  .  co  m

            if (download.getDescription() != null) {
                artifact.setDescription(download.getDescription());
            }

            artifact.setShowLink(!"false".equals(download.getShowLink()));

            artifacts.add(artifact);
        }
    }

    for (Artifact artifact : this.enunciate.getArtifacts()) {
        if (artifact.isPublic() || explicitArtifacts.contains(artifact.getId())) {
            artifacts.add(artifact);
            debug("Artifact %s to be added as an extra download.", artifact.getId());
            explicitArtifacts.remove(artifact.getId());
        }
    }

    if (explicitArtifacts.size() > 0) {
        for (String artifactId : explicitArtifacts) {
            warn("WARNING: Unknown artifact '%s'.  Will not be available for download.", artifactId);
        }
    }
    return artifacts;
}

From source file:org.apache.hadoop.hdfs.DataStreamer.java

private boolean[] getPinnings(DatanodeInfo[] nodes, boolean shouldLog) {
    if (favoredNodes == null) {
        return null;
    } else {// w w  w .  j ava 2 s. c om
        boolean[] pinnings = new boolean[nodes.length];
        HashSet<String> favoredSet = new HashSet<String>(Arrays.asList(favoredNodes));
        for (int i = 0; i < nodes.length; i++) {
            pinnings[i] = favoredSet.remove(nodes[i].getXferAddrWithHostname());
            if (LOG.isDebugEnabled()) {
                LOG.debug(nodes[i].getXferAddrWithHostname() + " was chosen by name node (favored="
                        + pinnings[i] + ").");
            }
        }
        if (shouldLog && !favoredSet.isEmpty()) {
            // There is one or more favored nodes that were not allocated.
            LOG.warn("These favored nodes were specified but not chosen: " + favoredSet
                    + " Specified favored nodes: " + Arrays.toString(favoredNodes));

        }
        return pinnings;
    }
}

From source file:org.alfresco.repo.security.person.PersonServiceImpl.java

private NodeRef handleDuplicates(List<NodeRef> refs, String searchUserName) {
    if (processDuplicates) {
        NodeRef best = findBest(refs);//from www  .  j av a  2 s.  c  o  m
        HashSet<NodeRef> toHandle = new HashSet<NodeRef>();
        toHandle.addAll(refs);
        toHandle.remove(best);
        addDuplicateNodeRefsToHandle(toHandle);
        return best;
    } else {
        String userNameSensitivity = " (user name is case-"
                + (userNameMatcher.getUserNamesAreCaseSensitive() ? "sensitive" : "insensitive") + ")";
        String domainNameSensitivity = "";
        if (!userNameMatcher.getDomainSeparator().equals("")) {
            domainNameSensitivity = " (domain name is case-"
                    + (userNameMatcher.getDomainNamesAreCaseSensitive() ? "sensitive" : "insensitive") + ")";
        }

        throw new AlfrescoRuntimeException(
                "Found more than one user for " + searchUserName + userNameSensitivity + domainNameSensitivity);
    }
}

From source file:co.rewen.statex.StateXModule.java

/**
 * Given an array of keys, this returns a map of (key, value) pairs for the keys found, and
 * (key, null) for the keys that haven't been found.
 *///from   w  w w. j  a  v  a2s.  c o  m
@ReactMethod
public void multiGet(final ReadableArray keys, final Callback callback) {
    if (keys == null) {
        callback.invoke(AsyncStorageErrorUtil.getInvalidKeyError(null), null);
        return;
    }

    new GuardedAsyncTask<Void, Void>(getReactApplicationContext()) {
        @Override
        protected void doInBackgroundGuarded(Void... params) {
            if (!ensureDatabase()) {
                callback.invoke(AsyncStorageErrorUtil.getDBError(null), null);
                return;
            }

            String[] columns = { KEY_COLUMN, VALUE_COLUMN };
            HashSet<String> keysRemaining = SetBuilder.newHashSet();
            WritableArray data = Arguments.createArray();
            for (int keyStart = 0; keyStart < keys.size(); keyStart += MAX_SQL_KEYS) {
                int keyCount = Math.min(keys.size() - keyStart, MAX_SQL_KEYS);
                Cursor cursor = mStateXDatabaseSupplier.get().query(TABLE_STATE, columns,
                        AsyncLocalStorageUtil.buildKeySelection(keyCount),
                        AsyncLocalStorageUtil.buildKeySelectionArgs(keys, keyStart, keyCount), null, null,
                        null);
                keysRemaining.clear();
                try {
                    if (cursor.getCount() != keys.size()) {
                        // some keys have not been found - insert them with null into the final array
                        for (int keyIndex = keyStart; keyIndex < keyStart + keyCount; keyIndex++) {
                            keysRemaining.add(keys.getString(keyIndex));
                        }
                    }

                    if (cursor.moveToFirst()) {
                        do {
                            WritableArray row = Arguments.createArray();
                            row.pushString(cursor.getString(0));
                            row.pushString(cursor.getString(1));
                            data.pushArray(row);
                            keysRemaining.remove(cursor.getString(0));
                        } while (cursor.moveToNext());
                    }
                } catch (Exception e) {
                    FLog.w(ReactConstants.TAG, e.getMessage(), e);
                    callback.invoke(AsyncStorageErrorUtil.getError(null, e.getMessage()), null);
                    return;
                } finally {
                    cursor.close();
                }

                for (String key : keysRemaining) {
                    WritableArray row = Arguments.createArray();
                    row.pushString(key);
                    row.pushNull();
                    data.pushArray(row);
                }
                keysRemaining.clear();
            }

            callback.invoke(null, data);
        }
    }.execute();
}

From source file:org.apache.roller.weblogger.pojos.WeblogEntry.java

public void updateTags(List<String> updatedTags) throws WebloggerException {

    if (updatedTags == null) {
        return;//from   w w  w  .j av a 2s.  c  o m
    }

    HashSet newTags = new HashSet(updatedTags.size());
    Locale localeObject = getWebsite() != null ? getWebsite().getLocaleInstance() : Locale.getDefault();

    for (Iterator<String> it = updatedTags.iterator(); it.hasNext();) {
        String name = it.next();
        newTags.add(Utilities.normalizeTag(name, localeObject));
    }

    HashSet removeTags = new HashSet();

    // remove old ones no longer passed.
    for (Iterator it = getTags().iterator(); it.hasNext();) {
        WeblogEntryTag tag = (WeblogEntryTag) it.next();
        if (!newTags.contains(tag.getName())) {
            removeTags.add(tag.getName());
        } else {
            newTags.remove(tag.getName());
        }
    }

    WeblogEntryManager weblogManager = WebloggerFactory.getWeblogger().getWeblogEntryManager();
    for (Iterator it = removeTags.iterator(); it.hasNext();) {
        weblogManager.removeWeblogEntryTag((String) it.next(), this);
    }

    for (Iterator it = newTags.iterator(); it.hasNext();) {
        addTag((String) it.next());
    }
}

From source file:com.clustercontrol.jobmanagement.factory.ModifyJob.java

public Long replaceJobunit(List<JobInfo> oldList, List<JobInfo> newList, String userId)
        throws JobInvalid, JobMasterNotFound, EntityExistsException, HinemosUnknown, JobMasterDuplicate,
        InvalidSetting, InvalidRole {/*from  w ww .  java 2 s.c  o m*/
    //?ID?
    String jobunitId = newList.get(0).getJobunitId();

    // 
    long start = HinemosTime.currentTimeMillis();

    HashSet<JobInfo> delJobs = new HashSet<JobInfo>(oldList);
    HashSet<JobInfo> newJobs = new HashSet<JobInfo>(newList);
    delJobs.removeAll(newJobs);

    long timeJobToDelete = HinemosTime.currentTimeMillis();
    m_log.info("Find jobs to delete " + (timeJobToDelete - start) + "ms");

    HashSet<JobInfo> addJobs = newJobs;
    addJobs.removeAll(new HashSet<JobInfo>(oldList));

    long timeJobToAdd = HinemosTime.currentTimeMillis();
    m_log.info("Find jobs to add " + (timeJobToAdd - timeJobToDelete) + "ms");
    m_log.info("oldList=" + oldList.size() + ", newList=" + newList.size() + ", delJobs=" + delJobs.size()
            + ", addJobs=" + addJobs.size());

    JpaTransactionManager jtm = new JpaTransactionManager();
    for (JobInfo delJob : delJobs) {
        JobMstEntity entity = QueryUtil.getJobMstPK(delJob.getJobunitId(), delJob.getId());
        deleteJob(entity);
    }

    jtm.flush();

    long timestamp = HinemosTime.currentTimeMillis();

    // ????????
    for (JobInfo addJob : addJobs) {
        int type = addJob.getType();
        if (type == JobConstant.TYPE_JOBUNIT) {
            String jobId = addJob.getId();
            String parentJobId = addJob.getParentId();
            if (jobunitId.equals(jobId)) {
                parentJobId = CreateJobSession.TOP_JOB_ID;
            }
            createJobMasterData(addJob, jobunitId, parentJobId, userId, timestamp);
            addJobs.remove(addJob);
            break;
        }
    }
    for (JobInfo addJob : addJobs) {
        String jobId = addJob.getId();
        String parentJobId = addJob.getParentId();
        if (jobunitId.equals(jobId)) {
            parentJobId = CreateJobSession.TOP_JOB_ID;
        }
        createJobMasterData(addJob, jobunitId, parentJobId, userId, timestamp);
    }

    // ??
    String jobId = newList.get(0).getId();
    JobMstEntity entity = QueryUtil.getJobMstPK(jobunitId, jobId);
    entity.setUpdateDate(timestamp);
    m_log.info("Left tasks in replaceJobunit " + (HinemosTime.currentTimeMillis() - timeJobToAdd) + "ms");

    // ??
    return timestamp;
}

From source file:org.sakaiproject.component.gradebook.GradebookFrameworkServiceImpl.java

private void mergeGradeMappings(Collection gradingScaleDefinitions, Session session) throws HibernateException {
    Map newMappingDefinitionsMap = new HashMap();
    HashSet uidsToSet = new HashSet();
    for (Iterator iter = gradingScaleDefinitions.iterator(); iter.hasNext();) {
        GradingScaleDefinition bean = (GradingScaleDefinition) iter.next();
        newMappingDefinitionsMap.put(bean.getUid(), bean);
        uidsToSet.add(bean.getUid());/*  ww w  .ja  v a 2s  .  c o  m*/
    }

    // Until we move to Hibernate 3 syntax, we need to update one record at a time.
    Query q;
    List gmtList;

    // Toggle any scales that are no longer specified.
    q = session.createQuery(
            "from GradingScale as gradingScale where gradingScale.uid not in (:uidList) and gradingScale.unavailable=false");
    q.setParameterList("uidList", uidsToSet);
    gmtList = q.list();
    for (Iterator iter = gmtList.iterator(); iter.hasNext();) {
        GradingScale gradingScale = (GradingScale) iter.next();
        gradingScale.setUnavailable(true);
        session.update(gradingScale);
        if (log.isInfoEnabled())
            log.info("Set Grading Scale " + gradingScale.getUid() + " unavailable");
    }

    // Modify any specified scales that already exist.
    q = session.createQuery("from GradingScale as gradingScale where gradingScale.uid in (:uidList)");
    q.setParameterList("uidList", uidsToSet);
    gmtList = q.list();
    for (Iterator iter = gmtList.iterator(); iter.hasNext();) {
        GradingScale gradingScale = (GradingScale) iter.next();
        copyDefinitionToScale((GradingScaleDefinition) newMappingDefinitionsMap.get(gradingScale.getUid()),
                gradingScale);
        uidsToSet.remove(gradingScale.getUid());
        session.update(gradingScale);
        if (log.isInfoEnabled())
            log.info("Updated Grading Scale " + gradingScale.getUid());
    }

    // Add any new scales.
    for (Iterator iter = uidsToSet.iterator(); iter.hasNext();) {
        String uid = (String) iter.next();
        GradingScale gradingScale = new GradingScale();
        gradingScale.setUid(uid);
        GradingScaleDefinition bean = (GradingScaleDefinition) newMappingDefinitionsMap.get(uid);
        copyDefinitionToScale(bean, gradingScale);
        session.save(gradingScale);
        if (log.isInfoEnabled())
            log.info("Added Grading Scale " + gradingScale.getUid());
    }
    session.flush();
}

From source file:org.archive.modules.CrawlURI.java

/** Make the given key non-'heritable', meaning its value will 
 * not be added to descendant CrawlURIs. Only meaningful if
 * key was previously made heritable.  //  w  w w.ja  va  2 s . c  om
 * 
 * @param key to make non-heritable
 */
public void makeNonHeritable(String key) {
    @SuppressWarnings("unchecked")
    HashSet<String> heritableKeys = (HashSet<String>) data.get(A_HERITABLE_KEYS);
    if (heritableKeys == null) {
        return;
    }
    heritableKeys.remove(key);
    if (heritableKeys.size() == 1) {
        // only remaining heritable key is itself; disable completely
        data.remove(A_HERITABLE_KEYS);
    }
}

From source file:org.apache.hadoop.hdfs.DataStreamer.java

private boolean[] getPinnings(DatanodeInfo[] nodes, boolean shouldLog) {
    if (favoredNodes == null) {
        return null;
    } else {/*from w  w  w. j a va2 s  .  c o m*/
        boolean[] pinnings = new boolean[nodes.length];
        HashSet<String> favoredSet = new HashSet<String>(Arrays.asList(favoredNodes));
        for (int i = 0; i < nodes.length; i++) {
            pinnings[i] = favoredSet.remove(nodes[i].getXferAddrWithHostname());
            if (DFSClient.LOG.isDebugEnabled()) {
                DFSClient.LOG.debug(nodes[i].getXferAddrWithHostname() + " was chosen by name node (favored="
                        + pinnings[i] + ").");
            }
        }
        if (shouldLog && !favoredSet.isEmpty()) {
            // There is one or more favored nodes that were not allocated.
            DFSClient.LOG.warn("These favored nodes were specified but not chosen: " + favoredSet
                    + " Specified favored nodes: " + Arrays.toString(favoredNodes));

        }
        return pinnings;
    }
}