Example usage for java.util HashSet removeAll

List of usage examples for java.util HashSet removeAll

Introduction

In this page you can find the example usage for java.util HashSet removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:com.ikanow.aleph2.search_service.elasticsearch.services.ElasticsearchIndexService.java

@Override
public Tuple2<String, List<BasicMessageBean>> validateSchema(final SearchIndexSchemaBean schema,
        final DataBucketBean bucket) {
    final LinkedList<BasicMessageBean> errors = new LinkedList<BasicMessageBean>(); // (Warning mutable code)
    try {/*ww w . j av  a  2 s  .co  m*/
        Map<String, DataSchemaBean.ColumnarSchemaBean> tokenization_overrides = Optionals
                .of(() -> schema.tokenization_override()).orElse(Collections.emptyMap());
        final HashSet<String> unsupported_tokenization_overrides = new HashSet<String>(
                tokenization_overrides.keySet());
        unsupported_tokenization_overrides
                .removeAll(Arrays.asList(ElasticsearchIndexUtils.DEFAULT_TOKENIZATION_TYPE,
                        ElasticsearchIndexUtils.NO_TOKENIZATION_TYPE));
        if (!unsupported_tokenization_overrides.isEmpty()) {
            errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                    SearchIndexErrorUtils.NOT_YET_SUPPORTED,
                    "tokenization_overrides: " + unsupported_tokenization_overrides.toString()));
        }
        Map<String, DataSchemaBean.ColumnarSchemaBean> type_overrides = Optionals
                .of(() -> schema.type_override()).orElse(Collections.emptyMap());
        type_overrides.keySet().stream().filter(type -> !_supported_types.contains(type))
                .forEach(type -> errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                        SearchIndexErrorUtils.NOT_YET_SUPPORTED, "type: " + type)));

        // If the user is trying to override the index name then they have to be admin:
        final Optional<String> manual_index_name = Optionals
                .<String>of(() -> ((String) bucket.data_schema().search_index_schema()
                        .technology_override_schema().get(SearchIndexSchemaDefaultBean.index_name_override_)));

        if (manual_index_name.isPresent()) { // (then must be admin)
            if (!_service_context.getSecurityService().hasUserRole(bucket.owner_id(),
                    ISecurityService.ROLE_ADMIN)) {
                errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                        SearchIndexErrorUtils.NON_ADMIN_BUCKET_NAME_OVERRIDE));
            }
        }

        final String index_name = ElasticsearchIndexUtils.getBaseIndexName(bucket, Optional.empty());
        boolean error = false; // (Warning mutable code)
        final boolean is_verbose = is_verbose(schema);
        final ElasticsearchIndexServiceConfigBean schema_config = ElasticsearchIndexConfigUtils
                .buildConfigBeanFromSchema(bucket, _config, _mapper);

        // 1) Check the schema:

        try {
            final Optional<String> type = Optional.ofNullable(schema_config.search_technology_override())
                    .map(t -> t.type_name_or_prefix());
            final String index_type = CollidePolicy.new_type == Optional
                    .ofNullable(schema_config.search_technology_override()).map(t -> t.collide_policy())
                    .orElse(CollidePolicy.new_type) ? "_default_"
                            : type.orElse(ElasticsearchIndexServiceConfigBean.DEFAULT_FIXED_TYPE_NAME);

            final XContentBuilder mapping = ElasticsearchIndexUtils.createIndexMapping(bucket, Optional.empty(),
                    true, schema_config, _mapper, index_type);
            if (is_verbose) {
                errors.add(ErrorUtils.buildSuccessMessage(bucket.full_name(), "validateSchema",
                        mapping.bytes().toUtf8()));
            }
        } catch (Throwable e) {
            errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                    ErrorUtils.getLongForm("{0}", e)));
            error = true;
        }

        // 2) Sanity check the max size

        final Optional<Long> index_max_size = Optional
                .ofNullable(schema_config.search_technology_override().target_index_size_mb());
        if (index_max_size.isPresent()) {
            final long max = index_max_size.get();
            if ((max > 0) && (max < 25)) {
                errors.add(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                        SearchIndexErrorUtils.INVALID_MAX_INDEX_SIZE, max));
                error = true;
            } else if (is_verbose) {
                errors.add(ErrorUtils.buildSuccessMessage(bucket.full_name(), "validateSchema",
                        "Max index size = {0} MB", max));
            }
        }
        return Tuples._2T(error ? "" : index_name, errors);
    } catch (Exception e) { // Very early error has occurred, just report that:
        return Tuples._2T("", Arrays.asList(ErrorUtils.buildErrorMessage(bucket.full_name(), "validateSchema",
                ErrorUtils.getLongForm("{0}", e))));
    }
}

From source file:dao.SearchDaoDb.java

/**
 * addTags - adds the tags entered by users in search text
 * @param searchText - searchText //  www .  ja va 2 s. c o m
 * @throws BaseDaoException If we have a problem interpreting the data or the data is missing
 * or incorrect
 */
public void addTags(String searchText) {

    if (RegexStrUtil.isNull(searchText)) {
        throw new BaseDaoException("params are null");
    }

    List tagList = RegexStrUtil.getWords(searchText);
    if (tagList == null) {
        throw new BaseDaoException("tags are null");
    }

    Connection conn = null;
    List searchResult = null;
    try {
        conn = ds.getConnection();
        searchResult = searchInTagsQuery.run(conn, searchText);
    } catch (Exception e) {
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (Exception e1) {
            throw new BaseDaoException("error in conn.close(), searchInTagsQuery() ", e1);
        }
        throw new BaseDaoException("error in result, searchInTagsQuery()", e);
    }

    try {
        if (conn != null) {
            conn.close();
        }
    } catch (Exception e) {
        throw new BaseDaoException("error in conn.close(), searchInTagsQuery() ", e);
    }

    /** 
          * none found, add all tags in the DB
     */
    if (searchResult == null) {
        for (int i = 0; i < tagList.size(); i++) {
            try {
                //Object params[] = {(Object)(String)tagList.get(i)};
                addTagQuery.run((String) tagList.get(i));
            } catch (Exception e) {
                throw new BaseDaoException("error in" + addTagQuery.getSql(), e);
            }
        }
    }

    if (searchResult != null) {
        /**
         * get the tags that are found in the DB and increment the hits
         */
        for (int i = 0; i < searchResult.size(); i++) {
            try {
                incrementTagHitsQuery
                        .run((String) ((Yourkeywords) searchResult.get(i)).getValue(DbConstants.ENTRYID));
            } catch (Exception e) {
                throw new BaseDaoException("error in incrementTagHitsQuery tag= "
                        + ((Yourkeywords) searchResult.get(i)).getValue(DbConstants.ENTRYID), e);
            }
        }

        /* get the tags that don't exist in the database  */
        HashSet foundList = new HashSet();
        for (int i = 0; i < searchResult.size(); i++) {
            if (searchResult.get(i) != null) {
                foundList.add(((Yourkeywords) searchResult.get(i)).getValue(DbConstants.TAG));
            }
        }

        /**
         * get the tags and add the tags that are not found in the searchResult
         */
        HashSet tagSet = new HashSet(tagList);
        boolean f = tagSet.removeAll(foundList);
        if (tagSet != null) {
            Iterator it1 = tagSet.iterator();
            while (it1.hasNext()) {
                try {
                    addTagQuery.run((String) it1.next());
                } catch (Exception e) {
                    throw new BaseDaoException("error in" + addTagQuery.getSql(), e);
                }
            }
        }
    }
}

From source file:ch.unil.genescore.pathway.GeneSetLibrary.java

License:asdf

public void computeApproxPathwayCorrelation() {

    DenseMatrix corMat = new DenseMatrix(geneSets_.size(), geneSets_.size());
    for (int i = 0; i < geneSets_.size(); i++) {
        GeneSet leftSet = geneSets_.get(i);
        double leftSize = leftSet.genes_.size();
        for (int j = 0; j < geneSets_.size(); j++) {
            GeneSet rightSet = geneSets_.get(j);
            double rightSize = rightSet.genes_.size();
            HashSet<Gene> unpackedMetaGenes = new HashSet<Gene>();
            HashSet<Gene> allRightGenes = new HashSet<Gene>();
            if (null != rightSet.getMetaGenes())
                for (MetaGene mg : rightSet.getMetaGenes()) {
                    unpackedMetaGenes.addAll(mg.getGenes());
                }/* w ww. j a va  2 s.c o  m*/

            allRightGenes.addAll(unpackedMetaGenes);
            allRightGenes.addAll(rightSet.genes_);
            allRightGenes.removeAll(rightSet.getMetaGenes());

            HashSet<Gene> copiedLeftGenes = new HashSet<Gene>(leftSet.genes_);
            copiedLeftGenes.retainAll(allRightGenes);
            double count = copiedLeftGenes.size();
            if (null != leftSet.getMetaGenes())
                for (MetaGene mg : leftSet.getMetaGenes()) {
                    TreeSet<Gene> mgSetCopy = new TreeSet<Gene>(mg.getGenes());
                    mgSetCopy.retainAll(allRightGenes);
                    if (!mgSetCopy.isEmpty()) {
                        count++;
                    }
                }
            double corr = count / Math.sqrt(leftSize * rightSize);
            corMat.set(i, j, corr);
            //corMat.set(j, i, corr);
        }
    }
    pathwayCorMat_ = corMat;
}

From source file:org.strasa.middleware.manager.CreateFieldBookManagerImpl.java

/**
 * Validate site./*w  w  w .j a va  2  s .c  o  m*/
 * 
 * @param shObservation
 *            the sh observation
 * @param shSiteInfo
 *            the sh site info
 * @throws Exception
 *             the exception
 */
public void validateSite(Sheet shObservation, Sheet shSiteInfo) throws Exception {

    Integer colSite = getHeaderColumnNumber("Site", shObservation);
    HashSet<String> uniqueSite = new HashSet<String>();
    uniqueSite.addAll(readRowsByColumn(shObservation, 1, colSite).get(0));
    System.out.println(readRowsByColumn(shObservation, 1, colSite).get(0).get(0));
    ArrayList<String> lstUnknownSite = readRowsByColumn(shSiteInfo, 1, 0).get(0);
    if (lstUnknownSite.size() > uniqueSite.size()) {

        lstUnknownSite.removeAll(uniqueSite);
        throw new CreateFieldBookException("Invalid list of sites detected. Could not find {"
                + StringUtils.join(lstUnknownSite.toArray(new String[lstUnknownSite.size()]), ",")
                + "} in Observation sheet.");
    }
    if (uniqueSite.size() > lstUnknownSite.size()) {
        uniqueSite.removeAll(lstUnknownSite);
        throw new CreateFieldBookException("Invalid list of sites detected. Could not find {"
                + StringUtils.join(uniqueSite.toArray(new String[uniqueSite.size()]), ",")
                + "} in Site Information sheet.");

    }
    lstUnknownSite.removeAll(uniqueSite);
    if (!lstUnknownSite.isEmpty()) {
        throw new CreateFieldBookException("Invalid list of sites detected. Could not find {"
                + StringUtils.join(lstUnknownSite.toArray(new String[lstUnknownSite.size()]), ",")
                + "} in Site Information sheet.");

    }

}

From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java

private void checkSearchedMembers(Set<String> pids, Map<String, Item> memberSearchMap) throws RestException {
    if (!pids.equals(memberSearchMap.keySet())) {
        HashSet<String> notMembers = new HashSet<String>(pids);
        notMembers.removeAll(memberSearchMap.keySet());
        HashSet<String> missingPids = new HashSet<String>(memberSearchMap.keySet());
        missingPids.removeAll(pids);//  ww  w. j  a v a2  s.com
        throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PID,
                "Not member PIDs: " + notMembers.toString() + "\nMissing PIDs: " + missingPids.toString());
    }
}

From source file:org.apache.ddlutils.io.DatabaseDataIO.java

/**
 * Sorts the given table according to their foreign key order.
 * //from   w w w. j av a2  s .c  o  m
 * @param tables The tables
 * @return The sorted tables
 */
private List sortTables(Table[] tables) {
    ArrayList result = new ArrayList();
    HashSet processed = new HashSet();
    ListOrderedMap pending = new ListOrderedMap();

    for (int idx = 0; idx < tables.length; idx++) {
        Table table = tables[idx];

        if (table.getForeignKeyCount() == 0) {
            result.add(table);
            processed.add(table);
        } else {
            HashSet waitedFor = new HashSet();

            for (int fkIdx = 0; fkIdx < table.getForeignKeyCount(); fkIdx++) {
                Table waitedForTable = table.getForeignKey(fkIdx).getForeignTable();

                if (!table.equals(waitedForTable)) {
                    waitedFor.add(waitedForTable);
                }
            }
            pending.put(table, waitedFor);
        }
    }

    HashSet newProcessed = new HashSet();

    while (!processed.isEmpty() && !pending.isEmpty()) {
        newProcessed.clear();
        for (Iterator it = pending.entrySet().iterator(); it.hasNext();) {
            Map.Entry entry = (Map.Entry) it.next();
            Table table = (Table) entry.getKey();
            HashSet waitedFor = (HashSet) entry.getValue();

            waitedFor.removeAll(processed);
            if (waitedFor.isEmpty()) {
                it.remove();
                result.add(table);
                newProcessed.add(table);
            }
        }
        processed.clear();

        HashSet tmp = processed;

        processed = newProcessed;
        newProcessed = tmp;
    }
    // the remaining are within circular dependencies
    for (Iterator it = pending.keySet().iterator(); it.hasNext();) {
        result.add(it.next());
    }
    return result;
}

From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java

/**
 * Removes given children from a parent.
 * <p><b>Requires handler commit!</b>
 * @param parent parent PID//  ww  w .j a  v a2s . c om
 * @param toRemovePidSet PIDs of children to remove
 */
private void deleteMembers(DigitalObjectHandler parent, Set<String> toRemovePidSet)
        throws DigitalObjectException {
    RelationEditor editor = parent.relations();
    List<String> members = editor.getMembers();
    // check that PIDs being removed are members of parent object
    HashSet<String> toRemovePidSetCopy = new HashSet<String>(toRemovePidSet);
    toRemovePidSetCopy.removeAll(members);
    if (!toRemovePidSetCopy.isEmpty()) {
        String msg = String.format("Parent: %s does not contain members: %s", parent.getFedoraObject().getPid(),
                toRemovePidSetCopy.toString());
        throw RestException.plainText(Status.BAD_REQUEST, msg);
    }
    // remove
    if (members.removeAll(toRemovePidSet)) {
        editor.setMembers(members);
        editor.write(editor.getLastModified(), session.asFedoraLog());
    }
}

From source file:be.fedict.dcat.datagovbe.Drupal.java

/**
 * Add DCAT datasets//  www.j a v  a 2  s  .com
 * 
 * @param builder 
 * @param uris
 * @param lang
 * @throws RepositoryException
 */
private void addDists(JsonObjectBuilder builder, List<String> uris, String lang) throws RepositoryException {
    HashSet<String> accesses = new HashSet<>();
    HashSet<String> downloads = new HashSet<>();
    HashSet<String> rights = new HashSet<>();
    HashSet<String> types = new HashSet<>();

    for (String uri : uris) {
        Map<IRI, ListMultimap<String, String>> dist = store.queryProperties(store.getURI(uri));
        if (hasLang(dist, lang)) {
            // Data.gov.be displays this information as fields on dataset
            // not on distribution.
            accesses.add(getLink(dist, DCAT.ACCESS_URL));
            downloads.add(getLink(dist, DCAT.DOWNLOAD_URL));
            rights.add(getLink(dist, DCTERMS.RIGHTS));
            types.add(getOne(dist, DATAGOVBE.MEDIA_TYPE, ""));

            builder.add(Drupal.FLD_LICENSE, arrayTermsJson(dist, DATAGOVBE.LICENSE));
        }
    }

    // remove duplicate links
    downloads.removeAll(accesses);
    rights.removeAll(accesses);

    builder.add(Drupal.FLD_DETAILS, urlArrayJson(accesses)).add(Drupal.FLD_LINKS, urlArrayJson(downloads))
            .add(Drupal.FLD_CONDITIONS, urlArrayJson(rights)).add(Drupal.FLD_FORMAT, arrayTermsJson(types));
}

From source file:com.pinterest.arcee.aws.EC2HostInfoDAOImpl.java

@Override
public List<String> getRunningInstances(List<String> runningIds) throws Exception {
    HashSet<String> ids = new HashSet<>(runningIds);
    ArrayList<String> resultIds = new ArrayList<>();
    while (!ids.isEmpty()) {
        DescribeInstancesRequest request = new DescribeInstancesRequest();
        request.setInstanceIds(ids);/*from  w  ww .j a v a  2 s  .com*/
        Filter filter = new Filter("instance-state-code", Arrays.asList(RUNNING_CODE));
        request.setFilters(Arrays.asList(filter));
        try {
            do {
                DescribeInstancesResult results = ec2Client.describeInstances(request);
                List<Reservation> reservations = results.getReservations();
                for (Reservation reservation : reservations) {
                    for (Instance instance : reservation.getInstances()) {
                        resultIds.add(instance.getInstanceId());
                    }
                }
                if (StringUtils.isEmpty(results.getNextToken())) {
                    break;
                }

                request = new DescribeInstancesRequest();
                request.setNextToken(results.getNextToken());
            } while (true);
            LOG.debug("Cannot find the following ids in AWS:", ids);
            return resultIds;
        } catch (AmazonServiceException ex) {
            // if the error code is not instance not found. return the terminated list we've already got.
            ids.removeAll(handleInvalidInstanceId(ex));
        } catch (AmazonClientException ex) {
            LOG.error(String.format("Get AmazonClientException, exit with terminiatedHost %s",
                    resultIds.toString()), ex);
            throw new Exception(String.format("Get AmazonClientException, exit with terminiatedHost %s",
                    resultIds.toString()), ex);
        }
    }
    return resultIds;
}

From source file:org.paxle.tools.charts.impl.gui.ChartServlet.java

private void serviceChanged(ServiceReference reference, int eventType) {
    if (reference == null)
        return;//from   w  w w .  j a  v a 2  s  . c om
    if (eventType == ServiceEvent.MODIFIED)
        return;

    // ignoring unknown services
    String pid = (String) reference.getProperty(Constants.SERVICE_PID);
    if (!variableTree.containsKey(pid))
        return;

    // getting currently monitored variables
    final HashSet<String> currentVariableNames = new HashSet<String>();
    if (this.currentMonitorJob != null) {
        String[] temp = this.currentMonitorJob.getStatusVariableNames();
        if (temp != null) {
            currentVariableNames.addAll(Arrays.asList(temp));
        }

        try {
            // stopping old monitoring-job
            this.currentMonitorJob.stop();
        } catch (NullPointerException e) {
            // XXX this is a bug in the MA implementation and should be ignored for now
            this.logger.debug(e);
        }
        this.currentMonitorJob = null;
    }

    // getting variables of changed service
    final HashSet<String> diffVariableNames = new HashSet<String>();
    this.addVariables4Monitor(reference, diffVariableNames, eventType == ServiceEvent.REGISTERED,
            eventType == ServiceEvent.REGISTERED);

    if (eventType == ServiceEvent.REGISTERED) {
        // adding new variable
        currentVariableNames.addAll(diffVariableNames);
    } else if (eventType == ServiceEvent.UNREGISTERING) {
        currentVariableNames.removeAll(diffVariableNames);
    }

    // restarting monitoring job
    this.startScheduledJob(currentVariableNames);
}