Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:therian.operator.copy.PropertyCopier.java

private Iterable<Copy<?, ?>> match(final TherianContext context,
        Copy<? extends SOURCE, ? extends TARGET> copy) {
    if (matching == null) {
        return Collections.emptySet();
    }/*from   w ww. j  a  va 2  s.  c om*/
    final Set<String> properties = new HashSet<>(Arrays.asList(matching.value()));

    // if no properties were explicitly specified for matching, take whatever we can get
    final boolean lenient = properties.isEmpty();
    if (lenient) {
        properties.addAll(
                BeanProperties.getPropertyNames(ReturnProperties.WRITABLE, context, copy.getTargetPosition()));
        properties.retainAll(
                BeanProperties.getPropertyNames(ReturnProperties.ALL, context, copy.getSourcePosition()));
    }
    properties.removeAll(Arrays.asList(matching.exclude()));

    final List<Copy<?, ?>> result = new ArrayList<>();
    for (String property : properties) {
        if (StringUtils.isBlank(property)) {
            continue;
        }
        final RelativePositionFactory.ReadWrite<Object, ?> factory = Property.optional(property);
        final Position.Readable<?> target = dereference(factory, copy.getTargetPosition());
        final Position.Readable<?> source = dereference(factory, copy.getSourcePosition());

        if (lenient) {
            final Copy<?, ?> propertyCopy = Copy.Safely.to(target, source);
            if (context.supports(propertyCopy)) {
                result.add(propertyCopy);
            }
        } else {
            result.add(Copy.to(target, source));
        }
    }
    return result;
}

From source file:com.sun.identity.saml2.plugins.SAML2IDPProxyFRImpl.java

private String selectIDPBasedOnAuthContext(List idpList, String realm, AuthnRequest authnRequest) {

    String classMethod = "selectIdPBasedOnLOA";
    EntityDescriptorElement idpDesc = null;
    Set authnRequestContextSet = null;
    String idps = "";

    try {//from w w w .  ja  v  a  2 s  .c om

        List listOfAuthnContexts = authnRequest.getRequestedAuthnContext().getAuthnContextClassRef();
        debugMessage(classMethod, "listofAuthnContexts: " + listOfAuthnContexts);

        try {
            authnRequestContextSet = new HashSet(listOfAuthnContexts);
        } catch (Exception ex1) {
            authnRequestContextSet = new HashSet();
        }

        if ((idpList != null) && (!idpList.isEmpty())) {
            Iterator idpI = idpList.iterator();
            while (idpI.hasNext()) {
                String idp = (String) idpI.next();
                debugMessage(classMethod, "IDP is: " + idp);
                List supportedAuthnContextsbyIDP = getSupportedAuthnContextsByIDP(realm, idp);
                if (supportedAuthnContextsbyIDP != null) {
                    debugMessage(classMethod, "Standard Authn Contexts found for idp: " + idp);
                    Set idpContextSet = trimmedListToSet(supportedAuthnContextsbyIDP);
                    debugMessage(classMethod, "idpContextSet = " + idpContextSet);
                    idpContextSet.retainAll(authnRequestContextSet);
                    if (idpContextSet != null && !idpContextSet.isEmpty()) {
                        idps = idp + " " + idps;
                        debugMessage(classMethod,
                                "Standard Authn Contexts found for idp " + idp + ": " + idpContextSet);
                    }

                } else {
                    debugMessage(classMethod,
                            "The IdP" + idp + " has no standard authentication" + " contexts configured");
                }
            }
        }
    } catch (Exception me) {
        SAML2Utils.debug
                .error(classMethod + "Error when trying to get the idp's by standard Authn Context: " + me);
    }

    debugMessage(classMethod, " IDPList returns: " + idps);
    return idps.trim();

}

From source file:airlift.servlet.rest.RestfulSecurityContext.java

/**
 * Check allowed.//from   www  .ja  v  a2  s .c  om
 *
 * @param _roleSet the _role set
 * @param _user the _user
 * @return true, if successful
 */
private boolean checkAllowed(java.util.Set<String> _roleSet, AirliftUser _user, RestContext _restContext) {
    java.util.Set<String> roleSet = (_roleSet != null) ? new java.util.HashSet<String>(_roleSet)
            : new java.util.HashSet<String>();

    boolean allowed = false;

    if (roleSet.contains("noone") == false) {
        if (roleSet.contains("all") == true) {
            allowed = true;
        } else {
            java.util.Set<String> userRoleSet = fetchUserRoleSet(_user, _restContext);

            if (userRoleSet.isEmpty() == false) {
                boolean changeSet = userRoleSet.retainAll(roleSet);
                allowed = (userRoleSet.isEmpty() == false);
            }
        }
    }

    return allowed;
}

From source file:org.wso2.carbon.dataservices.core.description.query.QueryFactory.java

private static List<SQLDialect> getDialectList(OMElement queryEl) throws DataServiceFault {
    Iterator<OMElement> itr = getSQLQueryElements(queryEl);
    boolean isRepeated = false;
    List<SQLDialect> dialectList = new ArrayList<SQLDialect>();
    while (itr.hasNext()) {
        OMElement sqlQuery = itr.next();
        String sqlDialectValue = sqlQuery.getAttributeValue(new QName(DBSFields.DIALECT));
        Set<String> dialectSet = new HashSet<String>();
        Set<String> intersect = null;
        SQLDialect sqlDialect = new SQLDialect();
        if (sqlDialectValue != null) {
            String dbTypes[] = sqlDialectValue.split(",");
            for (String dbType : dbTypes) {
                dialectSet.add(dbType);//from ww  w .  j av  a2 s.c om
            }
            for (SQLDialect dialect : dialectList) {
                intersect = new TreeSet<String>(dialect.getSqlDialects());
                intersect.retainAll(dialectSet);
                if (!intersect.isEmpty()) {
                    isRepeated = true;
                }
            }
            if (!isRepeated) {
                sqlDialect.setSqlDialects(dialectSet);
                sqlDialect.setSqlQuery(sqlQuery.getText());
                dialectList.add(sqlDialect);
            } else {
                Iterator<String> it = intersect.iterator();
                StringBuilder builder = new StringBuilder();
                while (it.hasNext()) {
                    builder.append(it.next());
                    if (it.hasNext()) {
                        builder.append(" ");
                    }
                }
                throw new DataServiceFault("SQL Dialect(s) repeated: " + builder.toString());
            }
        }
    }
    return dialectList;
}

From source file:org.apache.ambari.server.controller.internal.ConfigGroupResourceProvider.java

private synchronized Set<ConfigGroupResponse> getConfigGroups(Set<ConfigGroupRequest> requests)
        throws AmbariException {
    Set<ConfigGroupResponse> responses = new HashSet<ConfigGroupResponse>();
    if (requests != null) {
        for (ConfigGroupRequest request : requests) {
            LOG.debug("Received a Config group request with" + ", clusterName = " + request.getClusterName()
                    + ", groupId = " + request.getId() + ", groupName = " + request.getGroupName() + ", tag = "
                    + request.getTag());

            if (request.getClusterName() == null) {
                LOG.warn("Cluster name is a required field.");
                continue;
            }//from  w ww.ja  v  a  2 s. c  o  m

            Cluster cluster = getManagementController().getClusters().getCluster(request.getClusterName());
            Map<Long, ConfigGroup> configGroupMap = cluster.getConfigGroups();

            // By group id
            if (request.getId() != null) {
                ConfigGroup configGroup = configGroupMap.get(request.getId());
                if (configGroup != null) {
                    responses.add(configGroup.convertToResponse());
                } else {
                    throw new ConfigGroupNotFoundException(cluster.getClusterName(),
                            request.getId().toString());
                }
                continue;
            }
            // By group name
            if (request.getGroupName() != null) {
                for (ConfigGroup configGroup : configGroupMap.values()) {
                    if (configGroup.getName().equals(request.getGroupName())) {
                        responses.add(configGroup.convertToResponse());
                    }
                }
                continue;
            }
            // By tag only
            if (request.getTag() != null && request.getHosts().isEmpty()) {
                for (ConfigGroup configGroup : configGroupMap.values()) {
                    if (configGroup.getTag().equals(request.getTag())) {
                        responses.add(configGroup.convertToResponse());
                    }
                }
                continue;
            }
            // By hostnames only
            if (!request.getHosts().isEmpty() && request.getTag() == null) {
                for (String hostname : request.getHosts()) {
                    Map<Long, ConfigGroup> groupMap = cluster.getConfigGroupsByHostname(hostname);

                    if (!groupMap.isEmpty()) {
                        for (ConfigGroup configGroup : groupMap.values()) {
                            responses.add(configGroup.convertToResponse());
                        }
                    }
                }
                continue;
            }
            // By tag and hostnames
            if (request.getTag() != null && !request.getHosts().isEmpty()) {
                for (ConfigGroup configGroup : configGroupMap.values()) {
                    // Has tag
                    if (configGroup.getTag().equals(request.getTag())) {
                        // Has a match with hosts
                        Set<String> groupHosts = new HashSet<String>(configGroup.getHosts().keySet());
                        groupHosts.retainAll(request.getHosts());
                        if (!groupHosts.isEmpty()) {
                            responses.add(configGroup.convertToResponse());
                        }
                    }
                }
                continue;
            }
            // Select all
            for (ConfigGroup configGroup : configGroupMap.values()) {
                responses.add(configGroup.convertToResponse());
            }
        }
    }
    return responses;
}

From source file:org.lilyproject.indexer.engine.IndexUpdater.java

private void handleRecordCreateUpdate(IdRecord record, RecordEvent event, Map<String, Long> vtags,
        Map<Long, Set<String>> vtagsByVersion, Map<Scope, Set<FieldType>> updatedFieldsByScope)
        throws Exception {

    // Determine the IndexCase:
    //  The indexing of all versions is determined by the record type of the non-versioned scope.
    //  This makes that the indexing behavior of all versions is equal, and can be changed (the
    //  record type of the versioned scope is immutable).
    IndexCase indexCase = indexer.getConf().getIndexCase(record.getRecordTypeName(),
            record.getId().getVariantProperties());

    if (indexCase == null) {
        // The record should not be indexed
        // But data from this record might be denormalized into other index entries
        // After this we go to update denormalized data
    } else {//  w ww. java 2  s . co  m
        Set<String> vtagsToIndex = new HashSet<String>();

        if (event.getType().equals(CREATE)) {
            // New record: just index everything
            indexer.setIndexAllVTags(vtagsToIndex, vtags, indexCase, record);
            // After this we go to the indexing

        } else if (event.getRecordTypeChanged()) {
            // When the record type changes, the rules to index (= the IndexCase) change

            // Delete everything: we do not know the previous record type, so we do not know what
            // version tags were indexed, so we simply delete everything
            indexer.delete(record.getId());

            if (log.isDebugEnabled()) {
                log.debug(String.format("Record %1$s: deleted existing entries from index (if present) "
                        + "because of record type change", record.getId()));
            }

            // Reindex all needed vtags
            indexer.setIndexAllVTags(vtagsToIndex, vtags, indexCase, record);

            // After this we go to the indexing
        } else { // a normal update

            if (event.getVersionCreated() == 1 && event.getType().equals(UPDATE)
                    && indexCase.getIndexVersionless()) {
                // If the first version was created, but the record was not new, then there
                // might already be an @@versionless index entry
                indexer.delete(record.getId(), VersionTag.VERSIONLESS_TAG);

                if (log.isDebugEnabled()) {
                    log.debug(String.format("Record %1$s: deleted versionless entry from index (if present) "
                            + "because of creation first version", record.getId()));
                }
            }

            //
            // Handle changes to non-versioned fields
            //
            if (updatedFieldsByScope.get(Scope.NON_VERSIONED).size() > 0) {
                if (atLeastOneUsedInIndex(updatedFieldsByScope.get(Scope.NON_VERSIONED))) {
                    indexer.setIndexAllVTags(vtagsToIndex, vtags, indexCase, record);
                    // After this we go to the treatment of changed vtag fields
                    if (log.isDebugEnabled()) {
                        log.debug(String.format(
                                "Record %1$s: non-versioned fields changed, will reindex all vtags.",
                                record.getId()));
                    }
                }
            }

            //
            // Handle changes to versioned(-mutable) fields
            //
            // If there were non-versioned fields changed, then we already reindex all versions
            // so this can be skipped.
            //
            // In the case of newly created versions that should be indexed: this will often be
            // accompanied by corresponding changes to vtag fields, which are handled next, and in which case
            // it would work as well if this code would not be here.
            //
            if (vtagsToIndex.isEmpty()
                    && (event.getVersionCreated() != -1 || event.getVersionUpdated() != -1)) {
                if (atLeastOneUsedInIndex(updatedFieldsByScope.get(Scope.VERSIONED))
                        || atLeastOneUsedInIndex(updatedFieldsByScope.get(Scope.VERSIONED_MUTABLE))) {

                    long version = event.getVersionCreated() != -1 ? event.getVersionCreated()
                            : event.getVersionUpdated();
                    if (vtagsByVersion.containsKey(version)) {
                        Set<String> tmp = new HashSet<String>();
                        tmp.addAll(indexCase.getVersionTags());
                        tmp.retainAll(vtagsByVersion.get(version));
                        vtagsToIndex.addAll(tmp);

                        if (log.isDebugEnabled()) {
                            log.debug(String.format("Record %1$s: versioned(-mutable) fields changed, will "
                                    + "index for all tags of modified version %2$s that require indexing: %3$s",
                                    record.getId(), version, indexer.vtagSetToNameString(vtagsToIndex)));
                        }
                    }
                }
            }

            //
            // Handle changes to vtag fields themselves
            //
            Set<String> changedVTagFields = VersionTag.filterVTagFields(event.getUpdatedFields(), typeManager);
            // Remove the vtags which are going to be reindexed anyway
            changedVTagFields.removeAll(vtagsToIndex);
            for (String vtag : changedVTagFields) {
                if (vtags.containsKey(vtag) && indexCase.getVersionTags().contains(vtag)) {
                    if (log.isDebugEnabled()) {
                        log.debug(String.format("Record %1$s: will index for created or updated vtag %2$s",
                                record.getId(), indexer.safeLoadTagName(vtag)));
                    }
                    vtagsToIndex.add(vtag);
                } else {
                    // The vtag does not exist anymore on the document, or does not need to be indexed: delete from index
                    indexer.delete(record.getId(), vtag);
                    if (log.isDebugEnabled()) {
                        log.debug(String.format("Record %1$s: deleted from index for deleted vtag %2$s",
                                record.getId(), indexer.safeLoadTagName(vtag)));
                    }
                }
            }
        }

        //
        // Index
        //
        indexer.index(record, vtagsToIndex, vtags);
    }
}

From source file:com.surevine.alfresco.repo.action.AssignRandomValidSecurityGroupsAction.java

@Override
protected void executeImpl(final Action action, final NodeRef nodeRef) {
    try {/*from   ww  w.  j a  v a 2  s  .c o m*/

        //First things first, set a modifier for this item.  We try to round-robin all the users
        //This is intended to be run against an item without any security groups assigned.  If run against
        //an item that already has security groups assigned, it will not consider that the selected user may
        //not be able to access the item, and will throw an AccessDenied exception accordingly

        Iterator<NodeRef> peopleNodes = _personService.getAllPeople().iterator();
        int skipCount = 0;
        while (peopleNodes.hasNext()) {
            NodeRef personNode = peopleNodes.next();
            if (!(skipCount++ < _userIndexOffset)) {
                String userName = _nodeService.getProperty(personNode, ContentModel.PROP_USERNAME).toString();
                if (LOG.isInfoEnabled()) {
                    LOG.info("Setting modifier of " + nodeRef + " to " + userName);
                }
                AuthenticationUtil.runAs(new ModifyItemWork(nodeRef), userName);
                if (!peopleNodes.hasNext()) {
                    _userIndexOffset = 0;
                } else {
                    _userIndexOffset++;
                }
                break;
            }
        }
        ;

        // First, get the list of everyone to have modified this item - we
        // need to make sure all
        // these users could have seen the security marking we will
        // generate, to ensure
        // consistency (actually, we could be more specific than this if we
        // needed to as what we
        // actually need to ensure is that a user who can see Version X can
        // see all versions <X)

        Object o = _nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIER);
        if (o == null) {
            o = _nodeService.getProperty(nodeRef, ContentModel.PROP_CREATOR);
        }
        final String owner = o.toString();

        Collection<String> modifiers = new HashSet<String>(1);
        try {
            Iterator<Version> allVersions = _versionService.getVersionHistory(nodeRef).getAllVersions()
                    .iterator();
            while (allVersions.hasNext()) {
                Version v = allVersions.next();
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Adding " + v.getFrozenModifier() + " to the list of modifiers for " + nodeRef);
                }
                modifiers.add(v.getFrozenModifier());
            }
        } catch (NullPointerException e) {
            // This means that the item isn't versionable, in which case use
            // the current modifier
            modifiers.add(owner);
        }
        Iterator<String> modifierUserNames;

        // For each security Group, work out the groups to assign
        for (int i = 0; i < _constraintNames.length; i++) {
            modifierUserNames = modifiers.iterator();

            Set<String> potentialGroups = null;

            while (modifierUserNames.hasNext()) {
                String userName = modifierUserNames.next();
                if (potentialGroups == null) {
                    potentialGroups = new HashSet<String>(AuthenticationUtil
                            .runAs(new GetGivenUserSecurityMarkingsWork(_constraintNames[i]), userName));
                } else {
                    potentialGroups.retainAll(AuthenticationUtil
                            .runAs(new GetGivenUserSecurityMarkingsWork(_constraintNames[i]), userName));
                }
            }

            Iterator<String> potentialGroupsIt = potentialGroups.iterator();
            ArrayList<String> groupsToAdd = new ArrayList<String>(2);
            while (potentialGroupsIt.hasNext()) {
                String potentialGroup = potentialGroupsIt.next();
                if (LOG.isDebugEnabled()) {
                    LOG.debug(potentialGroup + " is a potential group for " + nodeRef);
                }
                if (_randomiser.nextFloat() < _chanceToApplyGivenSecurityGroup) {
                    if (LOG.isInfoEnabled()) {
                        LOG.info("Adding " + potentialGroup + " to " + nodeRef);
                    }
                    groupsToAdd.add(potentialGroup);
                }
            }
            if (groupsToAdd.contains("ATOMAL2") && !groupsToAdd.contains("ATOMAL1")) {
                groupsToAdd.add("ATOMAL1");
            }
            QName propertyQName = getQNameFromConstraintName(_constraintNames[i]);

            if (groupsToAdd.size() > 0) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Making modification as " + owner);
                }
                //Parts of the renditioned aspects, which require us to have privs to previews etc, require this to be run as the last modifier of the document
                AuthenticationUtil.runAs(new ModifySecurityMarkingWork(nodeRef, propertyQName, groupsToAdd),
                        owner);
            }

        }

        //OK, now we've set the security groups - we are now going to munge the modifier of the item
        /*
         * 
         * This bit seems to:
         *    A) Break whichever site you run it against
         *    B) Not consider whether the selected user could actually access the node they are trying to update
         * 
        */

    } catch (Exception e) {
        e.printStackTrace();
        throw new RuntimeException(e);
    }
}

From source file:uk.gov.gchq.gaffer.spark.operation.dataframe.FiltersToOperationConverter.java

/**
 * Iterates through all the filters looking for ones that specify a group or groups. The intersection of all of
 * these sets of groups is formed as all the filters are 'AND'ed together before data is provided to a Dataframe.
 * Only a group in the set of groups returned by this method can be returned from this query.
 * <p>/*from w w  w. j  a  v a2  s . c o m*/
 * This method needs to distinguish between the following cases:
 * - None of the filters specify a group (in which case null is returned);
 * - One or more of the filters specify a group (in which case the intersection of the sets of groups specified
 * by the different filters is returned);
 * - Incompatible groups are specified (this is a special case of the above bullet where an empty set is returned).
 *
 * @return A set of strings containing the required groups.
 */
private Set<String> checkForGroups() {
    final List<Set<String>> listOfGroups = new ArrayList<>();
    for (final Filter filter : filters) {
        final Set<String> groups = checkForGroups(filter);
        if (groups != null && !groups.isEmpty()) {
            listOfGroups.add(groups);
        }
    }
    if (listOfGroups.isEmpty()) {
        LOGGER.info("None of the filters specify a group");
        return null;
    }
    final Set<String> finalGroups = new HashSet<>();
    boolean first = true;
    for (final Set<String> groups : listOfGroups) {
        if (first) {
            finalGroups.addAll(groups);
            first = false;
        } else {
            finalGroups.retainAll(groups);
        }
    }
    LOGGER.info("The following groups are specified by the filters: {}", StringUtils.join(finalGroups, ','));
    return finalGroups;
}

From source file:ubic.BAMSandAllen.TopTenInfo.java

public void writeExpressionInfo() throws Exception {

    //        AllenBrainAtlasService abaService = new AllenBrainAtlasService();
    ImageSeriesInfoLoader imageInfo = new ImageSeriesInfoLoader();

    getMotorRegions();/*from w  w  w . j a va  2s  . co m*/

    List<String> rows;
    if (loader != null) {
        rows = loader.getRows();
    } else {
        // may be too many
        rows = expression.getRowNames();
    }

    DoubleArrayList averageExp = new DoubleArrayList();
    DoubleArrayList sdExp = new DoubleArrayList();
    for (String geneRow : expression.getRowNames()) {
        double[] expValues = expression.getRowByName(geneRow);

        DoubleArrayList expValuesDAL = new DoubleArrayList(expValues);
        double mean = DescriptiveWithMissing.mean(expValuesDAL);
        averageExp.add(mean);

        double sampleStandardDeviation = Math.sqrt(DescriptiveWithMissing.sampleVariance(expValuesDAL, mean));
        sdExp.add(sampleStandardDeviation);
    }
    DoubleArrayList averageRanks = Rank.rankTransform(averageExp);
    DoubleArrayList sdRanks = Rank.rankTransform(sdExp);

    log.info("degree correlation to X:" + forCoords.getDegreeCorrelation("x"));
    log.info("degree correlation to Y:" + forCoords.getDegreeCorrelation("y"));
    log.info("degree correlation to Z:" + forCoords.getDegreeCorrelation("z"));

    // //do motor regions have less connections
    DoubleArrayList motor1 = new DoubleArrayList();
    DoubleArrayList notMotor1 = new DoubleArrayList();

    ABAMSDataMatrix matrixA = pair.getMatrixA();
    DoubleMatrix<String, String> degNamed = Util.columnSums(matrixA);

    for (String region : degNamed.getColNames()) {
        double expvalue = degNamed.getByKeys("Sums", region);
        if (Double.isNaN(expvalue))
            continue;
        if (motorRegions.contains(region))
            motor1.add(expvalue);
        else
            notMotor1.add(expvalue);
    }
    log.info("Wilcox Motor P " + Wilcoxon.exactWilcoxonP(motor1.elements(), notMotor1.elements()));
    log.info("Wilcox Motor P Rev " + Wilcoxon.exactWilcoxonP(notMotor1.elements(), motor1.elements()));
    // //delete

    int rowsInMatrix = expression.rows();

    ParamKeeper stats = new ParamKeeper();
    int columns = pair.getMatrixA().columns();
    int count = 0;

    for (String geneRow : rows) {
        count++;
        if (count % 100 == 0)
            log.info("Count:" + count);

        Map<String, String> geneStats = new HashMap<String, String>();
        geneStats.put("Name", geneRow);
        String geneName = ImageSeriesInfoLoader.getGeneNameFromRowName(geneRow);
        String imageSeriesID = ImageSeriesInfoLoader.getImageIDFromRowLabel(geneRow);
        // String imageSeriesID = geneRow.substring( geneRow.indexOf( "[" ) + 1, geneRow.indexOf( "]" ) );

        geneStats.put("Gene", "'" + geneName);

        geneStats.put("imageSeriesID", "'" + imageSeriesID);

        geneStats.put("NCBI ID", "" + imageInfo.getNCBIIDFromRowName(geneRow));

        Set<String> allImageSets = imageInfo.getRowsFromGene(geneName);

        geneStats.put("ImageSeriesCount", "" + allImageSets.size());

        // do any of it's image series sets have a coronal image set?
        geneStats.put("HasCoronalImage", "" + imageInfo.hasCoronalImageFromRowName(geneName));

        // find out how many other imageseries are in the top list
        allImageSets.retainAll(rows);
        geneStats.put("ImageSeriesInList", "" + allImageSets.size());

        // AbaGene gene = abaService.getGene( geneName );

        // geneStats.put( "plane", imageInfo.getPlaneFromRowName( geneRow ) );

        geneStats.put("Gene Name", imageInfo.getNameFromImageID(imageSeriesID));

        // DoubleArrayList rx = Rank.rankTransform( x );

        int index = rows.indexOf(geneRow);
        geneStats.put("Index", index + "");

        int fullMatrixIndex = expression.getRowNames().indexOf(geneRow);
        geneStats.put("meanRank", "" + (rowsInMatrix - averageRanks.get(fullMatrixIndex)));
        geneStats.put("sdRank", "" + (rowsInMatrix - sdRanks.get(fullMatrixIndex)));

        double[] expValues = expression.getRowByName(geneRow);
        int nans = Util.countNaNs(expValues);
        geneStats.put("NaNs", "" + nans);
        int columnsMinusNaN = columns - nans;

        double pearsonDegreeCor = pair.getDegreeCorrelation(geneRow);
        geneStats.put("Degree correlation", pearsonDegreeCor + "");
        geneStats.put("Degree correlation pval",
                "" + CorrelationStats.pvalue(pearsonDegreeCor, columnsMinusNaN) * rows.size());

        double spearmanDegreeCor = pair.getRankDegreeCorrelation(geneRow);
        geneStats.put("Rank degree correlation", spearmanDegreeCor + "");
        geneStats.put("Rank degree correlation pval",
                "" + CorrelationStats.spearmanPvalue(spearmanDegreeCor, columnsMinusNaN) * rows.size());

        boolean removeNaNs = true;
        geneStats.put("expSum", "" + Util.sum(expValues, removeNaNs));

        geneStats.put("plane", imageInfo.getPlaneFromRowName(geneRow));

        double coordCorrelation = forCoords.getXRankCorrelation(geneRow);
        geneStats.put("x correlation", "" + coordCorrelation);
        geneStats.put("x correlation pvalue",
                "" + CorrelationStats.spearmanPvalue(coordCorrelation, columnsMinusNaN) * rows.size());

        coordCorrelation = forCoords.getYRankCorrelation(geneRow);
        geneStats.put("y correlation", "" + coordCorrelation);
        geneStats.put("y correlation pvalue",
                "" + CorrelationStats.spearmanPvalue(coordCorrelation, columnsMinusNaN) * rows.size());
        coordCorrelation = forCoords.getZRankCorrelation(geneRow);
        geneStats.put("z correlation", "" + coordCorrelation);
        geneStats.put("z correlation pvalue",
                "" + CorrelationStats.spearmanPvalue(coordCorrelation, columnsMinusNaN) * rows.size());

        DoubleArrayList expValuesDAL = new DoubleArrayList(expValues);
        double mean = DescriptiveWithMissing.mean(expValuesDAL);
        geneStats.put("mean", "" + mean);

        double sampleStandardDeviation = Math.sqrt(DescriptiveWithMissing.sampleVariance(expValuesDAL, mean));
        geneStats.put("sampleStandardDeviation", "" + sampleStandardDeviation);

        // // do test for genes enriched in motor regions
        DoubleArrayList motor = new DoubleArrayList();
        DoubleArrayList notMotor = new DoubleArrayList();
        for (String region : expression.getColNames()) {
            double expvalue = expression.getByKeys(geneRow, region);
            if (Double.isNaN(expvalue))
                continue;
            if (motorRegions.contains(region))
                motor.add(expvalue);
            else
                notMotor.add(expvalue);
        }
        geneStats.put("Wilcox Motor P",
                "" + Wilcoxon.exactWilcoxonP(motor.elements(), notMotor.elements()) * rows.size());
        geneStats.put("Wilcox Motor P Rev",
                "" + Wilcoxon.exactWilcoxonP(notMotor.elements(), motor.elements()) * rows.size());

        // url
        String url = "http://mouse.brain-map.org/brain/" + geneName + "/" + imageSeriesID
                + ".html?ispopup=true";
        url = "HYPERLINK(\"" + url + "\",\"Series summary\")";
        geneStats.put("URL", url);

        String urlThumbs = "http://mouse.brain-map.org/brain/" + geneName + "/" + imageSeriesID
                + "/thumbnails.html?ispopup=true";
        urlThumbs = "HYPERLINK(\"" + urlThumbs + "\",\"Thumbnails\")";
        geneStats.put("Thumbs URL", urlThumbs);

        // regions above average?

        stats.addParamInstance(geneStats);

    }
    stats.writeExcel(baseFileName + ".geneInfo.xls");
}

From source file:com.datumbox.framework.common.dataobjects.Dataframe.java

/**
 * Removes completely a list of columns from the dataset. The meta-data of 
 * the Dataframe are updated. The method internally uses threads.
 * //from  w  ww.  j ava  2 s .co  m
 * @param columnSet
 */
public void dropXColumns(Set<Object> columnSet) {
    columnSet.retainAll(xDataTypes.keySet()); //keep only those columns that are already known to the Meta data of the Dataframe

    if (columnSet.isEmpty()) {
        return;
    }

    //remove all the columns from the Meta data
    xDataTypes.keySet().removeAll(columnSet);

    streamExecutor.forEach(StreamMethods.stream(entries(), true), e -> {
        Integer rId = e.getKey();
        Record r = e.getValue();

        AssociativeArray xData = r.getX().copy();
        boolean modified = xData.keySet().removeAll(columnSet);

        if (modified) {
            Record newR = new Record(xData, r.getY(), r.getYPredicted(), r.getYPredictedProbabilities());

            //safe to call in this context. we already updated the meta when we modified the xDataTypes
            _unsafe_set(rId, newR);
        }
    });

}