List of usage examples for java.util Collection retainAll
boolean retainAll(Collection<?> c);
From source file:com.facebook.model.GraphObjectFactoryTests.java
@SmallTest @MediumTest/*from w w w. j av a 2s . co m*/ @LargeTest public void testCollectionRetainAllThrows() throws JSONException { try { Collection<Integer> collection = GraphObject.Factory.createList(Integer.class); collection.retainAll(Arrays.asList()); fail("Expected exception"); } catch (UnsupportedOperationException exception) { } }
From source file:org.ejbca.core.model.era.RaMasterApiSessionBean.java
private String getEndEntityProfileAuthorizationString(AuthenticationToken authenticationToken, boolean includeparanteses, String endentityAccessRule) { String authendentityprofilestring = null; Collection<Integer> profileIds = new ArrayList<>( endEntityProfileSession.getEndEntityProfileIdToNameMap().keySet()); Collection<Integer> result = getAuthorizedEndEntityProfileIds(authenticationToken, AccessRulesConstants.VIEW_END_ENTITY, profileIds); result.retainAll(this.endEntityProfileSession.getAuthorizedEndEntityProfileIds(authenticationToken, endentityAccessRule));/* w w w. j a va2 s . c om*/ Iterator<Integer> iter = result.iterator(); while (iter.hasNext()) { if (authendentityprofilestring == null) { authendentityprofilestring = " endEntityProfileId = " + iter.next().toString(); } else { authendentityprofilestring = authendentityprofilestring + " OR endEntityProfileId = " + iter.next().toString(); } } if (authendentityprofilestring != null) { authendentityprofilestring = "( " + authendentityprofilestring + " )"; } return authendentityprofilestring; }
From source file:org.infoscoop.account.ldap.LDAPAccountManager.java
public List searchUser(Map searchConditionMap) throws Exception { Map confitionForBase = getConditionForSearchBase(searchConditionMap); Collection users = new TreeSet(new Comparator() { public int compare(Object o1, Object o2) { try { LDAPAccount user1 = (LDAPAccount) o1; LDAPAccount user2 = (LDAPAccount) o2; return user1.getUid().compareTo(user2.getUid()); } catch (Exception e) { log.error("", e); return 0; }//from ww w.j a va 2s . c om } }); DirContext context = null; try { context = this.initContext(); Map groupFilterMap = (Map) confitionForBase.get(GROUP_SEARCH_BASE_KEY); Collection groupMembers = null; if (groupFilterMap != null) { groupMembers = searchGroupMember(context, groupFilterMap); } Map userFilterMap = (Map) confitionForBase.get(USER_SEARCH_BASE_KEY); if (userFilterMap != null) { users = searchFromUsers(context, userFilterMap); if (groupMembers != null) { users.retainAll(groupMembers); } } else if (groupMembers != null) { users.addAll(groupMembers); } List result = new ArrayList(); for (Iterator it = users.iterator(); it.hasNext();) { LDAPAccount user = (LDAPAccount) it.next(); if (user.getGroupName() == null) setGroup(context, user); result.add(user); } return result; } finally { context.close(); } }
From source file:pltag.parser.semantics.DepTreeState.java
/** * Retain in the dependency arc only the roles that are in the intersection of the * roles already in the arc and in the <code>rolesIn</code> Collection * @param arc/* w w w.j a v a2s .c om*/ * @param rolesIn */ private void filterRoles(DependencyArc arc, Collection<Role> rolesIn) { // for(Iterator<Role> it = arc.getRoles().iterator(); it.hasNext();) // { // if(!rolesIn.contains(it.next())) // { // it.remove(); // } // } Collection<Role> roles = arc.getRoles(); roles.retainAll(rolesIn); // if there are no roles in the intersection of both sets, // add all the roles from the verification tree (not so accurate) if (roles.isEmpty()) roles.addAll(rolesIn); }
From source file:edu.stanford.muse.index.Lexicon.java
/** returns docs with ALL given sentiments. * special cases: sentiments can be an array of length 1 and be "None", in which case all documents with no sentiments are returned. * special cases: sentiments can be an array of length 1 and be "all", in which case all documents with any sentiments are returned. * @param captions */// ww w . jav a 2s . c o m public Collection<Document> getDocsWithSentiments(String sentiments[], Indexer indexer, Collection<Document> docs, int cluster, boolean originalContentOnly, String... captions) { Collection<Document> result = null; // note: multiple sentiments are possible, they are ANDED if (sentiments == null || sentiments.length == 0) return result; Set<Document> docs_set = Util.castOrCloneAsSet(docs); if (sentiments.length == 1 && "all".equalsIgnoreCase(sentiments[0])) return getDocsWithAnyEmotions(indexer, docs_set, originalContentOnly); // note: we'll pass in null for docs, and intersect with the given set of docs later // otherwise we'd just be doing it again and again for each category and lexer Map<String, Collection<Document>> map = getEmotions(indexer, null, false, originalContentOnly, captions); for (int i = 0; i < sentiments.length; i++) { Collection<Document> temp1 = ("None".equalsIgnoreCase(sentiments[i])) ? getDocsWithNoEmotions(indexer, docs_set, originalContentOnly) : map.get(sentiments[i]); if (temp1 == null) { // no matches, just return result = new LinkedHashSet<Document>(); return result; } if (result == null) result = temp1; else result.retainAll(temp1); } //result.retainAll(docs); return Util.setIntersection(result, docs_set); }
From source file:nl.strohalm.cyclos.dao.accounts.transactions.TransferDAOImpl.java
@SuppressWarnings("unchecked") private boolean buildSearchQuery(final TransferQuery query, final StringBuilder hql, final Map<String, Object> namedParameters) { // hql.append(" and not exists (select pas.id from PendingAccountStatus pas where pas.transfer = t)"); HibernateHelper.addParameterToQuery(hql, namedParameters, "t.type.requiresAuthorization", query.getRequiresAuthorization()); HibernateHelper.addParameterToQuery(hql, namedParameters, "t.status", query.getStatus()); HibernateHelper.addLikeParameterToQuery(hql, namedParameters, "t.description", query.getDescription()); HibernateHelper.addLikeParameterToQuery(hql, namedParameters, "t.transactionNumber", query.getTransactionNumber()); HibernateHelper.addParameterToQuery(hql, namedParameters, "t.loanPayment", query.getLoanPayment()); HibernateHelper.addParameterToQuery(hql, namedParameters, "t.parent", query.getParent()); HibernateHelper.addParameterToQuery(hql, namedParameters, "t.type", query.getTransferType()); HibernateHelper.addPeriodParameterToQuery(hql, namedParameters, "ifnull(t.processDate, t.date)", query.getPeriod());// ww w . ja v a 2 s. co m if (query.isRootOnly()) { hql.append(" and t.parent is null"); } if (query.getLoanTransfer() != null) { if (query.getLoanTransfer()) { hql.append(" and l is not null"); } else { hql.append(" and l is null"); } } // By conciliation status if (query.getConciliated() != null) { hql.append(" and t.externalTransfer is " + (query.getConciliated() ? "not" : "") + " null"); } // By owner if (query.getOwner() != null) { // Load the account Collection<Account> accounts; if (query.getType() == null) { AccountQuery aq = new AccountQuery(); aq.setOwner(query.getOwner()); accounts = (Collection<Account>) accountDao.search(aq); } else { final Account account = accountDao.load(query.getOwner(), query.getType()); accounts = Collections.singleton(account); } namedParameters.put("accounts", accounts); if (query.getMember() != null) { // Load the related member accounts final AccountQuery otherAccountsQuery = new AccountQuery(); otherAccountsQuery.setOwner(query.getMember()); final List<? extends Account> otherAccounts = accountDao.search(otherAccountsQuery); if (otherAccounts.isEmpty()) { // No accounts - ensure nothing will be returned return false; } else { hql.append( " and ((t.from in (:accounts) and t.to in (:relatedAccounts)) or (t.to in (:accounts) and t.from in (:relatedAccounts)))"); namedParameters.put("relatedAccounts", otherAccounts); } } else { hql.append(" and (t.from in (:accounts) or t.to in (:accounts))"); } // Use the groups / group filters Collection<MemberGroup> groups = new HashSet<MemberGroup>(); if (CollectionUtils.isNotEmpty(query.getGroupFilters())) { // Get the groups from group filters for (GroupFilter groupFilter : query.getGroupFilters()) { if (groupFilter != null && groupFilter.isPersistent()) { groupFilter = getFetchDao().fetch(groupFilter, GroupFilter.Relationships.GROUPS); groups.addAll(groupFilter.getGroups()); } } } if (CollectionUtils.isNotEmpty(query.getGroups())) { // Specific groups if (!groups.isEmpty()) { // No group filters: use group alone groups.retainAll(query.getGroups()); } else { // Filter the groups from group filters with the specified groups groups = query.getGroups(); } } if (!groups.isEmpty()) { hql.append( " and ((t.to in (:accounts) and exists (select ma.id from MemberAccount ma where ma = t.from and ma.member.group in (:groups)))"); hql.append( " or (t.from in (:accounts) and exists (select ma.id from MemberAccount ma where ma = t.to and ma.member.group in (:groups))))"); namedParameters.put("groups", groups); } } // From account owner if (query.getFromAccountOwner() != null) { final AccountQuery accountQuery = new AccountQuery(); accountQuery.setOwner(query.getFromAccountOwner()); final List<? extends Account> fromAccounts = accountDao.search(accountQuery); hql.append(" and t.from in (:fromAccounts) "); namedParameters.put("fromAccounts", fromAccounts); } // To account owner if (query.getToAccountOwner() != null) { final AccountQuery accountQuery = new AccountQuery(); accountQuery.setOwner(query.getToAccountOwner()); final List<? extends Account> toAccounts = accountDao.search(accountQuery); hql.append(" and t.to in (:toAccounts) "); namedParameters.put("toAccounts", toAccounts); } // PaymentFilter final Collection<PaymentFilter> paymentFilters = query.getPaymentFilters(); if (CollectionUtils.isNotEmpty(paymentFilters)) { // Get all TTs from all those payment filters final String ttHql = "from TransferType tt where exists (" + " select 1" + " from PaymentFilter pf" + " where pf in (:pfs)" + " and tt in elements(pf.transferTypes))"; final List<TransferType> transferTypes = list(ttHql, Collections.singletonMap("pfs", paymentFilters)); HibernateHelper.addInParameterToQuery(hql, namedParameters, "t.type", transferTypes); } if (query.getExcludeTransferType() != null) { hql.append(" and t.type != :excludeTransferType "); namedParameters.put("excludeTransferType", query.getExcludeTransferType()); } // Operated by if (query.getBy() != null) { hql.append(" and (t.by = :by or t.receiver = :by)"); namedParameters.put("by", query.getBy()); } // Custom fields hibernateCustomFieldHandler.appendConditions(hql, namedParameters, query.getCustomValues()); // Set the order if (!query.isUnordered()) { final List<String> orders = new ArrayList<String>(); // Order by date ... String order = "ifnull(t.processDate, t.date)"; if (query.isReverseOrder()) { order += " desc"; } orders.add(order); // ... then by id, to ensure that payments in the same second are ordered correctly order = "t.id"; if (query.isReverseOrder()) { order += " desc"; } orders.add(order); HibernateHelper.appendOrder(hql, orders); } return true; }
From source file:org.lockss.test.LockssTestCase.java
/** Assert that a collection cannot be modified, <i>ie</i>, that all of * the following methods, plus the collection's iterator().remove() * method, throw UnsupportedOperationException: add(), addAll(), clear(), * remove(), removeAll(), retainAll() */ public static void assertUnmodifiable(Collection coll) { List list = ListUtil.list("bar"); try {// w w w.ja v a2s .c o m coll.add("foo"); fail("add() didn't throw"); } catch (UnsupportedOperationException e) { } try { coll.addAll(list); fail("addAll() didn't throw"); } catch (UnsupportedOperationException e) { } try { coll.clear(); fail("clear() didn't throw"); } catch (UnsupportedOperationException e) { } try { coll.remove("foo"); fail("remove() didn't throw"); } catch (UnsupportedOperationException e) { } try { coll.removeAll(list); fail("removeAll() didn't throw"); } catch (UnsupportedOperationException e) { } try { coll.retainAll(list); fail("retainAll() didn't throw"); } catch (UnsupportedOperationException e) { } Iterator iter = coll.iterator(); iter.next(); try { iter.remove(); fail("iterator().remove() didn't throw"); } catch (UnsupportedOperationException e) { } }
From source file:com.xpn.xwiki.plugin.spacemanager.impl.SpaceManagerImpl.java
/** * {@inheritDoc}// w w w . j a v a 2 s . c o m * * @see SpaceManager#getRoles(String, String, XWikiContext) */ public Collection<String> getRoles(String spaceName, String memberName, XWikiContext context) throws SpaceManagerException { try { Collection<String> memberRoles = context.getWiki().getGroupService(context) .getAllGroupsNamesForMember(memberName, 0, 0, context); Collection<String> spaceRoles = getRoles(spaceName, context); memberRoles.retainAll(spaceRoles); return memberRoles; } catch (XWikiException e) { throw new SpaceManagerException(e); } }
From source file:org.ejbca.core.model.era.RaMasterApiSessionBean.java
@SuppressWarnings("unchecked") @Override// w ww . ja v a2 s .co m public RaEndEntitySearchResponse searchForEndEntities(AuthenticationToken authenticationToken, RaEndEntitySearchRequest request) { final RaEndEntitySearchResponse response = new RaEndEntitySearchResponse(); final List<Integer> authorizedLocalCaIds = new ArrayList<>( caSession.getAuthorizedCaIds(authenticationToken)); // Only search a subset of the requested CAs if requested if (!request.getCaIds().isEmpty()) { authorizedLocalCaIds.retainAll(request.getCaIds()); } if (authorizedLocalCaIds.isEmpty()) { // Empty response since there were no authorized CAs if (log.isDebugEnabled()) { log.debug("Client '" + authenticationToken + "' was not authorized to any of the requested CAs and the search request will be dropped."); } return response; } // Check Certificate Profile authorization final List<Integer> authorizedCpIds = new ArrayList<>( certificateProfileSession.getAuthorizedCertificateProfileIds(authenticationToken, 0)); final boolean accessAnyCpAvailable = authorizedCpIds .containsAll(certificateProfileSession.getCertificateProfileIdToNameMap().keySet()); if (!request.getCpIds().isEmpty()) { authorizedCpIds.retainAll(request.getCpIds()); } if (authorizedCpIds.isEmpty()) { // Empty response since there were no authorized Certificate Profiles if (log.isDebugEnabled()) { log.debug("Client '" + authenticationToken + "' was not authorized to any of the requested CPs and the search request will be dropped."); } return response; } // Check End Entity Profile authorization final Collection<Integer> authorizedEepIds = new ArrayList<>(endEntityProfileSession .getAuthorizedEndEntityProfileIds(authenticationToken, AccessRulesConstants.VIEW_END_ENTITY)); final boolean accessAnyEepAvailable = authorizedEepIds .containsAll(endEntityProfileSession.getEndEntityProfileIdToNameMap().keySet()); if (!request.getEepIds().isEmpty()) { authorizedEepIds.retainAll(request.getEepIds()); } if (authorizedEepIds.isEmpty()) { // Empty response since there were no authorized End Entity Profiles if (log.isDebugEnabled()) { log.debug("Client '" + authenticationToken + "' was not authorized to any of the requested EEPs and the search request will be dropped."); } return response; } final String subjectDnSearchString = request.getSubjectDnSearchString(); final String subjectAnSearchString = request.getSubjectAnSearchString(); final String usernameSearchString = request.getUsernameSearchString(); final StringBuilder sb = new StringBuilder("SELECT a.username FROM UserData a WHERE (a.caId IN (:caId))"); if (!subjectDnSearchString.isEmpty() || !subjectAnSearchString.isEmpty() || !usernameSearchString.isEmpty()) { sb.append(" AND ("); boolean firstAppended = false; if (!subjectDnSearchString.isEmpty()) { sb.append("a.subjectDN LIKE :subjectDN"); firstAppended = true; } if (!subjectAnSearchString.isEmpty()) { if (firstAppended) { sb.append(" OR "); } else { firstAppended = true; } sb.append("a.subjectAltName LIKE :subjectAltName"); } if (!usernameSearchString.isEmpty()) { if (firstAppended) { sb.append(" OR "); } else { firstAppended = true; } sb.append("a.username LIKE :username"); } sb.append(")"); } if (request.isModifiedAfterUsed()) { sb.append(" AND (a.timeModified > :modifiedAfter)"); } if (request.isModifiedBeforeUsed()) { sb.append(" AND (a.timeModified < :modifiedBefore)"); } if (!request.getStatuses().isEmpty()) { sb.append(" AND (a.status IN (:status))"); } // Don't constrain results to certain end entity profiles if root access is available and "any" CP is requested if (!accessAnyCpAvailable || !request.getCpIds().isEmpty()) { sb.append(" AND (a.certificateProfileId IN (:certificateProfileId))"); } // Don't constrain results to certain end entity profiles if root access is available and "any" EEP is requested if (!accessAnyEepAvailable || !request.getEepIds().isEmpty()) { sb.append(" AND (a.endEntityProfileId IN (:endEntityProfileId))"); } final Query query = entityManager.createQuery(sb.toString()); query.setParameter("caId", authorizedLocalCaIds); if (!accessAnyCpAvailable || !request.getCpIds().isEmpty()) { query.setParameter("certificateProfileId", authorizedCpIds); } if (!accessAnyEepAvailable || !request.getEepIds().isEmpty()) { query.setParameter("endEntityProfileId", authorizedEepIds); } if (log.isDebugEnabled()) { log.debug(" CA IDs: " + Arrays.toString(authorizedLocalCaIds.toArray())); if (!accessAnyCpAvailable || !request.getCpIds().isEmpty()) { log.debug(" certificateProfileId: " + Arrays.toString(authorizedCpIds.toArray())); } else { log.debug(" certificateProfileId: Any (even deleted) profile(s) due to root access."); } if (!accessAnyEepAvailable || !request.getEepIds().isEmpty()) { log.debug(" endEntityProfileId: " + Arrays.toString(authorizedEepIds.toArray())); } else { log.debug(" endEntityProfileId: Any (even deleted) profile(s) due to root access."); } } if (!subjectDnSearchString.isEmpty()) { if (request.isSubjectDnSearchExact()) { query.setParameter("subjectDN", subjectDnSearchString); } else { query.setParameter("subjectDN", "%" + subjectDnSearchString + "%"); } } if (!subjectAnSearchString.isEmpty()) { if (request.isSubjectAnSearchExact()) { query.setParameter("subjectAltName", subjectAnSearchString); } else { query.setParameter("subjectAltName", "%" + subjectAnSearchString + "%"); } } if (!usernameSearchString.isEmpty()) { if (request.isUsernameSearchExact()) { query.setParameter("username", usernameSearchString); } else { query.setParameter("username", "%" + usernameSearchString + "%"); } } if (request.isModifiedAfterUsed()) { query.setParameter("modifiedAfter", request.getModifiedAfter()); } if (request.isModifiedBeforeUsed()) { query.setParameter("modifiedBefore", request.getModifiedBefore()); } if (!request.getStatuses().isEmpty()) { query.setParameter("status", request.getStatuses()); } final int maxResults = Math.min(getGlobalCesecoreConfiguration().getMaximumQueryCount(), request.getMaxResults()); query.setMaxResults(maxResults); /* Try to use the non-portable hint (depends on DB and JDBC driver) to specify how long in milliseconds the query may run. Possible behaviors: * - The hint is ignored * - A QueryTimeoutException is thrown * - A PersistenceException is thrown (and the transaction which don't have here is marked for roll-back) */ final long queryTimeout = getGlobalCesecoreConfiguration().getMaximumQueryTimeout(); if (queryTimeout > 0L) { query.setHint("javax.persistence.query.timeout", String.valueOf(queryTimeout)); } final List<String> usernames; try { usernames = query.getResultList(); for (final String username : usernames) { response.getEndEntities().add(endEntityAccessSession.findUser(username)); } response.setMightHaveMoreResults(usernames.size() == maxResults); if (log.isDebugEnabled()) { log.debug("Certificate search query: " + sb.toString() + " LIMIT " + maxResults + " \u2192 " + usernames.size() + " results. queryTimeout=" + queryTimeout + "ms"); } } catch (QueryTimeoutException e) { log.info("Requested search query by " + authenticationToken + " took too long. Query was " + e.getQuery().toString() + ". " + e.getMessage()); response.setMightHaveMoreResults(true); } catch (PersistenceException e) { log.info("Requested search query by " + authenticationToken + " failed, possibly due to timeout. " + e.getMessage()); response.setMightHaveMoreResults(true); } return response; }
From source file:ubic.gemma.analysis.expression.coexpression.GeneCoexpressionServiceImpl.java
/** * Get coexpression results using a pure gene2gene query (without visiting the probe2probe tables. This is generally * faster, probably even if we're only interested in data from a subset of the experiments. * /*from w w w. j a v a2s.c o m*/ * @param baseSet * @param eeIds Experiments to limit the results to (must not be null, and should already be security-filtered) * @param queryGenes * @param stringency * @param maxResults * @param queryGenesOnly return links among the query genes only. * @return */ private CoexpressionMetaValueObject getFilteredCannedAnalysisResults(ExpressionExperimentSet baseSet, Collection<Long> eeIds, Collection<Gene> queryGenes, int stringency, int maxResults, boolean queryGenesOnly) { if (queryGenes.isEmpty()) { throw new IllegalArgumentException("No genes in query"); } List<ExpressionExperimentValueObject> eevos = getSortedEEvos(eeIds); if (eevos.isEmpty()) { throw new IllegalArgumentException("There are no usable experiments in the selected set"); } /* * We get this prior to filtering so it matches the vectors stored with the analysis. */ expressionExperimentSetService.thaw(baseSet); List<Long> positionToIDMap = Gene2GenePopulationServiceImpl .getPositionToIdMap(EntityUtils.getIds(baseSet.getExperiments())); /* * This set of links must be filtered to include those in the data sets being analyzed. */ Map<Long, Collection<Gene2GeneCoexpression>> gg2gs = getRawCoexpression(queryGenes, stringency, maxResults, queryGenesOnly); List<Long> filteredEeIds = (List<Long>) EntityUtils.getIds(eevos); CoexpressionMetaValueObject result = initValueObject(queryGenes, eevos, true); List<CoexpressionValueObjectExt> ecvos = new ArrayList<CoexpressionValueObjectExt>(); Collection<Gene2GeneCoexpression> seen = new HashSet<Gene2GeneCoexpression>(); // queryGenes = geneService.thawLite( gg2gs.keySet() ); // populate the value objects. StopWatch timer = new StopWatch(); Collection<Gene> allUsedGenes = new HashSet<Gene>(); for (Gene queryGene : queryGenes) { timer.start(); if (!queryGene.getTaxon().equals(baseSet.getTaxon())) { throw new IllegalArgumentException( "Mismatch between taxon for expression experiment set selected and gene queries"); } allUsedGenes.add(queryGene); /* * For summary statistics */ CountingMap<Long> supportCount = new CountingMap<Long>(); Collection<Long> allSupportingDatasets = new HashSet<Long>(); Collection<Long> allDatasetsWithSpecificProbes = new HashSet<Long>(); Collection<Long> allTestedDataSets = new HashSet<Long>(); int linksMetPositiveStringency = 0; int linksMetNegativeStringency = 0; Collection<Gene2GeneCoexpression> g2gs = gg2gs.get(queryGene.getId()); assert g2gs != null; List<Long> relevantEEIdList = getRelevantEEidsForBitVector(positionToIDMap, g2gs); relevantEEIdList.retainAll(filteredEeIds); GeneValueObject queryGeneValueObject = new GeneValueObject(queryGene); HashMap<Gene, Collection<Gene2GeneCoexpression>> foundGenes = new HashMap<Gene, Collection<Gene2GeneCoexpression>>(); // for queryGene get the interactions Map<Long, Gene2GeneProteinAssociation> proteinInteractionMap = this .getGene2GeneProteinAssociationForQueryGene(queryGene); Map<Long, TfGeneAssociation> regulatedBy = this.getTfGeneAssociationsforTargetGene(queryGene); Map<Long, TfGeneAssociation> regulates = this.getTfGeneAssociationsforTf(queryGene); if (timer.getTime() > 100) { log.info("Postprocess " + queryGene.getOfficialSymbol() + " Phase I: " + timer.getTime() + "ms"); } timer.stop(); timer.reset(); timer.start(); for (Gene2GeneCoexpression g2g : g2gs) { StopWatch timer2 = new StopWatch(); timer2.start(); Gene foundGene = g2g.getFirstGene().equals(queryGene) ? g2g.getSecondGene() : g2g.getFirstGene(); allUsedGenes.add(foundGene); // FIXME Symptom fix for duplicate found genes // Keep track of the found genes that we can correctly identify // duplicates. // All keep the g2g object for debugging purposes. if (foundGenes.containsKey(foundGene)) { foundGenes.get(foundGene).add(g2g); log.warn("Duplicate gene found in coexpression results, skipping: " + foundGene + " From analysis: " + g2g.getSourceAnalysis().getId()); continue; // Found a duplicate gene, don't add to results // just our debugging list } foundGenes.put(foundGene, new ArrayList<Gene2GeneCoexpression>()); foundGenes.get(foundGene).add(g2g); CoexpressionValueObjectExt cvo = new CoexpressionValueObjectExt(); /* * This Thaw is a big time sink and _should not_ be necessary. */ // foundGene = geneService.thawLite( foundGene ); // db hit cvo.setQueryGene(queryGeneValueObject); cvo.setFoundGene(new GeneValueObject(foundGene)); if (timer2.getTime() > 10) log.info("Coexp. Gene processing phase I:" + timer2.getTime() + "ms"); timer2.stop(); timer2.reset(); timer2.start(); populateInteractions(proteinInteractionMap, regulatedBy, regulates, foundGene, cvo); Collection<Long> testingDatasets = Gene2GenePopulationServiceImpl.getTestedExperimentIds(g2g, positionToIDMap); testingDatasets.retainAll(filteredEeIds); /* * necesssary in case any were filtered out (for example, if this is a virtual analysis; or there were * 'troubled' ees. Note that 'supporting' includes 'non-specific' if they were recorded by the analyzer. */ Collection<Long> supportingDatasets = Gene2GenePopulationServiceImpl.getSupportingExperimentIds(g2g, positionToIDMap); // necessary in case any were filtered out. supportingDatasets.retainAll(filteredEeIds); cvo.setSupportingExperiments(supportingDatasets); Collection<Long> specificDatasets = Gene2GenePopulationServiceImpl.getSpecificExperimentIds(g2g, positionToIDMap); /* * Specific probe EEids contains 1 even if the data set wasn't supporting. */ specificDatasets.retainAll(supportingDatasets); int numTestingDatasets = testingDatasets.size(); int numSupportingDatasets = supportingDatasets.size(); /* * SANITY CHECKS */ assert specificDatasets.size() <= numSupportingDatasets; assert numTestingDatasets >= numSupportingDatasets; assert numTestingDatasets <= eevos.size(); cvo.setDatasetVector( getDatasetVector(supportingDatasets, testingDatasets, specificDatasets, relevantEEIdList)); /* * This check is necessary in case any data sets were filtered out. (i.e., we're not interested in the * full set of data sets that were used in the original analysis. */ if (numSupportingDatasets < stringency) { continue; } allTestedDataSets.addAll(testingDatasets); int supportFromSpecificProbes = specificDatasets.size(); if (g2g.getEffect() < 0) { cvo.setPosSupp(0); cvo.setNegSupp(numSupportingDatasets); if (numSupportingDatasets != supportFromSpecificProbes) cvo.setNonSpecNegSupp(numSupportingDatasets - supportFromSpecificProbes); ++linksMetNegativeStringency; } else { cvo.setPosSupp(numSupportingDatasets); if (numSupportingDatasets != supportFromSpecificProbes) cvo.setNonSpecPosSupp(numSupportingDatasets - supportFromSpecificProbes); cvo.setNegSupp(0); ++linksMetPositiveStringency; } cvo.setSupportKey(Math.max(cvo.getPosSupp(), cvo.getNegSupp())); cvo.setNumTestedIn(numTestingDatasets); for (Long id : supportingDatasets) { supportCount.increment(id); } cvo.setSortKey(); /* * This check prevents links from being shown twice when we do "among query genes". We don't skip * entirely so we get the counts for the summary table populated correctly. */ if (!seen.contains(g2g)) { ecvos.add(cvo); } seen.add(g2g); allSupportingDatasets.addAll(supportingDatasets); allDatasetsWithSpecificProbes.addAll(specificDatasets); } Collection<Long> geneIds = new ArrayList<Long>(); for (Gene g : allUsedGenes) { geneIds.add(g.getId()); } populateNodeDegree(ecvos, geneIds, allTestedDataSets); if (timer.getTime() > 1000) { log.info("Postprocess " + g2gs.size() + " results for " + queryGene.getOfficialSymbol() + "Phase II: " + timer.getTime() + "ms"); } timer.stop(); timer.reset(); timer.start(); // This is only necessary for debugging purposes. Helps us keep // track of duplicate genes found above. if (log.isDebugEnabled()) { for (Gene foundGene : foundGenes.keySet()) { if (foundGenes.get(foundGene).size() > 1) { log.debug("** DUPLICATE: " + foundGene.getOfficialSymbol() + " found multiple times. Gene2Genes objects are: "); for (Gene2GeneCoexpression g1g : foundGenes.get(foundGene)) { log.debug(" ============ Gene2Gene Id: " + g1g.getId() + " 1st gene: " + g1g.getFirstGene().getOfficialSymbol() + " 2nd gene: " + g1g.getSecondGene().getOfficialSymbol() + " Source Analysis: " + g1g.getSourceAnalysis().getId() + " # of dataSets: " + g1g.getNumDataSets()); } } } } CoexpressionSummaryValueObject summary = makeSummary(eevos, allTestedDataSets, allDatasetsWithSpecificProbes, linksMetPositiveStringency, linksMetNegativeStringency); result.getSummary().put(queryGene.getOfficialSymbol(), summary); generateDatasetSummary(eevos, result, supportCount, allSupportingDatasets, queryGene); /* * FIXME I'm lazy and rushed, so I'm using an existing field for this info; probably better to add another * field to the value object... */ for (ExpressionExperimentValueObject eevo : eevos) { eevo.setExternalUri(AnchorTagUtil.getExpressionExperimentUrl(eevo.getId())); } Collections.sort(ecvos); getGoOverlap(ecvos, queryGene); timer.stop(); if (timer.getTime() > 1000) { log.info("Postprocess " + g2gs.size() + " results for " + queryGene.getOfficialSymbol() + " PhaseIII: " + timer.getTime() + "ms"); } timer.reset(); } // Over results. result.getKnownGeneResults().addAll(ecvos); return result; }