List of usage examples for java.util Collection addAll
boolean addAll(Collection<? extends E> c);
From source file:com.atlassian.jira.util.system.ExtendedSystemInfoUtilsImpl.java
public Collection<JiraServiceContainer> getServices() { final Collection<JiraServiceContainer> services = new TreeSet<JiraServiceContainer>( JiraService.NAME_COMPARATOR); services.addAll(serviceManager.getServices()); return services; }
From source file:org.openmrs.module.providermanagement.api.impl.ProviderSuggestionServiceImpl.java
private List<Person> suggestSupervisionForProviderHelper(Person provider, SupervisionSuggestionType type) throws PersonIsNotProviderException, SuggestionEvaluationException { if (provider == null) { throw new APIException("Provider cannot be null"); }/*w w w. ja va 2s . c o m*/ // fail if the person is not a provider if (!Context.getService(ProviderManagementService.class).isProvider(provider)) { throw new PersonIsNotProviderException(provider + " is not a provider"); } // first, get all the roles for this provider List<ProviderRole> roles = Context.getService(ProviderManagementService.class).getProviderRoles(provider); // if the provider has no roles, return null if (roles == null || roles.size() == 0) { return null; } // now get all the roles that this provider can supervise or be supervisors by (depending on type) List<ProviderRole> validRoles; if (type.equals(SupervisionSuggestionType.SUPERVISEE_SUGGESTION)) { validRoles = Context.getService(ProviderManagementService.class) .getProviderRolesThatProviderCanSupervise(provider); } else { validRoles = Context.getService(ProviderManagementService.class) .getProviderRolesThatCanSuperviseThisProvider(provider); } // get any suggestions based on the provider roles Set<SupervisionSuggestion> suggestions = new HashSet<SupervisionSuggestion>(); for (ProviderRole role : roles) { List<SupervisionSuggestion> s = getSupervisionSuggestionsByProviderRoleAndSuggestionType(role, type); if (s != null && s.size() > 0) { suggestions.addAll(s); } } // if there are no suggestions, or no valid roles, just return null if (suggestions.size() == 0 || validRoles == null || validRoles.size() == 0) { return null; } // otherwise, get all the providers that match the suggestion rules Collection<Person> suggestedProviders = new HashSet<Person>(); for (SupervisionSuggestion suggestion : suggestions) { try { SuggestionEvaluator evaluator = suggestion.instantiateEvaluator(); Set<Person> p = evaluator.evaluate(suggestion, provider); if (p != null) { // note that we are doing union, not intersection, here if there are multiple rules suggestedProviders.addAll(p); } } catch (Exception e) { throw new SuggestionEvaluationException("Unable to evaluate suggestion " + suggestion, e); } } // only keep providers that are valid for this provider to supervise or be supervised by suggestedProviders.retainAll( Context.getService(ProviderManagementService.class).getProvidersAsPersonsByRoles(validRoles)); // finally, remove any providers that this provider is already supervising or being supervised by if (type.equals(SupervisionSuggestionType.SUPERVISEE_SUGGESTION)) { suggestedProviders.removeAll(Context.getService(ProviderManagementService.class) .getSuperviseesForSupervisor(provider, new Date())); } else { suggestedProviders.removeAll(Context.getService(ProviderManagementService.class) .getSupervisorsForProvider(provider, new Date())); } // return the result set return new ArrayList<Person>(suggestedProviders); }
From source file:chibi.gemmaanalysis.CoexpressionAnalysisService.java
/** * Create and populate the coexpression matrices (correlation matrix, sample size matrix, expression level matrix) * // w w w .j a va 2s . co m * @param ees * @param queryGenes * @param targetGenes * @param filterConfig * @param correlationMethod * @return */ public CoexpressionMatrices calculateCoexpressionMatrices(Collection<BioAssaySet> ees, Collection<Gene> queryGenes, Collection<Gene> targetGenes, FilterConfig filterConfig, CorrelationMethod correlationMethod) { if (correlationMethod == null) correlationMethod = CorrelationMethod.PEARSON; CoexpressionMatrices matrices = new CoexpressionMatrices(ees, queryGenes, targetGenes); DenseDouble3dMatrix<Gene, Gene, BioAssaySet> correlationMatrix = matrices.getCorrelationMatrix(); DenseDouble3dMatrix<Gene, Gene, BioAssaySet> sampleSizeMatrix = matrices.getSampleSizeMatrix(); int count = 1; int numEes = ees.size(); // calculate correlations log.info("Calculating correlation and sample size matrices"); StopWatch watch = new StopWatch(); watch.start(); for (BioAssaySet bas : ees) { ExpressionExperiment ee = (ExpressionExperiment) bas; log.info("Processing " + ee.getShortName() + " (" + count++ + " of " + numEes + ")"); int slice = correlationMatrix.getSliceIndexByName(ee); // get all the composite sequences Collection<ArrayDesign> ads = eeService.getArrayDesignsUsed(ee); Collection<CompositeSequence> css = new HashSet<CompositeSequence>(); for (ArrayDesign ad : ads) { css.addAll(adService.getCompositeSequences(ad)); } Map<Gene, Collection<CompositeSequence>> gene2css = getGene2CsMap(css); ExpressionDataDoubleMatrix dataMatrix = getExpressionDataMatrix(ee, filterConfig); if (dataMatrix == null) { log.error("ERROR: cannot process " + ee.getShortName()); continue; } for (Gene qGene : queryGenes) { int row = correlationMatrix.getRowIndexByName(qGene); for (Gene tGene : targetGenes) { int col = correlationMatrix.getColIndexByName(tGene); Collection<CompositeSequence> queryCss = gene2css.get(qGene); Collection<CompositeSequence> targetCss = gene2css.get(tGene); if (queryCss != null && targetCss != null) { CorrelationSampleSize corr = calculateCorrelation(queryCss, targetCss, dataMatrix, correlationMethod); if (corr != null) { correlationMatrix.set(slice, row, col, corr.correlation); sampleSizeMatrix.set(slice, row, col, corr.sampleSize); } } } } } watch.stop(); log.info("Calculated correlations of all " + numEes + " in " + watch); return matrices; }
From source file:com.atlassian.jira.startup.JiraSystemInfo.java
/** * Only call AFTER JIRA is fully up!/*from ww w . j a v a 2s . c o m*/ * <p/> * NOTE : Only PRIVILEGED CODE should make this call. This code goes to the Trusted Apps Manager directly. It does * this because at JIRA start time, there is no user. So be careful when calling this method and don't propagate * more permissions that are required. */ public void obtainTrustedApps() { final TrustedApplicationManager trustedAppManager = ComponentAccessor .getComponentOfType(TrustedApplicationManager.class); final Collection<TrustedApplicationInfo> trustedApplications = new TreeSet<TrustedApplicationInfo>( TrustedApplicationService.NAME_COMPARATOR); // // PRIVILEGED CALL HERE trustedApplications.addAll(trustedAppManager.getAll()); logMsg.outputHeader("Trusted Applications"); logMsg.outputProperty("Instance Count", String.valueOf(trustedApplications.size())); logMsg.add(""); for (final Object element : trustedApplications) { final TrustedApplicationInfo applicationInfo = (TrustedApplicationInfo) element; logMsg.outputProperty("Application Name", applicationInfo.getName()); logMsg.outputProperty("Matching URLs", applicationInfo.getUrlMatch(), 2); logMsg.outputProperty("Matching IP", applicationInfo.getIpMatch(), 2); } }
From source file:edu.stanford.muse.index.Lexicon.java
/** Core sentiment detection method. doNota = none of the above * @param captions (null/none = all) */ public Map<String, Collection<Document>> getEmotions(Indexer indexer, Collection<Document> docs, boolean doNota, boolean originalContentOnly, String... captions) { Collection<Lexicon1Lang> lexicons = getRelevantLexicon1Langs(docs); Map<String, Collection<Document>> result = new LinkedHashMap<>(); Set<Document> docs_set = Util.castOrCloneAsSet(docs); // aggregate results for each lang into result for (Lexicon1Lang lex : lexicons) { Map<String, Collection<Document>> resultsForThisLang = (doNota ? lex.getEmotionsWithNOTA(indexer, docs_set, originalContentOnly) : lex.getEmotions(indexer, docs_set, originalContentOnly, captions)); if (resultsForThisLang == null) continue; for (String caption : resultsForThisLang.keySet()) { Collection<Document> resultDocsThisLang = resultsForThisLang.get(caption); Collection<Document> resultDocs = result.get(caption); // if caption doesn't exist already, create a new entry, or else add to the existing set of docs that match this caption if (resultDocs == null) result.put(caption, resultDocsThisLang); else// w w w . j a v a 2 s.c om resultDocs.addAll(resultDocsThisLang); } } // TODO: the result can be cached at server to avoid redundant computation (by concurrent users, which are few for now) return result; }
From source file:graph.module.OntologyEdgeModule.java
@Override protected void addIfNonDAG(Node node, Object key, Collection<Pair<Node, Object>> nonDAGNodes) { if (node instanceof OntologyFunction) { Node[] subnodes = ((OntologyFunction) node).getNodes(); Object[] subargs = new Object[subnodes.length * 2]; for (int j = 0; j < subnodes.length; j++) { subargs[j * 2] = subnodes[j]; subargs[j * 2 + 1] = key + FUNC_SPLIT + (j + 1); }//from w ww.j a va 2 s .c om Collection<Pair<Node, Object>> subNonDAGs = findNonDAGs(subargs); if (key == null && !subNonDAGs.isEmpty()) nonDAGNodes.add(new Pair<Node, Object>(node, key)); else nonDAGNodes.addAll(subNonDAGs); } else if (!(node instanceof DAGNode)) nonDAGNodes.add(new Pair<Node, Object>(node, key)); }
From source file:com.evolveum.midpoint.common.refinery.CompositeRefinedObjectClassDefinitionImpl.java
@NotNull @Override//ww w . j ava 2s. c om public Collection<? extends RefinedAttributeDefinition<?>> getAttributeDefinitions() { if (auxiliaryObjectClassDefinitions.isEmpty()) { return structuralObjectClassDefinition.getAttributeDefinitions(); } Collection<? extends RefinedAttributeDefinition<?>> defs = new ArrayList<>(); defs.addAll((Collection) structuralObjectClassDefinition.getAttributeDefinitions()); for (RefinedObjectClassDefinition auxiliaryObjectClassDefinition : auxiliaryObjectClassDefinitions) { for (RefinedAttributeDefinition<?> auxRAttrDef : auxiliaryObjectClassDefinition .getAttributeDefinitions()) { boolean add = true; for (RefinedAttributeDefinition def : defs) { if (def.getName().equals(auxRAttrDef.getName())) { add = false; break; } } if (add) { ((Collection) defs).add(auxRAttrDef); } } } return defs; }
From source file:com.nextep.designer.synch.services.impl.ReverseSynchronizationService.java
@Override public void addToView(IWorkspace view, IVersionContainer container, IVersionable<?> imported, IReverseSynchronizationContext context) { if (view == null) { view = VCSPlugin.getViewService().getCurrentWorkspace(); }/*from w ww.j a va 2 s .c o m*/ if (container == null) { container = view; } Assert.notNull(imported, SynchMessages.getString("synch.reverse.nullElementError")); //$NON-NLS-1$ Assert.notNull(context, SynchMessages.getString("synch.reverse.noContextError")); //$NON-NLS-1$ final Map<IReference, IReference> extRefMap = context.getSourceReferenceMapping(); // Replacing db => view dependency when it maps SynchronizationHelper.replaceDependency(imported, extRefMap); // Replacing any dependency and removes any existing object SynchronizationHelper.replaceDependency(imported, extRefMap); Collection<IReference> importedRefs = new ArrayList<IReference>(); // Adding current references as imported references importedRefs.add(imported.getReference()); importedRefs.addAll(imported.getReferenceMap().keySet()); // Checking all dependencies have already been imported if (context.shouldCheckForExternals()) { // Current view contents final Map<IReference, IReferenceable> viewContents = view.getReferenceMap(); final Collection<IReference> dependencies = imported.getReferenceDependencies(); for (IReference r : dependencies) { if (!importedRefs.contains(r) && !viewContents.containsKey(r)) { String objName = SynchMessages.getString("synch.reverse.unknownName"); //$NON-NLS-1$ if (r.getInstance() != null && r.getInstance() instanceof INamedObject) { objName = ((INamedObject) r.getInstance()).getName(); } else if (r.getArbitraryName() != null) { objName = r.getArbitraryName(); } throw new UnresolvedCheckFailedException(MessageFormat.format( SynchMessages.getString("unresolvedWouldAppear"), imported.getType().getName() //$NON-NLS-1$ .toLowerCase(), imported.getName(), r.getType().getName().toLowerCase(), objName)); } } } // Importing if (!context.getImportPolicy().importVersionable(imported, container, getVersioningService().getCurrentActivity())) { throw new ImportFailedException(imported); } }
From source file:com.pinterest.teletraan.worker.ClusterReplacer.java
/** * Step 1. INIT state will launch hosts outside of the auto scaling group * The number of hosts to be launched should be max_parallel_rp * If launching failed, retry INIT state until timeout meets *//* www .ja v a 2 s . com*/ private void processInitState(ClusterUpgradeEventBean eventBean) throws Exception { String clusterName = eventBean.getCluster_name(); EnvironBean environBean = environDAO.getById(eventBean.getEnv_id()); int totToLaunch = environBean.getMax_parallel_rp() <= 0 ? 1 : environBean.getMax_parallel_rp(); if (!StringUtils.isEmpty(eventBean.getHost_ids())) { Collection<String> oldHostIds = Arrays.asList(eventBean.getHost_ids().split(",")); totToLaunch -= oldHostIds.size(); } LOG.info(String.format("Start to launch hosts (number to launch: %d)", totToLaunch)); boolean succeeded = true; while (totToLaunch > 0) { int numToLaunch = Math.min(totToLaunch, MAX_HOST_LAUNCH_SIZE); Collection<HostBean> newHosts = clusterManager.launchHosts(clusterName, numToLaunch, false); if (newHosts.isEmpty()) { LOG.error(String.format("Failed to launch %s hosts in INIT state", numToLaunch)); succeeded = false; break; } LOG.info(String.format("Successfully launched %d hosts: %s", newHosts.size(), newHosts.toString())); Collection<String> updateHostIds = new ArrayList<>(); for (HostBean host : newHosts) { updateHostIds.add(host.getHost_id()); hostDAO.insert(host); } if (!StringUtils.isEmpty(eventBean.getHost_ids())) { Collection<String> oldHostIds = Arrays.asList(eventBean.getHost_ids().split(",")); updateHostIds.addAll(oldHostIds); } updateHostsInClusterEvent(eventBean.getId(), updateHostIds); totToLaunch -= newHosts.size(); } if (succeeded) { LOG.info("Successfully completed INIT state, move to LAUNCHING state"); ClusterUpgradeEventBean updateBean = new ClusterUpgradeEventBean(); updateBean.setState(ClusterUpgradeEventState.LAUNCHING); updateBean.setStatus(ClusterUpgradeEventStatus.SUCCEEDED); transitionState(eventBean.getId(), updateBean); } }