List of usage examples for java.util Collection addAll
boolean addAll(Collection<? extends E> c);
From source file:com.dragome.compiler.units.ClassUnit.java
private void getDeclaredMembersInInterfaces(ClassUnit classUnit, Collection<MemberUnit> interfacesMembers) { if (classUnit != null) { if (isInterface) interfacesMembers.addAll(filterMethods(classUnit.getDeclaredMembers())); for (ClassUnit interfaceUnit : classUnit.getInterfaces()) { interfacesMembers.addAll(filterMethods(interfaceUnit.getDeclaredMembers())); interfaceUnit.getDeclaredMembersInInterfaces(interfaceUnit, interfacesMembers); }//from ww w . j a v a2 s . c o m getDeclaredMembersInInterfaces(classUnit.getSuperUnit(), interfacesMembers); } }
From source file:com.cloudera.oryx.ml.serving.als.model.ALSServingModel.java
public Collection<String> getAllItemIDs() { Collection<String> itemsList = new ArrayList<>(); for (int partition = 0; partition < Y.length; partition++) { Lock lock = yLocks[partition].readLock(); lock.lock();//from w w w . ja va 2 s. c o m try { itemsList.addAll(Y[partition].keySet()); } finally { lock.unlock(); } } return itemsList; }
From source file:edu.cornell.mannlib.vitro.webapp.controller.edit.listing.PropertyWebappsListingController.java
public void doGet(HttpServletRequest request, HttpServletResponse response) { if (!isAuthorizedToDisplayPage(request, response, SimplePermission.EDIT_ONTOLOGY.ACTIONS)) { return;// w w w . j a v a 2 s . co m } VitroRequest vrequest = new VitroRequest(request); String noResultsMsgStr = "No object properties found"; String ontologyUri = request.getParameter("ontologyUri"); ObjectPropertyDao dao = vrequest.getFullWebappDaoFactory().getObjectPropertyDao(); PropertyInstanceDao piDao = vrequest.getFullWebappDaoFactory().getPropertyInstanceDao(); VClassDao vcDao = vrequest.getFullWebappDaoFactory().getVClassDao(); PropertyGroupDao pgDao = vrequest.getFullWebappDaoFactory().getPropertyGroupDao(); String vclassURI = request.getParameter("vclassUri"); List props = new ArrayList(); if (request.getParameter("propsForClass") != null) { noResultsMsgStr = "There are no properties that apply to this class."; // incomplete list of classes to check, but better than before List<String> superclassURIs = vcDao.getAllSuperClassURIs(vclassURI); superclassURIs.add(vclassURI); superclassURIs.addAll(vcDao.getEquivalentClassURIs(vclassURI)); Map<String, PropertyInstance> propInstMap = new HashMap<String, PropertyInstance>(); for (String classURI : superclassURIs) { Collection<PropertyInstance> propInsts = piDao.getAllPropInstByVClass(classURI); for (PropertyInstance propInst : propInsts) { propInstMap.put(propInst.getPropertyURI(), propInst); } } List<PropertyInstance> propInsts = new ArrayList<PropertyInstance>(); propInsts.addAll(propInstMap.values()); Collections.sort(propInsts); Iterator propInstIt = propInsts.iterator(); HashSet propURIs = new HashSet(); while (propInstIt.hasNext()) { PropertyInstance pi = (PropertyInstance) propInstIt.next(); if (!(propURIs.contains(pi.getPropertyURI()))) { propURIs.add(pi.getPropertyURI()); ObjectProperty prop = (ObjectProperty) dao.getObjectPropertyByURI(pi.getPropertyURI()); if (prop != null) { props.add(prop); } } } } else { props = (request.getParameter("iffRoot") != null) ? dao.getRootObjectProperties() : dao.getAllObjectProperties(); } OntologyDao oDao = vrequest.getFullWebappDaoFactory().getOntologyDao(); HashMap<String, String> ontologyHash = new HashMap<String, String>(); Iterator propIt = props.iterator(); List<ObjectProperty> scratch = new ArrayList(); while (propIt.hasNext()) { ObjectProperty p = (ObjectProperty) propIt.next(); if (p.getNamespace() != null) { if (!ontologyHash.containsKey(p.getNamespace())) { Ontology o = (Ontology) oDao.getOntologyByURI(p.getNamespace()); if (o == null) { if (!VitroVocabulary.vitroURI.equals(p.getNamespace())) { log.debug("doGet(): no ontology object found for the namespace " + p.getNamespace()); } } else { ontologyHash.put(p.getNamespace(), o.getName() == null ? p.getNamespace() : o.getName()); } } if (ontologyUri != null && p.getNamespace().equals(ontologyUri)) { scratch.add(p); } } } if (ontologyUri != null) { props = scratch; } if (props != null) { Collections.sort(props, new ObjectPropertyHierarchyListingController.ObjectPropertyAlphaComparator()); } ArrayList results = new ArrayList(); results.add("XX"); // column 1 results.add("property public name"); // column 2 results.add("prefix + local name"); // column 3 results.add("domain"); // column 4 results.add("range"); // column 5 results.add("group"); // column 6 results.add("display tier"); // column 7 results.add("display level"); // column 8 results.add("update level"); // column 9 if (props != null) { if (props.size() == 0) { results.add("XX"); results.add("<strong>" + noResultsMsgStr + "</strong>"); results.add(""); results.add(""); results.add(""); results.add(""); results.add(""); results.add(""); results.add(""); } else { Iterator propsIt = props.iterator(); while (propsIt.hasNext()) { ObjectProperty prop = (ObjectProperty) propsIt.next(); results.add("XX"); String propNameStr = ObjectPropertyHierarchyListingController.getDisplayLabel(prop); try { results.add("<a href=\"./propertyEdit?uri=" + URLEncoder.encode(prop.getURI(), "UTF-8") + "\">" + propNameStr + "</a>"); // column 1 } catch (Exception e) { results.add(propNameStr); // column 2 } results.add(prop.getLocalNameWithPrefix()); // column 3 VClass vc = (prop.getDomainVClassURI() != null) ? vcDao.getVClassByURI(prop.getDomainVClassURI()) : null; String domainStr = (vc != null) ? vc.getLocalNameWithPrefix() : ""; results.add(domainStr); // column 4 vc = (prop.getRangeVClassURI() != null) ? vcDao.getVClassByURI(prop.getRangeVClassURI()) : null; String rangeStr = (vc != null) ? vc.getLocalNameWithPrefix() : ""; results.add(rangeStr); // column 5 if (prop.getGroupURI() != null) { PropertyGroup pGroup = pgDao.getGroupByURI(prop.getGroupURI()); results.add(pGroup == null ? "unknown group" : pGroup.getName()); // column 6 } else { results.add("unspecified"); } if (prop.getDomainDisplayTierInteger() != null) { results.add(Integer.toString(prop.getDomainDisplayTierInteger(), BASE_10)); // column 7 } else { results.add(""); // column 7 } results.add(prop.getHiddenFromDisplayBelowRoleLevel() == null ? "(unspecified)" : prop.getHiddenFromDisplayBelowRoleLevel().getShorthand()); // column 8 results.add(prop.getProhibitedFromUpdateBelowRoleLevel() == null ? "(unspecified)" : prop.getProhibitedFromUpdateBelowRoleLevel().getShorthand()); // column 9 } } request.setAttribute("results", results); } request.setAttribute("columncount", new Integer(NUM_COLS)); request.setAttribute("suppressquery", "true"); request.setAttribute("title", "Object Properties"); request.setAttribute("bodyJsp", Controllers.HORIZONTAL_JSP); // new way of adding more than one button List<ButtonForm> buttons = new ArrayList<ButtonForm>(); HashMap<String, String> newPropParams = new HashMap<String, String>(); newPropParams.put("controller", "Property"); ButtonForm newPropButton = new ButtonForm(Controllers.RETRY_URL, "buttonForm", "Add new object property", newPropParams); buttons.add(newPropButton); HashMap<String, String> rootPropParams = new HashMap<String, String>(); rootPropParams.put("iffRoot", "true"); String temp; if ((temp = vrequest.getParameter("ontologyUri")) != null) { rootPropParams.put("ontologyUri", temp); } ButtonForm rootPropButton = new ButtonForm("showObjectPropertyHierarchy", "buttonForm", "root properties", rootPropParams); buttons.add(rootPropButton); request.setAttribute("topButtons", buttons); /* original way of adding 1 button request.setAttribute("horizontalJspAddButtonUrl", Controllers.RETRY_URL); request.setAttribute("horizontalJspAddButtonText", "Add new object property"); request.setAttribute("horizontalJspAddButtonControllerParam", "Property"); */ RequestDispatcher rd = request.getRequestDispatcher(Controllers.BASIC_JSP); try { rd.forward(request, response); } catch (Throwable t) { t.printStackTrace(); } }
From source file:chibi.gemmaanalysis.BatchDiffExCli.java
@Override protected void processExperiment(ExpressionExperiment ee) { String fileprefix = ee.getId() + "." + ee.getShortName().replaceAll("[\\W\\s]+", "_"); try (Writer detailFile = initOutputFile("batch.proc.detail." + fileprefix + ".txt");) { Collection<ExperimentalFactor> experimentalFactors = ee.getExperimentalDesign() .getExperimentalFactors(); ExperimentalFactor batchFactor = expressionExperimentBatchCorrectionService.getBatchFactor(ee); if (null == batchFactor) { this.errorObjects.add("No batch factor: " + ee.getShortName()); return; }/* w w w . j av a 2s . co m*/ if (experimentalFactors.size() < 2) { // need at least two factors, one of which has to be the batch. this.errorObjects.add("Too few factors: " + ee.getShortName()); return; } if (ee.getBioAssays().size() < 8) { this.errorObjects.add("Too small (" + ee.getBioAssays().size() + " samples): " + ee.getShortName()); return; } if (experimentalFactors.size() > 10) { /* * This could be modified to select just a few factors, at random ... but that's probably */ this.errorObjects.add("Too many factors (" + experimentalFactors.size() + " factors, including 'batch'): " + ee.getShortName()); return; } /* TODO use this, or skip it... we have this information elsewhere already */ // expressionExperimentBatchCorrectionService.checkBatchEffectSeverity( ee ); boolean correctable = expressionExperimentBatchCorrectionService.checkCorrectability(ee); if (!correctable) { /* * Note that later on we can still end up with a model that is not of full rank, so combat will fail. * This can sometimes be ameliorated by dropping covariates. */ this.errorObjects .add("Batch effect is not correctable; possibly contains batches with only one sample: " + ee.getShortName()); return; } log.info("Processing: " + ee); /* * Extract data */ Collection<ProcessedExpressionDataVector> vectos = processedExpressionDataVectorService .getProcessedDataVectors(ee); ExpressionDataDoubleMatrix mat = new ExpressionDataDoubleMatrix(vectos); /* * TODO for some data sets we should re-normalize? */ StringBuilder summaryBuf = new StringBuilder(); /* * first do an analysis without batch; this is our baseline. Let's ignore interactions to keep things * simple. */ Collection<ExperimentalFactor> factors2 = new HashSet<ExperimentalFactor>(); for (ExperimentalFactor ef : experimentalFactors) { if (ef.equals(batchFactor)) continue; factors2.add(ef); } int j = 0; DifferentialExpressionAnalysisConfig configWithoutBatch = new DifferentialExpressionAnalysisConfig(); configWithoutBatch.setQvalueThreshold(null); configWithoutBatch.setFactorsToInclude(factors2); DifferentialExpressionAnalysis beforeResults = lma.run(ee, mat, configWithoutBatch).iterator().next(); Map<CompositeSequence, Map<ExperimentalFactor, Double>> beforeResultDetails = new HashMap<CompositeSequence, Map<ExperimentalFactor, Double>>(); for (ExpressionAnalysisResultSet brs : beforeResults.getResultSets()) { assert brs.getExperimentalFactors().size() == 1; ExperimentalFactor ef = brs.getExperimentalFactors().iterator().next(); Collection<DifferentialExpressionAnalysisResult> results = brs.getResults(); int c = 0; for (DifferentialExpressionAnalysisResult r : results) { c = tally(beforeResultDetails, ef, r, c); if (++j % LOGGING_FREQ == 0) { log.info(j + " processed"); } } summaryBuf.append("Before\t" + ee.getId() + "\t" + ee.getShortName() + "\t" + ef.getId() + "\t" + ef.getName() + "\t" + results.size() + "\t" + c + "\n"); } /* * Then do it with batch. */ Collection<ExperimentalFactor> factors = experimentalFactors; assert factors.contains(batchFactor); DifferentialExpressionAnalysisConfig configIncludingBatch = new DifferentialExpressionAnalysisConfig(); configIncludingBatch.setQvalueThreshold(summaryQvalThreshold); configIncludingBatch.setFactorsToInclude(factors); DifferentialExpressionAnalysis withBatchEffectResults = lma.run(ee, mat, configIncludingBatch) .iterator().next(); /* * Determine how many genes are diff ex wrt batch. The other factors are tracked; this shows how we would do * if we tried to simply directly include batch in the model */ Map<CompositeSequence, Map<ExperimentalFactor, Double>> batchEffectDetails = new HashMap<>(); for (ExpressionAnalysisResultSet brs : withBatchEffectResults.getResultSets()) { assert brs.getExperimentalFactors().size() == 1; ExperimentalFactor ef = brs.getExperimentalFactors().iterator().next(); Collection<DifferentialExpressionAnalysisResult> results = brs.getResults(); int c = 0; for (DifferentialExpressionAnalysisResult r : results) { c = tally(batchEffectDetails, ef, r, c); if (++j % LOGGING_FREQ == 0) { log.info(j + " processed"); } } summaryBuf.append("Batch\t" + ee.getId() + "\t" + ee.getShortName() + "\t" + ef.getId() + "\t" + ef.getName() + "\t" + results.size() + "\t" + c + "\n"); } /* * Correct for batch effects; covariates which are "unimportant" will be dropped. */ log.info("ComBat-ing"); boolean parametric = true; double importanceThreshold = 0.01; ExpressionDataDoubleMatrix comBat = expressionExperimentBatchCorrectionService.comBat(mat, parametric, importanceThreshold); assert comBat != null; /* * Check if we have removed the batch effect: there should be no diff ex wrt batch. This is just a sanity * check, really. The other factors are tracked just for completeness. Note that Combat log transforms the * data if necessary, but transforms it back. */ DifferentialExpressionAnalysis revisedResultWithBatch = lma.run(ee, comBat, configIncludingBatch) .iterator().next(); Map<CompositeSequence, Map<ExperimentalFactor, Double>> batchEffectAfterCorrDetails = new HashMap<CompositeSequence, Map<ExperimentalFactor, Double>>(); for (ExpressionAnalysisResultSet brs : revisedResultWithBatch.getResultSets()) { assert brs.getExperimentalFactors().size() == 1; ExperimentalFactor ef = brs.getExperimentalFactors().iterator().next(); Collection<DifferentialExpressionAnalysisResult> results = brs.getResults(); int c = 0; for (DifferentialExpressionAnalysisResult r : results) { c = tally(batchEffectAfterCorrDetails, ef, r, c); if (++j % LOGGING_FREQ == 0) { log.info(j + " processed"); } } summaryBuf.append("BatchAftCorr\t" + ee.getId() + "\t" + ee.getShortName() + "\t" + ef.getId() + "\t" + ef.getName() + "\t" + results.size() + "\t" + c + "\n"); } /* * Now without batch as a factor, which is what we really want. */ boolean hasNonNulls = false; DifferentialExpressionAnalysis revisedResult = lma.run(ee, comBat, configWithoutBatch).iterator() .next(); Map<CompositeSequence, Map<ExperimentalFactor, Double>> revisedResultDetails = new HashMap<CompositeSequence, Map<ExperimentalFactor, Double>>(); for (ExpressionAnalysisResultSet brs : revisedResult.getResultSets()) { assert brs.getExperimentalFactors().size() == 1; ExperimentalFactor ef = brs.getExperimentalFactors().iterator().next(); Collection<DifferentialExpressionAnalysisResult> results = brs.getResults(); int c = 0; for (DifferentialExpressionAnalysisResult r : results) { if (r.getCorrectedPvalue() != null && !Double.isNaN(r.getCorrectedPvalue())) { hasNonNulls = true; } c = tally(revisedResultDetails, ef, r, c); if (++j % LOGGING_FREQ == 0) { log.info(j + " processed"); } } summaryBuf.append("After\t" + ee.getId() + "\t" + ee.getShortName() + "\t" + ef.getId() + "\t" + ef.getName() + "\t" + results.size() + "\t" + c + "\n"); } if (!hasNonNulls) { // this means something went wrong ... somewhere. Possibly the model cannot be fit. errorObjects.add("No valid pvalues after correction: " + ee.getShortName()); return; } /* * Print out details */ detailFile.write( "EEID\tEENAME\tEFID\tEFNAME\tPROBEID\tPROBENAME\tGENESYMBS\tGENEIDS\tBEFOREQVAL\tBATCHQVAL\tBATAFTERQVAL\tAFTERQVAL\n"); getGeneAnnotations(ee); for (CompositeSequence c : beforeResultDetails.keySet()) { // Get the gene information String geneSymbs = ""; String geneIds = ""; if (genes.containsKey(c)) { Collection<Gene> g = new HashSet<Gene>(); g.addAll(genes.get(c)); CollectionUtils.transform(g, geneSymbolTransformer); geneSymbs = StringUtils.join(g, "|"); geneIds = StringUtils.join(EntityUtils.getIds(genes.get(c)), "|"); } for (ExperimentalFactor ef : factors) { detailFile.write(ee.getId() + "\t" + ee.getShortName() + "\t" + ef.getId() + "\t" + ef.getName() + "\t" + c.getId() + "\t" + c.getName() + "\t" + geneSymbs + "\t" + geneIds + "\t"); Double bpval = beforeResultDetails.get(c).get(ef); // will be null for 'batch' Double batpval = batchEffectDetails.get(c).get(ef); // when batch was included. Double batapval = batchEffectAfterCorrDetails.get(c).get(ef); // when batch was included. Double aftpval = revisedResultDetails.get(c).get(ef); // will be null for 'batch' detailFile.write(String.format("%.4g\t%.4g\t%.4g\t%.4g\n", bpval, batpval, batapval, aftpval)); } } detailFile.close(); summaryFile.write(summaryBuf.toString()); summaryFile.flush(); String rawDataFileName = fileprefix + ".originaldata.txt"; saveData(mat, rawDataFileName); String correctedDataFileName = fileprefix + ".correcteddata.txt"; saveData(comBat, correctedDataFileName); successObjects.add(ee); } catch (Exception e) { log.error(e, e); errorObjects.add(ee + e.getMessage()); } }
From source file:mitm.application.djigzo.james.mailets.Notify.java
@Override protected Collection<MailAddress> getRecipients(Mail mail) throws MessagingException, MissingRecipientsException { Collection<MailAddress> result = new LinkedHashSet<MailAddress>(); String[] stringRecipients = recipients.split("\\s*,\\s*"); for (String recipient : stringRecipients) { result.addAll(parseAddress(recipient, mail)); }//from w w w .java2s.c o m if (result.size() == 0) { throw new MissingRecipientsException("There are no recipients."); } return result; }
From source file:com.cyclopsgroup.waterview.CollectionLargeList.java
/** * Overwrite or implement method iterate() * * @see com.cyclopsgroup.waterview.IteratorLargeList#iterate(int, int, com.cyclopsgroup.waterview.LargeList.Sorting[]) *//*from ww w . ja v a 2s . co m*/ public Iterator iterate(int startPosition, int maxAmount, Sorting[] sortings) throws Exception { Collection sortedResult = collection; if (sortings != null && sortings.length > 0) { ComparatorChain chain = new ComparatorChain(); for (int i = 0; i < sortings.length; i++) { Sorting sorting = sortings[i]; if (sorting.isDescending()) { chain.addComparator(new BeanPropertyComparator(sorting.getName()), true); } else { chain.addComparator(new BeanPropertyComparator(sorting.getName())); } } chain.addComparator(HashCodeComparator.INSTANCE); sortedResult = new TreeSet(chain); sortedResult.addAll(collection); } Iterator it = sortedResult.iterator(); if (startPosition > 0) { for (int i = 0; i < startPosition; i++) { it.next(); } } if (maxAmount == UNLIMITED_MAX_AMOUNT) { return it; } return new FixedSizeIterator(it, maxAmount); }
From source file:org.ambraproject.article.service.IngesterImpl.java
/** * Update an existing article by copying properties from the new one over. Note that we can't call saveOrUpdate, * since the new article is not a persistent instance, but has all the properties that we want. * <p/>//from ww w . j a v a 2 s.c om * See <a href="http://stackoverflow.com/questions/4779239/update-persistent-object-with-transient-object-using-hibernate">this * post on stack overflow</a> for more information * <p/> * For collections, we clear the old property and add all the new entries, relying on 'delete-orphan' to delete the * old objects. The downside of this approach is that it results in a delete statement for each entry in the old * collection, and an insert statement for each entry in the new collection. There a couple of things we could do to * optimize this: <ol> <li>Write a sql statement to delete the old entries in one go</li> <li>copy over collection * properties recursively instead of clearing the old collection. e.g. for {@link Article#assets}, instead of * clearing out the old list, we would find the matching asset by DOI and Extension, and update its properties</li> * </ol> * <p/> * Option number 2 is messy and a lot of code (I've done it before) * * @param article the new article, parsed from the xml * @param existingArticle the article pulled up from the database * @throws IngestException if there's a problem copying properties or updating */ @SuppressWarnings("unchecked") private void updateArticle(final Article article, final Article existingArticle) throws IngestException { log.debug("ReIngesting (force ingest) article: {}", existingArticle.getDoi()); //Hibernate deletes orphans after inserting the new rows, which violates a unique constraint on (doi, extension) for assets //this temporary change gets around the problem, before the old assets are orphaned and deleted hibernateTemplate.execute(new HibernateCallback() { @Override public Object doInHibernate(Session session) throws HibernateException, SQLException { session.createSQLQuery("update articleAsset " + "set doi = concat('old-',doi), " + "extension = concat('old-',extension) " + "where articleID = :articleID") .setParameter("articleID", existingArticle.getID()).executeUpdate(); return null; } }); final BeanWrapper source = new BeanWrapperImpl(article); final BeanWrapper destination = new BeanWrapperImpl(existingArticle); try { //copy properties for (final PropertyDescriptor property : destination.getPropertyDescriptors()) { final String name = property.getName(); if (!name.equals("ID") && !name.equals("created") && !name.equals("lastModified") && !name.equals("class")) { //Collections shouldn't be dereferenced but have elements added //See http://www.onkarjoshi.com/blog/188/hibernateexception-a-collection-with-cascade-all-delete-orphan-was-no-longer-referenced-by-the-owning-entity-instance/ if (Collection.class.isAssignableFrom(property.getPropertyType())) { Collection orig = (Collection) destination.getPropertyValue(name); orig.clear(); Collection sourcePropertyValue = (Collection) source.getPropertyValue(name); if (sourcePropertyValue != null) { orig.addAll((Collection) source.getPropertyValue(name)); } } else { //just set the new value destination.setPropertyValue(name, source.getPropertyValue(name)); } } } //Circular relationship in related articles for (ArticleRelationship articleRelationship : existingArticle.getRelatedArticles()) { articleRelationship.setParentArticle(existingArticle); } } catch (Exception e) { throw new IngestException("Error copying properties for article " + article.getDoi(), e); } hibernateTemplate.update(existingArticle); }
From source file:com.reprezen.swagedit.assist.SwaggerContentAssistProcessor.java
@Override public ICompletionProposal[] computeCompletionProposals(ITextViewer viewer, int documentOffset) { if (!(viewer.getDocument() instanceof SwaggerDocument)) { return super.computeCompletionProposals(viewer, documentOffset); }// w ww. j a v a2 s.c om if (isRefCompletion) { currentScope = currentScope.next(); } final SwaggerDocument document = (SwaggerDocument) viewer.getDocument(); final ITextSelection selection = (ITextSelection) viewer.getSelectionProvider().getSelection(); int line = 0, lineOffset = 0, column = 0; try { line = document.getLineOfOffset(documentOffset); lineOffset = document.getLineOffset(line); column = selection.getOffset() - lineOffset; } catch (BadLocationException e) { } final String prefix = extractPrefix(viewer, documentOffset); // we have to remove the length of // the prefix to obtain the correct // column to resolve the path if (!prefix.isEmpty()) { column -= prefix.length(); } Model model = document.getModel(documentOffset - prefix.length()); currentPath = model.getPath(line, column); isRefCompletion = currentPath != null && currentPath.toString().endsWith(JsonReference.PROPERTY); Collection<Proposal> p; if (isRefCompletion) { updateStatus(); p = referenceProposalProvider.getProposals(currentPath, document.asJson(), currentScope); } else { clearStatus(); p = proposalProvider.getProposals(currentPath, model, prefix); } final Collection<ICompletionProposal> proposals = getCompletionProposals(p, prefix, documentOffset); // compute template proposals if (!isRefCompletion) { final ICompletionProposal[] templateProposals = super.computeCompletionProposals(viewer, documentOffset); if (templateProposals != null && templateProposals.length > 0) { proposals.addAll(Lists.newArrayList(templateProposals)); } } return proposals.toArray(new ICompletionProposal[proposals.size()]); }
From source file:com.dragome.compiler.units.ClassUnit.java
private void getImplementedMembersInHierarchy(ClassUnit classUnit, Collection<MemberUnit> implementedMembers) { if (classUnit != null) { if (isInterface) return; implementedMembers.addAll(filterMethods(classUnit.getDeclaredMembers())); getImplementedMembersInHierarchy(classUnit.getSuperUnit(), implementedMembers); }/*from w ww .jav a 2 s. c om*/ }
From source file:com.atlassian.jira.startup.JiraSystemInfo.java
/** * Only call AFTER JIRA is fully up!/*ww w . j av a 2 s. com*/ */ public void obtainServices() { final ServiceManager serviceManager = ComponentAccessor.getComponentOfType(ServiceManager.class); final Collection<JiraServiceContainer> services = new TreeSet<JiraServiceContainer>( JiraService.NAME_COMPARATOR); services.addAll(serviceManager.getServices()); logMsg.outputHeader("Services"); logMsg.outputProperty("Instance Count", String.valueOf(services.size())); logMsg.add(""); for (final JiraServiceContainer service : services) { printServiceInfo(service); } }