List of usage examples for java.util TreeSet addAll
public boolean addAll(Collection<? extends E> c)
From source file:uk.ac.bbsrc.tgac.miso.spring.ajax.PoolControllerHelperService.java
@Deprecated public JSONObject select454EmPCRDilutionsByBarcodeFile(HttpSession session, JSONObject json) { try {/*w w w .j ava 2 s . co m*/ JSONObject barcodes = (JSONObject) session.getAttribute("barcodes"); log.debug(barcodes.toString()); if (barcodes.has("barcodes")) { JSONArray a = barcodes.getJSONArray("barcodes"); // make sure there are no duplicates and order the strings // by putitng the codes in a treeset TreeSet<String> hcodes = new TreeSet<String>(); hcodes.addAll(Arrays.asList((String[]) a.toArray(new String[0]))); StringBuilder sb = new StringBuilder(); sb.append("<div style='width: 100%;'>"); sb.append("<form action='/miso/pool/ls454/import' method='post'>"); sb.append(processEmPcrDilutions(hcodes)); sb.append("</form>"); sb.append("</div>"); session.removeAttribute("barcodes"); return JSONUtils.SimpleJSONResponse(sb.toString()); } } catch (Exception e) { log.debug("Failed to generate barcode selection: ", e); return JSONUtils.SimpleJSONError("Failed to generate barcode selection"); } return JSONUtils.SimpleJSONError("Cannot select barcodes"); }
From source file:uk.ac.bbsrc.tgac.miso.spring.ajax.PoolControllerHelperService.java
@Deprecated public JSONObject selectSolidEmPCRDilutionsByBarcodeFile(HttpSession session, JSONObject json) { try {//from ww w . jav a 2 s. com JSONObject barcodes = (JSONObject) session.getAttribute("barcodes"); log.debug(barcodes.toString()); if (barcodes.has("barcodes")) { JSONArray a = barcodes.getJSONArray("barcodes"); // make sure there are no duplicates and order the strings // by putitng the codes in a treeset TreeSet<String> hcodes = new TreeSet<String>(); hcodes.addAll(Arrays.asList((String[]) a.toArray(new String[0]))); StringBuilder sb = new StringBuilder(); sb.append("<div style='width: 100%;'>"); sb.append("<form action='/miso/pool/solid/import' method='post'>"); sb.append(processEmPcrDilutions(hcodes)); sb.append("</form>"); sb.append("</div>"); session.removeAttribute("barcodes"); return JSONUtils.SimpleJSONResponse(sb.toString()); } } catch (Exception e) { log.debug("Failed to generate barcode selection: ", e); return JSONUtils.SimpleJSONError("Failed to generate barcode selection"); } return JSONUtils.SimpleJSONError("Cannot select barcodes"); }
From source file:org.ncic.bioinfo.sparkseq.algorithms.walker.haplotypecaller.HaplotypeCaller.java
public List<VariantContext> map(ActiveRegionMapData activeRegionMapData) { List<VariantContext> resultVCFRecords = new ArrayList<>(); VariantContextWriter vcfWriter = null; if (SCAC.emitReferenceConfidence == ReferenceConfidenceMode.GVCF) { vcfWriter = new GVCFWriter(GVCFGQBands, SCAC.genotypeArgs.samplePloidy); }/*ww w . j a va 2 s . co m*/ List<VariantContext> result = null; ActiveRegion originalActiveRegion = activeRegionMapData.activeRegion; RefMetaDataTracker metaDataTracker = activeRegionMapData.tracker; byte[] fullReferenceWithPadding = activeRegionMapData.fullReferenceWithPadding; byte[] refBases = activeRegionMapData.refBases; if (!originalActiveRegion.isActive()) { // Not active so nothing to do! result = referenceModelForNoVariation(originalActiveRegion, true, refBases); addAllIntoWriter(result, vcfWriter, resultVCFRecords); return resultVCFRecords; } else if (originalActiveRegion.size() == 0) { result = referenceModelForNoVariation(originalActiveRegion, true, refBases); addAllIntoWriter(result, vcfWriter, resultVCFRecords); return resultVCFRecords; } // run the local assembler, getting back a collection of information on how we should proceed final List<VariantContext> givenAlleles = new ArrayList<>(); final AssemblyResultSet untrimmedAssemblyResult = assembleReads(originalActiveRegion, givenAlleles, fullReferenceWithPadding, refBases); final TreeSet<VariantContext> allVariationEvents = untrimmedAssemblyResult.getVariationEvents(); // TODO - line bellow might be unnecessary : it might be that assemblyResult will always have those alleles anyway // TODO - so check and remove if that is the case: allVariationEvents.addAll(givenAlleles); final ActiveRegionTrimmer.Result trimmingResult = trimmer.trim(originalActiveRegion, allVariationEvents); if (!trimmingResult.isVariationPresent() && !disableOptimizations) { result = referenceModelForNoVariation(originalActiveRegion, false, refBases); addAllIntoWriter(result, vcfWriter, resultVCFRecords); return resultVCFRecords; } final AssemblyResultSet assemblyResult = trimmingResult.needsTrimming() ? untrimmedAssemblyResult.trimTo(trimmingResult.getCallableRegion()) : untrimmedAssemblyResult; final ActiveRegion regionForGenotyping = assemblyResult.getRegionForGenotyping(); // filter out reads from genotyping which fail mapping quality based criteria //TODO - why don't do this before any assembly is done? Why not just once at the beginning of this method //TODO - on the originalActiveRegion? //TODO - if you move this up you might have to consider to change referenceModelForNoVariation //TODO - that does also filter reads. final Collection<GATKSAMRecord> filteredReads = filterNonPassingReads(regionForGenotyping); final Map<String, List<GATKSAMRecord>> perSampleFilteredReadList = splitReadsBySample(filteredReads); // abort early if something is out of the acceptable range // TODO is this ever true at this point??? perhaps GGA. Need to check. if (!assemblyResult.isVariationPresent() && !disableOptimizations) { result = referenceModelForNoVariation(originalActiveRegion, false, refBases); addAllIntoWriter(result, vcfWriter, resultVCFRecords); return resultVCFRecords; } // TODO is this ever true at this point??? perhaps GGA. Need to check. if (regionForGenotyping.size() == 0 && !disableOptimizations) { // no reads remain after filtering so nothing else to do! result = referenceModelForNoVariation(originalActiveRegion, false, refBases); addAllIntoWriter(result, vcfWriter, resultVCFRecords); return resultVCFRecords; } // evaluate each sample's reads against all haplotypes //logger.info("Computing read likelihoods with " + assemblyResult.regionForGenotyping.size() + " reads"); final List<Haplotype> haplotypes = assemblyResult.getHaplotypeList(); final Map<String, List<GATKSAMRecord>> reads = splitReadsBySample(regionForGenotyping.getReads()); // Calculate the likelihoods: CPU intensive part. final ReadLikelihoods<Haplotype> readLikelihoods = likelihoodCalculationEngine .computeReadLikelihoods(assemblyResult, samplesList, reads); // Realign reads to their best haplotype. final Map<GATKSAMRecord, GATKSAMRecord> readRealignments = realignReadsToTheirBestHaplotype(readLikelihoods, assemblyResult.getPaddedReferenceLoc()); readLikelihoods.changeReads(readRealignments); // Note: we used to subset down at this point to only the "best" haplotypes in all samples for genotyping, but there // was a bad interaction between that selection and the marginalization that happens over each event when computing // GLs. In particular, for samples that are heterozygous non-reference (B/C) the marginalization for B treats the // haplotype containing C as reference (and vice versa). Now this is fine if all possible haplotypes are included // in the genotyping, but we lose information if we select down to a few haplotypes. [EB] final HaplotypeCallerGenotypingEngine.CalledHaplotypes calledHaplotypes = genotypingEngine .assignGenotypeLikelihoods(haplotypes, readLikelihoods, perSampleFilteredReadList, assemblyResult.getFullReferenceWithPadding(), assemblyResult.getPaddedReferenceLoc(), regionForGenotyping.getLocation(), genomeLocParser, metaDataTracker, (consensusMode ? Collections.<VariantContext>emptyList() : givenAlleles), emitReferenceConfidence()); if (emitReferenceConfidence()) { if (!containsCalls(calledHaplotypes)) { // no called all of the potential haplotypes result = referenceModelForNoVariation(originalActiveRegion, false, refBases); } else { result = new ArrayList<>(); // output left-flanking non-variant section: if (trimmingResult.hasLeftFlankingRegion()) { ActiveRegion leftRegion = trimmingResult.nonVariantLeftFlankRegion(); byte[] trimedRef = getTrimedRefBases(originalActiveRegion, leftRegion, refBases); result.addAll(referenceModelForNoVariation(leftRegion, false, trimedRef)); } // output variant containing region. result.addAll( referenceConfidenceModel.calculateRefConfidence(assemblyResult.getReferenceHaplotype(), calledHaplotypes.getCalledHaplotypes(), assemblyResult.getPaddedReferenceLoc(), regionForGenotyping, readLikelihoods, genotypingEngine.getPloidyModel(), genotypingEngine.getGenotypingModel(), calledHaplotypes.getCalls())); // output right-flanking non-variant section: if (trimmingResult.hasRightFlankingRegion()) { ActiveRegion rightRegion = trimmingResult.nonVariantRightFlankRegion(); byte[] trimedRef = getTrimedRefBases(originalActiveRegion, rightRegion, refBases); result.addAll(referenceModelForNoVariation(rightRegion, false, trimedRef)); } } } else { result = calledHaplotypes.getCalls(); } addAllIntoWriter(result, vcfWriter, resultVCFRecords); if (SCAC.emitReferenceConfidence == ReferenceConfidenceMode.GVCF) { ((GVCFWriter) vcfWriter).close(false); return ((GVCFWriter) vcfWriter).getResultGVCFWriter(); } else { return resultVCFRecords; } }
From source file:org.gvsig.framework.web.service.impl.OGCInfoServiceImpl.java
public WMTSInfo getCapabilitiesFromWMTS(String urlServerWMTS, TreeSet<String> listCrs, boolean useCrsSelected) throws ServerGeoException { TreeSet<String> formatsSupported = new TreeSet<String>(); TreeSet<String> crsSupported = new TreeSet<String>(); boolean isFormatsSupported = false; WMTSInfo wmtsInfo = new WMTSInfo(); // put url on object WMSInfo wmtsInfo.setServiceUrl(urlServerWMTS); // Create hashmap to add the layers getted to the WMTSInfo object Map<String, org.gvsig.framework.web.ogc.WMTSLayer> layersMap = new HashMap<String, org.gvsig.framework.web.ogc.WMTSLayer>(); // get WMTS manager WMTSOGCManager wmtsMan = WMTSOGCLocator.getManager(); try {/* ww w .ja va 2s . co m*/ WMTSClient wmtsClient = wmtsMan.createWMTSClient(urlServerWMTS); wmtsClient.connect(true, null); WMTSServiceIdentification wmtsServIden = wmtsClient.getServiceIdentification(); // set server info wmtsInfo.setServiceAbstract(wmtsServIden.getAbstract()); wmtsInfo.setServiceTitle(wmtsServIden.getTitle()); wmtsInfo.setVersion(wmtsServIden.getServiceTypeVersion()); wmtsInfo.setServiceType(wmtsServIden.getServiceType()); // set id of the request wmst (service title + calendar) int hashCode = (wmtsServIden.getTitle() + Calendar.getInstance()).hashCode(); wmtsInfo.setId(hashCode); // set tile matrix and check if has support to crs of the map List<String> patternList = new ArrayList<String>(); if (!listCrs.isEmpty()) { for (String crs : listCrs) { String[] crsSplit = crs.split(":"); String pattern = "(.*)(:?)".concat(crsSplit[0]).concat("((:)(.*)(:)").concat(crsSplit[1]) .concat("|(:)").concat(crsSplit[1]).concat(")"); patternList.add(pattern); } } // hashmap with: identifier of tile matrix, supported crs Map<String, String> tileMatrixCrsSupported = new HashMap<String, String>(); TreeSet<String> tileMatrixSelectedId = new TreeSet<String>(); List<WMTSTileMatrixSet> tileMatrixSet = wmtsClient.getTileMatrixSet(); for (int i = 0; i < tileMatrixSet.size(); i++) { WMTSTileMatrixSet tileMatrix = tileMatrixSet.get(i); String identifier = tileMatrix.getIdentifier(); String supportedCRS = tileMatrix.getSupportedCRS(); crsSupported.add(supportedCRS); // add to map the tile matrix with its crs supported tileMatrixCrsSupported.put(identifier, supportedCRS); if (!listCrs.isEmpty()) { if (listCrs.contains(supportedCRS)) { tileMatrixSelectedId.add(identifier); } else { // check supportedCrs with the expReg generated by the // list of crs passed for (String expReg : patternList) { if (supportedCRS.matches(expReg)) { tileMatrixSelectedId.add(identifier); } } } } } // Add map of tile matrix and the tile matrix selected to WMTSInfo // object wmtsInfo.setTileMatrixCrsSupported(tileMatrixCrsSupported); wmtsInfo.setTileMatrixSelectedId(tileMatrixSelectedId); // Only set layers if has a tile matrix with crs of the map // supported // or crs is null WMTSThemes layerListAsThemes = wmtsClient.getLayerListAsThemes(); // Create tree with layer values List<TreeNode> tree = new ArrayList<TreeNode>(); // Create children layers for (int i = 0; i < layerListAsThemes.getChildCount(); i++) { WMTSTheme wmtsTheme = layerListAsThemes.getChildren(i); WMTSLayer layer = wmtsTheme.getLayer(); TreeSet<String> wmtsLinkSelected = new TreeSet<String>(); TreeSet<String> wmtsLinkSupported = new TreeSet<String>(); // check crs List<WMTSTileMatrixSetLink> tileMatrixSetLink = layer.getTileMatrixSetLink(); for (int j = 0; j < tileMatrixSetLink.size(); j++) { WMTSTileMatrixSetLink wmtsLink = tileMatrixSetLink.get(j); wmtsLinkSupported.add(wmtsLink.getTileMatrixSetId()); if (!tileMatrixSelectedId.isEmpty() && tileMatrixSelectedId.contains(wmtsLink.getTileMatrixSetId())) { wmtsLinkSelected.add(wmtsLink.getTileMatrixSetId()); } } // check format TreeSet<String> setFormats = new TreeSet<String>(); setFormats.addAll(layer.getFormat()); String format = getFirstFormatSupported(setFormats); formatsSupported.addAll(setFormats); if ((!wmtsLinkSelected.isEmpty() || listCrs.isEmpty()) && format != null) { isFormatsSupported = true; TreeNode node = new TreeNode(layer.getIdentifier()); node.setTitle(layer.getTitle()); node.setFolder(false); tree.add(node); // Add layer to layer map org.gvsig.framework.web.ogc.WMTSLayer wmtsLayer = new org.gvsig.framework.web.ogc.WMTSLayer(); TreeSet<String> crsSet = new TreeSet<String>(); crsSet.addAll(layer.getSrsList()); wmtsLayer.setCrs(crsSet); wmtsLayer.setName(layer.getIdentifier()); wmtsLayer.setTitle(layer.getTitle()); wmtsLayer.setFormatSelected(format); wmtsLayer.setFormatsSupported(setFormats); if (listCrs.isEmpty()) { wmtsLayer.setTileMatrixSelected(wmtsLinkSupported); } else { wmtsLayer.setTileMatrixSelected(wmtsLinkSelected); } layersMap.put(layer.getIdentifier(), wmtsLayer); } } wmtsInfo.setFormatsSupported(formatsSupported); wmtsInfo.setLayersTree(tree); wmtsInfo.setLayers(layersMap); wmtsInfo.setIsFormatsSupported(isFormatsSupported); wmtsInfo.setCrsSupported(crsSupported); } catch (Exception exc) { logger.error("Exception on getCapabilitiesFromWMS", exc); throw new ServerGeoException(); } return wmtsInfo; }
From source file:gr.abiss.calipso.domain.Metadata.java
@SuppressWarnings("unchecked") public void setXmlString(String xmlString) { //init();//w w w. j a va 2 s . c om //logger.info("setXmlString: "+xmlString); if (xmlString == null) { return; } Document document = XmlUtils.parse(xmlString); // date formats for (Element e : (List<Element>) document.selectNodes(DATEFORMATS_XPATH)) { String dfKey = e.attribute(NAME).getValue(); String dfExpression = e.attribute(EXPRESSION).getValue(); this.dateFormats.put(dfKey, dfExpression); } // field groups fieldGroups.clear(); for (Element e : (List<Element>) document.selectNodes(FIELD_GROUP_XPATH)) { FieldGroup fieldGroup = new FieldGroup(e); fieldGroups.add(fieldGroup); fieldGroupsById.put(fieldGroup.getId(), fieldGroup); } if (fieldGroups.isEmpty()) { addDefaultFieldGroup(); } // sort by priority TreeSet<FieldGroup> fieldGroupSet = new TreeSet<FieldGroup>(); fieldGroupSet.addAll(fieldGroups); fieldGroups.clear(); fieldGroups.addAll(fieldGroupSet); if (logger.isDebugEnabled()) logger.debug("Loaded fieldGroups:" + fieldGroups); for (Element e : (List<Element>) document.selectNodes(FIELD_XPATH)) { Field field = new Field(e); fields.put(field.getName(), field); fieldsByLabel.put(field.getLabel(), field); // link to full field group object or // of default if none is set //logger.info("field name: "+field.getName().getText()+", group id: "+field.getGroupId()+", group: "+fieldGroupsById.get(field.getGroupId()).getName()); if (field.getGroupId() != null) { FieldGroup fieldGroup = fieldGroupsById.get(field.getGroupId()); if (fieldGroup == null) { logger.warn("Field belongs to undefined field-group element with id: " + field.getGroupId() + ", adding to default group"); fieldGroup = fieldGroupsById.get("default"); } else { fieldGroup.addField(field); } } else { // add field to default group if it does not // belong to any FieldGroup defaultFieldGroup = fieldGroups.get(0); field.setGroup(defaultFieldGroup); defaultFieldGroup.addField(field); } } for (Element e : (List<Element>) document.selectNodes(ROLE_XPATH)) { Role role = new Role(e); roles.put(role.getName(), role); } for (Element e : (List<Element>) document.selectNodes(STATE_XPATH)) { String key = e.attributeValue(STATUS); String value = e.attributeValue(LABEL); states.put(Integer.parseInt(key), value); statesByName.put(value, Integer.parseInt(key)); statesPlugins.put(Integer.parseInt(key), e.attributeValue(PLUGIN)); String sDurations = e.attributeValue(MAX_DURATION); if (StringUtils.isNotBlank(sDurations)) { maxDurations.put(Integer.parseInt(key), NumberUtils.createLong(sDurations)); } String asTypeId = e.attributeValue(ASSET_TYPE_ID); if (StringUtils.isNotBlank(asTypeId)) { assetTypeIdMap.put(Integer.parseInt(key), NumberUtils.createLong(asTypeId)); } String existingAssetTypeId = e.attributeValue(EXISTING_ASSET_TYPE_ID); if (StringUtils.isNotBlank(existingAssetTypeId)) { existingAssetTypeIdsMap.put(Integer.parseInt(key), NumberUtils.createLong(existingAssetTypeId)); } String existingAssetTypeMultiple = e.attributeValue(EXISTING_ASSET_TYPE_MULTIPLE); if (StringUtils.isNotBlank(existingAssetTypeMultiple)) { existingAssetTypeMultipleMap.put(Integer.parseInt(key), BooleanUtils.toBoolean(existingAssetTypeMultiple)); } } fieldOrder.clear(); for (Element e : (List<Element>) document.selectNodes(FIELD_ORDER_XPATH)) { String fieldName = e.attributeValue(NAME); fieldOrder.add(Field.convertToName(fieldName)); } }
From source file:org.jahia.services.render.filter.cache.AggregateCacheFilter.java
/** * Store some properties that may have been set during fragment execution * * @param resource/*w w w .j a v a 2 s.c om*/ * @param cacheEntry * @param renderContext * @throws RepositoryException */ private void addPropertiesToCacheEntry(Resource resource, CacheEntry<String> cacheEntry, RenderContext renderContext) throws RepositoryException { if (resource.getNode().isNodeType("jnt:area") || resource.getNode().isNodeType("jnt:mainResourceDisplay")) { cacheEntry.setProperty("areaResource", resource.getNode().getIdentifier()); } TreeSet<String> allPaths = new TreeSet<String>(); allPaths.addAll(renderContext.getRenderedPaths()); //Add current resource too as is as been removed by the TemplatesScriptFilter already // if (renderContext.getRequest().getAttribute("lastResourceRenderedByScript") != null && renderContext.getRequest().getAttribute("lastResourceRenderedByScript").equals(resource)) { // allPaths.add(resource.getNode().getPath()); // } Map<String, Object> m = (Map<String, Object>) renderContext.getRequest().getAttribute("moduleMap"); if (m != null && m.containsKey("requestAttributesToCache")) { HashMap<String, Serializable> attributes = new HashMap<>(); Collection<String> requestAttributesToCache = (Collection<String>) m.get("requestAttributesToCache"); for (String attributesToCache : requestAttributesToCache) { if (renderContext.getRequest().getAttribute(attributesToCache) instanceof Serializable) { attributes.put(attributesToCache, (Serializable) renderContext.getRequest().getAttribute(attributesToCache)); } } cacheEntry.setProperty("requestAttributes", attributes); } cacheEntry.setProperty("allPaths", allPaths); }
From source file:de.csw.expertfinder.mediawiki.api.MediaWikiAPI.java
/** * Returns a set of article namen for a set of article IDs. If none of the * given ids match an exisiting article, an empty Set is returned. * /*from w w w .j a va2 s . c om*/ * @param id * an article id. * @return a Set of article names for the given article IDs or an empty Set * if no such article exists. * @throws MediaWikiAPIException */ public Set<String> getArticleNamesForIds(Set<Integer> ids) throws MediaWikiAPIException { TreeSet<String> result = new TreeSet<String>(); // The limit of the mediawiki API for info requests is 50. Iterator<Integer> iter = ids.iterator(); while (iter.hasNext()) { HashSet<Integer> partOfIds = new HashSet<Integer>(); for (int i = 0; i < 50 && iter.hasNext(); i++) { partOfIds.add(iter.next()); } result.addAll(getArticleNamesFor50Ids(partOfIds)); } return result; }
From source file:org.livespark.formmodeler.renderer.backend.service.impl.Model2FormTransformerServiceImpl.java
protected Set<FieldSetting> getClassFieldSettings(Class clazz) { TreeSet<FieldSetting> settings = new TreeSet<FieldSetting>(); for (Field field : clazz.getDeclaredFields()) { for (Annotation annotation : field.getAnnotations()) { if (annotation instanceof FieldDef) { FieldDef fieldDef = (FieldDef) annotation; Class fieldType = getFieldType(field, fieldDef); Class realType = fieldType; if (field.getGenericType() instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) field.getGenericType(); Type paramArg = parameterizedType.getActualTypeArguments()[0]; realType = (Class) paramArg; }/*w w w. j a v a 2s . co m*/ FieldSetting setting = new FieldSetting( field.getName(), new DefaultFieldTypeInfo(realType.getName(), fieldType.isAssignableFrom(List.class), fieldType.isEnum()), realType, fieldDef, field.getAnnotations()); settings.add(setting); } } } if (clazz.getSuperclass() != null) { settings.addAll(getClassFieldSettings(clazz.getSuperclass())); } return settings; }
From source file:net.sourceforge.fenixedu.domain.CompetenceCourse.java
private TreeSet<CompetenceCourseInformation> getOrderedCompetenceCourseInformations() { TreeSet<CompetenceCourseInformation> informations = new TreeSet<CompetenceCourseInformation>( CompetenceCourseInformation.COMPARATORY_BY_EXECUTION_PERIOD); informations.addAll(getCompetenceCourseInformationsSet()); return informations; }