List of usage examples for java.util TreeSet add
public boolean add(E e)
From source file:ca.uvic.cs.tagsea.statistics.svn.jobs.SVNCommentScanningJob.java
protected IStatus run(IProgressMonitor monitor) { IProject[] projects = ResourcesPlugin.getWorkspace().getRoot().getProjects(); ISVNClientAdapter client;/* w w w. ja va 2s .c om*/ List svnResources = new LinkedList(); for (int i = 0; i < projects.length; i++) { IProject project = projects[i]; ISVNLocalResource r = SVNWorkspaceRoot.getSVNResourceFor(project); try { if (r != null && r.isManaged()) { svnResources.add(r); } } catch (SVNException e) { //do nothing, continue t the next } } monitor.beginTask("Scanning subversion projects...", svnResources.size() * 1000000); IPath state = SVNStatistics.getDefault().getStateLocation(); File tempdir = state.append("temp").toFile(); if (!tempdir.isDirectory()) { if (tempdir.exists()) { tempdir.delete(); } tempdir.mkdir(); } deleteTemps(tempdir); for (Iterator it = svnResources.iterator(); it.hasNext();) { ISVNLocalResource svnProject = (ISVNLocalResource) it.next(); //used to make sure that we don't repeat old comments. HashMap fileCommentsMap = new HashMap(); fileEntryMap = new HashMap(); monitor.subTask("Getting project information for " + svnProject.getName()); //create a temp file for each project. They will be uploaded //to the server. String projectName = svnProject.getName(); //names are guaranteed unique try { ISVNRemoteResource remote = null; for (int tries = 0; remote == null && tries < 10; tries++) { try { remote = svnProject.getLatestRemoteResource(); } catch (Exception e) { } ; if (remote == null) { SVNStatistics.getDefault().getLog().log(new Status(IStatus.WARNING, SVNStatistics.PLUGIN_ID, IStatus.WARNING, "could not get remote resource for " + svnProject.getName() + "... trying again.", null)); try { // @tag tagsea.bug.subclipse : it seems that sublcipse has a synchronization problem. Wait a little while and try again. Thread.sleep(1000); } catch (InterruptedException e1) { return new Status(IStatus.ERROR, SVNProviderPlugin.ID, IStatus.ERROR, "Could not communicate with remote resource.", null); } } } if (remote == null) { SVNStatistics.getDefault().getLog().log(new Status(IStatus.ERROR, SVNStatistics.PLUGIN_ID, 0, "Could not get a remote resource", null)); monitor.worked(1000000); continue; } ISVNRepositoryLocation repository = remote.getRepository(); client = repository.getSVNClient(); // @tag tagsea.statistics.enhance : It seems best to use this password callback because that way, the passwords can be saved with the workspace. But, it might be confusing to see for the first time. client.addPasswordCallback(SVNProviderPlugin.getPlugin().getSvnPromptUserPassword()); SVNRevision.Number revision = remote.getLastChangedRevision(); long revNum = revision.getNumber(); int revisionWork = 1000000; ILogEntry[] entries; try { entries = remote.getLogEntries(new NullProgressMonitor()); } catch (TeamException e1) { monitor.worked(revisionWork); e1.printStackTrace(); continue; } if (revNum > 0) { revisionWork = 1000000 / (int) revNum; } for (int ei = 0; ei < entries.length; ei++) { ILogEntry entry = entries[ei]; revision = entry.getRevision(); File tempFile = state.append( projectName + "." + getDateString() + "." + revision.getNumber() + ".comments.txt") .toFile(); if (tempFile.exists()) { tempFile.delete(); } try { tempFile.createNewFile(); } catch (IOException e) { //skip to the next one. continue; } PrintStream out; try { out = new PrintStream(tempFile); } catch (IOException e) { continue; } out.println(remote.getUrl() + " Revision:" + revision.getNumber()); monitor.subTask("Finding java resources: " + svnProject.getName() + "..."); SubProgressMonitor revMonitor = new SubProgressMonitor(monitor, revisionWork); if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } monitor.subTask("temporarily checking out " + svnProject.getName() + "..."); SubProgressMonitor subPm = new SubProgressMonitor(revMonitor, 10); try { OperationManager.getInstance().beginOperation(client, new OperationProgressNotifyListener(subPm)); client.checkout(remote.getUrl(), new File(tempdir, svnProject.getName()), revision, true); } catch (SVNClientException e) { //I wish that there were a better way to do this, but it seem that we //have to just keep decrementing it. revMonitor.done(); revNum--; revision = new SVNRevision.Number(revNum); continue; } finally { OperationManager.getInstance().endOperation(); subPm.done(); } if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } List files = findJavaFiles(tempdir); int work = 0; if (files.size() > 0) { work = (revisionWork - 20) / files.size(); for (Iterator fit = files.iterator(); fit.hasNext();) { File file = (File) fit.next(); monitor.subTask("Scanning java file...."); TreeSet commentSet = (TreeSet) fileCommentsMap.get(file.getAbsolutePath()); if (commentSet == null) { commentSet = new TreeSet(); fileCommentsMap.put(file.getAbsolutePath(), commentSet); } FileReader reader = new FileReader(file); StringBuilder builder = new StringBuilder(); char[] buffer = new char[1024]; int read = 0; while ((read = reader.read(buffer)) >= 0) { builder.append(buffer, 0, read); } reader.close(); ISVNAnnotations ann = null; try { //get blame information. List fileLogs = getLogEntries(file, client, repository); //don't do extra work if this file doesn't have a log for this revision. if (!checkRevision(fileLogs, revision)) { monitor.worked(work); //System.out.println("Skipped " + file.getAbsolutePath() + " revision " + revision.getNumber()); continue; } ann = client.annotate(file, revision, revision); } catch (SVNClientException e) { } catch (TeamException e) { } if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } SubProgressMonitor scanMonitor = new SubProgressMonitor(revMonitor, work); Stats s = SimpleJavaCodeScanner.scan(builder.toString(), scanMonitor); if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } monitor.worked(work); out.println("New/Changed Tags:"); for (int ci = 0; ci < s.TAGS.length; ci++) { Comment c = s.TAGS[ci]; if (!commentSet.contains(c)) { commentSet.add(c); String author = getAuthor(c, ann); out.println(c.toString() + "\tauthor=" + author); } } out.println("New/Changed Tasks:"); for (int ci = 0; ci < s.TASKS.length; ci++) { Comment c = s.TASKS[ci]; if (!commentSet.contains(c)) { commentSet.add(c); String author = getAuthor(c, ann); out.println(c.toString() + "\tauthor=" + author); } } out.println("New/Changed Other:"); for (int ci = 0; ci < s.NONTAGS.length; ci++) { Comment c = s.NONTAGS[ci]; if (!commentSet.contains(c)) { commentSet.add(c); String author = getAuthor(c, ann); out.println(c.toString() + "\tauthor=" + author); } } if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } } } if (work == 0) { revMonitor.worked(revisionWork - 10); } monitor.subTask("Sending and Deleting temporary files..."); out.close(); sendFile(tempFile); deleteTemps(tempdir); if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } revMonitor.done(); monitor.worked(revisionWork - 20); } } catch (SVNException e) { return new Status(IStatus.ERROR, SVNStatistics.PLUGIN_ID, 0, e.getMessage(), e); } catch (IOException e) { return new Status(IStatus.ERROR, SVNStatistics.PLUGIN_ID, 0, e.getMessage(), e); } } return Status.OK_STATUS; }
From source file:elh.eus.absa.Features.java
/** * Function reads an attribute list from a file (ngram/word list files) and adds the elements to the * attribute list /* w w w . ja va 2 s .c om*/ * * @param fname : path to the file containing the feature information * @param attName : prefix for the feature name in the feature vector * @return HashMap<String,Integer> contains the elements and their respective attribute values, * in order to later fill the vectors. * * @throws IOException if the given file give reading problems. */ private TreeSet<String> loadAttributeListFromFile(File fname, String attName) throws IOException { TreeSet<String> valueSet = new TreeSet<String>(); if (FileUtilsElh.checkFile(fname)) { BufferedReader breader = new BufferedReader(new FileReader(fname)); String line; while ((line = breader.readLine()) != null) { // # starting lines are ignored, considered comments. Blank lines are ignored as well if (line.startsWith("#") || line.matches("^\\s*$")) { continue; } // for good measure, test that the lemma is not already included as a feature else if (!getAttIndexes().containsKey(line)) { addNumericFeature(line); valueSet.add(line); } } breader.close(); } return valueSet; }
From source file:elh.eus.absa.Features.java
/** * Function reads an attribute map from a file (mainly word cluster files) and adds the * //from w ww . j a va2 s.c o m * @param fname : path to the file containing the feature information * @param attName : prefix for the feature name in the feature vector * @return HashMap<String,Integer> contains the elements and their respective attribute values, * in order to later fill the vectors. * * @throws IOException if the given file give reading problems. */ private HashMap<String, Integer> loadAttributeMapFromFile(String fname, String attName) throws IOException { HashMap<String, Integer> result = new HashMap<String, Integer>(); TreeSet<Integer> valueSet = new TreeSet<Integer>(); if (FileUtilsElh.checkFile(fname)) { BufferedReader breader = new BufferedReader(new FileReader(fname)); String line; while ((line = breader.readLine()) != null) { if (line.startsWith("#") || line.matches("^\\s*$")) { continue; } String[] fields = line.split(" "); Integer attValue; try { attValue = Integer.valueOf(fields[1]); } catch (NumberFormatException nfe) { attValue = Integer.parseInt(fields[1], 2); } result.put(fields[0], attValue); valueSet.add(attValue); } breader.close(); //add features to feature map addNumericFeatureSet(attName, valueSet); } return result; }
From source file:com.hichinaschool.flashcards.libanki.Sched.java
public Object[] deckCounts() { TreeSet<Object[]> decks = deckDueTree(); int[] counts = new int[] { 0, 0, 0 }; for (Object[] deck : decks) { if (((String[]) deck[0]).length == 1) { counts[0] += (Integer) deck[2]; counts[1] += (Integer) deck[3]; counts[2] += (Integer) deck[4]; }/*from w ww.j av a 2s . c om*/ } TreeSet<Object[]> decksNet = new TreeSet<Object[]>(new DeckNameCompare()); for (Object[] d : decks) { try { boolean show = true; for (JSONObject o : mCol.getDecks().parents((Long) d[1])) { if (o.getBoolean("collapsed")) { show = false; break; } } if (show) { JSONObject deck = mCol.getDecks().get((Long) d[1]); if (deck.getBoolean("collapsed")) { String[] name = (String[]) d[0]; name[name.length - 1] = name[name.length - 1] + " (+)"; d[0] = name; } decksNet.add(new Object[] { d[0], d[1], d[2], d[3], d[4], deck.getInt("dyn") != 0 }); } } catch (JSONException e) { throw new RuntimeException(e); } } return new Object[] { decksNet, eta(counts), mCol.cardCount() }; }
From source file:net.java.sip.communicator.impl.history.HistoryReaderImpl.java
/** * Returns the supplied number of recent messages before the given date * * @param date messages before date/*ww w . ja va2 s.c o m*/ * @param count messages count * @return QueryResultSet the found records * @throws RuntimeException */ public QueryResultSet<HistoryRecord> findLastRecordsBefore(Date date, int count) throws RuntimeException { // the files are supposed to be ordered from oldest to newest Vector<String> filelist = filterFilesByDate(this.historyImpl.getFileList(), null, date); TreeSet<HistoryRecord> result = new TreeSet<HistoryRecord>(new HistoryRecordComparator()); int leftCount = count; int currentFile = filelist.size() - 1; SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); while (leftCount > 0 && currentFile >= 0) { Document doc = this.historyImpl.getDocumentForFile(filelist.get(currentFile)); if (doc == null) { currentFile--; continue; } NodeList nodes = doc.getElementsByTagName("record"); Node node; for (int i = nodes.getLength() - 1; i >= 0 && leftCount > 0; i--) { node = nodes.item(i); NodeList propertyNodes = node.getChildNodes(); Date timestamp; String ts = node.getAttributes().getNamedItem("timestamp").getNodeValue(); try { timestamp = sdf.parse(ts); } catch (ParseException e) { timestamp = new Date(Long.parseLong(ts)); } if (!isInPeriod(timestamp, null, date)) continue; ArrayList<String> nameVals = new ArrayList<String>(); boolean isRecordOK = true; int len = propertyNodes.getLength(); for (int j = 0; j < len; j++) { Node propertyNode = propertyNodes.item(j); if (propertyNode.getNodeType() == Node.ELEMENT_NODE) { // Get nested TEXT node's value Node nodeValue = propertyNode.getFirstChild(); if (nodeValue != null) { nameVals.add(propertyNode.getNodeName()); nameVals.add(nodeValue.getNodeValue()); } else isRecordOK = false; } } // if we found a broken record - just skip it if (!isRecordOK) continue; String[] propertyNames = new String[nameVals.size() / 2]; String[] propertyValues = new String[propertyNames.length]; for (int j = 0; j < propertyNames.length; j++) { propertyNames[j] = nameVals.get(j * 2); propertyValues[j] = nameVals.get(j * 2 + 1); } HistoryRecord record = new HistoryRecord(propertyNames, propertyValues, timestamp); result.add(record); leftCount--; } currentFile--; } return new OrderedQueryResultSet<HistoryRecord>(result); }
From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java
private void addNamespaceIfMissing(TreeSet ts, String ns, String pref) { boolean bF = false; Iterator iNs = ts.iterator(); while (iNs.hasNext()) { String s = (String) iNs.next(); if (s != null && s.indexOf(ns) != -1) { bF = true;//from w w w. ja v a2s . c om break; } } if (!bF) { StringBuffer sb = new StringBuffer("xmlns"); if (pref != null) { sb.append(":"); sb.append(pref); } sb.append("=\""); sb.append(ns); sb.append("\""); ts.add(sb.toString()); } }
From source file:net.java.sip.communicator.impl.history.HistoryReaderImpl.java
/** * Returns the supplied number of recent messages after the given date * * @param date messages after date/*from w w w . j av a2 s. com*/ * @param count messages count * @return QueryResultSet the found records * @throws RuntimeException */ public QueryResultSet<HistoryRecord> findFirstRecordsAfter(Date date, int count) throws RuntimeException { TreeSet<HistoryRecord> result = new TreeSet<HistoryRecord>(new HistoryRecordComparator()); Vector<String> filelist = filterFilesByDate(this.historyImpl.getFileList(), date, null); int leftCount = count; int currentFile = 0; SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); while (leftCount > 0 && currentFile < filelist.size()) { Document doc = this.historyImpl.getDocumentForFile(filelist.get(currentFile)); if (doc == null) { currentFile++; continue; } NodeList nodes = doc.getElementsByTagName("record"); Node node; for (int i = 0; i < nodes.getLength() && leftCount > 0; i++) { node = nodes.item(i); NodeList propertyNodes = node.getChildNodes(); Date timestamp; String ts = node.getAttributes().getNamedItem("timestamp").getNodeValue(); try { timestamp = sdf.parse(ts); } catch (ParseException e) { timestamp = new Date(Long.parseLong(ts)); } if (!isInPeriod(timestamp, date, null)) continue; ArrayList<String> nameVals = new ArrayList<String>(); boolean isRecordOK = true; int len = propertyNodes.getLength(); for (int j = 0; j < len; j++) { Node propertyNode = propertyNodes.item(j); if (propertyNode.getNodeType() == Node.ELEMENT_NODE) { // Get nested TEXT node's value Node nodeValue = propertyNode.getFirstChild(); if (nodeValue != null) { nameVals.add(propertyNode.getNodeName()); nameVals.add(nodeValue.getNodeValue()); } else isRecordOK = false; } } // if we found a broken record - just skip it if (!isRecordOK) continue; String[] propertyNames = new String[nameVals.size() / 2]; String[] propertyValues = new String[propertyNames.length]; for (int j = 0; j < propertyNames.length; j++) { propertyNames[j] = nameVals.get(j * 2); propertyValues[j] = nameVals.get(j * 2 + 1); } HistoryRecord record = new HistoryRecord(propertyNames, propertyValues, timestamp); result.add(record); leftCount--; } currentFile++; } return new OrderedQueryResultSet<HistoryRecord>(result); }
From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java
private TreeSet collectNamespaces(String sCanInfo, TreeSet tsOtherAttr) { TreeSet ts = new TreeSet(); // find element header int p1 = -1, p2 = -1; p1 = sCanInfo.indexOf('>'); if (p1 != -1) { String sHdr = sCanInfo.substring(0, p1); if (m_logger.isDebugEnabled()) m_logger.debug("Header: " + sHdr); String[] toks = sHdr.split(" "); for (int i = 0; (toks != null) && (i < toks.length); i++) { String tok = toks[i]; if (tok != null && tok.trim().length() > 0 && tok.charAt(0) != '<') { if (tok.indexOf("xmlns") != -1) ts.add(tok); else tsOtherAttr.add(tok); }/*ww w .jav a 2 s. co m*/ } } return ts; }
From source file:asterix.parser.classad.ClassAd.java
public boolean privateGetExternalReferences(ExprTree expr, ClassAd ad, EvalState state, TreeSet<String> refs, boolean fullNames) throws HyracksDataException { if (expr.isTreeHolder()) { expr = ((ExprTreeHolder) expr).getInnerTree(); }/*ww w . j a v a 2 s . c om*/ switch (expr.getKind()) { case LITERAL_NODE: // no external references here return (true); case ATTRREF_NODE: { ClassAd start = new ClassAd(); ExprTreeHolder tree = new ExprTreeHolder(); ExprTreeHolder result = new ExprTreeHolder(); AMutableCharArrayString attr = new AMutableCharArrayString(); Value val = new Value(); MutableBoolean abs = new MutableBoolean(); ((AttributeReference) expr).getComponents(tree, attr, abs); // establish starting point for attribute search if (tree.getInnerTree() == null) { start = abs.booleanValue() ? state.getRootAd() : state.getCurAd(); if (abs.booleanValue() && (start == null)) {// NAC - circularity so no root return false; // NAC } // NAC } else { if (!tree.publicEvaluate(state, val)) { return (false); } // if the tree evals to undefined, the external references // are in the tree part if (val.isUndefinedValue()) { if (fullNames) { AMutableCharArrayString fullName = new AMutableCharArrayString(); if (tree.getInnerTree() != null) { ClassAdUnParser unparser = new PrettyPrint(); unparser.unparse(fullName, tree); fullName.appendChar('.'); } fullName.appendString(attr); refs.add(fullName.toString()); return true; } else { if (state.getDepthRemaining() <= 0) { return false; } state.decrementDepth(); boolean ret = privateGetExternalReferences(tree, ad, state, refs, fullNames); state.incrementDepth(); return ret; } } // otherwise, if the tree didn't evaluate to a classad, // we have a problem if (!val.isClassAdValue(start)) { return (false); } } // lookup for attribute ClassAd curAd = state.getCurAd(); switch (start.lookupInScope(attr.toString(), result, state)) { case EVAL_ERROR_Int: // some error return (false); case EVAL_UNDEF_Int: // attr is external refs.add(attr.toString()); state.setCurAd(curAd); return (true); case EVAL_OK_Int: { // attr is internal; find external refs in result if (state.getDepthRemaining() <= 0) { state.setCurAd(curAd); return false; } state.decrementDepth(); boolean rval = privateGetExternalReferences(result, ad, state, refs, fullNames); state.incrementDepth(); state.setCurAd(curAd); return (rval); } case EVAL_FAIL_Int: default: // enh?? return (false); } } case OP_NODE: { // recurse on subtrees AMutableInt32 opKind = new AMutableInt32(0); ExprTreeHolder t1 = new ExprTreeHolder(); ExprTreeHolder t2 = new ExprTreeHolder(); ExprTreeHolder t3 = new ExprTreeHolder(); ((Operation) expr).getComponents(opKind, t1, t2, t3); if (t1.getInnerTree() != null && !privateGetExternalReferences(t1, ad, state, refs, fullNames)) { return (false); } if (t2.getInnerTree() != null && !privateGetExternalReferences(t2, ad, state, refs, fullNames)) { return (false); } if (t3.getInnerTree() != null && !privateGetExternalReferences(t3, ad, state, refs, fullNames)) { return (false); } return (true); } case FN_CALL_NODE: { // recurse on subtrees AMutableCharArrayString fnName = new AMutableCharArrayString(); ExprList args = new ExprList(); ((FunctionCall) expr).getComponents(fnName, args); for (ExprTree tree : args.getExprList()) { if (!privateGetExternalReferences(tree, ad, state, refs, fullNames)) { return (false); } } return (true); } case CLASSAD_NODE: { // recurse on subtrees Map<CaseInsensitiveString, ExprTree> attrs = new HashMap<CaseInsensitiveString, ExprTree>(); ((ClassAd) expr).getComponents(attrs); for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) { if (state.getDepthRemaining() <= 0) { return false; } state.decrementDepth(); boolean ret = privateGetExternalReferences(entry.getValue(), ad, state, refs, fullNames); state.incrementDepth(); if (!ret) { return (false); } } return (true); } case EXPR_LIST_NODE: { // recurse on subtrees ExprList exprs = new ExprList(); ((ExprList) expr).getComponents(exprs); for (ExprTree exprTree : exprs.getExprList()) { if (state.getDepthRemaining() <= 0) { return false; } state.decrementDepth(); boolean ret = privateGetExternalReferences(exprTree, ad, state, refs, fullNames); state.incrementDepth(); if (!ret) { return (false); } } return (true); } default: return false; } }
From source file:org.gvsig.framework.web.service.impl.OGCInfoServiceImpl.java
public WMTSInfo getCapabilitiesFromWMTS(String urlServerWMTS, TreeSet<String> listCrs, boolean useCrsSelected) throws ServerGeoException { TreeSet<String> formatsSupported = new TreeSet<String>(); TreeSet<String> crsSupported = new TreeSet<String>(); boolean isFormatsSupported = false; WMTSInfo wmtsInfo = new WMTSInfo(); // put url on object WMSInfo wmtsInfo.setServiceUrl(urlServerWMTS); // Create hashmap to add the layers getted to the WMTSInfo object Map<String, org.gvsig.framework.web.ogc.WMTSLayer> layersMap = new HashMap<String, org.gvsig.framework.web.ogc.WMTSLayer>(); // get WMTS manager WMTSOGCManager wmtsMan = WMTSOGCLocator.getManager(); try {/*w w w. ja va 2s.co m*/ WMTSClient wmtsClient = wmtsMan.createWMTSClient(urlServerWMTS); wmtsClient.connect(true, null); WMTSServiceIdentification wmtsServIden = wmtsClient.getServiceIdentification(); // set server info wmtsInfo.setServiceAbstract(wmtsServIden.getAbstract()); wmtsInfo.setServiceTitle(wmtsServIden.getTitle()); wmtsInfo.setVersion(wmtsServIden.getServiceTypeVersion()); wmtsInfo.setServiceType(wmtsServIden.getServiceType()); // set id of the request wmst (service title + calendar) int hashCode = (wmtsServIden.getTitle() + Calendar.getInstance()).hashCode(); wmtsInfo.setId(hashCode); // set tile matrix and check if has support to crs of the map List<String> patternList = new ArrayList<String>(); if (!listCrs.isEmpty()) { for (String crs : listCrs) { String[] crsSplit = crs.split(":"); String pattern = "(.*)(:?)".concat(crsSplit[0]).concat("((:)(.*)(:)").concat(crsSplit[1]) .concat("|(:)").concat(crsSplit[1]).concat(")"); patternList.add(pattern); } } // hashmap with: identifier of tile matrix, supported crs Map<String, String> tileMatrixCrsSupported = new HashMap<String, String>(); TreeSet<String> tileMatrixSelectedId = new TreeSet<String>(); List<WMTSTileMatrixSet> tileMatrixSet = wmtsClient.getTileMatrixSet(); for (int i = 0; i < tileMatrixSet.size(); i++) { WMTSTileMatrixSet tileMatrix = tileMatrixSet.get(i); String identifier = tileMatrix.getIdentifier(); String supportedCRS = tileMatrix.getSupportedCRS(); crsSupported.add(supportedCRS); // add to map the tile matrix with its crs supported tileMatrixCrsSupported.put(identifier, supportedCRS); if (!listCrs.isEmpty()) { if (listCrs.contains(supportedCRS)) { tileMatrixSelectedId.add(identifier); } else { // check supportedCrs with the expReg generated by the // list of crs passed for (String expReg : patternList) { if (supportedCRS.matches(expReg)) { tileMatrixSelectedId.add(identifier); } } } } } // Add map of tile matrix and the tile matrix selected to WMTSInfo // object wmtsInfo.setTileMatrixCrsSupported(tileMatrixCrsSupported); wmtsInfo.setTileMatrixSelectedId(tileMatrixSelectedId); // Only set layers if has a tile matrix with crs of the map // supported // or crs is null WMTSThemes layerListAsThemes = wmtsClient.getLayerListAsThemes(); // Create tree with layer values List<TreeNode> tree = new ArrayList<TreeNode>(); // Create children layers for (int i = 0; i < layerListAsThemes.getChildCount(); i++) { WMTSTheme wmtsTheme = layerListAsThemes.getChildren(i); WMTSLayer layer = wmtsTheme.getLayer(); TreeSet<String> wmtsLinkSelected = new TreeSet<String>(); TreeSet<String> wmtsLinkSupported = new TreeSet<String>(); // check crs List<WMTSTileMatrixSetLink> tileMatrixSetLink = layer.getTileMatrixSetLink(); for (int j = 0; j < tileMatrixSetLink.size(); j++) { WMTSTileMatrixSetLink wmtsLink = tileMatrixSetLink.get(j); wmtsLinkSupported.add(wmtsLink.getTileMatrixSetId()); if (!tileMatrixSelectedId.isEmpty() && tileMatrixSelectedId.contains(wmtsLink.getTileMatrixSetId())) { wmtsLinkSelected.add(wmtsLink.getTileMatrixSetId()); } } // check format TreeSet<String> setFormats = new TreeSet<String>(); setFormats.addAll(layer.getFormat()); String format = getFirstFormatSupported(setFormats); formatsSupported.addAll(setFormats); if ((!wmtsLinkSelected.isEmpty() || listCrs.isEmpty()) && format != null) { isFormatsSupported = true; TreeNode node = new TreeNode(layer.getIdentifier()); node.setTitle(layer.getTitle()); node.setFolder(false); tree.add(node); // Add layer to layer map org.gvsig.framework.web.ogc.WMTSLayer wmtsLayer = new org.gvsig.framework.web.ogc.WMTSLayer(); TreeSet<String> crsSet = new TreeSet<String>(); crsSet.addAll(layer.getSrsList()); wmtsLayer.setCrs(crsSet); wmtsLayer.setName(layer.getIdentifier()); wmtsLayer.setTitle(layer.getTitle()); wmtsLayer.setFormatSelected(format); wmtsLayer.setFormatsSupported(setFormats); if (listCrs.isEmpty()) { wmtsLayer.setTileMatrixSelected(wmtsLinkSupported); } else { wmtsLayer.setTileMatrixSelected(wmtsLinkSelected); } layersMap.put(layer.getIdentifier(), wmtsLayer); } } wmtsInfo.setFormatsSupported(formatsSupported); wmtsInfo.setLayersTree(tree); wmtsInfo.setLayers(layersMap); wmtsInfo.setIsFormatsSupported(isFormatsSupported); wmtsInfo.setCrsSupported(crsSupported); } catch (Exception exc) { logger.error("Exception on getCapabilitiesFromWMS", exc); throw new ServerGeoException(); } return wmtsInfo; }