List of usage examples for java.util TreeSet add
public boolean add(E e)
From source file:gemlite.shell.admin.dao.AdminDao.java
public String prB(String regionName) { Map param = new HashMap(); param.put("beanName", "PrService"); Map args = new HashMap(); args.put("REGIONPATH", regionName); param.put("userArgs", args); Execution execution = FunctionService.onServers(clientPool).withArgs(param); ResultCollector rc = execution.execute("REMOTE_ADMIN_FUNCTION"); ArrayList rs = (ArrayList) rc.getResult(); StringBuilder sb = new StringBuilder(); int pNum = 0, rNum = 0, tNum = 0; // ???ip+node?TreeSet TreeSet<String> ipNodeSet = new TreeSet<String>(); TreeSet<String> ipSet = new TreeSet<String>(); // ip HashMap<String, Set<String>> nodeMap = new HashMap<String, Set<String>>(); // ?ipnode // ?HashMap,k:ip+node+? v:bucket?,bucketId? HashMap<String, HashMap<Integer, String>> data = new HashMap<String, HashMap<Integer, String>>(); if (rs != null) { for (Object obj : rs) { ArrayList list = (ArrayList) obj; for (Object o : list) { if (!(o instanceof Map)) { System.out.println(o.toString()); continue; }/*w w w .ja v a 2s. c o m*/ HashMap map = (HashMap) o; // ??,?bucket? String host = (String) map.get("host"); String node = (String) map.get("node"); Integer BucketId = (Integer) map.get("BucketId"); if (!ipSet.contains(host)) ipSet.add(host); Set<String> nodeSet = nodeMap.get(host); if (nodeSet == null) { nodeSet = new TreeSet<String>(); nodeSet.add(node); nodeMap.put(host, nodeSet); } else { if (!nodeSet.contains(node)) nodeSet.add(node); } String hostAndNode = host + node; String singleHostNode = hostAndNode; tNum = (Integer) map.get("TotalNumBuckets"); ipNodeSet.add(hostAndNode); // if ("primary".equals(map.get("type"))) { singleHostNode = primary + singleHostNode; pNum++; } else { singleHostNode = redundant + singleHostNode; rNum++; } if (data.containsKey(singleHostNode)) { HashMap<Integer, String> buckets = data.get(singleHostNode); buckets.put(BucketId, BucketId + "\t" + map.get("Bytes") + "\t" + map.get("Size")); } else { HashMap<Integer, String> buckets = new HashMap<Integer, String>(); buckets.put(BucketId, BucketId + "\t" + map.get("Bytes") + "\t" + map.get("Size")); data.put(singleHostNode, buckets); } } } } // ?,ip,ipset?? Iterator<String> it = ipNodeSet.iterator(); int i = 0; while (it.hasNext()) { i++; String host = it.next(); // ?bucket? // ,?? String p = primary + host; sb.append(i + ". " + p).append("\n"); sb.append(paraseSingleNode(data, p)); // ?bucket? // ,?? String r = redundant + host; sb.append(i + ". " + r).append("\n"); sb.append(paraseSingleNode(data, r)); } // ?? sb.append("Primary Bucket Count:" + pNum).append("\n"); sb.append("Redundant Bucket Count:" + rNum).append("\n"); sb.append("total-num-buckets (max):" + tNum).append("\n"); // bucket? checkPr(ipSet, nodeMap, data, sb); return sb.toString(); //System.out.println(sb.toString()); }
From source file:com.ichi2.anki.CardEditor.java
private void actualizeTagDialog(StyledDialog ad) { TreeSet<String> tags = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER); for (String tag : mCol.getTags().all()) { tags.add(tag); }//from w w w .j a v a2s. co m tags.addAll(selectedTags); int len = tags.size(); allTags = new String[len]; boolean[] checked = new boolean[len]; int i = 0; for (String t : tags) { allTags[i++] = t; if (selectedTags.contains(t)) { checked[i - 1] = true; } } ad.setMultiChoiceItems(allTags, checked, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface arg0, int which) { String tag = allTags[which]; if (selectedTags.contains(tag)) { Log.i(AnkiDroidApp.TAG, "unchecked tag: " + tag); selectedTags.remove(tag); } else { Log.i(AnkiDroidApp.TAG, "checked tag: " + tag); selectedTags.add(tag); } } }); }
From source file:fr.gouv.finances.dgfip.xemelios.importers.archives.ArchiveImporter.java
public Errors doImport() { Errors errors = new Errors(); try {//from w ww .j a v a2s. c o m ZipFile zipArchive = new ZipFile(fileToImport); ZipEntry manifesteEntry = zipArchive.getEntry(MANIFESTE_FILE_NAME); archiveManifeste = getManisfesteFromArchive(zipArchive.getInputStream(manifesteEntry)); archiveManifeste.getRootElement() .addAttribute(new Attribute("archive-name", getArchiveName(fileToImport))); zipArchive.close(); HashMap<String, Object> importProperties = extractPropertiesFromArchiveManifeste(archiveManifeste); for (String docType : (String[]) importProperties.get("archiveDocumentTypes")) { if (!docType.equals("PJ") && !DataLayerManager.getImplementation().canImportDocument(docType, getUser())) { errors.addError(Errors.SEVERITY_WARNING, "Impossible d'importer ce type de document (" + docType + "), la base de donne doit d'abord tre mise jour."); } } importedArchiveManifeste = DataLayerManager.getImplementation() .getManifesteFromArchive(importProperties.get("archiveName").toString(), getUser()); definePropertiesFromImportedManifeste(importedArchiveManifeste, importProperties); Element historique = null; if (importedArchiveManifeste != null) { historique = (Element) importedArchiveManifeste .query("/m:manifeste/m:evenements", getNamespaceCtx()).get(0); // pour avoir un lment sans parent et pouvoir l'ajouter o on veut historique = new Element(historique); } else { historique = new Element("evenements"); } archiveManifeste.getRootElement().appendChild(historique); boolean sectionApplied = false; for (SectionModel section : rules.getSections()) { SectionModel theSection = section; if (theSection.getPredicat().matches(importProperties)) { logger.info("Application de la rgle " + theSection.getName()); int globalOverwriteRule = OVERWRITE_RULE_UNSET; if (theSection.getActions().isUsePreviousSection()) { // alors il y a forcment un manifeste import, et on va aller chercher sa section Element sectionElement = (Element) importedArchiveManifeste .query("/m:manifeste/rul:section", getNamespaceCtx()).get(0); if (sectionElement == null) throw new ImportException(new Errors.Error(Errors.SEVERITY_ERROR, "la section " + theSection.getName() + " impose l'application de la section du prcdente import, mais celui-ci n'a pas t trouv."), null); theSection = new SectionModel(sectionElement); // et on supprime toutes les donnes de l'archive HashMap<String, DocumentModel> docsToDrop = new HashMap<String, DocumentModel>(); for (String docId : (String[]) importProperties.get("archiveImportedDocumentTypes")) { docsToDrop.put(docId, documentsModel.getDocumentById(docId)); } DataLayerManager.getImplementation().removeArchive(docsToDrop, importProperties.get("archiveName").toString(), getUser()); Nodes deleteActions = importedArchiveManifeste.query("/m:manifeste/m:on-delete/m:action", getNamespaceCtx()); for (int i = 0; i < deleteActions.size(); i++) { Element action = (Element) deleteActions.get(i); doApplyAction(action); } // a ce stade, aucune mise jour faire dans le manifeste, tous les documents sont supprims } else { if (importedArchiveManifeste != null) { // il faut reprendre l'historique de chacun des documents Nodes importedDocuments = importedArchiveManifeste.query("//m:document", getNamespaceCtx()); for (int i = 0; i < importedDocuments.size(); i++) { Element importedDoc = (Element) importedDocuments.get(i); Element thisDoc = getElement( archiveManifeste.query("/manifeste/documents/document[@path='" + importedDoc.getAttributeValue("path") + "']")); if (thisDoc != null) { String __imported = importedDoc.getAttributeValue("imported"); thisDoc.addAttribute(new Attribute("imported", __imported)); if ("Oui".equals(__imported)) { Element result = getElement( importedDoc.query("m:resultatimport", getNamespaceCtx())); if (result != null) thisDoc.appendChild(new Element(result)); } } } } } if (theSection.getOverwriteRule() != null) { if (EtatImporteur.OVERWRITE_RULE_ALWAYS.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_OVERWRITE; else if (EtatImporteur.OVERWRITE_RULE_NEVER.equals(theSection.getOverwriteRule())) globalOverwriteRule = OVERWRITE_RULE_SKIP; } // on rcupre la liste des documents importer pour pouvoir contrler qu'ils sont bien disponibles List<Element> documentsToImport = theSection.getDocumentsToImport(archiveManifeste, applicationProperties); if (documentsToImport.size() > 0) { TreeSet<String> volumesRequired = new TreeSet<String>(); for (Element filePlannedToImport : documentsToImport) { volumesRequired.add(filePlannedToImport.getAttributeValue("volume")); } int maxVolumes = Integer.parseInt(volumesRequired.last()); File[] volumes = new File[maxVolumes + 1]; for (String volume : volumesRequired) { String fichierVolume = archiveManifeste .query("/manifeste/volumes/volume[@num=" + volume + "]/@fichier").get(0) .getValue(); File f = new File(fileToImport.getParentFile(), fichierVolume); if (!f.exists()) { errors.addError(Errors.SEVERITY_ERROR, f.getAbsolutePath() + " non trouv"); } volumes[Integer.parseInt(volume)] = f; } if (!errors.containsError()) { logger.info("displayProgress(" + (documentsToImport.size() + 1) + ")"); // ici, on ouvre une porte pour permettre de faire des modifs dans l'ArchiveImporteur preImport(theSection, archiveManifeste); importServiceProvider.displayProgress(documentsToImport.size() + 1); boolean doImport = false; boolean doDelete = false; // on traite les actions for (XomDefinitionable dd : theSection.getActions().getActions()) { if (dd instanceof ImportModel) { //ImportModel im = (ImportModel)dd; // on a dj dtermin la liste des fichiers importer, donc on les importe for (Element documentToImport : documentsToImport) { int vol = Integer.parseInt(documentToImport.getAttributeValue("volume")); try { FileInfo fileInfo = doImportDocument(documentToImport, volumes[vol], importProperties, globalOverwriteRule); if (fileInfo.getInProcessException() != null) errors.addError(Errors.SEVERITY_ERROR, fileInfo.getInProcessException().getMessage()); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); if (fileInfo.getGlobalCount() == 0) { // rien n'a t import, probablement parce que overwrite rule disait // qu'il ne fallait pas importer. Donc on ne modifie rien. } else { Element result = new Element("resultatimport"); result.addAttribute(new Attribute("Duree", DateUtils.durationToString(fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils .formatXsDateTime(new Date(fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils .formatXsDateTime(new Date(fileInfo.getFinImport())))); // on supprime le prcdent rsultat d'import, si il existait... Nodes previousResults = documentToImport.query( "resultatimport | m:resultatimport", getNamespaceCtx()); for (int i = previousResults.size() - 1; i >= 0; i--) { Element __res = (Element) previousResults.get(i); documentToImport.removeChild(__res); } documentToImport.insertChild(result, 0); documentToImport.addAttribute(new Attribute("imported", "Oui")); } // on applique les ventuelles actions portant sur ce document Nodes actions = archiveManifeste.query("/manifeste/action[@depends-on='" + documentToImport.getAttributeValue("path") + "']"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo actFileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = actFileInfo; else __fileInfo.merge(actFileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } catch (Exception ex) { logger.error( "while importing " + documentToImport.getAttributeValue("path"), ex); documentToImport.addAttribute( new Attribute("imported", "Erreur: " + ex.getMessage())); } } doImport = true; } else if (dd instanceof DeleteModel) { importServiceProvider.startLongWait(); DeleteModel dm = (DeleteModel) dd; if (dm.getArchive() != null) { String archiveName = null; if ("archiveName".equals(dm.getArchive())) { // on remplace par le nom de l'archive archiveName = importProperties.get("archiveName").toString(); // pour le moment, on n'autorise pas la suppression d'une autre archive HashMap<String, DocumentModel> map = new HashMap<String, DocumentModel>(); for (String s : (String[]) importProperties .get("archiveDocumentTypes")) { map.put(s, documentsModel.getDocumentById(s)); } DataLayerManager.getImplementation().removeArchive(map, archiveName, getUser()); Nodes documents = archiveManifeste .query("/manifeste/documents/document"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement(doc.query( "m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique toutes les actions, puisqu'on a supprim tous les documents Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else if (dm.getTypeDoc() != null) { if (dm.getCollectivite() != null) { if (dm.getBudget() != null) { if (dm.getFileName() != null) { DataLayerManager.getImplementation().removeDocument( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), dm.getFileName(), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "' and ends-with(@path,'" + dm.getFileName() + "')]"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); } // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[ends-with(@depends-on,'" + dm.getFileName() + "')]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } else { DataLayerManager.getImplementation().removeBudget( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), new Pair(dm.getBudget(), dm.getBudget()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "' and @buCode='" + dm.getBudget() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste.query( "/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeCollectivite( documentsModel.getDocumentById(dm.getTypeDoc()), new Pair(dm.getCollectivite(), dm.getCollectivite()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "' and @buIdCol='" + dm.getCollectivite() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } else { DataLayerManager.getImplementation().removeDocumentModel( documentsModel.getDocumentById(dm.getTypeDoc()), user); Nodes documents = archiveManifeste .query("/manifeste/documents/document[@type='" + dm.getTypeDoc() + "']"); for (int i = 0; i < documents.size(); i++) { Element doc = (Element) documents.get(i); Element resultImport = getElement( doc.query("m:resultatimport | resultatimport", getNamespaceCtx())); if (resultImport != null) doc.removeChild(resultImport); doc.addAttribute(new Attribute("imported", "Non")); // on applique les actions du document Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[@depends-on='" + doc.getAttributeValue("path") + "']"); for (int a = 0; a < actions.size(); a++) { Element action = (Element) actions.get(a); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } } } doDelete = true; } importServiceProvider.endLongWait(); } if (doImport) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste.query("/manifeste/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doImport) { // Pour les patchs edmn on applique les actions Nodes actions = archiveManifeste.query("/manifeste/actions/action"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } if (doDelete) { // Pour compatibilit avec les archives avant 2011, on traite toutes les actions qui ne sont pas on-delete et qui n'ont pas de depends-on Nodes actions = archiveManifeste .query("/manifeste/on-delete/action[not(@depends-on)]"); for (int i = 0; i < actions.size(); i++) { Element action = (Element) actions.get(i); try { FileInfo fileInfo = doApplyAction(action); if (__fileInfo == null) __fileInfo = fileInfo; else __fileInfo.merge(fileInfo); } catch (Exception ex) { logger.error("while applying " + action.toXML(), ex); } } } } // dfinir ici si il y a des donnes ou non if (archiveManifeste.query("/manifeste/documents/document[@imported='Oui']").size() > 0) archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Oui")); else archiveManifeste.getRootElement() .addAttribute(new Attribute("added:archive", Constants.ADDED_NS_URI, "Non")); // on ajoute les actions que l'on a pratiqu dans le manifeste, // pour savoir quoi refaire plus tard... archiveManifeste.getRootElement().appendChild(theSection.getXomDefinition()); sectionApplied = true; break; } else { // il n'y avait rien importer, mais la section a quand mme t importe sectionApplied = true; break; } } } if (sectionApplied) { // on a trait toutes les sections, peut-tre il ne s'est rien pass... boolean somethingHasBeenImported = false; Nodes nodes = archiveManifeste.query("//document[@imported]"); somethingHasBeenImported = nodes.size() > 0; // on recherche tous les documents qui n'ont pas t traits, et on positionne un @imported='false' nodes = archiveManifeste.query("//document[not(@imported)]"); for (int i = 0; i < nodes.size(); i++) { Element el = (Element) nodes.get(i); el.addAttribute(new Attribute("imported", "Non")); } archiveManifeste.getRootElement() .addAttribute(new Attribute("imported", Boolean.toString(somethingHasBeenImported))); Element result = new Element("resultatimport"); if (result != null) { // on sait jamais... en cas de suppression par exemple... if (__fileInfo != null) { result.addAttribute( new Attribute("Duree", DateUtils.durationToString(__fileInfo.getDurationImport()))); result.addAttribute(new Attribute("Debut", DateUtils.formatXsDateTime(new Date(__fileInfo.getDebutImport())))); result.addAttribute(new Attribute("Fin", DateUtils.formatXsDateTime(new Date(__fileInfo.getFinImport())))); result.addAttribute(new Attribute("User", getUser().getId())); result.addAttribute(new Attribute("LastModify", DateUtils.formatXsDateTime(new Date(__fileInfo.getLastModify())))); result.appendChild(__fileInfo.toXomXml(documentsModel)); } } archiveManifeste.getRootElement().appendChild(result); // l'historique des imports Element event = new Element("evenement"); event.addAttribute(new Attribute("date", DateUtils.formatXsDateTime(new Date()))); event.addAttribute(new Attribute("user", getUser().getId())); event.addAttribute(new Attribute("section", archiveManifeste .query("/manifeste/rul:section/@name", getNamespaceCtx()).get(0).getValue())); String version = archiveManifeste.getRootElement().getAttributeValue("version"); if (version == null || version.length() == 0) version = "1"; event.addAttribute(new Attribute("version-archive", version)); historique.insertChild(event, 0); doImportManifeste(archiveManifeste, importProperties.get("archiveName").toString()); DataLayerManager.getImplementation() .declareArchiveImported(importProperties.get("archiveName").toString(), user); // System.out.println(archiveManifeste.toXML()); } else { errors.addError(Errors.SEVERITY_WARNING, "Cette archive ne peut tre importe par aucune des rgles de cette configuration."); } } catch (XPathExpressionException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (IOException ioEx) { logger.error(fileToImport.getAbsolutePath() + ": " + ioEx.getMessage(), ioEx); errors.addError(Errors.SEVERITY_ERROR, ioEx.getMessage()); } catch (ImportException iEx) { Errors.Error error = iEx.error; errors.addError(error); } catch (DataConfigurationException dcEx) { logger.error(dcEx.getMessage(), dcEx); errors.addError(Errors.SEVERITY_ERROR, dcEx.getMessage()); } catch (DataAccessException daEx) { logger.error(daEx.getMessage(), daEx); errors.addError(Errors.SEVERITY_ERROR, daEx.getMessage()); } catch (UnauthorizedException ex) { logger.error(ex.getMessage(), ex); errors.addError(Errors.SEVERITY_ERROR, ex.getMessage()); } catch (Throwable t) { t.printStackTrace(); errors.addError(Errors.SEVERITY_ERROR, t.getMessage()); } finally { // try { zipArchive.close(); } catch(Exception ex) {} } return errors; }
From source file:com.sjc.cc.login.service.impl.LoginServiceImpl.java
/** * /*from ww w.j av a2 s . c o m*/ * Description: Date:Mar 28, 2013 4:39:45 PM * * @param list * @return * @return List<Map<MenuVO,HashSet<MenuVO>>> * */ @SuppressWarnings("unchecked") public Map<MenuPO, TreeSet<MenuPO>> getMenuMap(List<MenuPO> list) { Map optionMap = applicationCacheService.getCacheList("376"); TreeSet<MenuPO> parentSet = new TreeSet<MenuPO>(); TreeSet<MenuPO> childSet = new TreeSet<MenuPO>(); for (MenuPO po : list) { if (po.getPid() != null) { if (optionMap.get(po.getPid().toString()) != null) { parentSet.add(po); } else { childSet.add(po); } } } Map<MenuPO, TreeSet<MenuPO>> map = new TreeMap<MenuPO, TreeSet<MenuPO>>(); for (MenuPO pvo : parentSet) { TreeSet<MenuPO> set = new TreeSet<MenuPO>(); for (MenuPO cvo : childSet) { if (pvo.getPrivilegeId().longValue() == cvo.getPid().longValue()) { set.add(cvo); } } map.put(pvo, set); } return map; }
From source file:net.sf.mzmine.modules.peaklistmethods.alignment.ransac.RansacAlignerTask.java
/** * // w ww . java 2s . c o m * @param peakList * @return */ private HashMap<PeakListRow, PeakListRow> getAlignmentMap(PeakList peakList) { // Create a table of mappings for best scores HashMap<PeakListRow, PeakListRow> alignmentMapping = new HashMap<PeakListRow, PeakListRow>(); if (alignedPeakList.getNumberOfRows() < 1) { return alignmentMapping; } // Create a sorted set of scores matching TreeSet<RowVsRowScore> scoreSet = new TreeSet<RowVsRowScore>(); // RANSAC algorithm List<AlignStructMol> list = ransacPeakLists(alignedPeakList, peakList); PolynomialFunction function = this.getPolynomialFunction(list); PeakListRow allRows[] = peakList.getRows(); for (PeakListRow row : allRows) { // Calculate limits for a row with which the row can be aligned Range mzRange = mzTolerance.getToleranceRange(row.getAverageMZ()); double rt; try { rt = function.value(row.getAverageRT()); } catch (NullPointerException e) { rt = row.getAverageRT(); } if (Double.isNaN(rt) || rt == -1) { rt = row.getAverageRT(); } Range rtRange = rtToleranceAfter.getToleranceRange(rt); // Get all rows of the aligned peaklist within parameter limits PeakListRow candidateRows[] = alignedPeakList.getRowsInsideScanAndMZRange(rtRange, mzRange); for (PeakListRow candidate : candidateRows) { RowVsRowScore score; if (sameChargeRequired && (!PeakUtils.compareChargeState(row, candidate))) { continue; } try { score = new RowVsRowScore(row, candidate, mzRange.getSize() / 2, rtRange.getSize() / 2, rt); scoreSet.add(score); errorMessage = score.getErrorMessage(); } catch (Exception e) { e.printStackTrace(); setStatus(TaskStatus.ERROR); return null; } } processedRows++; } // Iterate scores by descending order Iterator<RowVsRowScore> scoreIterator = scoreSet.iterator(); while (scoreIterator.hasNext()) { RowVsRowScore score = scoreIterator.next(); // Check if the row is already mapped if (alignmentMapping.containsKey(score.getPeakListRow())) { continue; } // Check if the aligned row is already filled if (alignmentMapping.containsValue(score.getAlignedRow())) { continue; } alignmentMapping.put(score.getPeakListRow(), score.getAlignedRow()); } return alignmentMapping; }
From source file:io.hops.hopsworks.common.security.CertificateMaterializer.java
private TreeSet<ReentrantReadWriteLock> acquireWriteLocks(Map<MaterialKey, ReentrantReadWriteLock> lockSet) { TreeSet<ReentrantReadWriteLock> acquiredLocks = new TreeSet<>(new Comparator<ReentrantReadWriteLock>() { @Override/*w w w. ja va2 s . c o m*/ public int compare(ReentrantReadWriteLock t0, ReentrantReadWriteLock t1) { if (t0.hashCode() < t1.hashCode()) { return -1; } else if (t0.hashCode() > t1.hashCode()) { return 1; } return 0; } }); lockSet.values().stream().forEach(l -> { l.writeLock().lock(); acquiredLocks.add(l); }); return acquiredLocks; }
From source file:gdsc.smlm.ij.plugins.TraceMolecules.java
private int[] convert(double[] intervals) { TreeSet<Integer> set = new TreeSet<Integer>(); for (double d : intervals) set.add((int) Math.round(d)); set.remove(0); // Do not allow zero int[] values = new int[set.size()]; int i = 0;/* w w w . j a v a 2 s .com*/ for (Integer ii : set) values[i++] = ii; Arrays.sort(values); return values; }
From source file:net.sourceforge.fenixedu.domain.phd.PhdIndividualProgramProcess.java
public PrecedentDegreeInformation getLatestPrecedentDegreeInformation() { TreeSet<PrecedentDegreeInformation> degreeInformations = new TreeSet<PrecedentDegreeInformation>( Collections.reverseOrder(PrecedentDegreeInformation.COMPARATOR_BY_EXECUTION_YEAR)); ExecutionYear currentExecutionYear = ExecutionYear.readCurrentExecutionYear(); for (PrecedentDegreeInformation pdi : getPrecedentDegreeInformationsSet()) { if (!pdi.getExecutionYear().isAfter(currentExecutionYear)) { degreeInformations.add(pdi); }//from ww w .jav a2s . c o m } degreeInformations.addAll(getPrecedentDegreeInformationsSet()); return (degreeInformations.iterator().hasNext()) ? degreeInformations.iterator().next() : null; }
From source file:com.jaspersoft.jasperserver.test.ReportSchedulingTestTestNG.java
/** * doPersistenceTest/*from ww w . j ava 2 s .co m*/ */ @Test() public void doPersistenceTest() { m_logger.info("ReportSchedulingTestTestNG => doPersistenceTest() called"); // REPORT JOB 1 ReportJobSource source = new ReportJobSource(); source.setReportUnitURI("/test/reportURI"); Map params = new HashMap(); params.put("param1", new Integer(5)); params.put("param2", "value2"); source.setParametersMap(params); Date startDate = new Date(); ReportJobSimpleTrigger trigger = new ReportJobSimpleTrigger(); trigger.setStartDate(startDate); trigger.setOccurrenceCount(20); trigger.setRecurrenceInterval(10); trigger.setRecurrenceIntervalUnit(ReportJobSimpleTrigger.INTERVAL_DAY); ReportJobRepositoryDestination repositoryDestination = new ReportJobRepositoryDestination(); repositoryDestination.setFolderURI("/test/scheduled"); repositoryDestination.setOutputDescription("report output"); repositoryDestination.setSequentialFilenames(true); repositoryDestination.setTimestampPattern("yyyyMMdd"); repositoryDestination.setDefaultReportOutputFolderURI("/default/report_output/folder"); repositoryDestination.setUsingDefaultReportOutputFolderURI(true); ReportJobMailNotification mailNotification = new ReportJobMailNotification(); mailNotification.addTo("john@smith.com"); mailNotification.setSubject("Scheduled report"); mailNotification.setMessageText("Executed report"); ReportJob job_01 = new ReportJob(); job_01.setLabel("foo"); job_01.setDescription("bar"); job_01.setSource(source); job_01.setTrigger(trigger); job_01.setBaseOutputFilename("foo"); job_01.addOutputFormat(ReportJob.OUTPUT_FORMAT_PDF); job_01.addOutputFormat(ReportJob.OUTPUT_FORMAT_RTF); job_01.setContentRepositoryDestination(repositoryDestination); job_01.setMailNotification(mailNotification); job_01 = m_reportJobsPersistenceService.saveJob(m_executionContext, job_01); m_logger.info("saved job_01 " + job_01.getId() + " has version=" + job_01.getVersion()); assertNotNull(job_01); long jobId_01 = job_01.getId(); String userName = job_01.getUsername(); // Report Job 02 source = new ReportJobSource(); source.setReportUnitURI("/test/A_ReportURI"); params = new HashMap(); params.put("param1", new Integer(5)); params.put("param2", "value2"); source.setParametersMap(params); startDate = new Date(); ReportJobCalendarTrigger trigger2 = new ReportJobCalendarTrigger(); trigger2.setMinutes("0"); trigger2.setHours("0"); trigger2.setDaysTypeCode(trigger2.DAYS_TYPE_ALL); TreeSet months = new TreeSet(); months.add(new Byte((byte) 1)); months.add(new Byte((byte) 2)); months.add(new Byte((byte) 3)); trigger2.setMonthDays(""); trigger2.setMonths(months); trigger2.setTimezone("America/Los_Angeles"); trigger2.setStartType(trigger2.START_TYPE_NOW); repositoryDestination = new ReportJobRepositoryDestination(); repositoryDestination.setFolderURI("/test/scheduled"); repositoryDestination.setOutputDescription("report output"); repositoryDestination.setSequentialFilenames(false); repositoryDestination.setTimestampPattern("yyyyMMdd"); repositoryDestination.setSaveToRepository(false); repositoryDestination.setOutputLocalFolder("c:/tmp"); FTPInfo ftpInfo = new FTPInfo(); ftpInfo.setUserName("JohnSmith"); HashMap<String, String> map = new HashMap<String, String>(); map.put("PORT", "27"); ftpInfo.setPropertiesMap(map); repositoryDestination.setOutputFTPInfo(ftpInfo); mailNotification = new ReportJobMailNotification(); mailNotification.addTo("john@smith.com"); mailNotification.addTo("peter@pan.com"); mailNotification.setSubject("Scheduled report"); mailNotification.setMessageText("Executed report"); mailNotification.setSkipNotificationWhenJobFails(true); ReportJobAlert alert = new ReportJobAlert(); alert.setRecipient(ReportJobAlert.Recipient.ADMIN); alert.setMessageText("CUSTOMIZE MESSAGE"); alert.setJobState(ReportJobAlert.JobState.FAIL_ONLY); ArrayList<String> to_Addresses = new ArrayList<String>(); to_Addresses.add("peterpan@jaspersoft.com"); to_Addresses.add("peter.pan@gmail.com"); alert.setToAddresses(to_Addresses); ReportJob job_02 = new ReportJob(); job_02.setLabel("A_ReportJob_2"); job_02.setDescription("bar"); job_02.setSource(source); job_02.setTrigger(trigger2); job_02.setBaseOutputFilename("aReportJob_2_OUTPUT"); job_02.addOutputFormat(ReportJob.OUTPUT_FORMAT_PDF); job_02.addOutputFormat(ReportJob.OUTPUT_FORMAT_RTF); job_02.setContentRepositoryDestination(repositoryDestination); job_02.setAlert(alert); boolean exceptionCaught = false; try { job_02.setMailNotification(mailNotification); } catch (Exception ex) { exceptionCaught = true; } assertTrue(exceptionCaught); mailNotification.setResultSendTypeCode(mailNotification.RESULT_SEND_ATTACHMENT); job_02.setMailNotification(mailNotification); job_02 = m_reportJobsPersistenceService.saveJob(m_executionContext, job_02); assertEquals(false, job_02.getContentRepositoryDestination().isSaveToRepository()); assertNotNull(job_02); long jobId_02 = job_02.getId(); // test creation date assertTrue((System.currentTimeMillis() - job_01.getCreationDate().getTime()) < 60000); assertTrue((System.currentTimeMillis() - job_02.getCreationDate().getTime()) < 60000); // test default report output of user assertEquals("/default/report_output/folder", job_02.getContentRepositoryDestination().getDefaultReportOutputFolderURI()); assertEquals("/default/report_output/folder", job_01.getContentRepositoryDestination().getDefaultReportOutputFolderURI()); assertEquals(true, job_01.getContentRepositoryDestination().isUsingDefaultReportOutputFolderURI()); assertEquals(false, job_02.getContentRepositoryDestination().isUsingDefaultReportOutputFolderURI()); // test alert testAlert(job_02.getAlert()); // test output destination assertNull(job_01.getContentRepositoryDestination().getOutputLocalFolder()); assertTrue((job_01.getContentRepositoryDestination().getOutputFTPInfo() == null) || (job_01.getContentRepositoryDestination().getOutputFTPInfo().getFolderPath() == null)); assertEquals("c:/tmp", job_02.getContentRepositoryDestination().getOutputLocalFolder()); assertEquals("JohnSmith", job_02.getContentRepositoryDestination().getOutputFTPInfo().getUserName()); Map<String, String> ftpProperties = job_02.getContentRepositoryDestination().getOutputFTPInfo() .getPropertiesMap(); assertEquals("27", ftpProperties.get("PORT")); assertNotNull(job_02.getContentRepositoryDestination().getOutputFTPInfo().getPassword()); //password was encrypted. Unencrypted pwd is never stored. // test mail notification assertTrue(!job_01.getMailNotification().isSkipNotificationWhenJobFails()); assertTrue(job_02.getMailNotification().isSkipNotificationWhenJobFails()); boolean deleted = true; try { job_01 = m_reportJobsPersistenceService.loadJob(m_executionContext, new ReportJobIdHolder(jobId_01)); m_logger.info("retrieved job_01 " + job_01.getId() + " has version=" + job_01.getVersion()); assertNotNull(job_01); assertEquals("foo", job_01.getLabel()); Set outputFormats = job_01.getOutputFormatsSet(); assertNotNull(outputFormats); assertEquals(2, outputFormats.size()); assertTrue(outputFormats.contains(new Byte(ReportJob.OUTPUT_FORMAT_PDF))); assertTrue(outputFormats.contains(new Byte(ReportJob.OUTPUT_FORMAT_RTF))); source = job_01.getSource(); assertNotNull(source); assertEquals("/test/reportURI", source.getReportUnitURI()); params = source.getParametersMap(); assertNotNull(params); assertEquals(2, params.size()); assertTrue(params.containsKey("param1")); assertEquals(new Integer(5), params.get("param1")); assertTrue(params.containsKey("param2")); assertEquals("value2", params.get("param2")); assertNotNull(job_01.getTrigger()); assertTrue(job_01.getTrigger() instanceof ReportJobSimpleTrigger); trigger = (ReportJobSimpleTrigger) job_01.getTrigger(); assertEquals(20, trigger.getOccurrenceCount()); assertNotNull(trigger.getRecurrenceIntervalUnit()); assertEquals(ReportJobSimpleTrigger.INTERVAL_DAY, trigger.getRecurrenceIntervalUnit().byteValue()); repositoryDestination = job_01.getContentRepositoryDestination(); assertNotNull(repositoryDestination); assertEquals("/test/scheduled", repositoryDestination.getFolderURI()); assertEquals("report output", repositoryDestination.getOutputDescription()); assertTrue(repositoryDestination.isSequentialFilenames()); assertEquals("yyyyMMdd", repositoryDestination.getTimestampPattern()); assertFalse(repositoryDestination.isOverwriteFiles()); mailNotification = job_01.getMailNotification(); assertNotNull(mailNotification); assertEquals("Scheduled report", mailNotification.getSubject()); List toAddresses = mailNotification.getToAddresses(); assertNotNull(toAddresses); assertEquals(1, toAddresses.size()); assertEquals("john@smith.com", toAddresses.get(0)); long origJobId = job_01.getId(); int origJobVersion = job_01.getVersion(); long origTriggerId = trigger.getId(); int origTriggerVersion = trigger.getVersion(); long origMailId = mailNotification.getId(); int origMailVersion = mailNotification.getVersion(); job_01.setDescription("updated"); mailNotification.setSubject("updated subject"); mailNotification.addTo("joan@smith.com"); mailNotification.addCc("mary@smith.com"); m_reportJobsPersistenceService.updateJob(m_executionContext, job_01); job_01 = m_reportJobsPersistenceService.loadJob(m_executionContext, new ReportJobIdHolder(jobId_01)); assertNotNull(job_01); assertEquals("foo", job_01.getLabel()); assertEquals("updated", job_01.getDescription()); assertEquals(origJobId, job_01.getId()); assertEquals(origJobVersion + 1, job_01.getVersion()); assertNotNull(job_01.getTrigger()); assertTrue(job_01.getTrigger() instanceof ReportJobSimpleTrigger); trigger = (ReportJobSimpleTrigger) job_01.getTrigger(); assertEquals(origTriggerId, trigger.getId()); assertEquals(origTriggerVersion, trigger.getVersion()); mailNotification = job_01.getMailNotification(); assertNotNull(mailNotification); assertEquals(origMailId, mailNotification.getId()); assertEquals(origMailVersion + 1, mailNotification.getVersion()); assertEquals("updated subject", mailNotification.getSubject()); toAddresses = mailNotification.getToAddresses(); assertEquals(2, toAddresses.size()); assertEquals("john@smith.com", toAddresses.get(0)); assertEquals("joan@smith.com", toAddresses.get(1)); List ccAddresses = mailNotification.getCcAddresses(); assertNotNull(ccAddresses); assertEquals(1, ccAddresses.size()); assertEquals("mary@smith.com", ccAddresses.get(0)); List jobs = m_reportJobsPersistenceService.listJobs(m_executionContext, "/test/reportURI"); assertNotNull(jobs); assertTrue(1 <= jobs.size()); boolean found = false; for (Iterator it = jobs.iterator(); it.hasNext();) { Object element = it.next(); assertTrue(element instanceof ReportJobSummary); ReportJobSummary summary = (ReportJobSummary) element; if (summary.getId() == jobId_01) { found = true; assertEquals("foo", summary.getLabel()); break; } } assertTrue(found); // test listJobs function by filtering Report Job Criteria model testListJobs(jobId_01, jobId_02, userName); // test sorting feature testSorting(jobId_01, jobId_02); testSortingDES(jobId_01, jobId_02); testSortingNONEDES(jobId_01, jobId_02); // test pagination feature testPagination(jobId_01, jobId_02); // test bulk update feature by using report job model testUpdateJobsByIDFAIL(jobId_01, jobId_02); // test bulk update feature by using report job model testUpdateJobsByIDFAILInvalidID(jobId_01, jobId_02); // test bulk update feature by using report job model testUpdateJobsByIDFAILInvalidPath(jobId_01, jobId_02); // testUpdateJobsByID(jobId_01, jobId_02); // testUpdateJobsByReference(job_01, job_02); m_reportJobsPersistenceService.deleteJob(m_executionContext, new ReportJobIdHolder(jobId_01)); m_reportJobsPersistenceService.deleteJob(m_executionContext, new ReportJobIdHolder(jobId_02)); deleted = true; job_01 = m_reportJobsPersistenceService.loadJob(m_executionContext, new ReportJobIdHolder(jobId_01)); assertNull(job_01); job_02 = m_reportJobsPersistenceService.loadJob(m_executionContext, new ReportJobIdHolder(jobId_02)); assertNull(job_02); } finally { if (!deleted) { m_reportJobsPersistenceService.deleteJob(m_executionContext, new ReportJobIdHolder(jobId_01)); m_reportJobsPersistenceService.deleteJob(m_executionContext, new ReportJobIdHolder(jobId_02)); } } }