List of usage examples for java.util SortedSet size
int size();
From source file:com.wavemaker.studio.StudioService.java
/** * @throws FileAccessException//from w w w . j a v a 2 s. c om * @see ProjectManager#listProjects() */ @ExposeToClient public String[] listProjects() throws FileAccessException { SortedSet<String> projects = this.projectManager.listProjects(); return new ArrayList<String>(projects).toArray(new String[projects.size()]); }
From source file:edu.utah.further.core.api.collections.SortedSetComparator.java
/** * Compare two {@link Double}s. If {@link #sortingOrder} is * {@link SortingOrder#ASCENDING}, this is their natural ordering. If * {@link #sortingOrder} is {@link SortingOrder#DESCENDING}, the order is reversed. * //ww w . ja va 2s. c o m * @param o1 * left operand * @param o2 * right operand * @return result of comparison of <code>o1</code> and <code>o2</code> * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) */ @Override public int compare(final SortedSet<E> o1, final SortedSet<E> o2) { ValidationUtil.validateIsTrue(o1.comparator() == null, "First set must have natural ordering"); ValidationUtil.validateIsTrue(o2.comparator() == null, "Second set must have natural ordering"); final CompareToBuilder builder = new CompareToBuilder(); // Compare the first corresponding min(o1.size(),o2.size()) element pairs final Iterator<E> iterator2 = o2.iterator(); for (final E element1 : o1) { if (!iterator2.hasNext()) { // o2.size() < o1.size() break; } // Pair exists, add to comparison builder.append(element1, iterator2.next()); } // If we're still tied, compare by set sizes return builder.append(o1.size(), o2.size()).toComparison(); }
From source file:org.torproject.ernie.db.ArchiveWriter.java
/** * Dump some statistics on the completeness of descriptors to the logs * on level INFO.//from w w w . java 2 s. c o m */ public void dumpStats() { StringBuilder sb = new StringBuilder("Finished writing relay " + "descriptors to disk.\n"); sb.append(intermediateStats.toString()); sb.append("Statistics on the completeness of written relay " + "descriptors of the last 3 consensuses (Consensus/Vote, " + "valid-after, votes, server descriptors, extra-infos):"); try { SimpleDateFormat validAfterFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); validAfterFormat.setTimeZone(TimeZone.getTimeZone("UTC")); SimpleDateFormat consensusVoteFormat = new SimpleDateFormat("yyyy/MM/dd/yyyy-MM-dd-HH-mm-ss"); consensusVoteFormat.setTimeZone(TimeZone.getTimeZone("UTC")); SimpleDateFormat descriptorFormat = new SimpleDateFormat("yyyy/MM/"); descriptorFormat.setTimeZone(TimeZone.getTimeZone("UTC")); SortedSet<File> consensuses = new TreeSet<File>(); Stack<File> leftToParse = new Stack<File>(); leftToParse.add(new File(outputDirectory + "/consensus")); while (!leftToParse.isEmpty()) { File pop = leftToParse.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { leftToParse.add(f); } } else if (pop.length() > 0) { consensuses.add(pop); } while (consensuses.size() > 3) { consensuses.remove(consensuses.first()); } } for (File f : consensuses) { BufferedReader br = new BufferedReader(new FileReader(f)); String line = null, validAfterTime = null, voteFilenamePrefix = null, dirSource = null; int allVotes = 0, foundVotes = 0, allServerDescs = 0, foundServerDescs = 0, allExtraInfos = 0, foundExtraInfos = 0; while ((line = br.readLine()) != null) { if (line.startsWith("valid-after ")) { validAfterTime = line.substring("valid-after ".length()); long validAfter = validAfterFormat.parse(validAfterTime).getTime(); voteFilenamePrefix = outputDirectory + "/vote/" + consensusVoteFormat.format(new Date(validAfter)) + "-vote-"; } else if (line.startsWith("dir-source ")) { dirSource = line.split(" ")[2]; } else if (line.startsWith("vote-digest ")) { allVotes++; File voteFile = new File(voteFilenamePrefix + dirSource + "-" + line.split(" ")[1]); if (voteFile.exists()) { foundVotes++; BufferedReader vbr = new BufferedReader(new FileReader(voteFile)); String line3 = null; int voteAllServerDescs = 0, voteFoundServerDescs = 0, voteAllExtraInfos = 0, voteFoundExtraInfos = 0; while ((line3 = vbr.readLine()) != null) { if (line3.startsWith("r ")) { voteAllServerDescs++; String digest = Hex .encodeHexString(Base64.decodeBase64(line3.split(" ")[3] + "=")) .toLowerCase(); long published = validAfterFormat .parse(line3.split(" ")[4] + " " + line3.split(" ")[5]).getTime(); String filename = outputDirectory + "/server-descriptor/" + descriptorFormat.format(new Date(published)) + digest.substring(0, 1) + "/" + digest.substring(1, 2) + "/" + digest; if (new File(filename).exists()) { BufferedReader sbr = new BufferedReader(new FileReader(new File(filename))); String line2 = null; while ((line2 = sbr.readLine()) != null) { if (line2.startsWith("opt extra-info-digest ") || line2.startsWith("extra-info-digest ")) { voteAllExtraInfos++; String extraInfoDigest = line2.startsWith("opt ") ? line2.split(" ")[2].toLowerCase() : line2.split(" ")[1].toLowerCase(); String filename2 = outputDirectory + "/extra-info/" + descriptorFormat.format(new Date(published)) + extraInfoDigest.substring(0, 1) + "/" + extraInfoDigest.substring(1, 2) + "/" + extraInfoDigest; if (new File(filename2).exists()) { voteFoundExtraInfos++; } } } sbr.close(); voteFoundServerDescs++; } } } vbr.close(); sb.append(String.format("%nV, %s, NA, %d/%d (%.1f%%), " + "%d/%d (%.1f%%)", validAfterTime, voteFoundServerDescs, voteAllServerDescs, 100.0D * (double) voteFoundServerDescs / (double) voteAllServerDescs, voteFoundExtraInfos, voteAllExtraInfos, 100.0D * (double) voteFoundExtraInfos / (double) voteAllExtraInfos)); } } else if (line.startsWith("r ")) { allServerDescs++; String digest = Hex.encodeHexString(Base64.decodeBase64(line.split(" ")[3] + "=")) .toLowerCase(); long published = validAfterFormat.parse(line.split(" ")[4] + " " + line.split(" ")[5]) .getTime(); String filename = outputDirectory + "/server-descriptor/" + descriptorFormat.format(new Date(published)) + digest.substring(0, 1) + "/" + digest.substring(1, 2) + "/" + digest; if (new File(filename).exists()) { BufferedReader sbr = new BufferedReader(new FileReader(new File(filename))); String line2 = null; while ((line2 = sbr.readLine()) != null) { if (line2.startsWith("opt extra-info-digest ") || line2.startsWith("extra-info-digest ")) { allExtraInfos++; String extraInfoDigest = line2.startsWith("opt ") ? line2.split(" ")[2].toLowerCase() : line2.split(" ")[1].toLowerCase(); String filename2 = outputDirectory + "/extra-info/" + descriptorFormat.format(new Date(published)) + extraInfoDigest.substring(0, 1) + "/" + extraInfoDigest.substring(1, 2) + "/" + extraInfoDigest; if (new File(filename2).exists()) { foundExtraInfos++; } } } sbr.close(); foundServerDescs++; } } } sb.append(String.format("%nC, %s, %d/%d (%.1f%%), " + "%d/%d (%.1f%%), %d/%d (%.1f%%)", validAfterTime, foundVotes, allVotes, 100.0D * (double) foundVotes / (double) allVotes, foundServerDescs, allServerDescs, 100.0D * (double) foundServerDescs / (double) allServerDescs, foundExtraInfos, allExtraInfos, 100.0D * (double) foundExtraInfos / (double) allExtraInfos)); } this.logger.info(sb.toString()); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not dump statistics to disk.", e); } catch (ParseException e) { this.logger.log(Level.WARNING, "Could not dump statistics to disk.", e); } }
From source file:org.jasig.schedassist.impl.owner.SpringJDBCAvailableScheduleDaoImplTest.java
/** * /*from w ww.j a v a2 s . c o m*/ * @throws Exception */ @Test public void testAddToScheduleOverrideMeetingLocation() throws Exception { AvailableBlock single = AvailableBlockBuilder.createBlock("20091102-1330", "20091102-1400", 1, "alternate location"); AvailableSchedule schedule = availableScheduleDao.addToSchedule(sampleOwners[0], single); SortedSet<AvailableBlock> stored = schedule.getAvailableBlocks(); Assert.assertTrue(stored.contains(single)); schedule = availableScheduleDao.retrieve(sampleOwners[0]); stored = schedule.getAvailableBlocks(); Assert.assertTrue(stored.contains(single)); Assert.assertEquals(1, stored.size()); Assert.assertEquals("alternate location", stored.first().getMeetingLocation()); }
From source file:org.eclipse.skalli.core.rest.admin.StatisticsConverter.java
private int countUniqueMembers(Map<String, SortedSet<Member>> uniqueMembers) { int count = 0; for (SortedSet<Member> members : uniqueMembers.values()) { count += members.size(); }/* w ww . ja v a2s .c o m*/ return count; }
From source file:org.sakaiproject.status.StatusServlet.java
protected void reportToolDetails(String toolId, HttpServletResponse response) throws Exception { PrintWriter pw = response.getWriter(); ToolManager tm = (ToolManager) ComponentManager.get("org.sakaiproject.tool.api.ActiveToolManager"); if (tm == null) { throw new Exception("Could not get ToolManager bean."); }//from ww w. j a va2 s. c om Tool tool = tm.getTool(toolId); if (tool == null) { pw.print("ERROR: no such tool ID\n"); return; } pw.print("id: " + tool.getId() + "\n"); pw.print("title: " + tool.getTitle() + "\n"); pw.print("description: " + tool.getDescription() + "\n"); Properties regProps = tool.getRegisteredConfig(); Enumeration propNames = regProps.propertyNames(); SortedSet sortedPropNames = new TreeSet(); while (propNames.hasMoreElements()) { sortedPropNames.add((String) propNames.nextElement()); } if (sortedPropNames.size() > 0) { pw.print("registered_properties:\n"); for (Object pName : sortedPropNames) { String propertyName = (String) pName; String value = regProps.getProperty(propertyName); pw.print(" " + propertyName + ": " + value + "\n"); } } Properties mutableProps = tool.getMutableConfig(); propNames = mutableProps.propertyNames(); sortedPropNames = new TreeSet(); while (propNames.hasMoreElements()) { sortedPropNames.add((String) propNames.nextElement()); } if (sortedPropNames.size() > 0) { pw.print("mutable_properties:\n"); for (Object pName : sortedPropNames) { String propertyName = (String) pName; String value = mutableProps.getProperty(propertyName); pw.print(" " + propertyName + ": " + value + "\n"); } } Properties finalProps = tool.getFinalConfig(); propNames = finalProps.propertyNames(); sortedPropNames = new TreeSet(); while (propNames.hasMoreElements()) { sortedPropNames.add((String) propNames.nextElement()); } if (sortedPropNames.size() > 0) { pw.print("final_properties:\n"); for (Object pName : sortedPropNames) { String propertyName = (String) pName; String value = finalProps.getProperty(propertyName); pw.print(" " + propertyName + ": " + value + "\n"); } } Set keywords = tool.getKeywords(); if (keywords != null) { if (keywords.size() > 0) { pw.print("keywords:\n"); for (Object keyword : keywords) { pw.print(" - " + keyword + "\n"); } } } Set categories = tool.getCategories(); if (categories != null) { if (categories.size() > 0) { pw.print("categories:\n"); for (Object category : categories) { pw.print(" - " + category + "\n"); } } } }
From source file:org.mitre.mpf.wfm.camel.operations.detection.trackmerging.TrackMergingProcessor.java
@Override public void wfmProcess(Exchange exchange) throws WfmProcessingException { assert exchange.getIn().getBody() != null : "The body must not be null."; assert exchange.getIn().getBody(byte[].class) != null : "The body must be convertible to a String."; TrackMergingContext trackMergingContext = jsonUtils.deserialize(exchange.getIn().getBody(byte[].class), TrackMergingContext.class); assert trackMergingContext != null : "The TrackMergingContext instance must never be null."; TransientJob transientJob = redis.getJob(trackMergingContext.getJobId()); assert transientJob != null : String.format("Redis failed to retrieve a job with ID %d.", trackMergingContext.getJobId()); TransientStage transientStage = transientJob.getPipeline().getStages() .get(trackMergingContext.getStageIndex()); for (int actionIndex = 0; actionIndex < transientStage.getActions().size(); actionIndex++) { TransientAction transientAction = transientStage.getActions().get(actionIndex); TrackMergingPlan trackMergingPlan = createTrackMergingPlan(transientAction.getProperties()); if (trackMergingPlan.isMergeTracks()) { for (TransientMedia transientMedia : transientJob.getMedia()) { if (!transientMedia.isFailed()) { SortedSet<Track> tracks = redis.getTracks(trackMergingContext.getJobId(), transientMedia.getId(), trackMergingContext.getStageIndex(), actionIndex); SortedSet<Track> newTracks = new TreeSet<Track>(combine(tracks, trackMergingPlan.getSamplingInterval(), propertiesUtil.getTrackOverlapThreshold())); log.debug("[Job {}|{}|{}] Merging {} tracks down to {} in Media {}.", trackMergingContext.getJobId(), trackMergingContext.getStageIndex(), actionIndex, tracks.size(), newTracks.size(), transientMedia.getId()); redis.setTracks(trackMergingContext.getJobId(), transientMedia.getId(), trackMergingContext.getStageIndex(), actionIndex, newTracks); } else { log.debug(/*from ww w .j a va2 s. co m*/ "[Job {}|{}|{}] Media {} is in an error state and is not a candidate for merging.", trackMergingContext.getJobId(), trackMergingContext.getStageIndex(), actionIndex, transientMedia.getId()); } } } else { log.debug("[Job {}|{}|{}] Track merging has not been requested for this action.", trackMergingContext.getJobId(), trackMergingContext.getStageIndex(), actionIndex); } } exchange.getOut().setBody(jsonUtils.serialize(trackMergingContext)); }
From source file:info.rmapproject.core.rmapservice.impl.openrdf.ORMapDiSCOMgr.java
/** * Get IRI of next version of a DiSCO.//from www .ja v a2s. com * * @param discoID IRI of DISCO * @param event2disco Map from events to all versions of DiSCOs * @param date2event Map from date events associated with version of DiSCO * @param ts the triplestore instance * @return IRI of next version of DiSCO, or null if none found * @throws RMapException the RMap exception * @throws RMapObjectNotFoundException the RMap object not found exception * @throws RMapDefectiveArgumentException the RMap defective argument exception */ protected IRI getNextIRI(IRI discoID, Map<IRI, IRI> event2disco, Map<Date, IRI> date2event, SesameTriplestore ts) throws RMapException, RMapObjectNotFoundException, RMapDefectiveArgumentException { if (discoID == null) { throw new RMapDefectiveArgumentException("null DiSCO id"); } if (event2disco == null) { throw new RMapDefectiveArgumentException("Null event2disco map"); } Map<IRI, IRI> disco2event = Utils.invertMap(event2disco); if (date2event == null) { date2event = eventmgr.getDate2EventMap(event2disco.keySet(), ts); } Map<IRI, Date> event2date = Utils.invertMap(date2event); IRI discoEventId = disco2event.get(discoID); Date eventDate = event2date.get(discoEventId); SortedSet<Date> sortedDates = new TreeSet<Date>(); sortedDates.addAll(date2event.keySet()); SortedSet<Date> laterDates = sortedDates.tailSet(eventDate); IRI nextDiscoId = null; if (laterDates.size() > 1) { Date[] dateArray = laterDates.toArray(new Date[laterDates.size()]); IRI nextEventId = date2event.get(dateArray[1]); nextDiscoId = event2disco.get(nextEventId); } return nextDiscoId; }
From source file:info.rmapproject.core.rmapservice.impl.openrdf.ORMapDiSCOMgr.java
/** * Get IRI of previous version of this DiSCO. * * @param discoID IRI of DiSCO// w ww .j a v a 2 s. c om * @param event2disco Map from events to all versions of DiSCOs * @param date2event Map from date events associated with version of DiSCO * @param ts the triplestore instance * @return IRI of previous version of this DiSCO, or null if none found * @throws RMapException the RMap exception * @throws RMapObjectNotFoundException the RMap object not found exception * @throws RMapDefectiveArgumentException the RMap defective argument exception */ protected IRI getPreviousIRI(IRI discoID, Map<IRI, IRI> event2disco, Map<Date, IRI> date2event, SesameTriplestore ts) throws RMapException, RMapObjectNotFoundException, RMapDefectiveArgumentException { if (discoID == null) { throw new RMapDefectiveArgumentException("null DiSCO id"); } if (event2disco == null) { throw new RMapDefectiveArgumentException("Null event2disco map"); } Map<IRI, IRI> disco2event = Utils.invertMap(event2disco); if (date2event == null) { date2event = eventmgr.getDate2EventMap(event2disco.keySet(), ts); } Map<IRI, Date> event2date = Utils.invertMap(date2event); IRI discoEventId = disco2event.get(discoID); Date eventDate = event2date.get(discoEventId); SortedSet<Date> sortedDates = new TreeSet<Date>(); sortedDates.addAll(date2event.keySet()); SortedSet<Date> earlierDates = sortedDates.headSet(eventDate); IRI prevDiscoId = null; if (earlierDates.size() > 0) { Date previousDate = earlierDates.last(); IRI prevEventId = date2event.get(previousDate); prevDiscoId = event2disco.get(prevEventId); } return prevDiscoId; }
From source file:com.gisgraphy.domain.repository.OpenStreetMapDaoTest.java
@Test public void testSaveCascadeHousenumber() { HouseNumber houseNumber = GisgraphyTestHelper.createHouseNumber(); OpenStreetMap street = GisgraphyTestHelper.createOpenStreetMapForJohnKenedyStreet(); //houseNumber.setStreet(street); street.addHouseNumber(houseNumber);//from w w w . j a v a2s . c o m street = openStreetMapDao.save(street); Assert.assertNotNull(houseNumber.getId()); OpenStreetMap retrievedStreet = openStreetMapDao.get(street.getId()); SortedSet<HouseNumber> houseNumbers = retrievedStreet.getHouseNumbers(); Assert.assertNotNull(houseNumbers); Assert.assertEquals("the street should have the housenumber associated", 1, houseNumbers.size()); }