List of usage examples for java.util SortedSet first
E first();
From source file:uk.co.flax.biosolr.ontology.core.ols.OLSOntologyHelper.java
/** * Find the IRIs of all terms referenced by a related URL. * * @param baseUrl the base URL to look up, from a Link or similar * query-type URL./*from w w w. j av a2s . co m*/ * @return a set of IRIs referencing the terms found for the * given URL. * @throws OntologyHelperException if problems occur accessing the * web service. */ protected Set<String> queryWebServiceForTerms(String baseUrl) throws OntologyHelperException { updateLastCallTime(); Set<String> retList; // Build URL for first page List<String> urls = buildPageUrls(baseUrl, 0, 1); // Sort returned calls by page number SortedSet<RelatedTermsResult> results = new TreeSet<>( (RelatedTermsResult r1, RelatedTermsResult r2) -> r1.getPage().compareTo(r2.getPage())); results.addAll(olsClient.callOLS(urls, RelatedTermsResult.class)); if (results.size() == 0) { retList = Collections.emptySet(); } else { Page page = results.first().getPage(); if (page.getTotalPages() > 1) { // Get remaining pages urls = buildPageUrls(baseUrl, page.getNumber() + 1, page.getTotalPages()); results.addAll(olsClient.callOLS(urls, RelatedTermsResult.class)); } retList = new HashSet<>(page.getTotalSize()); for (RelatedTermsResult result : results) { result.getTerms().forEach(t -> { terms.put(t.getIri(), t); retList.add(t.getIri()); }); } } return retList; }
From source file:org.cloudata.core.tabletserver.TabletMapFile.java
public MapFileIndexRecord findNearest(Row.Key targetRowKey, Cell.Key cellKey) throws IOException { SortedSet<MapFileIndexRecord> tailMap = mapFileIndexRecords .tailSet(new MapFileIndexRecord(targetRowKey, cellKey)); if (tailMap.size() > 0) { return tailMap.first(); } else {/* www . j a v a2s. com*/ return null; } }
From source file:org.jasig.schedassist.impl.owner.SpringJDBCAvailableScheduleDaoImplTest.java
/** * @throws ParseException /* ww w . j av a 2s .c o m*/ * @throws InputFormatException * */ @Test public void testAvailable104() throws InputFormatException, ParseException { // get owner with meeting durations preference of 20 minutes IScheduleOwner owner = sampleOwners[3]; SimpleDateFormat dateFormat = CommonDateOperations.getDateFormat(); SortedSet<AvailableBlock> blocks = AvailableBlockBuilder.createBlocks("9:00 AM", "11:40 AM", "MW", dateFormat.parse("20100830"), dateFormat.parse("20100903"), 1); availableScheduleDao.addToSchedule(owner, blocks); AvailableSchedule stored = availableScheduleDao.retrieve(owner); SortedSet<AvailableBlock> storedBlocks = stored.getAvailableBlocks(); Assert.assertEquals(2, storedBlocks.size()); Assert.assertEquals(CommonDateOperations.getDateTimeFormat().parse("20100830-0900"), storedBlocks.first().getStartTime()); Assert.assertEquals(CommonDateOperations.getDateTimeFormat().parse("20100830-1140"), storedBlocks.first().getEndTime()); Assert.assertEquals(CommonDateOperations.getDateTimeFormat().parse("20100901-0900"), storedBlocks.last().getStartTime()); Assert.assertEquals(CommonDateOperations.getDateTimeFormat().parse("20100901-1140"), storedBlocks.last().getEndTime()); SortedSet<AvailableBlock> expanded = AvailableBlockBuilder.expand(storedBlocks, 20); Assert.assertEquals(16, expanded.size()); Date originalStart = CommonDateOperations.getDateTimeFormat().parse("20100830-0900"); Date currentStart = originalStart; for (AvailableBlock e : expanded) { if (!DateUtils.isSameDay(e.getStartTime(), currentStart)) { currentStart = DateUtils.addDays(originalStart, 2); } Assert.assertEquals(currentStart, e.getStartTime()); currentStart = DateUtils.addMinutes(currentStart, 20); Assert.assertEquals(currentStart, e.getEndTime()); } }
From source file:org.evosuite.junit.naming.variables.ExplanatoryNamingTestVisitor.java
public Map<VariableReference, String> getAllVariableNames() { Map<VariableReference, String> variableNames = new HashMap<>(); for (Map.Entry<VariableReference, SortedSet<CandidateName>> varEntry : varNamesCandidates.entrySet()) { VariableReference key = varEntry.getKey(); SortedSet<CandidateName> candidates = varEntry.getValue(); variableNames.put(key, getFinalVariableName(candidates.first().getName())); // ordered list of candidates }// w w w . ja va2s . c o m return variableNames; }
From source file:net.sourceforge.fenixedu.domain.degreeStructure.DegreeModule.java
public ExecutionSemester getMinimumExecutionPeriod() { if (isRoot()) { return isBolonhaDegree() ? getBeginBolonhaExecutionPeriod() : getFirstExecutionPeriodOfFirstExecutionDegree(); }//from ww w . ja v a2 s . c o m final SortedSet<ExecutionSemester> executionSemesters = new TreeSet<ExecutionSemester>(); for (final Context context : getParentContextsSet()) { executionSemesters.add(context.getBeginExecutionPeriod()); } return executionSemesters.first(); }
From source file:relationalFramework.agentObservations.NonRedundantBackgroundKnowledge.java
/** * Adds background knowledge to the current knowledge set if it represents a * unique. non-redundant rule. If the knowledge is able to be added, it may * result in other knowledge being removed. * /* w w w .ja v a2 s . c om*/ * @param bckKnow * The knowledge to add. * @return True if the knowledge was added, false otherwise. */ public boolean addBackgroundKnowledge(BackgroundKnowledge bckKnow) { try { SortedSet<RelationalPredicate> nonPreferredFacts = new TreeSet<RelationalPredicate>( bckKnow.getNonPreferredFacts()); SortedSet<RelationalPredicate> preferredFacts = new TreeSet<RelationalPredicate>( bckKnow.getPreferredFacts()); String[] factStrings = formFactsKeys(preferredFacts, nonPreferredFacts); // If an implication rule if (!bckKnow.isEquivalence()) { for (String equivPostString : equivalencePostConds_) { // If any equivalent post conditions are in this implication // rule, return false if (factStrings[0].contains(equivPostString) || factStrings[0].contains(equivPostString)) return false; } // Rule isn't present, can add freely addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]); return true; } else { // Equivalence rule if (currentKnowledge_.containsKey(factStrings[0])) { // If the background knowledge rule is an equivalence rule, // it may be redundant SortedSet<BackgroundKnowledge> existingRules = currentKnowledge_.getSortedSet(factStrings[0]); // If the existing rules are only an equivalence rule, this // rule is redundant if (existingRules.size() == 1 && existingRules.first().isEquivalence()) { return false; } } if (currentKnowledge_.containsKey(factStrings[1])) { // Fact already exists in another rule - it may be redundant SortedSet<BackgroundKnowledge> existingRules = currentKnowledge_.getSortedSet(factStrings[1]); if (existingRules.size() > 1 || !existingRules.first().isEquivalence()) { // If the existing rules are inference rules, this rule // trumps them all removeRules(factStrings[1]); addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]); return true; } else { // Check if this rule's preconditions are more general // than the existing equivalence rule's if (bckKnow.compareTo(existingRules.first()) == -1) { removeRules(factStrings[1]); addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]); return true; } } return false; } // Rule isn't present, can add freely addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]); return true; } } catch (Exception e) { e.printStackTrace(); } return false; }
From source file:org.mzd.shap.domain.Feature.java
/** * A verbose header for fasta output. /*w w w . j a v a 2s.co m*/ * * When a persistence layer is involved (eg. Hibernate), this method should * only be used when extra detail is necessary. It navigates associations * which will result in additional queries. * * @return a verbose header for fasta output. */ public String verboseFastaHeader() { // Create a header from the available fields StringBuffer header = new StringBuffer(); header.append(getQueryId()); // Select the top annotation by rank and then alphabetically. if (getAnnotations().size() > 0) { SortedSet<Annotation> sortedAnno = new TreeSet<Annotation>(new Comparator<Annotation>() { public int compare(Annotation a1, Annotation a2) { Annotator ar1 = a1.getAnnotator(); Annotator ar2 = a2.getAnnotator(); return ar2.getRank().compareTo(ar1.getRank()); } }); sortedAnno.addAll(getAnnotations()); Annotation topAnno = sortedAnno.first(); header.append(" " + Fasta.formatAttribute("acc", topAnno.getAccession())); header.append(" " + Fasta.formatAttribute("desc", topAnno.getDescription())); header.append(" " + Fasta.formatAttribute("anno", topAnno.getAnnotator().getName() + "," + topAnno.getConfidence().toString())); } if (getAliases().size() > 0) { StringBuffer aliases = new StringBuffer(); for (Iterator<String> it = getAliases().iterator(); it.hasNext();) { aliases.append(it.next()); if (it.hasNext()) { aliases.append(","); } } header.append(" " + Fasta.formatAttribute("aliases", aliases.toString())); } header.append( " " + Fasta.formatAttribute("coords", getLocation().getStart() + ".." + getLocation().getEnd())); header.append(" " + Fasta.formatAttribute("strand", getLocation().getStrand().toString())); header.append(" " + Fasta.formatAttribute("orf_conf", getConfidence().toString())); header.append(" " + Fasta.formatAttribute("partial", isPartial().toString())); return header.toString(); }
From source file:org.easyrec.plugin.arm.impl.AssocRuleMiningServiceImpl.java
@Override public Collection<SortedSet<ItemAssocVO<Integer, Integer>>> createBestRules(List<TupleVO> tuples, TObjectIntHashMap<ItemVO<Integer, Integer>> L1, ARMConfigurationInt configuration, ARMStatistics stats, Double minConfidence) {//from w w w.java 2 s .c o m // Integer h1, h2; Double dh1, dh2; Integer sup1, sup2; Double dsup1, dsup2, assocValue1, assocValue2; Double baskets = new Double(stats.getNrBaskets()); stats.setMetricType(configuration.getMetricType()); //Vector<ItemAssocVO<Integer,Integer>> ret = new Vector<ItemAssocVO<Integer,Integer>>(); Map<ItemVO<Integer, Integer>, SortedSet<ItemAssocVO<Integer, Integer>>> ret = new HashMap<>(); for (TupleVO tuple : tuples) { sup1 = L1.get(tuple.getItem1()); dsup1 = new Double(sup1); sup2 = L1.get(tuple.getItem2()); dsup2 = new Double(sup2); if (sup1 == null || sup2 == null) { continue; } // confidence // h1 = (tuple.getSupport() * 100) / sup1; // h2 = (tuple.getSupport() * 100) / sup2; // confidence dh1 = (tuple.getSupport() * 100) / dsup1; dh2 = (tuple.getSupport() * 100) / dsup2; // lift Double lift = tuple.getSupport() / (dsup1 * dsup2); // conviction Double conviction1 = (1 - (dsup2 / baskets)) / (100 - dh1); Double conviction2 = (1 - (dsup1 / baskets)) / (100 - dh2); // ltc Double ltc1 = dsup1 * Math.log10(dsup1 / dsup2); Double ltc2 = dsup2 * Math.log10(dsup2 / dsup1); switch (configuration.getMetricType()) { case CONFIDENCE: assocValue1 = dh1; assocValue2 = dh2; break; case CONVICTION: assocValue1 = conviction1; assocValue2 = conviction2; break; case LIFT: assocValue1 = lift; assocValue2 = lift; break; case LONGTAIL: assocValue1 = ltc1; assocValue2 = ltc2; break; default: assocValue1 = dh1; assocValue2 = dh2; break; } // public ItemAssocVO(T tenant, ItemVO<T, I, IT> itemFrom, AT assocType, // Double assocValue, ItemVO<T, I, IT> itemTo, ST sourceType, // String sourceInfo, VT viewType, Boolean active) if (dh1 >= (minConfidence)) { SortedSet<ItemAssocVO<Integer, Integer>> bestRules = ret.get(tuple.getItem1()); if (bestRules == null) { bestRules = new TreeSet<>(); } if ((bestRules.size() < configuration.getMaxRulesPerItem()) || (assocValue1 > bestRules.first().getAssocValue())) { // no need to create objects if limit already reached and rule shows worse quality String comment1 = null; if (configuration.getStoreAlternativeMetrics()) { comment1 = new StringBuilder("conf=").append(String.format("%04f", dh1)).append(" lift=") .append(String.format("%04f", lift)).append(" convic=") .append(String.format("%04f", conviction1)).append(" ltc=") .append(String.format("%04f", ltc1)).append(" sup1=") .append(String.format("%04f", dsup1)).append(" sup2=") .append(String.format("%04f", dsup2)).append(" tsup=").append(tuple.getSupport()) .toString(); } ItemAssocVO<Integer, Integer> rule = new ItemAssocVO<>(configuration.getTenantId(), tuple.getItem1(), configuration.getAssocType(), assocValue1 /*new Double(h1)*/, tuple.getItem2(), typeMappingService.getIdOfSourceType(configuration.getTenantId(), ARMGenerator.ID.toString() + "/" + ARMGenerator.VERSION), comment1, typeMappingService.getIdOfViewType(configuration.getTenantId(), TypeMappingService.VIEW_TYPE_COMMUNITY), true, stats.getStartDate()); bestRules.add(rule); if (bestRules.size() > configuration.getMaxRulesPerItem()) { bestRules.remove(bestRules.first()); } ret.put(tuple.getItem1(), bestRules); } } if (dh2 >= (minConfidence)) { SortedSet<ItemAssocVO<Integer, Integer>> bestRules = ret.get(tuple.getItem2()); if (bestRules == null) { bestRules = new TreeSet<>(); } if ((bestRules.size() < configuration.getMaxRulesPerItem()) || (assocValue2 > bestRules.first().getAssocValue())) { // no need to create objects if limit already reached and rule shows worse quality String comment2 = null; if (configuration.getStoreAlternativeMetrics()) { comment2 = new StringBuilder("conf=").append(String.format("%04f", dh2)).append(" lift=") .append(String.format("%04f", lift)).append(" convic=") .append(String.format("%04f", conviction2)).append(" ltc=") .append(String.format("%04f", ltc2)).append(" sup2=") .append(String.format("%04f", dsup2)).append(" sup1=") .append(String.format("%04f", dsup1)).append(" tsup=").append(tuple.getSupport()) .toString(); } ItemAssocVO<Integer, Integer> rule = new ItemAssocVO<>(configuration.getTenantId(), tuple.getItem2(), configuration.getAssocType(), assocValue2 /*new Double(h2)*/, tuple.getItem1(), typeMappingService.getIdOfSourceType(configuration.getTenantId(), ARMGenerator.ID.toString() + "/" + ARMGenerator.VERSION), comment2, typeMappingService.getIdOfViewType(configuration.getTenantId(), TypeMappingService.VIEW_TYPE_COMMUNITY), true, stats.getStartDate()); bestRules.add(rule); if (bestRules.size() > configuration.getMaxRulesPerItem()) { bestRules.remove(bestRules.first()); } ret.put(tuple.getItem2(), bestRules); } } } return ret.values(); }
From source file:org.torproject.ernie.db.ArchiveWriter.java
/** * Dump some statistics on the completeness of descriptors to the logs * on level INFO./*from w w w .ja v a2s. c om*/ */ public void dumpStats() { StringBuilder sb = new StringBuilder("Finished writing relay " + "descriptors to disk.\n"); sb.append(intermediateStats.toString()); sb.append("Statistics on the completeness of written relay " + "descriptors of the last 3 consensuses (Consensus/Vote, " + "valid-after, votes, server descriptors, extra-infos):"); try { SimpleDateFormat validAfterFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); validAfterFormat.setTimeZone(TimeZone.getTimeZone("UTC")); SimpleDateFormat consensusVoteFormat = new SimpleDateFormat("yyyy/MM/dd/yyyy-MM-dd-HH-mm-ss"); consensusVoteFormat.setTimeZone(TimeZone.getTimeZone("UTC")); SimpleDateFormat descriptorFormat = new SimpleDateFormat("yyyy/MM/"); descriptorFormat.setTimeZone(TimeZone.getTimeZone("UTC")); SortedSet<File> consensuses = new TreeSet<File>(); Stack<File> leftToParse = new Stack<File>(); leftToParse.add(new File(outputDirectory + "/consensus")); while (!leftToParse.isEmpty()) { File pop = leftToParse.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { leftToParse.add(f); } } else if (pop.length() > 0) { consensuses.add(pop); } while (consensuses.size() > 3) { consensuses.remove(consensuses.first()); } } for (File f : consensuses) { BufferedReader br = new BufferedReader(new FileReader(f)); String line = null, validAfterTime = null, voteFilenamePrefix = null, dirSource = null; int allVotes = 0, foundVotes = 0, allServerDescs = 0, foundServerDescs = 0, allExtraInfos = 0, foundExtraInfos = 0; while ((line = br.readLine()) != null) { if (line.startsWith("valid-after ")) { validAfterTime = line.substring("valid-after ".length()); long validAfter = validAfterFormat.parse(validAfterTime).getTime(); voteFilenamePrefix = outputDirectory + "/vote/" + consensusVoteFormat.format(new Date(validAfter)) + "-vote-"; } else if (line.startsWith("dir-source ")) { dirSource = line.split(" ")[2]; } else if (line.startsWith("vote-digest ")) { allVotes++; File voteFile = new File(voteFilenamePrefix + dirSource + "-" + line.split(" ")[1]); if (voteFile.exists()) { foundVotes++; BufferedReader vbr = new BufferedReader(new FileReader(voteFile)); String line3 = null; int voteAllServerDescs = 0, voteFoundServerDescs = 0, voteAllExtraInfos = 0, voteFoundExtraInfos = 0; while ((line3 = vbr.readLine()) != null) { if (line3.startsWith("r ")) { voteAllServerDescs++; String digest = Hex .encodeHexString(Base64.decodeBase64(line3.split(" ")[3] + "=")) .toLowerCase(); long published = validAfterFormat .parse(line3.split(" ")[4] + " " + line3.split(" ")[5]).getTime(); String filename = outputDirectory + "/server-descriptor/" + descriptorFormat.format(new Date(published)) + digest.substring(0, 1) + "/" + digest.substring(1, 2) + "/" + digest; if (new File(filename).exists()) { BufferedReader sbr = new BufferedReader(new FileReader(new File(filename))); String line2 = null; while ((line2 = sbr.readLine()) != null) { if (line2.startsWith("opt extra-info-digest ") || line2.startsWith("extra-info-digest ")) { voteAllExtraInfos++; String extraInfoDigest = line2.startsWith("opt ") ? line2.split(" ")[2].toLowerCase() : line2.split(" ")[1].toLowerCase(); String filename2 = outputDirectory + "/extra-info/" + descriptorFormat.format(new Date(published)) + extraInfoDigest.substring(0, 1) + "/" + extraInfoDigest.substring(1, 2) + "/" + extraInfoDigest; if (new File(filename2).exists()) { voteFoundExtraInfos++; } } } sbr.close(); voteFoundServerDescs++; } } } vbr.close(); sb.append(String.format("%nV, %s, NA, %d/%d (%.1f%%), " + "%d/%d (%.1f%%)", validAfterTime, voteFoundServerDescs, voteAllServerDescs, 100.0D * (double) voteFoundServerDescs / (double) voteAllServerDescs, voteFoundExtraInfos, voteAllExtraInfos, 100.0D * (double) voteFoundExtraInfos / (double) voteAllExtraInfos)); } } else if (line.startsWith("r ")) { allServerDescs++; String digest = Hex.encodeHexString(Base64.decodeBase64(line.split(" ")[3] + "=")) .toLowerCase(); long published = validAfterFormat.parse(line.split(" ")[4] + " " + line.split(" ")[5]) .getTime(); String filename = outputDirectory + "/server-descriptor/" + descriptorFormat.format(new Date(published)) + digest.substring(0, 1) + "/" + digest.substring(1, 2) + "/" + digest; if (new File(filename).exists()) { BufferedReader sbr = new BufferedReader(new FileReader(new File(filename))); String line2 = null; while ((line2 = sbr.readLine()) != null) { if (line2.startsWith("opt extra-info-digest ") || line2.startsWith("extra-info-digest ")) { allExtraInfos++; String extraInfoDigest = line2.startsWith("opt ") ? line2.split(" ")[2].toLowerCase() : line2.split(" ")[1].toLowerCase(); String filename2 = outputDirectory + "/extra-info/" + descriptorFormat.format(new Date(published)) + extraInfoDigest.substring(0, 1) + "/" + extraInfoDigest.substring(1, 2) + "/" + extraInfoDigest; if (new File(filename2).exists()) { foundExtraInfos++; } } } sbr.close(); foundServerDescs++; } } } sb.append(String.format("%nC, %s, %d/%d (%.1f%%), " + "%d/%d (%.1f%%), %d/%d (%.1f%%)", validAfterTime, foundVotes, allVotes, 100.0D * (double) foundVotes / (double) allVotes, foundServerDescs, allServerDescs, 100.0D * (double) foundServerDescs / (double) allServerDescs, foundExtraInfos, allExtraInfos, 100.0D * (double) foundExtraInfos / (double) allExtraInfos)); } this.logger.info(sb.toString()); } catch (IOException e) { this.logger.log(Level.WARNING, "Could not dump statistics to disk.", e); } catch (ParseException e) { this.logger.log(Level.WARNING, "Could not dump statistics to disk.", e); } }
From source file:org.jasig.schedassist.impl.owner.SpringJDBCAvailableScheduleDaoImplTest.java
/** * /*from ww w .j a v a 2 s . com*/ * @throws Exception */ @Test public void testAddToScheduleOverrideMeetingLocation() throws Exception { AvailableBlock single = AvailableBlockBuilder.createBlock("20091102-1330", "20091102-1400", 1, "alternate location"); AvailableSchedule schedule = availableScheduleDao.addToSchedule(sampleOwners[0], single); SortedSet<AvailableBlock> stored = schedule.getAvailableBlocks(); Assert.assertTrue(stored.contains(single)); schedule = availableScheduleDao.retrieve(sampleOwners[0]); stored = schedule.getAvailableBlocks(); Assert.assertTrue(stored.contains(single)); Assert.assertEquals(1, stored.size()); Assert.assertEquals("alternate location", stored.first().getMeetingLocation()); }