List of usage examples for java.util TreeSet add
public boolean add(E e)
From source file:com.enonic.cms.business.core.content.ContentStorer.java
private Set<BinaryDataKey> findBinariesToRemove(final ContentVersionEntity persistedVersion, final ContentVersionEntity newVersionToPersist, final UpdateContentCommand updateContentCommand) { // Old usage of updateContentCommand to set removeable binaries if (updateContentCommand.useCommandsBinaryDataToRemove()) { return updateContentCommand.getBinaryDataToRemove(); }/* www . j a v a 2 s . co m*/ // New usage of contentdata to decide what to use TreeSet<BinaryDataKey> binariesToRemove = new TreeSet<BinaryDataKey>(); if (newVersionToPersist.getContentData() instanceof CustomContentData) { ArrayList<BinaryDataKey> binariesFromContentData = new ArrayList<BinaryDataKey>(); CustomContentData customContentData = (CustomContentData) newVersionToPersist.getContentData(); for (BinaryDataEntry binaryDataEntry : customContentData.getBinaryDataEntryList()) { if (binaryDataEntry.hasExistingBinaryKey()) { binariesFromContentData.add(new BinaryDataKey(binaryDataEntry.getExistingBinaryKey())); } } for (ContentBinaryDataEntity persistedCBD : persistedVersion.getContentBinaryData()) { BinaryDataKey binaryDataKey = persistedCBD.getBinaryData().getBinaryDataKey(); if (!binariesFromContentData.contains(binaryDataKey)) // do not add those to be removed { binariesToRemove.add(binaryDataKey); } } } return binariesToRemove; }
From source file:com.appeligo.search.actions.SearchResults.java
public List<SearchResult> getSearchResults(int startIndex) { initializeStatics();//from w ww . j a va 2 s. c o m hasMoreResults = false; try { IndexSearcher searcher = null; try { searcher = newIndexSearcher(); IndexReader reader = searcher.getIndexReader(); Query luceneQuery = generateLuceneQuery(searcher); luceneQuery = luceneQuery.rewrite(reader); Hits hits = searcher.search(luceneQuery); usingSuggestedQuery = false; suggestedQuery = null; if ((didYouMeanParser != null) && ((hits.length() < minimumHits) || (calcScore(searcher, getQuery()) < minimumScore))) { if (log.isDebugEnabled()) { log.debug("Need to suggest because either num hits " + hits.length() + " < " + minimumHits + "\n or top hit score " + (hits.length() > 0 ? hits.score(0) : "[NO HITS]") + " < " + minimumScore); } IndexSearcher compositeSearcher = new IndexSearcher(compositeIndexLocation); try { log.debug("calling suggest() with query=" + getQuery() + " and composite index from " + compositeIndexLocation); //Query didYouMean = didYouMeanParser.suggest(getQuery(), compositeSearcher.getIndexReader()); Query suggestedQueries[] = didYouMeanParser.getSuggestions(getQuery(), compositeSearcher.getIndexReader()); TreeSet<Suggestion> suggestions = new TreeSet<Suggestion>(); if (suggestedQueries != null) { for (int i = 0; i < suggestedQueries.length; i++) { log.debug("trying suggested query: " + suggestedQueries[i].toString(defaultField)); String suggestedQueryString = suggestedQueries[i].toString(defaultField); String constrainedQueryString = suggestedQueryString; if (constrainedQueryString.indexOf('"') < 0 && constrainedQueryString.indexOf('\'') < 0) { constrainedQueryString = "\"" + constrainedQueryString + "\"~5"; // proximity/distance query (within 5 words of each other) } Query suggestedLuceneQuery = generateLuceneQuery(constrainedQueryString, searcher); suggestedLuceneQuery = suggestedLuceneQuery.rewrite(reader); Hits suggestedHits = searcher.search(suggestedLuceneQuery); float score = calcScore(suggestedQueryString, suggestedHits); log.debug("========================================="); log.debug("SCORE = " + score); log.debug("========================================="); suggestions.add( new Suggestion(suggestedQueryString, suggestedLuceneQuery, suggestedHits, score, ((i == 0) ? didYouMeanParser.includesOriginal() : false))); log.debug("hits=" + suggestedHits.length() + ", score=" + score); } } Suggestion best = null; if (suggestions.size() > 0) { best = suggestions.last(); } if (best != null && !best.isOriginal()) { suggestedQuery = best.getQueryString(); if (suggestedQuery != null && suggestedQuery.indexOf('+') >= 0 && getQuery().indexOf('+') < 0) { suggestedQuery = suggestedQuery.replaceAll("\\+", ""); } if (hits.length() == 0) { if (best.getHits().length() > 0) { // Requery probably required because we added proximity before String suggestedQueryString = best.getQueryString(); luceneQuery = generateLuceneQuery(suggestedQueryString, searcher); luceneQuery = luceneQuery.rewrite(reader); hits = searcher.search(luceneQuery); //hits = best.getHits(); //luceneQuery = best.getLuceneQuery(); usingSuggestedQuery = true; } } log.debug("DidYouMeanParser suggested " + suggestedQuery); } else { if (best != null && best.isOriginal()) { log.debug("The suggestion was the original query after all"); } log.debug("DidYouMeanParser did not suggest anything"); } } finally { compositeSearcher.close(); } } /* if (hits.length() == 0 && suggestedQuery != null) { // If we didn't find anything at all, go ahead and show them what the suggested query // will give them Query suggestedLuceneQuery = generateLuceneQuery(suggestedQuery, searcher); suggestedLuceneQuery = suggestedLuceneQuery.rewrite(reader); Hits suggestedHits = searcher.search(suggestedLuceneQuery); if (suggestedHits.length() > 0) { hits = suggestedHits; luceneQuery = suggestedLuceneQuery; usingSuggestedQuery = true; } } */ totalHits = hits.length(); //Get the genere matches: try { BitSetFacetHitCounter facetHitCounter = new BitSetFacetHitCounter(); facetHitCounter.setSearcher(searcher); String baseQueryString = (isUsingSuggestedQuery() ? suggestedQuery : query); String quotedQueryString = baseQueryString; if (quotedQueryString.indexOf('"') == -1 && quotedQueryString.indexOf(' ') > -1) { quotedQueryString = "\"" + quotedQueryString + "\""; } facetHitCounter.setBaseQuery(luceneQuery, baseQueryString); List<HitCount> subQueries = new ArrayList<HitCount>(); for (Map.Entry<String, Query> entry : genreQueries.entrySet()) { subQueries.add( new HitCount(entry.getKey(), entry.getValue(), entry.getValue().toString(), 0)); } facetHitCounter.setSubQueries(subQueries); genreCounts = facetHitCounter.getFacetHitCounts(true); whatMatchedCounts = new ArrayList<HitCount>(); whatMatchedCounts .add(new HitCount("Title", getFieldQuery(baseQueryString, "programTitle", searcher), "programTitle:" + quotedQueryString, 0)); whatMatchedCounts.add( new HitCount("Episode Title", getFieldQuery(baseQueryString, "episodeTitle", searcher), "episodeTitle:" + quotedQueryString, 0)); whatMatchedCounts.add( new HitCount("Description", getFieldQuery(baseQueryString, "description", searcher), "description:" + quotedQueryString, 0)); whatMatchedCounts.add(new HitCount("Content", getFieldQuery(baseQueryString, "text", searcher), "text:" + quotedQueryString, 0)); whatMatchedCounts .add(new HitCount("Credits", getFieldQuery(baseQueryString, "credits", searcher), "credits:" + quotedQueryString, 0)); facetHitCounter.setSubQueries(whatMatchedCounts); whatMatchedCounts = facetHitCounter.getFacetHitCounts(true); //Program Count -- Not sure if there is a better way to do this. HashSet<String> programTitles = new HashSet<String>(); programCounts = new ArrayList<HitCount>(); for (int i = 0; i < hits.length() && programCounts.size() < 5; i++) { String title = hits.doc(i).get("programTitle"); if (!programTitles.contains(title)) { String queryTitle = title; queryTitle = QueryParser.escape(title); if (queryTitle.indexOf('"') > -1) { queryTitle.replace("\"", "\\\""); } if (queryTitle.indexOf(' ') > -1) { queryTitle = "\"" + queryTitle + "\""; } programCounts .add(new HitCount(title, getFieldQuery(queryTitle, "programTitle", searcher), "programTitle:" + queryTitle, 0)); programTitles.add(title); } } facetHitCounter.setSubQueries(programCounts); programCounts = facetHitCounter.getFacetHitCounts(false); } catch (Exception e) { e.printStackTrace(); } results = new ArrayList<SearchResult>(); programToSearchResult.clear(); Query userQuery = getContentQuery(query, searcher); userQuery.rewrite(reader); Highlighter highlighter = new Highlighter(new TermFormatter(), new QueryScorer(userQuery, "text")); log.debug("#hits=" + hits.length()); EPGProvider epgProvider = DefaultEpg.getInstance(); boolean missingWebPaths = false; // We added this to the index midstream, so some do and some don't. // Next index rebuild, and they'll all have it. for (int i = 0; i < pageSize && i + startIndex < hits.length(); i++) { if (hits.doc(i + startIndex).get("webPath") == null) { missingWebPaths = true; break; } } Program[] programs = null; if (missingWebPaths) { List<String> programIds = new ArrayList<String>(pageSize); for (int i = 0; i < pageSize && i + startIndex < hits.length(); i++) { programIds.add(hits.doc(i + startIndex).get("programID")); } programs = DefaultEpg.getInstance().getProgramList(programIds); } for (int i = 0; i < pageSize && i + startIndex < hits.length(); i++) { addDocument(hits.doc(i + startIndex), hits.score(i + startIndex), epgProvider, highlighter, analyzer, null, null, (programs == null ? null : programs[i])); } if (results.size() + startIndex < hits.length()) { hasMoreResults = true; } } finally { if (searcher != null) { searcher.close(); } } } catch (IOException e) { log.error("Error searching index", e); } catch (ParseException e) { log.error("Error searching index", e); } return results; }
From source file:net.sourceforge.fenixedu.domain.student.Student.java
public PersonalIngressionData getLatestPersonalIngressionData() { TreeSet<PersonalIngressionData> personalInformations = new TreeSet<PersonalIngressionData>( Collections.reverseOrder(PersonalIngressionData.COMPARATOR_BY_EXECUTION_YEAR)); ExecutionYear currentExecutionYear = ExecutionYear.readCurrentExecutionYear(); for (PersonalIngressionData pid : getPersonalIngressionsDataSet()) { if (!pid.getExecutionYear().isAfter(currentExecutionYear)) { personalInformations.add(pid); }// w ww . j a va 2 s .c o m } if (personalInformations.isEmpty()) { return null; } return personalInformations.iterator().next(); }
From source file:net.java.sip.communicator.impl.history.HistoryReaderImpl.java
/** * Returns the supplied number of recent messages * containing all <tt>keywords</tt>. * * @param count messages count//from ww w . j ava2s. c o m * @param keywords array of keywords we search for * @param field the field where to look for the keyword * @param caseSensitive is keywords search case sensitive * @return the found records * @throws RuntimeException */ public synchronized QueryResultSet<HistoryRecord> findLast(int count, String[] keywords, String field, boolean caseSensitive) throws RuntimeException { // the files are supposed to be ordered from oldest to newest Vector<String> filelist = filterFilesByDate(this.historyImpl.getFileList(), null, null); TreeSet<HistoryRecord> result = new TreeSet<HistoryRecord>(new HistoryRecordComparator()); int leftCount = count; int currentFile = filelist.size() - 1; SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); while (leftCount > 0 && currentFile >= 0) { Document doc = this.historyImpl.getDocumentForFile(filelist.get(currentFile)); if (doc == null) { currentFile--; continue; } // will get nodes and construct a List of nodes // so we can easily get sublist of it List<Node> nodes = new ArrayList<Node>(); NodeList nodesList = doc.getElementsByTagName("record"); for (int i = 0; i < nodesList.getLength(); i++) { nodes.add(nodesList.item(i)); } List<Node> lNodes = null; if (nodes.size() > leftCount) { lNodes = nodes.subList(nodes.size() - leftCount, nodes.size()); leftCount = 0; } else { lNodes = nodes; leftCount -= nodes.size(); } Iterator<Node> i = lNodes.iterator(); while (i.hasNext()) { Node node = i.next(); NodeList propertyNodes = node.getChildNodes(); Date timestamp; String ts = node.getAttributes().getNamedItem("timestamp").getNodeValue(); try { timestamp = sdf.parse(ts); } catch (ParseException e) { timestamp = new Date(Long.parseLong(ts)); } HistoryRecord record = filterByKeyword(propertyNodes, timestamp, keywords, field, caseSensitive); if (record != null) { result.add(record); } } currentFile--; } return new OrderedQueryResultSet<HistoryRecord>(result); }
From source file:com.l2jfree.gameserver.datatables.SkillTable.java
private SkillTable() { final List<L2Skill> skills = DocumentEngine.loadSkills(); _log.info("SkillTable: Loaded " + skills.size() + " skill templates from XML files."); int highestId = 0; for (L2Skill skill : skills) if (highestId < skill.getId()) highestId = skill.getId();//from ww w .ja v a 2 s. com _maxLevels = new int[highestId + 1]; int[] highestLevels = new int[highestId + 1]; for (L2Skill skill : skills) { if (highestLevels[skill.getId()] < skill.getLevel()) highestLevels[skill.getId()] = skill.getLevel(); if (_maxLevels[skill.getId()] < skill.getLevel() && skill.getLevel() < 100) _maxLevels[skill.getId()] = skill.getLevel(); } // clear previously stored skills for (SkillInfo[] infos : SKILL_INFOS) if (infos != null) for (SkillInfo info : infos) if (info != null) info._skill = null; _skillTable = new L2Skill[highestId + 1][]; SKILL_INFOS = Arrays.copyOf(SKILL_INFOS, Math.max(SKILL_INFOS.length, highestId + 1)); for (int i = 0; i < highestLevels.length; i++) { final int highestLevel = highestLevels[i]; if (highestLevel < 1) continue; _skillTable[i] = new L2Skill[highestLevel + 1]; if (SKILL_INFOS[i] == null) SKILL_INFOS[i] = new SkillInfo[highestLevel + 1]; else SKILL_INFOS[i] = Arrays.copyOf(SKILL_INFOS[i], Math.max(SKILL_INFOS[i].length, highestLevel + 1)); } for (L2Skill skill : skills) { _skillTable[skill.getId()][skill.getLevel()] = skill; if (SKILL_INFOS[skill.getId()][skill.getLevel()] == null) SKILL_INFOS[skill.getId()][skill.getLevel()] = new SkillInfo(skill.getId(), skill.getLevel()); SKILL_INFOS[skill.getId()][skill.getLevel()]._skill = skill; } int length = _skillTable.length; for (L2Skill[] array : _skillTable) if (array != null) length += array.length; _log.info("SkillTable: Occupying arrays for " + length + "."); SingletonHolder.INSTANCE = this; Map<Integer, L2Skill> skillsByUID = new HashMap<Integer, L2Skill>(); for (L2Skill skill : skills) { try { L2Skill old = skillsByUID.put(SkillTable.getSkillUID(skill), skill); if (old != null) _log.warn("Overlapping UIDs for: " + old + ", " + skill, new IllegalStateException()); skill.validate(); } catch (Exception e) { _log.warn(skill, e); } } for (L2Skill skill0 : skills) { if (!(skill0 instanceof L2SkillLearnSkill)) continue; L2SkillLearnSkill skill = (L2SkillLearnSkill) skill0; for (int i = 0; i < skill.getNewSkillId().length; i++) { final L2Skill learnedSkill = getInfo(skill.getNewSkillId()[i], skill.getNewSkillLvl()[i]); if (learnedSkill != null) _learnedSkills.add(learnedSkill); } } // checking for skill enchantment mismatch // in XMLs final TreeSet<String> skillEnchantsInXMLs = new TreeSet<String>(); // reusing final Map<Integer, Set<Integer>> enchantLevelsByEnchantType = new HashMap<Integer, Set<Integer>>(); for (int skillId = 0; skillId < _skillTable.length; skillId++) { final L2Skill[] skillsById = _skillTable[skillId]; if (skillsById == null) continue; for (final L2Skill skill : skillsById) { if (skill == null || skill.getLevel() < 100) continue; final int enchantType = skill.getLevel() / 100; final int enchantLevel = skill.getLevel() % 100; Set<Integer> enchantLevels = enchantLevelsByEnchantType.get(enchantType); if (enchantLevels == null) enchantLevelsByEnchantType.put(enchantType, enchantLevels = new FastSet<Integer>(30)); enchantLevels.add(enchantLevel); } for (Map.Entry<Integer, Set<Integer>> entry : enchantLevelsByEnchantType.entrySet()) { final int enchantType = entry.getKey(); final Set<Integer> enchantLevels = entry.getValue(); if (enchantLevels.isEmpty()) continue; final String s = "Skill ID: " + skillId + " - EnchantType: enchant" + enchantType + " - Levels: " + enchantLevels.size(); boolean valid = true; for (int skillLvl = 1; skillLvl <= 30; skillLvl++) { if (!enchantLevels.remove(skillLvl)) { if (skillLvl == 16 && enchantLevels.isEmpty()) break; _log.warn("Missing skill enchant level in XMLs for " + s + " - Level: " + skillLvl); valid = false; } } if (!enchantLevels.isEmpty()) _log.warn("Extra skill enchant levels in XMLs for " + s + " - Levels: " + enchantLevels); else if (valid) skillEnchantsInXMLs.add(s); // reusing enchantLevels.clear(); } } // in database final TreeSet<String> skillEnchantsInDatabase = new TreeSet<String>(); for (L2EnchantSkillLearn skillLearn : SkillTreeTable.getInstance().getSkillEnchantments()) { final int skillId = skillLearn.getId(); final List<EnchantSkillDetail>[] details = skillLearn.getEnchantRoutes(); if (details.length == 0) _log.warn("Invalid skill enchant data in database for Skill ID: " + skillId); for (int indexingEnchantType = 0; indexingEnchantType < details.length; indexingEnchantType++) { final List<EnchantSkillDetail> route = details[indexingEnchantType]; if (route == null) continue; final String s = "Skill ID: " + skillId + " - EnchantType: enchant" + (indexingEnchantType + 1) + " - Levels: " + route.size(); if (route.size() != 30 && route.size() != 15) _log.warn("Invalid skill enchant data in database for " + s); else skillEnchantsInDatabase.add(s); } } // comparing the results for (String skillEnchant : skillEnchantsInXMLs) if (!skillEnchantsInDatabase.remove(skillEnchant)) _log.warn("Missing skill enchant data in database for " + skillEnchant); for (String skillEnchant : skillEnchantsInDatabase) _log.warn("Missing skill enchant data in XMLs for " + skillEnchant); // just validation for (L2EnchantSkillLearn skillLearn : SkillTreeTable.getInstance().getSkillEnchantments()) { final int skillId = skillLearn.getId(); final List<EnchantSkillDetail>[] details = skillLearn.getEnchantRoutes(); final int maxLevel = getMaxLevel(skillId); if (skillLearn.getBaseLevel() != maxLevel) _log.warn("Invalid `base_lvl` skill enchant data in database for Skill ID: " + skillId); for (int indexingEnchantType = 0; indexingEnchantType < details.length; indexingEnchantType++) { final List<EnchantSkillDetail> route = details[indexingEnchantType]; if (route == null) continue; final String s = "Skill ID: " + skillId + " - EnchantType: enchant" + (indexingEnchantType + 1) + " - Levels: " + route.size(); int index = 1; int expectedMinSkillLevel = maxLevel; for (EnchantSkillDetail detail : route) { if (detail.getLevel() % 100 != index) _log.warn("Invalid `level` skill enchant data in database for " + s); if (detail.getMinSkillLevel() != expectedMinSkillLevel) _log.warn("Invalid `min_skill_lvl` skill enchant data in database for " + s); index++; expectedMinSkillLevel = detail.getLevel(); } } } }
From source file:net.spfbl.core.User.java
public static TreeSet<User> dropAll() { TreeSet<User> userSet = new TreeSet<User>(); for (User user : getSet()) { String email = user.getEmail(); user = drop(email);//from w w w . ja v a 2 s .c o m if (email != null) { userSet.add(user); } } return userSet; }
From source file:com.hichinaschool.flashcards.libanki.Sched.java
private TreeSet<Object[]> _groupChildren(ArrayList<Object[]> grps) { TreeSet<Object[]> set = new TreeSet<Object[]>(new DeckNameCompare()); // first, split the group names into components for (Object[] g : grps) { set.add(new Object[] { ((String) g[0]).split("::"), g[1], g[2], g[3], g[4] }); }/*from ww w .j a v a 2 s . c o m*/ return _groupChildrenMain(set); }
From source file:edu.duke.cabig.c3pr.domain.StudySubject.java
@Transient public StudyVersion getFirstConsentedStudyVersion() { TreeSet uniqueStudyVersions = new TreeSet(); List<StudySubjectConsentVersion> allStudySubjectConsentVersions = getAllConsents(); for (StudySubjectConsentVersion studySubjectConsentVersion : allStudySubjectConsentVersions) { uniqueStudyVersions.add(studySubjectConsentVersion.getConsent().getStudyVersion()); }//w ww . j ava 2 s . c om return uniqueStudyVersions.isEmpty() ? null : (StudyVersion) uniqueStudyVersions.first(); }
From source file:edu.duke.cabig.c3pr.domain.StudySubject.java
@Transient public StudyVersion getLastConsentedStudyVersion() { TreeSet uniqueStudyVersions = new TreeSet(); List<StudySubjectConsentVersion> allStudySubjectConsentVersions = getAllConsents(); for (StudySubjectConsentVersion studySubjectConsentVersion : allStudySubjectConsentVersions) { uniqueStudyVersions.add(studySubjectConsentVersion.getConsent().getStudyVersion()); }/* w ww .ja va2s .com*/ return uniqueStudyVersions.isEmpty() ? null : (StudyVersion) uniqueStudyVersions.last(); }
From source file:com.hichinaschool.flashcards.libanki.Sched.java
private TreeSet<Object[]> _groupChildrenMain(TreeSet<Object[]> grps, int depth) { TreeSet<Object[]> tree = new TreeSet<Object[]>(new DeckNameCompare()); // group and recurse Iterator<Object[]> it = grps.iterator(); Object[] tmp = null;//from www. java2s . co m while (tmp != null || it.hasNext()) { Object[] head; if (tmp != null) { head = tmp; tmp = null; } else { head = it.next(); } String[] title = (String[]) head[0]; long did = (Long) head[1]; int newCount = (Integer) head[2]; int lrnCount = (Integer) head[3]; int revCount = (Integer) head[4]; TreeSet<Object[]> children = new TreeSet<Object[]>(new DeckNameCompare()); while (it.hasNext()) { Object[] o = it.next(); if (((String[]) o[0])[depth].equals(title[depth])) { // add to children children.add(o); } else { // proceed with this as head tmp = o; break; } } children = _groupChildrenMain(children, depth + 1); // tally up children counts, but skip deeper sub-decks for (Object[] ch : children) { if (((String[]) ch[0]).length == ((String[]) head[0]).length + 1) { newCount += (Integer) ch[2]; lrnCount += (Integer) ch[3]; revCount += (Integer) ch[4]; } } // limit the counts to the deck's limits JSONObject conf = mCol.getDecks().confForDid(did); JSONObject deck = mCol.getDecks().get(did); try { if (conf.getInt("dyn") == 0) { revCount = Math.max(0, Math.min(revCount, conf.getJSONObject("rev").getInt("perDay") - deck.getJSONArray("revToday").getInt(1))); newCount = Math.max(0, Math.min(newCount, conf.getJSONObject("new").getInt("perDay") - deck.getJSONArray("newToday").getInt(1))); } } catch (JSONException e) { throw new RuntimeException(e); } tree.add(new Object[] { title, did, newCount, lrnCount, revCount, children }); } TreeSet<Object[]> result = new TreeSet<Object[]>(new DeckNameCompare()); for (Object[] t : tree) { result.add(new Object[] { t[0], t[1], t[2], t[3], t[4] }); result.addAll((TreeSet<Object[]>) t[5]); } return result; }