List of usage examples for java.util SortedMap get
V get(Object key);
From source file:com.aurel.track.report.dashboard.StatusOverTimeGraph.java
/** * Create a map of hierarchical data with TWorkItemBeans in the periods * - key: year//www . j a va 2 s .c o m * - value: map * - key: period * - Set of TStateChangeBeans, one for each workItem * @param timeInterval * @return */ private static SortedMap<Integer, SortedMap<Integer, List<TWorkItemBean>>> getNewWorkItemsMap(List workItemList, int timeInterval, Date dateFrom, Date dateTo) { SortedMap<Integer, SortedMap<Integer, List<TWorkItemBean>>> yearToIntervalToWorkItemBeans = new TreeMap(); int yearValue; int intervalValue; if (workItemList != null) { Calendar calendarCreated = Calendar.getInstance(); Iterator iterator = workItemList.iterator(); int calendarInterval = getCalendarInterval(timeInterval); while (iterator.hasNext()) { TWorkItemBean workItemBean = (TWorkItemBean) iterator.next(); Date createDate = workItemBean.getCreated(); if (createDate == null) { continue; } if (dateFrom != null && dateFrom.after(createDate) || dateTo != null && dateTo.before(createDate)) { continue; } calendarCreated.setTime(workItemBean.getCreated()); yearValue = calendarCreated.get(Calendar.YEAR); intervalValue = calendarCreated.get(calendarInterval); if (Calendar.WEEK_OF_YEAR == calendarInterval) { //avoid adding the first week of the new year as the first week of the old year, //because it can be that the year is the old one but the last days of the year belong to the first week of the next year //and that would add an entry with the first week of the old year int monthValue = calendarCreated.get(Calendar.MONTH); if (monthValue >= 11 && intervalValue == 1) { yearValue = yearValue + 1; } } SortedMap<Integer, List<TWorkItemBean>> intervalToWorkItemBeans = yearToIntervalToWorkItemBeans .get(new Integer(yearValue)); if (intervalToWorkItemBeans == null) { yearToIntervalToWorkItemBeans.put(new Integer(yearValue), new TreeMap()); intervalToWorkItemBeans = yearToIntervalToWorkItemBeans.get(new Integer(yearValue)); } List<TWorkItemBean> workItemBeansForInterval = intervalToWorkItemBeans .get(new Integer(intervalValue)); if (workItemBeansForInterval == null) { intervalToWorkItemBeans.put(new Integer(intervalValue), new ArrayList()); workItemBeansForInterval = intervalToWorkItemBeans.get(new Integer(intervalValue)); } workItemBeansForInterval.add(workItemBean); } } return yearToIntervalToWorkItemBeans; }
From source file:freemarker.ext.dump.DumpDirectiveTest.java
@Test public void dumpStringToObjectMap() { String varName = "stuff"; Map<String, Object> dataModel = new HashMap<String, Object>(); Map<String, Object> mixedMap = new HashMap<String, Object>(); String myString = "apples"; mixedMap.put("myString", myString); boolean myBool = true; mixedMap.put("myBoolean", myBool); int myInt = 4; mixedMap.put("myNumber", myInt); Date myDate = new Date(); mixedMap.put("myDate", myDate); List<String> myList = new ArrayList<String>(); myList.add("apples"); myList.add("bananas"); myList.add("oranges"); mixedMap.put("myList", myList); Map<String, String> myMap = new HashMap<String, String>(); myMap.put("Great Expectations", "Charles Dickens"); myMap.put("Pride and Prejudice", "Jane Austen"); myMap.put("Middlemarch", "George Eliot"); myMap.put("Jude the Obscure", "Thomas Hardy"); mixedMap.put("myMap", myMap); dataModel.put(varName, mixedMap);/*w w w . j av a 2 s . co m*/ Map<String, Object> expectedDumpValue = new HashMap<String, Object>(); expectedDumpValue.put(Key.TYPE.toString(), Type.HASH_EX); SortedMap<String, Object> mixedMapExpectedDump = new TreeMap<String, Object>(); Map<String, Object> myStringExpectedDump = new HashMap<String, Object>(); myStringExpectedDump.put(Key.TYPE.toString(), Type.STRING); myStringExpectedDump.put(Key.VALUE.toString(), myString); mixedMapExpectedDump.put("myString", myStringExpectedDump); Map<String, Object> myBooleanExpectedDump = new HashMap<String, Object>(); myBooleanExpectedDump.put(Key.TYPE.toString(), Type.BOOLEAN); myBooleanExpectedDump.put(Key.VALUE.toString(), myBool); mixedMapExpectedDump.put("myBoolean", myBooleanExpectedDump); Map<String, Object> myIntExpectedDump = new HashMap<String, Object>(); myIntExpectedDump.put(Key.TYPE.toString(), Type.NUMBER); myIntExpectedDump.put(Key.VALUE.toString(), myInt); mixedMapExpectedDump.put("myNumber", myIntExpectedDump); Map<String, Object> myDateExpectedDump = new HashMap<String, Object>(); myDateExpectedDump.put(Key.TYPE.toString(), Type.DATE); myDateExpectedDump.put(Key.DATE_TYPE.toString(), DateType.UNKNOWN); myDateExpectedDump.put(Key.VALUE.toString(), myDate); mixedMapExpectedDump.put("myDate", myDateExpectedDump); Map<String, Object> myListExpectedDump = new HashMap<String, Object>(); myListExpectedDump.put(Key.TYPE.toString(), Type.SEQUENCE); List<Map<String, Object>> myListItemsExpectedDump = new ArrayList<Map<String, Object>>(myList.size()); for (String item : myList) { Map<String, Object> itemDump = new HashMap<String, Object>(); itemDump.put(Key.TYPE.toString(), Type.STRING); itemDump.put(Key.VALUE.toString(), item); myListItemsExpectedDump.add(itemDump); } myListExpectedDump.put(Key.VALUE.toString(), myListItemsExpectedDump); mixedMapExpectedDump.put("myList", myListExpectedDump); Map<String, Object> myMapExpectedDump = new HashMap<String, Object>(); myMapExpectedDump.put(Key.TYPE.toString(), Type.HASH_EX); SortedMap<String, Object> myMapItemsExpectedDump = new TreeMap<String, Object>(); for (String key : myMap.keySet()) { Map<String, Object> itemDump = new HashMap<String, Object>(); itemDump.put(Key.TYPE.toString(), Type.STRING); itemDump.put(Key.VALUE.toString(), myMap.get(key)); myMapItemsExpectedDump.put(key, itemDump); } myMapExpectedDump.put(Key.VALUE.toString(), myMapItemsExpectedDump); mixedMapExpectedDump.put("myMap", myMapExpectedDump); expectedDumpValue.put(Key.VALUE.toString(), mixedMapExpectedDump); Map<String, Object> expectedDump = new HashMap<String, Object>(); expectedDump.put(varName, expectedDumpValue); Map<String, Object> dump = getDump(varName, dataModel); assertEquals(expectedDump, dump); // Test the sorting of the outer map List<String> expectedDumpValueKeys = new ArrayList<String>(mixedMapExpectedDump.keySet()); @SuppressWarnings("unchecked") Map<String, Object> actualDumpValue = (Map<String, Object>) dump.get(varName); @SuppressWarnings("unchecked") SortedMap<String, Object> mixedMapActualDump = (SortedMap<String, Object>) actualDumpValue .get(Key.VALUE.toString()); List<String> actualDumpValueKeys = new ArrayList<String>(mixedMapActualDump.keySet()); assertEquals(expectedDumpValueKeys, actualDumpValueKeys); // Test the sorting of the inner map List<String> myMapItemsExpectedDumpKeys = new ArrayList<String>(myMapItemsExpectedDump.keySet()); @SuppressWarnings("unchecked") Map<String, Object> myMapActualDump = (Map<String, Object>) mixedMapActualDump.get("myMap"); @SuppressWarnings("unchecked") SortedMap<String, Object> myMapItemsActualDump = (SortedMap<String, Object>) myMapActualDump .get(Key.VALUE.toString()); List<String> myMapItemsActualDumpKeys = new ArrayList<String>(myMapItemsActualDump.keySet()); assertEquals(myMapItemsExpectedDumpKeys, myMapItemsActualDumpKeys); }
From source file:fr.aliacom.obm.common.calendar.CalendarBindingImpl.java
@VisibleForTesting void inheritsParticipationOnExceptions(Event before, Event event) { Set<Event> beforeExceptions = before.getEventsExceptions(); SortedMap<Event, Event> eventExceptions = buildSortedMap(event.getEventsExceptions()); for (Event beforeException : beforeExceptions) { if (eventExceptions.containsKey(beforeException)) { inheritsParticipationFromExistingEventForObmUsers(beforeException, eventExceptions.get(beforeException)); }// ww w .j a v a 2 s . c o m } }
From source file:org.nuxeo.launcher.connect.ConnectBroker.java
protected String getBestIdForNameInList(String pkgName, List<? extends Package> pkgList) { String foundId = null;//from w w w .jav a2s .c o m SortedMap<Version, String> foundPkgs = new TreeMap<>(); SortedMap<Version, String> matchingPkgs = new TreeMap<>(); for (Package pkg : pkgList) { if (pkg.getName().equals(pkgName)) { foundPkgs.put(pkg.getVersion(), pkg.getId()); if (Arrays.asList(pkg.getTargetPlatforms()).contains(targetPlatform)) { matchingPkgs.put(pkg.getVersion(), pkg.getId()); } } } if (matchingPkgs.size() != 0) { foundId = matchingPkgs.get(matchingPkgs.lastKey()); } else if (foundPkgs.size() != 0) { foundId = foundPkgs.get(foundPkgs.lastKey()); } return foundId; }
From source file:org.kuali.coeus.common.budget.impl.calculator.BudgetCalculationServiceImpl.java
/** * This method is to get list of line items based on budget category type grouped by cost element. *///from w w w . ja v a 2 s. c om private SortedMap<CostElement, List<BudgetLineItem>> getBudgetSummaryUniqueLineItemCostElementsForBudgetCategory( List<BudgetLineItem> budgetLineItems, String budgetCategoryTypeCode) { SortedMap<CostElement, List<BudgetLineItem>> uniqueLineItemCostElements = new TreeMap<>(); for (BudgetLineItem budgetLineItem : budgetLineItems) { CostElement costElement = budgetLineItem.getCostElementBO(); String costElementBudgetCategoryTypeCode = costElement.getBudgetCategory().getBudgetCategoryTypeCode(); if (costElementBudgetCategoryTypeCode.equalsIgnoreCase(budgetCategoryTypeCode)) { if (!uniqueLineItemCostElements.containsKey(costElement)) { uniqueLineItemCostElements.put(costElement, new ArrayList<>()); } uniqueLineItemCostElements.get(costElement).add(budgetLineItem); } } return uniqueLineItemCostElements; }
From source file:org.apache.hadoop.hbase.regionserver.transactional.TransactionalHLogManager.java
/** * @param reconstructionLog//from w ww . ja va 2 s. c om * @param maxSeqID * @param reporter * @return map of batch updates * @throws UnsupportedEncodingException * @throws IOException */ public Map<Long, List<BatchUpdate>> getCommitsFromLog(final Path reconstructionLog, final long maxSeqID, final Progressable reporter) throws UnsupportedEncodingException, IOException { if (reconstructionLog == null || !fileSystem.exists(reconstructionLog)) { // Nothing to do. return null; } // Check its not empty. FileStatus[] stats = fileSystem.listStatus(reconstructionLog); if (stats == null || stats.length == 0) { LOG.warn("Passed reconstruction log " + reconstructionLog + " is zero-length"); return null; } SortedMap<Long, List<BatchUpdate>> pendingTransactionsById = new TreeMap<Long, List<BatchUpdate>>(); SortedMap<Long, List<BatchUpdate>> commitedTransactionsById = new TreeMap<Long, List<BatchUpdate>>(); Set<Long> abortedTransactions = new HashSet<Long>(); SequenceFile.Reader logReader = new SequenceFile.Reader(fileSystem, reconstructionLog, conf); try { HLogKey key = new HLogKey(); HLogEdit val = new HLogEdit(); long skippedEdits = 0; long totalEdits = 0; long startCount = 0; long writeCount = 0; long abortCount = 0; long commitCount = 0; // How many edits to apply before we send a progress report. int reportInterval = conf.getInt("hbase.hstore.report.interval.edits", 2000); while (logReader.next(key, val)) { LOG.debug("Processing edit: key: " + key.toString() + " val: " + val.toString()); if (key.getLogSeqNum() < maxSeqID) { skippedEdits++; continue; } // TODO: Change all below so we are not doing a getRow and getColumn // against a KeyValue. Each invocation creates a new instance. St.Ack. // Check this edit is for me. byte[] column = val.getKeyValue().getColumn(); Long transactionId = val.getTransactionId(); if (!val.isTransactionEntry() || HLog.isMetaColumn(column) || !Bytes.equals(key.getRegionName(), regionInfo.getRegionName())) { continue; } List<BatchUpdate> updates = pendingTransactionsById.get(transactionId); switch (val.getOperation()) { case START: if (updates != null || abortedTransactions.contains(transactionId) || commitedTransactionsById.containsKey(transactionId)) { LOG.error("Processing start for transaction: " + transactionId + ", but have already seen start message"); throw new IOException("Corrupted transaction log"); } updates = new LinkedList<BatchUpdate>(); pendingTransactionsById.put(transactionId, updates); startCount++; break; case WRITE: if (updates == null) { LOG.error("Processing edit for transaction: " + transactionId + ", but have not seen start message"); throw new IOException("Corrupted transaction log"); } BatchUpdate tranUpdate = new BatchUpdate(val.getKeyValue().getRow()); if (val.getKeyValue().getValue() != null) { tranUpdate.put(val.getKeyValue().getColumn(), val.getKeyValue().getValue()); } else { tranUpdate.delete(val.getKeyValue().getColumn()); } updates.add(tranUpdate); writeCount++; break; case ABORT: if (updates == null) { LOG.error("Processing abort for transaction: " + transactionId + ", but have not seen start message"); throw new IOException("Corrupted transaction log"); } abortedTransactions.add(transactionId); pendingTransactionsById.remove(transactionId); abortCount++; break; case COMMIT: if (updates == null) { LOG.error("Processing commit for transaction: " + transactionId + ", but have not seen start message"); throw new IOException("Corrupted transaction log"); } if (abortedTransactions.contains(transactionId)) { LOG.error("Processing commit for transaction: " + transactionId + ", but also have abort message"); throw new IOException("Corrupted transaction log"); } if (updates.size() == 0) { LOG.warn("Transaciton " + transactionId + " has no writes in log. "); } if (commitedTransactionsById.containsKey(transactionId)) { LOG.error("Processing commit for transaction: " + transactionId + ", but have already commited transaction with that id"); throw new IOException("Corrupted transaction log"); } pendingTransactionsById.remove(transactionId); commitedTransactionsById.put(transactionId, updates); commitCount++; } totalEdits++; if (reporter != null && (totalEdits % reportInterval) == 0) { reporter.progress(); } } if (LOG.isDebugEnabled()) { LOG.debug("Read " + totalEdits + " tranasctional operations (skipped " + skippedEdits + " because sequence id <= " + maxSeqID + "): " + startCount + " starts, " + writeCount + " writes, " + abortCount + " aborts, and " + commitCount + " commits."); } } finally { logReader.close(); } if (pendingTransactionsById.size() > 0) { LOG.info("Region log has " + pendingTransactionsById.size() + " unfinished transactions. Going to the transaction log to resolve"); throw new RuntimeException("Transaction log not yet implemented"); } return commitedTransactionsById; }
From source file:business.services.FileService.java
public File uploadPart(User user, String name, File.AttachmentType type, MultipartFile file, Integer chunk, Integer chunks, String flowIdentifier) { try {//from w w w. j a va 2 s . c o m String identifier = user.getId().toString() + "_" + flowIdentifier; String contentType = MediaType.APPLICATION_OCTET_STREAM_VALUE; log.info("File content-type: " + file.getContentType()); try { contentType = MediaType.valueOf(file.getContentType()).toString(); log.info("Media type: " + contentType); } catch (InvalidMediaTypeException e) { log.warn("Invalid content type: " + e.getMediaType()); //throw new FileUploadError("Invalid content type: " + e.getMediaType()); } InputStream input = file.getInputStream(); // Create temporary file for chunk Path path = fileSystem.getPath(uploadPath).normalize(); if (!path.toFile().exists()) { Files.createDirectory(path); } name = URLEncoder.encode(name, "utf-8"); String prefix = getBasename(name); String suffix = getExtension(name); Path f = Files.createTempFile(path, prefix, suffix + "." + chunk + ".chunk").normalize(); // filter path names that point to places outside the upload path. // E.g., to prevent that in cases where clients use '../' in the filename // arbitrary locations are reachable. if (!Files.isSameFile(path, f.getParent())) { // Path f is not in the upload path. Maybe 'name' contains '..'? throw new FileUploadError("Invalid file name"); } log.info("Copying file to " + f.toString()); // Copy chunk to temporary file Files.copy(input, f, StandardCopyOption.REPLACE_EXISTING); // Save chunk location in chunk map SortedMap<Integer, Path> chunkMap; synchronized (uploadChunks) { // FIXME: perhaps use a better identifier? Not sure if this one // is unique enough... chunkMap = uploadChunks.get(identifier); if (chunkMap == null) { chunkMap = new TreeMap<Integer, Path>(); uploadChunks.put(identifier, chunkMap); } } chunkMap.put(chunk, f); log.info("Chunk " + chunk + " saved to " + f.toString()); // Assemble complete file if all chunks have been received if (chunkMap.size() == chunks.intValue()) { uploadChunks.remove(identifier); Path assembly = Files.createTempFile(path, prefix, suffix).normalize(); // filter path names that point to places outside the upload path. // E.g., to prevent that in cases where clients use '../' in the filename // arbitrary locations are reachable. if (!Files.isSameFile(path, assembly.getParent())) { // Path assembly is not in the upload path. Maybe 'name' contains '..'? throw new FileUploadError("Invalid file name"); } log.info("Assembling file " + assembly.toString() + " from " + chunks + " chunks..."); OutputStream out = Files.newOutputStream(assembly, StandardOpenOption.CREATE, StandardOpenOption.APPEND); // Copy chunks to assembly file, delete chunk files for (int i = 1; i <= chunks; i++) { //log.info("Copying chunk " + i + "..."); Path source = chunkMap.get(new Integer(i)); if (source == null) { log.error("Cannot find chunk " + i); throw new FileUploadError("Cannot find chunk " + i); } Files.copy(source, out); Files.delete(source); } // Save assembled file name to database log.info("Saving attachment to database..."); File attachment = new File(); attachment.setName(URLDecoder.decode(name, "utf-8")); attachment.setType(type); attachment.setMimeType(contentType); attachment.setDate(new Date()); attachment.setUploader(user); attachment.setFilename(assembly.getFileName().toString()); attachment = fileRepository.save(attachment); return attachment; } return null; } catch (IOException e) { log.error(e); throw new FileUploadError(e.getMessage()); } }
From source file:org.omnaest.utils.table.TableTest.java
@Test public void testIndexOfArbitraryKeyExtractor() { Table<String> table = this.filledTable(100, 5); KeyExtractor<Integer, RowDataReader<String>> keyExtractor = new KeyExtractor<Integer, RowDataReader<String>>() { private static final long serialVersionUID = -4201644938610833630L; @Override/*from www . ja va 2 s .com*/ public Integer extractKey(RowDataReader<String> rowDataReader) { String[] elements = rowDataReader.getElements(); String[] tokens = elements[1].split(":"); return Integer.valueOf(tokens[0]); } }; SortedMap<Integer, Set<Row<String>>> sortedMap = table.index().of(keyExtractor); { assertNotNull(sortedMap); assertEquals(table.rowSize(), sortedMap.size()); assertTrue(sortedMap.containsKey(0)); } table.removeRow(0); { assertFalse(sortedMap.containsKey(0)); assertTrue(sortedMap.containsKey(1)); assertFalse(sortedMap.containsKey(101)); table.setElement(0, 1, "101:88"); assertTrue(sortedMap.containsKey(101)); Set<Row<String>> rowSet = sortedMap.get(101); assertEquals(1, rowSet.size()); } { assertSame(sortedMap, table.index().of(keyExtractor)); } table.setRowElements(1, "0:0", "200:0"); { assertTrue(sortedMap.containsKey(200)); } { SortedMap<Integer, Set<Row<String>>> tailMap = sortedMap.tailMap(90); assertEquals(100 - 90 + 2, tailMap.size()); assertEquals(90, tailMap.firstKey().intValue()); assertEquals(200, tailMap.lastKey().intValue()); } { SortedMap<Integer, Set<Row<String>>> headMap = sortedMap.headMap(10); assertEquals(9 - 2, headMap.size()); assertEquals(3, headMap.firstKey().intValue()); assertEquals(9, headMap.lastKey().intValue()); } { table.clear(); assertTrue(sortedMap.isEmpty()); } }
From source file:com.aurel.track.report.dashboard.AverageTimeToCloseItem.java
private SortedMap<Integer, SortedMap<Integer, ArrayList<ReportBeanWithHistory>>> createYearToIntervalToReportBeanListMap( int timeInterval, Set<Integer> finalStates) { SortedMap<Integer, SortedMap<Integer, ArrayList<ReportBeanWithHistory>>> yearToIntervalToReportBeanList = new TreeMap<Integer, SortedMap<Integer, ArrayList<ReportBeanWithHistory>>>(); if (reportBeanWithHistoryList != null) { Calendar calendar = Calendar.getInstance(); Calendar calendarEndDate = Calendar.getInstance(); int calendarInterval = getCalendarInterval(timeInterval); for (ReportBeanWithHistory reportBean : reportBeanWithHistoryList) { TWorkItemBean workItemBean = reportBean.getWorkItemBean(); calendar.setTime(dateFrom);/*from www. ja v a 2 s . co m*/ calendarEndDate.setTime(dateTo); int yearValue = calendar.get(Calendar.YEAR); int intervalValue = calendar.get(calendarInterval); boolean isFirst = true; while (calendar.before(calendarEndDate) || isFirst) { if (isFirst) { isFirst = false; } else { calendar.add(calendarInterval, 1); } yearValue = calendar.get(Calendar.YEAR); intervalValue = calendar.get(calendarInterval); SortedMap<Integer, ArrayList<ReportBeanWithHistory>> intervalToReportBeans = yearToIntervalToReportBeanList .get(Integer.valueOf(yearValue)); if (intervalToReportBeans == null) { yearToIntervalToReportBeanList.put(new Integer(yearValue), new TreeMap<Integer, ArrayList<ReportBeanWithHistory>>()); intervalToReportBeans = yearToIntervalToReportBeanList.get(Integer.valueOf(yearValue)); } ArrayList<ReportBeanWithHistory> reportBeanList = intervalToReportBeans .get(Integer.valueOf(intervalValue)); if (reportBeanList == null) { reportBeanList = new ArrayList<ReportBeanWithHistory>(); intervalToReportBeans.put(Integer.valueOf(intervalValue), reportBeanList); } Integer stateID = getReportBeanStateID(reportBean); Date lastStateChangeDate = getReportBeanLastStateChange(reportBean); if (stateID == null || lastStateChangeDate == null) { continue; } if (finalStates.contains(stateID)) { if (timeInterval == TIME_INTERVAL.DAY) { if (DateTimeUtils.compareTwoDatesWithoutTimeValue(workItemBean.getCreated(), calendar.getTime()) == 0 && DateTimeUtils.compareTwoDatesWithoutTimeValue(lastStateChangeDate, calendar.getTime()) == 0) { reportBeanList.add(reportBean); } } else { Calendar actualReportinIntervalEndCalendar = Calendar.getInstance(); actualReportinIntervalEndCalendar.setTime(calendar.getTime()); actualReportinIntervalEndCalendar.add(calendarInterval, 1); if (DateTimeUtils.greaterOrEqual(workItemBean.getCreated(), calendar.getTime()) && DateTimeUtils.greater(lastStateChangeDate, calendar.getTime()) && DateTimeUtils.lessOrEqual(lastStateChangeDate, actualReportinIntervalEndCalendar.getTime())) { reportBeanList.add(reportBean); } } } } } } return yearToIntervalToReportBeanList; }
From source file:org.apereo.portal.portlets.search.SearchPortletController.java
/** * Return the search results in a sorted map based on priority of the search result type * @param resultsMap Search results map/*www.j a v a2 s .c o m*/ * @return Sorted map of search results ordered on search result type priority */ private SortedMap<Integer, List<AutocompleteResultsModel>> getCleanedAndSortedMapResults( ConcurrentMap<String, List<Tuple<SearchResult, String>>> resultsMap, int maxTextLength) { SortedMap<Integer, List<AutocompleteResultsModel>> prioritizedResultsMap = createAutocompletePriorityMap(); // Put the results into the map of <priority,list> for (Map.Entry<String, List<Tuple<SearchResult, String>>> entry : resultsMap.entrySet()) { for (Tuple<SearchResult, String> tupleSearchResult : entry.getValue()) { SearchResult searchResult = tupleSearchResult.getFirst(); List<String> resultTypes = searchResult.getType(); // If the search result doesn't have a type defined, use the undefined result type. if (resultTypes.size() == 0) { resultTypes = UNDEFINED_SEARCH_RESULT_TYPE; } for (String category : resultTypes) { // Exclude the result if it is a result type that's in the ignore list. if (!autocompleteIgnoreResultTypes.contains(category)) { int priority = calculatePriorityFromCategory(category); AutocompleteResultsModel result = new AutocompleteResultsModel( cleanAndTrimString(searchResult.getTitle(), maxTextLength), cleanAndTrimString(searchResult.getSummary(), maxTextLength), tupleSearchResult.getSecond(), category); prioritizedResultsMap.get(priority).add(result); } } } } return prioritizedResultsMap; }