List of usage examples for java.util Collections reverseOrder
@SuppressWarnings("unchecked") public static <T> Comparator<T> reverseOrder()
From source file:org.nuxeo.ecm.platform.syndication.SyndicationActionsBean.java
/** * Called by rss reader for document-based syndication. *//*from w ww. j a va 2 s. c o m*/ @Begin(id = "#{conversationIdGenerator.currentOrNewMainConversationId}", join = true) public void getSyndicationDocument() throws ClientException { if (docRef == null || "".equals(docRef)) { throw new IllegalArgumentException("Missing docRef"); } if (feedType == null || "".equals(feedType)) { feedType = DEFAULT_TYPE; } /* * Parse docRef into serverLocation and docId */ String[] split = docRef.split("/", 2); if (split.length != 2) { throw new IllegalArgumentException("Invalid docRef"); } String serverLocation = split[0]; String docId = split[1]; if (docId.startsWith("1:")) { // deprecated docRef syntax, with DocumentRef type (IdRef assumed) docId = docId.substring(2); docRef = serverLocation + '/' + docId; } IdRef idRef = new IdRef(docId); // Create a navigationContext from scratch with the proper server // location NavigationContext navigationContext = (NavigationContext) Component.getInstance("navigationContext", true); navigationContext.setCurrentServerLocation(new RepositoryLocation(serverLocation)); CoreSession documentManager = navigationContext.getOrCreateDocumentManager(); DocumentModel doc; try { doc = documentManager.getDocument(idRef); } catch (DocumentSecurityException e) { sendForbidden(); return; } /* * Feed definition */ SyndFeed feed = new SyndFeedImpl(); feed.setFeedType(feedType); String title = (String) doc.getProperty("dublincore", "title"); if (title == null || "".equals(title)) { title = " "; } feed.setTitle(title); String description = (String) doc.getProperty("dublincore", "description"); if (description == null || "".equals(description)) { description = " "; } feed.setDescription(description); feed.setLink(getFeedUrl(DOCUMENT_SYNDICATION_PATH, DOCREF_KEY, docRef, feedType)); /* * Feed entries */ // skip deleted documents // TODO do standard folder children search to check permissions too DocumentModelList children = new DocumentModelListImpl(); for (DocumentModel child : documentManager.getChildren(idRef)) { if (LifeCycleConstants.DELETED_STATE.equals(child.getCurrentLifeCycleState())) { continue; } children.add(child); } List<FeedItem> feedItems = getFeedItems(children); // Sort items by update date or if not, by publication date Collections.sort(feedItems, Collections.reverseOrder()); feed.setEntries(feedItems); writeFeed(feed); }
From source file:cz.muni.fi.mir.db.service.impl.StatisticsServiceImpl.java
@Override @Transactional(readOnly = true)//from w ww. j a va2 s. c om public Map<Long, DateTime> getStatisticsMap() { List<Object[]> results = entityManager.createQuery("SELECT s.id,s.calculationDate FROM statistics s") .getResultList(); Map<Long, DateTime> resultMap = new TreeMap<>(Collections.reverseOrder()); for (Object[] result : results) { resultMap.put((Long) result[0], (DateTime) result[1]); } return resultMap; }
From source file:org.openmrs.module.encounteralerts.api.impl.EncounterAlertsServiceImpl.java
@Override public List<EvaluatedEncounter> evaluateCurrentUserEncounterAlert(EncounterAlert alert) { // TODO - Filter by patientf List<EvaluatedEncounter> encounters = new ArrayList<EvaluatedEncounter>(); SerializationService soService = Context.getSerializationService(); EncounterQuery upQuery;//w w w .j av a 2s .co m EncounterQuery downQuery; try { upQuery = soService.deserialize(alert.getUpQuery().getSerializedData(), EncounterQuery.class, ReportingSerializer.class); EncounterQueryService eqService = Context.getService(EncounterQueryService.class); EncounterQueryResult upResult = eqService.evaluate(upQuery, null); if (alert.getDownQuery() != null) { downQuery = soService.deserialize(alert.getDownQuery().getSerializedData(), EncounterQuery.class, ReportingSerializer.class); EncounterQueryResult downResult = eqService.evaluate(downQuery, null); downResult.getMemberIds().retainAll(upResult.getMemberIds()); upResult.getMemberIds().removeAll(downResult.getMemberIds()); for (Integer i : downResult.getMemberIds()) { // TODO - Improve method if (i != null) { Encounter e = Context.getEncounterService().getEncounter(i); if (e != null) { encounters.add(new EvaluatedEncounter(e, EvaluatedEncounter.CHECKED)); } } } } for (Integer i : upResult.getMemberIds()) { if (i != null) { Encounter e = Context.getEncounterService().getEncounter(i); if (e != null) { encounters.add(new EvaluatedEncounter(e, EvaluatedEncounter.TO_BE_CHECKED)); } } } // Order the list before returning Collections.sort(encounters, Collections.reverseOrder()); return encounters; } catch (SerializationException e) { e.printStackTrace(); } catch (EvaluationException e) { e.printStackTrace(); } return null; }
From source file:sadl.run.datagenerators.AlgoWeaknessesDataGenerator.java
private Pair<Integer, Integer> samplePair(PDRTAState s) { final List<AlphIn> transitions = new ArrayList<>(); for (int i = 0; i < s.getPDRTA().getAlphSize(); i++) { final Collection<Interval> ins = s.getIntervals(i).values(); for (final Interval in : ins) { final double transProb = s.getProbabilityTrans(i, in); if (transProb > 0.0) { transitions.add(new AlphIn(i, in, transProb)); }// w w w .j a v a2 s. c om } } transitions.add(new AlphIn(-1, null, s.getSequenceEndProb())); Collections.sort(transitions, Collections.reverseOrder()); final int idx = drawInstance( transitions.stream().map(a -> new Double(a.prob)).collect(Collectors.toList())); final AlphIn trans = transitions.get(idx); if (trans.symIdx == -1) { return null; } return Pair.of(new Integer(trans.symIdx), new Integer(chooseUniform(trans.in.getBegin(), trans.in.getEnd()))); }
From source file:semanticsco.servicecomposition.ServiceDiscoverer.java
public List<Service> findResolvingServices() { //Initialize List "foundServices" this.foundServices = new LinkedList<>(); try {// w w w . jav a 2s . c om RegistryInquiry jr = new RegistryInquiry(); //Try to authenticate user in service registry String authentication = jr.authenticate(juddiUser); //If user is authenticated if (authentication != null) { //Find services semantically annotated with corresponding output and add them to List "foundServices" for (Iterator it = this.oSearchValues.iterator(); it.hasNext();) { String outputConcept = (String) it.next(); LinkedList<String> outputConcepts = new LinkedList<>(); outputConcepts.add(outputConcept); foundServices .addAll(jr.findBusinessServicesByData("output", (LinkedList<String>) outputConcepts)); } //Calculate semantic similarity this.setSemanticSimilarity("output"); //Sort List "foundServices" in descending order Collections.sort(foundServices, Collections.reverseOrder()); //Logout from service registry jr.logout(); } } catch (ConfigurationException | TransportException ex) { } return foundServices; }
From source file:com.liato.bankdroid.banking.banks.Steam.java
@Override public void update() throws BankException, LoginException, BankChoiceException { super.update(); if (username == null || password == null || username.length() == 0 || password.length() == 0) { throw new LoginException(res.getText(R.string.invalid_username_password).toString()); }/*from www . ja va 2 s .com*/ urlopen = login(); Matcher matcher = reBalance.matcher(response); if (matcher.find()) { /* * Capture groups: * GROUP EXAMPLE DATA * 1: Amount 0,--€ * */ String amount = Html.fromHtml(matcher.group(1)).toString().trim().replace("--", "00"); Account account = new Account("Wallet", Helpers.parseBalance(amount), "1"); String currency = Helpers.parseCurrency(amount, "USD"); this.setCurrency(currency); account.setCurrency(currency); balance = balance.add(Helpers.parseBalance(amount)); ArrayList<Transaction> transactions = new ArrayList<Transaction>(); matcher = reTransactions.matcher(response); while (matcher.find()) { /* * Capture groups: * GROUP EXAMPLE DATA * 1: Date 18 Oct 2007 * 2: Amount 0,99€ * 3: Event Purchase * 4: Item Team Fortress 2 * 5: Sub item Mann Co. Supply Crate Key * */ SimpleDateFormat sdfFrom = new SimpleDateFormat("d MMM yyyy"); SimpleDateFormat sdfTo = new SimpleDateFormat("yyyy-MM-dd"); Date transactionDate; try { transactionDate = sdfFrom.parse(matcher.group(1).trim()); String strDate = sdfTo.format(transactionDate); BigDecimal price = Helpers .parseBalance(Html.fromHtml(matcher.group(2)).toString().trim().replace("--", "00")); if ("Purchase".equalsIgnoreCase(matcher.group(3).trim())) { price = price.negate(); } transactions.add(new Transaction(strDate, Html.fromHtml(matcher.group(4)).toString().trim() + (Html.fromHtml(matcher.group(5)).toString().trim().length() > 1 ? " (" + Html.fromHtml(matcher.group(5)).toString().trim() + ")" : ""), price, Helpers.parseCurrency(Html.fromHtml(matcher.group(2)).toString().trim(), "USD"))); } catch (ParseException e) { Log.e(TAG, "Unable to parse date: " + matcher.group(1).trim()); } } Collections.sort(transactions, Collections.reverseOrder()); account.setTransactions(transactions); accounts.add(account); } if (accounts.isEmpty()) { throw new BankException(res.getText(R.string.no_accounts_found).toString()); } super.updateComplete(); }
From source file:net.tsquery.DataEndpoint.java
@SuppressWarnings("unchecked") private JSONObject PlotToDygraphJSON(Plot plot, long tsFrom, long tsTo, int topN) { final JSONObject plotObject = new JSONObject(); final JSONArray nameArray = new JSONArray(); final JSONArray dataArray = new JSONArray(); final int dpCount = plot.getDataPointsSize(); final TreeMap<Long, double[]> tsMap = new TreeMap<>(); final double[] weight = new double[dpCount]; int dpIndex = 0; for (DataPoints dataPoints : plot.getDataPoints()) { for (DataPoint point : dataPoints) { long timestamp = point.timestamp(); if (timestamp < tsFrom || timestamp > tsTo) continue; long tsMSec = timestamp * 1000; if (!tsMap.containsKey(tsMSec)) { double[] values = new double[dpCount]; values[dpIndex] = getValue(point); tsMap.put(tsMSec, values); weight[dpIndex] += ((values[dpIndex]) / 1000000.0); } else { //noinspection MismatchedReadAndWriteOfArray double[] values = tsMap.get(tsMSec); values[dpIndex] = getValue(point); weight[dpIndex] += ((values[dpIndex]) / 1000000.0); }// w w w .j a v a 2s . c o m } dpIndex++; } HashMap<Integer, Boolean> includeMap = null; // are we performing a topN lookup? if (topN > 0) { includeMap = new HashMap<>(topN); TreeMap<Double, Integer> weightMap = new TreeMap<>(Collections.reverseOrder()); for (int i = 0; i < dpCount; i++) { while (weightMap.containsKey(weight[i])) weight[i] -= 0.00000001; weightMap.put(weight[i], i); } int series = 0; for (Map.Entry<Double, Integer> entry : weightMap.entrySet()) { includeMap.put(entry.getValue(), true); ++series; if (series >= topN) break; } } for (Map.Entry<Long, double[]> entry : tsMap.entrySet()) { JSONArray entryArray = new JSONArray(); entryArray.add(entry.getKey()); final double[] points = entry.getValue(); for (dpIndex = 0; dpIndex < dpCount; dpIndex++) { if ((topN <= 0) || (topN > 0 && includeMap.containsKey(dpIndex))) { entryArray.add(points[dpIndex]); } } dataArray.add(entryArray); } // First column is always the Date nameArray.add("Date"); int index = -1; for (DataPoints dataPoints : plot.getDataPoints()) { index++; // if we are in a topN query and the current index is not included, skip this iteration if (topN > 0 && !includeMap.containsKey(index)) continue; StringBuilder nameBuilder = new StringBuilder(); nameBuilder.append(dataPoints.metricName()).append(":"); Map<String, String> tags = dataPoints.getTags(); for (String s : tags.keySet()) { nameBuilder.append(String.format(" %s=%s", s, tags.get(s))); } nameArray.add(nameBuilder.toString()); } plotObject.put("labels", nameArray); plotObject.put("values", dataArray); return plotObject; }
From source file:fr.gael.dhus.service.SystemService.java
@PreAuthorize("hasRole('ROLE_SYSTEM_MANAGER')") public List<Date> getDumpDatabaseList() { List<Date> timestamps = new ArrayList<Date>(); File path_file = new File(cfgManager.getDatabaseConfiguration().getDumpPath()); File[] lst = path_file.listFiles(new FilenameFilter() { @Override//from ww w .java 2 s . c o m public boolean accept(File dir, String name) { if (name.startsWith("dump-")) return true; return false; } }); if (lst == null) { return timestamps; } for (File f : lst) { String stimesamp = f.getName().replaceAll("dump-(.*)", "$1"); long timestamp = Long.parseLong(stimesamp); Date date = new Date(timestamp); timestamps.add(date); } Collections.sort(timestamps, Collections.reverseOrder()); return timestamps; }
From source file:org.tinymediamanager.scraper.MediaArtwork.java
public ImageSizeAndUrl getBiggestArtwork() { if (imageSizes.size() > 0) { List<ImageSizeAndUrl> descImageSizes = new ArrayList<MediaArtwork.ImageSizeAndUrl>(imageSizes); // sort descending Collections.sort(descImageSizes, Collections.reverseOrder()); ImageSizeAndUrl biggestImage = descImageSizes.get(0); if (biggestImage != null) { return biggestImage; }/*from ww w . java2s . co m*/ } return null; }
From source file:uk.co.flax.ukmp.services.TermsManagerThread.java
/** * Extract a list of terms, sorted by popularity (descending) from a list * of tweets./*from w ww . j a v a 2 s . c o m*/ * @param tweets the tweets whose text should be analysed. * @return a complete list of terms in the tweets, filtered by the stopwords * list, and with usernames, links and words with punctuation filtered out. */ private List<Term> extractTerms(List<Tweet> tweets) { Map<String, Integer> termMap = new HashMap<String, Integer>(); for (Tweet tweet : tweets) { // Split text into words, breaking on whitespace String[] words = tweet.getText().split("\\s+"); for (String w : words) { // Skip all Twitter handles by default if (!w.startsWith("@")) { // Split individual words by punctuation (except hyphens) String[] unpunctuated = w.split("[^-A-Za-z0-9]"); // Ignore anything that has split into more than one term - should // cut out URLs if (unpunctuated.length == 1) { // Force word into lower case String word = unpunctuated[0].toLowerCase(); if (!isWordInStopWords(word)) { if (!termMap.containsKey(word)) { termMap.put(word, 0); } termMap.put(word, termMap.get(word) + 1); } } } } } LOGGER.trace("Extracted {} terms from {} tweets", termMap.size(), tweets.size()); // Convert the map into a set of terms in reverse order (ie. most popular first) Set<Term> termSet = new TreeSet<Term>(Collections.reverseOrder()); for (String word : termMap.keySet()) { Term term = new Term(word, termMap.get(word)); termSet.add(term); } // Convert the set into a List and return it return new ArrayList<Term>(termSet); }