List of usage examples for java.util Collections reverseOrder
@SuppressWarnings("unchecked") public static <T> Comparator<T> reverseOrder()
From source file:com.cloudera.oryx.als.common.lsh.LocationSensitiveHashIT.java
private static List<Long> findTopRecommendations(LongObjectMap<float[]> Y, float[] userVec) { SortedMap<Double, Long> allScores = Maps.newTreeMap(Collections.reverseOrder()); for (LongObjectMap.MapEntry<float[]> entry : Y.entrySet()) { double dot = SimpleVectorMath.dot(entry.getValue(), userVec); allScores.put(dot, entry.getKey()); }//from w w w .jav a 2s . c o m List<Long> topRecommendations = Lists.newArrayList(); for (Map.Entry<Double, Long> entry : allScores.entrySet()) { topRecommendations.add(entry.getValue()); if (topRecommendations.size() == NUM_RECS) { return topRecommendations; } } return topRecommendations; }
From source file:com.joeyturczak.jtscanner.ui.FileListFragment.java
private void loadFileList() { mFileListAdapter.clear();//from w w w .j a v a 2s . co m File externalDir = Environment.getExternalStorageDirectory(); String externalDirPath = externalDir.getPath(); File scannerDir = new File(externalDirPath + getString(R.string.file_directory)); long today = Utility.normalizeDate(System.currentTimeMillis()); int daysToKeep = Utility.getDaysToKeepFiles(getActivity()); long daysToKeepMilliseconds = ONE_DAY * daysToKeep; long todayMinusDaysToKeep = today - daysToKeepMilliseconds; File[] files = scannerDir.listFiles(); if (files != null) { Arrays.sort(files, Collections.reverseOrder()); for (File file : files) { String fileName = file.getName(); fileName = fileName.replace("JTS_", ""); fileName = fileName.replace(".xls", ""); long fileDate = Utility.dateToMilliseconds(fileName, "MM-dd-yyyy"); if (fileDate < todayMinusDaysToKeep) { file.delete(); Utility.trimCache(getActivity()); } else { mFileListAdapter.add(file.getName()); } } } }
From source file:com.cloudera.oryx.als.common.candidate.LocationSensitiveHashIT.java
private static List<Long> findTopRecommendations(LongObjectMap<float[]> Y, float[] userVec) { SortedMap<Double, Long> allScores = new TreeMap<>(Collections.reverseOrder()); for (LongObjectMap.MapEntry<float[]> entry : Y.entrySet()) { double dot = SimpleVectorMath.dot(entry.getValue(), userVec); allScores.put(dot, entry.getKey()); }//from www .ja v a 2 s. co m List<Long> topRecommendations = new ArrayList<>(); for (Map.Entry<Double, Long> entry : allScores.entrySet()) { topRecommendations.add(entry.getValue()); if (topRecommendations.size() == NUM_RECS) { return topRecommendations; } } return topRecommendations; }
From source file:org.apache.hadoop.hdfs.server.diskbalancer.command.ReportCommand.java
private void handleTopReport(final CommandLine cmd, final StrBuilder result, final String nodeFormat) { Collections.sort(getCluster().getNodes(), Collections.reverseOrder()); /* extract value that identifies top X DataNode(s) */ setTopNodes(parseTopNodes(cmd, result)); /*/*from ww w.j a va2s .c o m*/ * Reporting volume information of top X DataNode(s) in summary */ final String outputLine = String .format("Reporting top %d DataNode(s) benefiting from running DiskBalancer.", getTopNodes()); recordOutput(result, outputLine); ListIterator<DiskBalancerDataNode> li = getCluster().getNodes().listIterator(); for (int i = 0; i < getTopNodes() && li.hasNext(); i++) { DiskBalancerDataNode dbdn = li.next(); result.appendln(String.format(nodeFormat, i + 1, getTopNodes(), dbdn.getDataNodeName(), dbdn.getDataNodeIP(), dbdn.getDataNodePort(), dbdn.getDataNodeUUID(), dbdn.getVolumeCount(), dbdn.getNodeDataDensity())); } }
From source file:com.textocat.textokit.commons.util.CorpusUtils.java
/** * Partition corpus files specified by filters. * * @param corpusDir corpus base directory * @param corpusFileFilter filter for corpus files * @param corpusSubDirFilter filter for corpus subdirectories. If null subdirectories will * be ignored. * @param partitionsNumber//from w ww . j ava 2s. co m * @return list of file sets (partitions) */ public static List<Set<File>> partitionCorpusByFileSize(File corpusDir, IOFileFilter corpusFileFilter, IOFileFilter corpusSubDirFilter, int partitionsNumber) { log.info("Partitioning corpus {} with file filter {} and subdir filter {}...", new Object[] { corpusDir.getAbsolutePath(), corpusFileFilter, corpusSubDirFilter }); // TODO implement an algorithm that is more robust to different file sizes // e.g. it should handle the case when there is no more files to include into the last partition if (partitionsNumber <= 0) { throw new IllegalArgumentException(String.format("Illegal number of partitions: %s", partitionsNumber)); } if (!corpusDir.isDirectory()) { throw new IllegalArgumentException(String.format("%s is not existing directory", corpusDir)); } final Deque<File> corpusFilesDeq; { List<File> corpusFiles = Lists .newArrayList(FileUtils.listFiles(corpusDir, corpusFileFilter, corpusSubDirFilter)); // sort by decreasing size to smooth differences between parts Collections.sort(corpusFiles, SizeFileComparator.SIZE_REVERSE); corpusFilesDeq = Lists.newLinkedList(corpusFiles); } // int totalSize = 0; for (File cf : corpusFilesDeq) { totalSize += cf.length(); } log.info("Corpus total size (bytes): {}", totalSize); List<FileBucket> buckets = Lists.newArrayListWithExpectedSize(partitionsNumber); // create empty parts for (int i = 0; i < partitionsNumber; i++) { buckets.add(new FileBucket()); } while (!corpusFilesDeq.isEmpty()) { File cf = corpusFilesDeq.pop(); buckets.get(0).add(cf); // resort: make the least bucket first Collections.sort(buckets); } // resort: make the largest bucket first Collections.sort(buckets, Collections.reverseOrder()); // log log.info("Corpus {} has been partitioned by file sizes. Result partitions:\n{}", corpusDir, Joiner.on('\n').join(buckets)); // transform List<Set<File>> result = Lists.newArrayList(); for (FileBucket b : buckets) { result.add(b.getFiles()); } // sanity checks if (result.size() != partitionsNumber || result.get(result.size() - 1).isEmpty()) { throw new IllegalStateException( "Illegal corpus partitioning result. Check previous log messages for details."); } return result; }
From source file:org.libreplan.business.common.ProportionalDistributor.java
private void assignRemaining(int[] result, Fraction[] currentProportions, int remaining) { List<FractionWithPosition> transform = FractionWithPosition.transform(difference(currentProportions)); Collections.sort(transform, Collections.reverseOrder()); for (int i = 0; i < remaining; i++) { FractionWithPosition proportionWithPosition = transform.get(i % currentProportions.length); result[proportionWithPosition.position] = result[proportionWithPosition.position] + 1; }//from w w w. ja va2 s .c o m }
From source file:org.magicdgs.popgenlib.utils.FrequencyUtils.java
/** * Sort the frequencies from major to minor. * * @param frequencies the frequencies to sort. * * @return a new list with the sorted frequencies. * * @throws IllegalFrequencyException if frequencies are invalid. *///from w w w . j a va2 s . c o m public static List<Double> sortFrequencies(final List<Double> frequencies) { validateFrequencies(frequencies); return frequencies.stream().sorted(Collections.reverseOrder()).collect(Collectors.toList()); }
From source file:org.openddr.simpleapi.oddr.identificator.CachedDeviceIdentificator.java
@Override public Device get(UserAgent userAgent, int confidenceTreshold) { List<Device> foundDevices = new ArrayList<Device>(); // check if the device is in the cache Device foundDevice = (Device) getFromCache(userAgent); // if not, build the device if (foundDevice == null) { if (isUaNotFound(userAgent)) { return null; }//ww w . j av a 2 s .co m for (DeviceBuilder deviceBuilder : builders) { if (deviceBuilder.canBuild(userAgent)) { Device device = (Device) deviceBuilder.build(userAgent, confidenceTreshold); if (device != null) { String parentId = device.getParentId(); Device parentDevice = null; Set propertiesSet = null; Iterator it = null; while (!"root".equals(parentId)) { parentDevice = (Device) devices.get(parentId); propertiesSet = parentDevice.getPropertiesMap().entrySet(); it = propertiesSet.iterator(); while (it.hasNext()) { Map.Entry entry = (Map.Entry) it.next(); if (!device.containsProperty((String) entry.getKey())) { device.putProperty((String) entry.getKey(), (String) entry.getValue()); } } parentId = parentDevice.getParentId(); } foundDevices.add(device); if (device.getConfidence() >= confidenceTreshold) { foundDevice = device; break; } } } } if (foundDevice != null) { // add the device to the cache addToCache(userAgent, foundDevice); } else { if (foundDevices.isEmpty()) { addNotFoundUa(userAgent); return null; } Collections.sort(foundDevices, Collections.reverseOrder()); foundDevice = foundDevices.get(0); // add the device to the cache addToCache(userAgent, foundDevice); } } return foundDevice; }
From source file:MSUmpire.PSMDataStructure.ProtID.java
public float GetAbundanceByTopPepFrag(int toppep, int topfrag, float pepweight) { if (PeptideID.isEmpty()) { return 0; }/*from ww w . ja va 2s .c o m*/ PriorityQueue<Float> TopQueue = new PriorityQueue<>(PeptideID.size(), Collections.reverseOrder()); for (PepIonID peptide : PeptideID.values()) { if (peptide.FilteringWeight > pepweight) { TopQueue.add(peptide.GetPepAbundanceByTopFragments(topfrag)); } } float totalabundance = 0f; int num = Math.min(toppep, TopQueue.size()); for (int i = 0; i < num; i++) { totalabundance += TopQueue.poll(); } return totalabundance / num; }
From source file:org.codice.ddf.catalog.ui.metacard.impl.BaseLocator.java
protected <T extends ServiceProperties> List<T> findServices(Class<T> clazz, String filter, Predicate<T> servicePropertiesFilter) { BundleContext bundleContext = getContext(); if (bundleContext == null) { LOGGER.debug("Unable to get the OSGi bundle context."); return Collections.emptyList(); }// w w w.j a v a 2s. c o m Collection<ServiceReference<T>> refs = findServices(clazz, filter, bundleContext); if (refs != null) { return refs.stream().sorted(Collections.reverseOrder()).map(bundleContext::getService) .filter(servicePropertiesFilter).collect(Collectors.toList()); } return Collections.emptyList(); }