Example usage for java.util Comparator comparing

List of usage examples for java.util Comparator comparing

Introduction

In this page you can find the example usage for java.util Comparator comparing.

Prototype

public static <T, U extends Comparable<? super U>> Comparator<T> comparing(
        Function<? super T, ? extends U> keyExtractor) 

Source Link

Document

Accepts a function that extracts a java.lang.Comparable Comparable sort key from a type T , and returns a Comparator that compares by that sort key.

Usage

From source file:org.matsim.contrib.drt.optimizer.insertion.ParallelMultiVehicleInsertionProblem.java

@Override
public Optional<BestInsertion> findBestInsertion(DrtRequest drtRequest, Collection<Entry> vEntries) {
    DetourLinksProvider detourLinksProvider = new DetourLinksProvider(drtCfg, timer, drtRequest);
    detourLinksProvider.findInsertionsAndLinks(forkJoinPool, vEntries);

    detourLinksStats.updateStats(vEntries, detourLinksProvider);
    Map<Entry, List<Insertion>> filteredInsertions = detourLinksProvider.getFilteredInsertions();
    if (filteredInsertions.isEmpty()) {
        return Optional.empty();
    }// w  w w. j a v a2s  . c  o  m

    pathDataProvider.precalculatePathData(drtRequest, detourLinksProvider.getDetourLinksSet());

    return forkJoinPool.submit(() -> filteredInsertions.entrySet().parallelStream()//
            .map(e -> new SingleVehicleInsertionProblem(pathDataProvider, insertionCostCalculator)
                    .findBestInsertion(drtRequest, e.getKey(), e.getValue()))//
            .filter(Optional::isPresent)//
            .map(Optional::get)//
            .min(Comparator.comparing(i -> i.cost)))//
            .join();
}

From source file:pt.ist.applications.admissions.ui.ApplicationsAdmissionsController.java

@RequestMapping(method = RequestMethod.GET)
public String home(final Model model) {
    final JsonArray contests = Bennu.getInstance().getContestSet().stream()
            .sorted(Comparator.comparing((Contest c) -> c.getBeginDate()).reversed()).map(this::toJsonObject)
            .collect(Utils.toJsonArray());
    model.addAttribute("contests", contests);
    return "applications-admissions/home";
}

From source file:aiai.ai.launchpad.snippet.SnippetService.java

public void sortSnippetsByType(List<ExperimentSnippet> snippets) {
    snippets.sort(Comparator.comparing(ExperimentSnippet::getType));
}

From source file:com.evolveum.midpoint.task.quartzimpl.work.workers.WorkersManager.java

public void reconcileWorkers(String coordinatorTaskOid, WorkersReconciliationOptions options,
        OperationResult result) throws SchemaException, ObjectNotFoundException, ObjectAlreadyExistsException {
    Task coordinatorTask = taskManager.getTask(coordinatorTaskOid, result);
    if (coordinatorTask.getKind() != TaskKindType.COORDINATOR) {
        throw new IllegalArgumentException("Task is not a coordinator task: " + coordinatorTask);
    }// w ww .  ja v  a2s  .c  o  m
    List<Task> currentWorkers = new ArrayList<>(coordinatorTask.listSubtasks(true, result));
    Map<WorkerKey, WorkerTasksPerNodeConfigurationType> perNodeConfigurationMap = new HashMap<>();
    MultiValuedMap<String, WorkerKey> shouldBeWorkers = createWorkerKeys(coordinatorTask,
            perNodeConfigurationMap, result);

    int startingWorkersCount = currentWorkers.size();
    int startingShouldBeWorkersCount = shouldBeWorkers.size();

    currentWorkers.sort(Comparator.comparing(t -> {
        if (t.getExecutionStatus() == TaskExecutionStatus.RUNNABLE) {
            if (t.getNodeAsObserved() != null) {
                return 0;
            } else if (t.getNode() != null) {
                return 1;
            } else {
                return 2;
            }
        } else if (t.getExecutionStatus() == TaskExecutionStatus.SUSPENDED) {
            return 3;
        } else if (t.getExecutionStatus() == TaskExecutionStatus.CLOSED) {
            return 4;
        } else {
            return 5;
        }
    }));

    LOGGER.trace("Before reconciliation:\nCurrent workers: {}\nShould be workers: {}", currentWorkers,
            shouldBeWorkers);

    int matched = matchWorkers(currentWorkers, shouldBeWorkers);
    int renamed = renameWorkers(currentWorkers, shouldBeWorkers, result);
    int closedExecuting = closeExecutingWorkers(currentWorkers, result);
    MovedClosed movedClosed = moveWorkers(currentWorkers, shouldBeWorkers, result);
    int created = createWorkers(coordinatorTask, shouldBeWorkers, perNodeConfigurationMap, result);

    TaskWorkStateType workState = coordinatorTask.getWorkState();
    Integer closedBecauseDone = null;
    if (isCloseWorkersOnWorkDone(options) && workState != null
            && Boolean.TRUE.equals(workState.isAllWorkComplete())) {
        closedBecauseDone = closeAllWorkers(coordinatorTask, result);
    }
    result.recordStatus(OperationResultStatus.SUCCESS,
            "Worker reconciliation finished. " + "Original workers: " + startingWorkersCount + ", should be: "
                    + startingShouldBeWorkersCount + ", matched: " + matched + ", renamed: " + renamed
                    + ", closed because executing: " + closedExecuting + ", moved: " + movedClosed.moved
                    + ", closed because superfluous: " + movedClosed.closed + ", created: " + created
                    + " worker task(s)."
                    + (closedBecauseDone != null && closedBecauseDone > 0
                            ? " Closed " + closedBecauseDone + " workers because the work is done."
                            : ""));
}

From source file:org.polymap.rhei.batik.engine.PageStackLayout.java

@Override
protected void layout(Composite composite, boolean flushCache) {
    assert pageStack == composite;

    if (pageStack.getPages().isEmpty()) {
        return;/*from www .j  a va 2s .co  m*/
    }

    Rectangle clientArea = pageStack.getClientArea();
    log.debug("layout(): clientArea=" + clientArea);

    Rectangle displayArea = composite.getDisplay().getBounds();
    if (clientArea.width > displayArea.width) {
        log.debug("Invalid client area: " + clientArea + ", display width: " + displayArea.width
                + ", flushCache: " + flushCache);
        return;
    }

    if (!flushCache && clientArea.equals(cachedClientArea)) {
        log.info("Ignoring cachedClientArea: " + cachedClientArea + ", flushCache: " + flushCache);
        return;
    }
    cachedClientArea = clientArea;

    pageStack.preUpdateLayout();

    // available size
    int availWidth = clientArea.width - margins.getMarginLeft() - margins.getMarginRight();

    // top down sorted pages
    Collection<Page> pages = pageStack.getPages();
    List<Page> topDown = pages.stream().sorted(Comparator.comparing(page -> Integer.MAX_VALUE - page.priority))
            .collect(Collectors.toList());

    // 1: minimum width: max pages visible
    int filledWidth = 0;
    boolean pageVisible = true;
    for (Page page : topDown) {
        page.isShown = false;

        if (pageVisible) {
            // right most: always displayed; preferred width
            if (filledWidth == 0) {
                int pageWidth = min(page.size.preferred(), availWidth);
                page.bounds = new Rectangle(0, 0, pageWidth, clientArea.height);
                page.isShown = true;
                filledWidth = pageWidth;
            }
            // others: min width
            else {
                int pageWidth = min(page.size.min(), availWidth);
                if (filledWidth + pageWidth + margins.getSpacing() <= availWidth) {
                    page.bounds = new Rectangle(0, 0, pageWidth, clientArea.height);
                    page.isShown = true;
                    filledWidth += pageWidth + margins.getSpacing();
                } else {
                    pageVisible = false;
                }
            }
        }
    }

    // 2: preferred width: distribute remaining (actually set bounds)
    int panelX = availWidth - margins.getMarginRight();
    int remainWidth = availWidth - filledWidth;
    for (Page page : topDown) {
        page.control.setVisible(page.isShown);
        if (page.isShown) {
            // does page want more width?
            int preferred = min(page.size.preferred(), page.bounds.width + remainWidth);
            if (preferred > page.bounds.width) {
                remainWidth -= (preferred - page.bounds.width);
                page.bounds.width = preferred;
            }
            page.bounds.x = panelX - page.bounds.width;
            page.control.setBounds(page.bounds);
            //                page.control.layout( true /*flushCache*/ );

            panelX -= page.bounds.width + margins.getSpacing();
        }
    }

    // 3: maximum: still space remaining
    if (remainWidth > 0) {
        panelX = availWidth - margins.getMarginRight();
        for (Page page : topDown) {
            if (page.isShown) {
                // does page want more width?
                int max = min(page.size.max(), page.bounds.width + remainWidth);
                if (max > page.bounds.width) {
                    remainWidth -= (max - page.bounds.width);
                    page.bounds.width = max;
                }
                page.bounds.x = panelX - page.bounds.width;
                page.control.setBounds(page.bounds);

                panelX -= page.bounds.width + margins.getSpacing();
            }
        }
    }

    // 4: over maximum: distribute remaining over all pages
    if (remainWidth > 0) {
        long delta = remainWidth / topDown.stream().filter(p -> p.isShown).count();
        panelX = availWidth - margins.getMarginRight();
        for (Page page : topDown) {
            if (page.isShown) {
                page.bounds.width += delta;

                page.bounds.x = panelX - page.bounds.width;
                page.control.setBounds(page.bounds);

                panelX -= page.bounds.width + margins.getSpacing();
            }
        }
    }

    //
    log.debug("available: " + availWidth);
    for (Page page : topDown) {
        if (page.isShown) {
            log.debug("    panel width: " + page.bounds.width);
        } else {
            log.debug("    panel: invisible");
        }
    }

    pageStack.postUpdateLayout();
}

From source file:org.tightblog.service.ThemeManager.java

@PostConstruct
public void initialize() {
    String blogThemePath = "/blogthemes";

    Set<String> paths = servletContext.getResourcePaths(blogThemePath);

    if (paths != null && paths.size() > 0) {
        log.info("{} shared blog themes detected, loading...", paths.size());
        for (String path : paths) {
            try {
                SharedTheme theme = loadThemeData(path);
                themeMap.put(theme.getId(), theme);
            } catch (Exception unexpected) {
                // shouldn't happen, so let's learn why it did
                log.error("Exception processing theme {}, will be skipped", path, unexpected);
            }/*from   w  w w .  ja  v  a 2 s.  c  om*/
        }

        // for convenience create an alphabetized list also
        themeList = new ArrayList<>(this.themeMap.values());
        themeList.sort(Comparator.comparing(SharedTheme::getName));
        log.info("Successfully loaded {} shared blog themes.", themeList.size());
    } else {
        log.info("No shared blog themes detected at path {}, none will be loaded", blogThemePath);
    }
}

From source file:com.simiacryptus.mindseye.applications.ObjectLocationBase.java

/**
 * Run.//  w w  w  . java 2  s .  c o m
 *
 * @param log the log
 */
public void run(@Nonnull final NotebookOutput log) {
    //    @Nonnull String logName = "cuda_" + log.getName() + ".log";
    //    log.p(log.file((String) null, logName, "GPU Log"));
    //    CudaSystem.addLog(new PrintStream(log.file(logName)));

    ImageClassifierBase classifier = getClassifierNetwork();
    Layer classifyNetwork = classifier.getNetwork();

    ImageClassifierBase locator = getLocatorNetwork();
    Layer locatorNetwork = locator.getNetwork();
    ArtistryUtil.setPrecision((DAGNetwork) classifyNetwork, Precision.Float);
    ArtistryUtil.setPrecision((DAGNetwork) locatorNetwork, Precision.Float);

    Tensor[][] inputData = loadImages_library();
    //    Tensor[][] inputData = loadImage_Caltech101(log);
    double alphaPower = 0.8;

    final AtomicInteger index = new AtomicInteger(0);
    Arrays.stream(inputData).limit(10).forEach(row -> {
        log.h3("Image " + index.getAndIncrement());
        final Tensor img = row[0];
        log.p(log.image(img.toImage(), ""));
        Result classifyResult = classifyNetwork.eval(new MutableResult(row));
        Result locationResult = locatorNetwork.eval(new MutableResult(row));
        Tensor classification = classifyResult.getData().get(0);
        List<CharSequence> categories = classifier.getCategories();
        int[] sortedIndices = IntStream.range(0, categories.size()).mapToObj(x -> x)
                .sorted(Comparator.comparing(i -> -classification.get(i))).mapToInt(x -> x).limit(10).toArray();
        logger.info(Arrays.stream(sortedIndices)
                .mapToObj(
                        i -> String.format("%s: %s = %s%%", i, categories.get(i), classification.get(i) * 100))
                .reduce((a, b) -> a + "\n" + b).orElse(""));
        LinkedHashMap<CharSequence, Tensor> vectors = new LinkedHashMap<>();
        List<CharSequence> predictionList = Arrays.stream(sortedIndices).mapToObj(categories::get)
                .collect(Collectors.toList());
        Arrays.stream(sortedIndices).limit(6).forEach(category -> {
            CharSequence name = categories.get(category);
            log.h3(name);
            Tensor alphaTensor = renderAlpha(alphaPower, img, locationResult, classification, category);
            log.p(log.image(img.toRgbImageAlphaMask(0, 1, 2, alphaTensor), ""));
            vectors.put(name, alphaTensor.unit());
        });

        Tensor avgDetection = vectors.values().stream().reduce((a, b) -> a.add(b)).get()
                .scale(1.0 / vectors.size());
        Array2DRowRealMatrix covarianceMatrix = new Array2DRowRealMatrix(predictionList.size(),
                predictionList.size());
        for (int x = 0; x < predictionList.size(); x++) {
            for (int y = 0; y < predictionList.size(); y++) {
                Tensor l = vectors.get(predictionList.get(x));
                Tensor r = vectors.get(predictionList.get(y));

                covarianceMatrix.setEntry(x, y,
                        null == l || null == r ? 0 : (l.minus(avgDetection)).dot(r.minus(avgDetection)));
            }
        }
        @Nonnull
        final EigenDecomposition decomposition = new EigenDecomposition(covarianceMatrix);

        for (int objectVector = 0; objectVector < 10; objectVector++) {
            log.h3("Eigenobject " + objectVector);
            double eigenvalue = decomposition.getRealEigenvalue(objectVector);
            RealVector eigenvector = decomposition.getEigenvector(objectVector);
            Tensor detectionRegion = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> {
                Tensor tensor = vectors.get(predictionList.get(i));
                return null == tensor ? null : tensor.scale(eigenvector.getEntry(i));
            }).filter(x -> null != x).reduce((a, b) -> a.add(b)).get();
            detectionRegion = detectionRegion.scale(255.0 / detectionRegion.rms());
            CharSequence categorization = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> {
                CharSequence category = predictionList.get(i);
                double component = eigenvector.getEntry(i);
                return String.format("<li>%s = %.4f</li>", category, component);
            }).reduce((a, b) -> a + "" + b).get();
            log.p(String.format("Object Detected: <ol>%s</ol>", categorization));
            log.p("Object Eigenvalue: " + eigenvalue);
            log.p("Object Region: " + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion), ""));
            log.p("Object Region Compliment: "
                    + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion.scale(-1)), ""));
        }

        //      final int[] orderedVectors = IntStream.range(0, 10).mapToObj(x -> x)
        //        .sorted(Comparator.comparing(x -> -decomposition.getRealEigenvalue(x))).mapToInt(x -> x).toArray();
        //      IntStream.range(0, orderedVectors.length)
        //        .mapToObj(i -> {
        //            //double realEigenvalue = decomposition.getRealEigenvalue(orderedVectors[i]);
        //            return decomposition.getEigenvector(orderedVectors[i]).toArray();
        //          }
        //        ).toArray(i -> new double[i][]);

        log.p(String.format(
                "<table><tr><th>Cosine Distance</th>%s</tr>%s</table>", Arrays.stream(sortedIndices).limit(10)
                        .mapToObj(col -> "<th>" + categories.get(col) + "</th>").reduce((a, b) -> a + b).get(),
                Arrays.stream(sortedIndices).limit(10).mapToObj(r -> {
                    return String.format("<tr><td>%s</td>%s</tr>", categories.get(r),
                            Arrays.stream(sortedIndices).limit(10).mapToObj(col -> {
                                Tensor l = vectors.get(categories.get(r));
                                Tensor r2 = vectors.get(categories.get(col));
                                return String.format("<td>%.4f</td>",
                                        (null == l || null == r2) ? 0 : Math.acos(l.dot(r2)));
                            }).reduce((a, b) -> a + b).get());
                }).reduce((a, b) -> a + b).orElse("")));
    });

    log.setFrontMatterProperty("status", "OK");
}

From source file:org.apache.geode.management.internal.cli.util.LogExporterTest.java

@Test
public void exportBuildsZipCorrectlyWithTwoLogFiles() throws Exception {
    File logFile1 = new File(workingDir, "server1.log");
    FileUtils.writeStringToFile(logFile1, "some log for server1 \n some other log line");
    File logFile2 = new File(workingDir, "server2.log");
    FileUtils.writeStringToFile(logFile2, "some log for server2 \n some other log line");

    File notALogFile = new File(workingDir, "foo.txt");
    FileUtils.writeStringToFile(notALogFile, "some text");

    Path zippedExport = logExporter.export();

    File unzippedExportDir = temporaryFolder.newFolder("unzippedExport");
    ZipUtils.unzip(zippedExport.toString(), unzippedExportDir.getCanonicalPath());

    assertThat(unzippedExportDir.listFiles()).hasSize(2);
    List<File> exportedFiles = Stream.of(unzippedExportDir.listFiles())
            .sorted(Comparator.comparing(File::getName)).collect(toList());

    assertThat(exportedFiles.get(0)).hasSameContentAs(logFile1);
    assertThat(exportedFiles.get(1)).hasSameContentAs(logFile2);
}

From source file:org.apache.nifi.minifi.c2.cache.s3.S3CacheFileInfoImpl.java

@Override
public Stream<WriteableConfiguration> getCachedConfigurations() throws IOException {

    Iterable<S3ObjectSummary> objectSummaries = S3Objects.withPrefix(s3, bucket, prefix);
    Stream<S3ObjectSummary> objectStream = StreamSupport.stream(objectSummaries.spliterator(), false);

    return objectStream.map(p -> {
        Integer version = getVersionIfMatch(p.getKey());
        if (version == null) {
            return null;
        }/*w w w  .  j  a v  a 2 s  .co m*/
        return new Pair<>(version, p);
    }).filter(Objects::nonNull)
            .sorted(Comparator.comparing(pair -> ((Pair<Integer, S3ObjectSummary>) pair).getFirst()).reversed())
            .map(pair -> new S3WritableConfiguration(s3, pair.getSecond(), Integer.toString(pair.getFirst())));

}

From source file:alfio.manager.system.ConfigurationManager.java

/**
 * Select the most "precise" configuration in the given list.
 *
 * @param conf/*from ww w .j a  v a 2 s  .  c om*/
 * @return
 */
private Configuration selectPath(List<Configuration> conf) {
    return conf.size() == 1 ? conf.get(0)
            : conf.stream().sorted(Comparator.comparing(Configuration::getConfigurationPathLevel).reversed())
                    .findFirst().orElse(null);
}