Example usage for java.util Comparator comparing

List of usage examples for java.util Comparator comparing

Introduction

In this page you can find the example usage for java.util Comparator comparing.

Prototype

public static <T, U extends Comparable<? super U>> Comparator<T> comparing(
        Function<? super T, ? extends U> keyExtractor) 

Source Link

Document

Accepts a function that extracts a java.lang.Comparable Comparable sort key from a type T , and returns a Comparator that compares by that sort key.

Usage

From source file:com.webtide.jetty.load.generator.jenkins.LoadGeneratorProjectAction.java

public void doGcUsage(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {

    List<GcUsage> gcUsages = new ArrayList<>();

    for (Run run : this.builds) {
        LoadGeneratorBuildAction buildAction = run.getAction(LoadGeneratorBuildAction.class);
        if (buildAction != null) {
            Map<String, Object> monitoringResultMap = buildAction.getMonitoringResultMap();
            if (monitoringResultMap == null) {
                continue;
            }/*from  w  w w . j av a  2 s .  com*/
            GcUsage gcUsage = new GcUsage();
            Map<String, Object> resultsMap = (Map) monitoringResultMap.get("results");

            if (resultsMap == null) {
                continue;
            }

            Map<String, Object> gcResult = (Map) resultsMap.get("gc");

            gcUsage.youngCount = ObjectUtils.toString(gcResult.get("youngCount"));

            Map<String, Object> youngTime = (Map) gcResult.get("youngTime");
            if (youngTime != null) {
                gcUsage.youngTime = ObjectUtils.toString(youngTime.get("value"));
            }
            gcUsage.oldCount = ObjectUtils.toString(gcResult.get("oldCount"));
            Map<String, Object> oldTime = (Map) gcResult.get("oldTime");
            if (oldTime != null) {
                gcUsage.oldTime = ObjectUtils.toString(oldTime.get("value"));
            }

            Map<String, Object> youngGarbage = (Map) gcResult.get("youngGarbage");
            if (youngGarbage != null) {
                gcUsage.youngGarbage = ObjectUtils.toString(youngGarbage.get("value"));
            }

            Map<String, Object> oldGarbage = (Map) gcResult.get("oldGarbage");
            if (oldGarbage != null) {
                gcUsage.oldGarbage = ObjectUtils.toString(oldGarbage.get("value"));
            }

            gcUsages.add(gcUsage);
        }
    }

    Collections.sort(gcUsages, Comparator.comparing(GcUsage::getBuildId));

    StringWriter stringWriter = new StringWriter();

    new ObjectMapper().writeValue(stringWriter, gcUsages);

    rsp.getWriter().write(stringWriter.toString());
}

From source file:org.ambraproject.rhino.service.taxonomy.impl.TaxonomyClassificationServiceImpl.java

/**
 * Determine the most heavily weighted leaf nodes, then return all terms that have one of those leaf nodes.
 * <p>/*from w ww .  jav  a  2  s  .  c o  m*/
 * The returned list is in descending order by weight. The order of terms with equal weight is stably preserved from
 * the input list.
 *
 * @param leafCount     the number of distinct leaf nodes to search for
 * @param weightedTerms all weighted category terms on an article
 * @return a list, in descending order by weight, of all terms whose leaf node is among the most heavily weighted
 */
@VisibleForTesting
static List<WeightedTerm> getDistinctLeafNodes(int leafCount, List<WeightedTerm> weightedTerms) {
    List<WeightedTerm> orderedTerms = weightedTerms.stream()
            .sorted(Comparator.comparing(WeightedTerm::getWeight).reversed()).collect(Collectors.toList());
    Set<String> mostWeightedLeaves = orderedTerms.stream().map(WeightedTerm::getLeafTerm).distinct()
            .limit(leafCount).collect(Collectors.toSet());
    return orderedTerms.stream().filter(term -> mostWeightedLeaves.contains(term.getLeafTerm()))
            .collect(Collectors.toList());
}

From source file:org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.binary.JoinEmbeddingsNodeTest.java

@Test
public void testExecute() throws Exception {
    GradoopId a = GradoopId.get();/*from w w w.jav  a2s. c  om*/
    GradoopId b = GradoopId.get();
    GradoopId c = GradoopId.get();
    GradoopId d = GradoopId.get();
    GradoopId e = GradoopId.get();
    GradoopId f = GradoopId.get();

    EmbeddingMetaData leftInputMetaData = new EmbeddingMetaData();
    leftInputMetaData.setEntryColumn("v1", EntryType.VERTEX, 0);
    leftInputMetaData.setPropertyColumn("v1", "age", 0);

    Embedding embedding1 = createEmbedding(singletonList(Pair.of(a, singletonList(42))));
    Embedding embedding2 = createEmbedding(singletonList(Pair.of(b, singletonList(23))));

    DataSet<Embedding> leftEmbeddings = getExecutionEnvironment().fromElements(embedding1, embedding2);

    /*
     * ----------------------------------
     * |  v1   | e1    | v2    | v2.age |
     * ----------------------------------
     * | id(a) | id(c) | id(e) |  84    | -> Embedding 3
     * ----------------------------------
     * | id(b) | id(d) | id(f) |  77    | -> Embedding 4
     * ----------------------------------
     */
    EmbeddingMetaData rightInputMetaData = new EmbeddingMetaData();
    rightInputMetaData.setEntryColumn("v1", EntryType.VERTEX, 0);
    rightInputMetaData.setEntryColumn("e1", EntryType.EDGE, 1);
    rightInputMetaData.setEntryColumn("v2", EntryType.VERTEX, 2);
    rightInputMetaData.setPropertyColumn("v2", "age", 0);

    Embedding embedding3 = createEmbedding(
            asList(Pair.of(a, emptyList()), Pair.of(c, emptyList()), Pair.of(e, singletonList(84))));
    Embedding embedding4 = createEmbedding(
            asList(Pair.of(b, emptyList()), Pair.of(d, emptyList()), Pair.of(f, singletonList(77))));

    DataSet<Embedding> rightEmbeddings = getExecutionEnvironment().fromElements(embedding3, embedding4);

    MockPlanNode leftChild = new MockPlanNode(leftEmbeddings, leftInputMetaData);
    MockPlanNode rightChild = new MockPlanNode(rightEmbeddings, rightInputMetaData);

    JoinEmbeddingsNode node = new JoinEmbeddingsNode(leftChild, rightChild, singletonList("v1"),
            MatchStrategy.ISOMORPHISM, MatchStrategy.ISOMORPHISM);

    List<Embedding> result = node.execute().collect();
    result.sort(Comparator.comparing(o -> o.getProperty(0))); // sort by property value in column 0

    assertThat(result.size(), is(2));

    assertEmbedding(result.get(0), asList(b, d, f), asList(PropertyValue.create(23), PropertyValue.create(77)));
    assertEmbedding(result.get(1), asList(a, c, e), asList(PropertyValue.create(42), PropertyValue.create(84)));
}

From source file:org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart.java

/**
 * Iterate through the list of tick-marks building a two level structure of
 * replacement tick mark labels. (Visually) upper level has most
 * detailed/highest frequency part of date/time (specific label). Second
 * level has rest of date/time grouped by unchanging part (contextual
 * label)./*w w  w . ja v a2 s.co  m*/
 *
 * eg:
 *
 * October-October-31_September-01_September-02_September-03
 *
 * becomes:
 *
 * _________30_________31___________01___________02___________03
 *
 * _________October___________|_____________September___________
 *
 */
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
protected synchronized void layoutDateLabels() {
    //clear old labels
    contextLabelPane.getChildren().clear();
    specificLabelPane.getChildren().clear();
    //since the tickmarks aren't necessarily in value/position order,
    //make a copy of the list sorted by position along axis
    SortedList<Axis.TickMark<X>> tickMarks = getXAxis().getTickMarks()
            .sorted(Comparator.comparing(Axis.TickMark::getPosition));

    if (tickMarks.isEmpty()) {
        /*
         * Since StackedBarChart does some funky animation/background thread
         * stuff, sometimes there are no tick marks even though there is
         * data. Dispatching another call to layoutDateLables() allows that
         * stuff time to run before we check a gain.
         */
        Platform.runLater(this::layoutDateLabels);
    } else {
        //get the spacing between ticks in the underlying axis
        double spacing = getTickSpacing();

        //initialize values from first tick
        TwoPartDateTime dateTime = new TwoPartDateTime(getTickMarkLabel(tickMarks.get(0).getValue()));
        String lastSeenContextLabel = dateTime.context;

        //x-positions (pixels) of the current branch and leaf labels
        double specificLabelX = 0;

        if (dateTime.context.isEmpty()) {
            //if there is only one part to the date (ie only year), just add a label for each tick
            for (Axis.TickMark<X> t : tickMarks) {
                addSpecificLabel(new TwoPartDateTime(getTickMarkLabel(t.getValue())).specifics, spacing,
                        specificLabelX, isTickBold(t.getValue()));
                specificLabelX += spacing; //increment x
            }
        } else {
            //there are two parts so ...
            //initialize additional state
            double contextLabelX = 0;
            double contextLabelWidth = 0;

            for (Axis.TickMark<X> t : tickMarks) {
                //split the label into a TwoPartDateTime
                dateTime = new TwoPartDateTime(getTickMarkLabel(t.getValue()));

                //if we are still in the same context
                if (lastSeenContextLabel.equals(dateTime.context)) {
                    //increment context width
                    contextLabelWidth += spacing;
                } else {// we are on to a new context, so ...
                    addContextLabel(lastSeenContextLabel, contextLabelWidth, contextLabelX);
                    //and then update label, x-pos, and width
                    lastSeenContextLabel = dateTime.context;
                    contextLabelX += contextLabelWidth;
                    contextLabelWidth = spacing;
                }
                //add the specific label (highest frequency part)
                addSpecificLabel(dateTime.specifics, spacing, specificLabelX, isTickBold(t.getValue()));

                //increment specific position
                specificLabelX += spacing;
            }
            //we have reached end so add label for current context
            addContextLabel(lastSeenContextLabel, contextLabelWidth, contextLabelX);
        }
    }
    //request layout since we have modified scene graph structure
    requestParentLayout();
}

From source file:org.cgiar.ccafs.marlo.action.powb.FlagshipPlansAction.java

@Override
public void prepare() throws Exception {
    // Get current CRP
    loggedCrp = (GlobalUnit) this.getSession().get(APConstants.SESSION_CRP);
    loggedCrp = crpManager.getGlobalUnitById(loggedCrp.getId());
    // Check history version
    if (this.getRequest().getParameter(APConstants.TRANSACTION_ID) != null) {
        this.setPowbSynthesisIdHistory();
    } else {//from  w  w  w  . j  a va2 s. co  m
        this.setPowbSynthesisParameters();
    }
    // Validate draft version
    if (powbSynthesis != null) {
        Path path = this.getAutoSaveFilePath();
        if (path.toFile().exists() && this.getCurrentUser().isAutoSave()) {
            this.readJsonAndLoadPowbSynthesis(path);
        } else {
            this.setDraft(false);
            this.createEmptyFlagshipPlan();
        }
    }

    // Get the list of liaison institutions Flagships and PMU.
    liaisonInstitutions = loggedCrp.getLiaisonInstitutions().stream()
            .filter(c -> c.getCrpProgram() != null
                    && c.getCrpProgram().getProgramType() == ProgramType.FLAGSHIP_PROGRAM_TYPE.getValue()
                    && c.isActive())
            .collect(Collectors.toList());
    liaisonInstitutions.addAll(loggedCrp.getLiaisonInstitutions().stream()
            .filter(c -> c.getCrpProgram() == null && c.getAcronym().equals("PMU") && c.isActive())
            .collect(Collectors.toList()));
    liaisonInstitutions.sort(Comparator.comparing(LiaisonInstitution::getAcronym));
    // Base Permission
    String params[] = { loggedCrp.getAcronym(), powbSynthesis.getId() + "" };
    this.setBasePermission(this.getText(Permission.POWB_SYNTHESIS_FLAGSHIPPLANS_BASE_PERMISSION, params));

    if (this.isHttpPost()) {
        if (powbSynthesis.getPowbFlagshipPlans() != null) {
            powbSynthesis.getPowbFlagshipPlans().setFlagshipProgramFile(null);
        }
    }
}

From source file:org.dita.dost.ant.PluginInstallTask.java

private Optional<Registry> findPlugin(final Collection<Registry> regs, final SemVerMatch version) {
    if (version == null) {
        return regs.stream().filter(this::matchingPlatformVersion).max(Comparator.comparing(o -> o.vers));
    } else {/*from  w  w  w. j a va  2s. c o  m*/
        return regs.stream().filter(this::matchingPlatformVersion).filter(reg -> version.contains(reg.vers))
                .findFirst();
    }
}

From source file:alfio.manager.WaitingQueueManager.java

private void preReserveTickets(Event event, int ticketsNeeded, int eventId, int alreadyReserved) {
    final int toBeGenerated = Math.abs(alreadyReserved - ticketsNeeded);
    EventStatisticView eventStatisticView = eventRepository.findStatisticsFor(eventId);
    Map<Integer, TicketCategoryStatisticView> ticketCategoriesStats = ticketCategoryRepository
            .findStatisticsForEventIdByCategoryId(eventId);
    List<Pair<Integer, TicketCategoryStatisticView>> collectedTickets = ticketCategoryRepository
            .findAllTicketCategories(eventId).stream().filter(tc -> !tc.isAccessRestricted())
            .sorted(Comparator.comparing(t -> t.getExpiration(event.getZoneId())))
            .map(tc -> Pair.of(/*w ww  . ja  v  a2s.  c om*/
                    determineAvailableSeats(ticketCategoriesStats.get(tc.getId()), eventStatisticView),
                    ticketCategoriesStats.get(tc.getId())))
            .collect(new PreReservedTicketDistributor(toBeGenerated));
    MapSqlParameterSource[] candidates = collectedTickets.stream()
            .flatMap(p -> selectTicketsForPreReservation(eventId, p).stream())
            .map(id -> new MapSqlParameterSource().addValue("id", id)).toArray(MapSqlParameterSource[]::new);
    jdbc.batchUpdate(ticketRepository.preReserveTicket(), candidates);
}

From source file:org.gradoop.flink.model.impl.operators.matching.single.cypher.planning.queryplan.binary.ValueJoinNodeTest.java

@Test
public void testExecute() throws Exception {
    GradoopId a = GradoopId.get();//  w ww.j  a  v  a  2s .co m
    GradoopId b = GradoopId.get();
    GradoopId c = GradoopId.get();
    GradoopId d = GradoopId.get();
    GradoopId e = GradoopId.get();
    GradoopId f = GradoopId.get();
    GradoopId g = GradoopId.get();
    GradoopId h = GradoopId.get();

    EmbeddingMetaData leftInputMetaData = new EmbeddingMetaData();
    leftInputMetaData.setEntryColumn("v1", EntryType.VERTEX, 0);
    leftInputMetaData.setPropertyColumn("v1", "age", 0);

    Embedding embedding1 = createEmbedding(singletonList(Pair.of(a, singletonList(42))));
    Embedding embedding2 = createEmbedding(singletonList(Pair.of(b, singletonList(21))));

    DataSet<Embedding> leftEmbeddings = getExecutionEnvironment().fromElements(embedding1, embedding2);

    /*
     * ----------------------------------
     * |  v2   | e1    | v3    | v3.age |
     * ----------------------------------
     * | id(c) | id(d) | id(e) |  42    | -> Embedding 3
     * ----------------------------------
     * | id(f) | id(g) | id(h) |  21    | -> Embedding 4
     * ----------------------------------
     */
    EmbeddingMetaData rightInputMetaData = new EmbeddingMetaData();
    rightInputMetaData.setEntryColumn("v2", EntryType.VERTEX, 0);
    rightInputMetaData.setEntryColumn("e1", EntryType.EDGE, 1);
    rightInputMetaData.setEntryColumn("v3", EntryType.VERTEX, 2);
    rightInputMetaData.setPropertyColumn("v3", "age", 0);

    Embedding embedding3 = createEmbedding(
            asList(Pair.of(c, emptyList()), Pair.of(d, emptyList()), Pair.of(e, singletonList(42))));
    Embedding embedding4 = createEmbedding(
            asList(Pair.of(f, emptyList()), Pair.of(g, emptyList()), Pair.of(h, singletonList(21))));

    DataSet<Embedding> rightEmbeddings = getExecutionEnvironment().fromElements(embedding3, embedding4);

    MockPlanNode leftChild = new MockPlanNode(leftEmbeddings, leftInputMetaData);
    MockPlanNode rightChild = new MockPlanNode(rightEmbeddings, rightInputMetaData);

    ValueJoinNode node = new ValueJoinNode(leftChild, rightChild, singletonList(Pair.of("v1", "age")),
            singletonList(Pair.of("v3", "age")), MatchStrategy.ISOMORPHISM, MatchStrategy.ISOMORPHISM);

    List<Embedding> result = node.execute().collect();
    result.sort(Comparator.comparing(o -> o.getProperty(0))); // sort by property value in column 0

    assertThat(result.size(), is(2));

    assertEmbedding(result.get(0), asList(b, f, g, h),
            asList(PropertyValue.create(21), PropertyValue.create(21)));
    assertEmbedding(result.get(1), asList(a, c, d, e),
            asList(PropertyValue.create(42), PropertyValue.create(42)));
}

From source file:org.apache.sysml.hops.codegen.template.CPlanMemoTable.java

public MemoTableEntry getBest(long hopID) {
    List<MemoTableEntry> tmp = get(hopID);
    if (tmp == null || tmp.isEmpty())
        return null;

    //single plan per type, get plan w/ best rank in preferred order
    //but ensure that the plans valid as a top-level plan
    return tmp.stream().filter(p -> p.isValid()).min(Comparator.comparing(p -> p.type.getRank())).orElse(null);
}

From source file:org.hawkular.metrics.clients.ptrans.fullstack.CollectdITest.java

private List<Point> getServerData() throws Exception {
    ObjectMapper objectMapper = new ObjectMapper();

    HttpURLConnection urlConnection = (HttpURLConnection) new URL(findNumericMetricsUrl).openConnection();
    urlConnection.connect();/*w  w  w  .j  a  v  a2s . c o m*/
    int responseCode = urlConnection.getResponseCode();
    if (responseCode != HttpURLConnection.HTTP_OK) {
        String msg = "Could not get metrics list from server: %s, %d";
        fail(String.format(Locale.ROOT, msg, findNumericMetricsUrl, responseCode));
    }
    List<String> metricNames;
    try (InputStream inputStream = urlConnection.getInputStream()) {
        TypeFactory typeFactory = objectMapper.getTypeFactory();
        CollectionType valueType = typeFactory.constructCollectionType(List.class, MetricName.class);
        List<MetricName> value = objectMapper.readValue(inputStream, valueType);
        metricNames = value.stream().map(MetricName::getId).collect(toList());
    }

    Stream<Point> points = Stream.empty();

    for (String metricName : metricNames) {
        String[] split = metricName.split("\\.");
        String type = split[split.length - 1];

        urlConnection = (HttpURLConnection) new URL(findNumericDataUrl(metricName)).openConnection();
        urlConnection.connect();
        responseCode = urlConnection.getResponseCode();
        if (responseCode != HttpURLConnection.HTTP_OK) {
            fail("Could not load metric data from server: " + responseCode);
        }

        try (InputStream inputStream = urlConnection.getInputStream()) {
            TypeFactory typeFactory = objectMapper.getTypeFactory();
            CollectionType valueType = typeFactory.constructCollectionType(List.class, MetricData.class);
            List<MetricData> data = objectMapper.readValue(inputStream, valueType);
            Stream<Point> metricPoints = data.stream()
                    .map(metricData -> new Point(type, metricData.timestamp, metricData.value));
            points = Stream.concat(points, metricPoints);
        }
    }

    return points.sorted(Comparator.comparing(Point::getType).thenComparing(Point::getTimestamp))
            .collect(toList());
}