Example usage for java.util Comparator comparing

List of usage examples for java.util Comparator comparing

Introduction

In this page you can find the example usage for java.util Comparator comparing.

Prototype

public static <T, U extends Comparable<? super U>> Comparator<T> comparing(
        Function<? super T, ? extends U> keyExtractor) 

Source Link

Document

Accepts a function that extracts a java.lang.Comparable Comparable sort key from a type T , and returns a Comparator that compares by that sort key.

Usage

From source file:org.apache.sysml.hops.codegen.opt.ReachabilityGraph.java

public ReachabilityGraph(PlanPartition part, CPlanMemoTable memo) {
    //create repository of materialization points
    _matPoints = new HashMap<>();
    for (InterestingPoint p : part.getMatPointsExt())
        _matPoints.put(Pair.of(p._fromHopID, p._toHopID), new NodeLink(p));

    //create reachability graph
    _root = new NodeLink(null);
    HashSet<VisitMarkCost> visited = new HashSet<>();
    for (Long hopID : part.getRoots()) {
        Hop rootHop = memo.getHopRefs().get(hopID);
        addInputNodeLinks(rootHop, _root, part, memo, visited);
    }//from   w w  w  .  java 2 s. c  om

    //create candidate cutsets 
    List<NodeLink> tmpCS = _matPoints.values().stream().filter(p -> p._inputs.size() > 0 && p._p != null)
            .sorted().collect(Collectors.toList());

    //short-cut for partitions without cutsets
    if (tmpCS.isEmpty()) {
        _cutSets = new CutSet[0];
        //sort materialization points in decreasing order of their sizes
        //which can improve the pruning efficiency by skipping larger sub-spaces.
        _searchSpace = sortBySize(part.getMatPointsExt(), memo, false);
        return;
    }

    //create composite cutsets 
    ArrayList<ArrayList<NodeLink>> candCS = new ArrayList<>();
    ArrayList<NodeLink> current = new ArrayList<>();
    for (NodeLink node : tmpCS) {
        if (current.isEmpty())
            current.add(node);
        else if (current.get(0).equals(node))
            current.add(node);
        else {
            candCS.add(current);
            current = new ArrayList<>();
            current.add(node);
        }
    }
    if (!current.isEmpty())
        candCS.add(current);

    //evaluate cutsets (single, and duplicate pairs)
    ArrayList<ArrayList<NodeLink>> remain = new ArrayList<>();
    ArrayList<Pair<CutSet, Double>> cutSets = evaluateCutSets(candCS, remain);
    if (!remain.isEmpty() && remain.size() < 5) {
        //second chance: for pairs for remaining candidates
        ArrayList<ArrayList<NodeLink>> candCS2 = new ArrayList<>();
        for (int i = 0; i < remain.size() - 1; i++)
            for (int j = i + 1; j < remain.size(); j++) {
                ArrayList<NodeLink> tmp = new ArrayList<>();
                tmp.addAll(remain.get(i));
                tmp.addAll(remain.get(j));
                candCS2.add(tmp);
            }
        ArrayList<Pair<CutSet, Double>> cutSets2 = evaluateCutSets(candCS2, remain);
        //ensure constructed cutsets are disjoint
        HashSet<InterestingPoint> testDisjoint = new HashSet<>();
        for (Pair<CutSet, Double> cs : cutSets2) {
            if (!CollectionUtils.containsAny(testDisjoint, Arrays.asList(cs.getLeft().cut))) {
                cutSets.add(cs);
                CollectionUtils.addAll(testDisjoint, cs.getLeft().cut);
            }
        }
    }

    //sort and linearize search space according to scores
    _cutSets = cutSets.stream().sorted(Comparator.comparing(p -> p.getRight())).map(p -> p.getLeft())
            .toArray(CutSet[]::new);

    //created sorted order of materialization points
    //(cut sets in predetermined order, other points sorted by size)
    HashMap<InterestingPoint, Integer> probe = new HashMap<>();
    ArrayList<InterestingPoint> lsearchSpace = new ArrayList<>();
    for (CutSet cs : _cutSets) {
        CollectionUtils.addAll(lsearchSpace, cs.cut);
        for (InterestingPoint p : cs.cut)
            probe.put(p, probe.size());
    }
    //sort materialization points in decreasing order of their sizes
    //which can improve the pruning efficiency by skipping larger sub-spaces.
    for (InterestingPoint p : sortBySize(part.getMatPointsExt(), memo, false))
        if (!probe.containsKey(p)) {
            lsearchSpace.add(p);
            probe.put(p, probe.size());
        }
    _searchSpace = lsearchSpace.toArray(new InterestingPoint[0]);

    //finalize cut sets (update positions wrt search space)
    for (CutSet cs : _cutSets)
        cs.updatePositions(probe);

    //final sanity check of interesting points
    if (_searchSpace.length != part.getMatPointsExt().length)
        throw new RuntimeException("Corrupt linearized search space: " + _searchSpace.length + " vs "
                + part.getMatPointsExt().length);
}

From source file:uk.gov.gchq.gaffer.schemabuilder.service.SchemaBuilderService.java

private static List<Class> getSubClasses(final Class<?> clazz) {
    final Set<URL> urls = new HashSet<>();
    for (final String packagePrefix : System
            .getProperty(SystemProperty.PACKAGE_PREFIXES, SystemProperty.PACKAGE_PREFIXES_DEFAULT).split(",")) {
        urls.addAll(ClasspathHelper.forPackage(packagePrefix));
    }//  w  w  w . ja v a2  s.co  m

    final List<Class> classes = new ArrayList<Class>(new Reflections(urls).getSubTypesOf(clazz));
    keepPublicConcreteClasses(classes);
    classes.sort(Comparator.comparing(Class::getName));

    return classes;

}

From source file:com.thoughtworks.go.apiv5.plugininfos.PluginInfosControllerV5.java

public String index(Request request, Response response) throws IOException {
    List<CombinedPluginInfo> pluginInfos = new ArrayList<>();
    String pluginType = request.queryParams("type");
    Boolean includeBad = Boolean.valueOf(request.queryParams("include_bad"));

    if (StringUtils.isNotBlank(pluginType)
            && !extensionsRegistry.allRegisteredExtensions().contains(pluginType)) {
        throw new UnprocessableEntityException(
                String.format("Invalid plugin type '%s'. It has to be one of '%s'.", pluginType,
                        String.join(", ", extensionsRegistry.allRegisteredExtensions())));
    }/*  ww  w.j  a v  a 2s . c  o  m*/

    Collection<CombinedPluginInfo> validPluginInfos = this.pluginInfoFinder.allPluginInfos(pluginType).stream()
            .filter(pluginInfo -> !hasUnsupportedExtensionType(pluginInfo)).collect(Collectors.toList());

    pluginInfos.addAll(validPluginInfos);

    if (includeBad) {
        List<BadPluginInfo> badPluginInfos = defaultPluginManager.plugins().stream()
                .filter(GoPluginDescriptor::isInvalid).map(BadPluginInfo::new).collect(toList());

        pluginInfos.addAll(badPluginInfos);
    }

    pluginInfos
            .sort(Comparator.comparing((CombinedPluginInfo pluginInfos1) -> pluginInfos1.getDescriptor().id()));
    String etag = etagFor(pluginInfos);

    if (fresh(request, etag)) {
        return notModified(response);
    }
    setEtagHeader(response, etag);
    return writerForTopLevelObject(request, response,
            writer -> PluginInfosRepresenter.toJSON(writer, pluginInfos));

}

From source file:org.wso2.carbon.apimgt.rest.api.store.v1.impl.SubscriptionsApiServiceImpl.java

/**
 * Get all subscriptions that are of user or shared subscriptions of the user's group.
 * <p/>/*w w  w  . j  a va 2  s  .c  om*/
 * If apiId is specified this will return the subscribed applications of that api
 * If application id is specified this will return the api subscriptions of that application
 *
 * @param apiId         api identifier
 * @param applicationId application identifier
 * @param offset        starting index of the subscription list
 * @param limit         max num of subscriptions returned
 * @param ifNoneMatch   If-None-Match header value
 * @return matched subscriptions as a list of SubscriptionDTOs
 */
@Override
public Response subscriptionsGet(String apiId, String applicationId, String apiType, Integer offset,
        Integer limit, String ifNoneMatch, MessageContext messageContext) {
    String username = RestApiUtil.getLoggedInUsername();
    String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
    Subscriber subscriber = new Subscriber(username);
    Set<SubscribedAPI> subscriptions;
    List<SubscribedAPI> subscribedAPIList = new ArrayList<>();

    //pre-processing
    limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
    offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;

    // currently groupId is taken from the user so that groupId coming as a query parameter is not honored.
    // As a improvement, we can check admin privileges of the user and honor groupId.
    String groupId = RestApiUtil.getLoggedInUserGroupId();

    try {
        APIConsumer apiConsumer = RestApiUtil.getConsumer(username);
        SubscriptionListDTO subscriptionListDTO;
        if (!StringUtils.isEmpty(apiId)) {
            // todo : FIX properly, need to done properly with backend side pagination. 
            // todo : getSubscribedIdentifiers() method should NOT be used. Appears to be too slow. 

            // This will fail with an authorization failed exception if user does not have permission to access the API
            API api = apiConsumer.getLightweightAPIByUUID(apiId, tenantDomain);
            subscriptions = apiConsumer.getSubscribedIdentifiers(subscriber, api.getId(), groupId);
            //sort by application name
            subscribedAPIList.addAll(subscriptions);
            subscribedAPIList.sort(Comparator.comparing(o -> o.getApplication().getName()));

            subscriptionListDTO = SubscriptionMappingUtil.fromSubscriptionListToDTO(subscribedAPIList, limit,
                    offset);

            return Response.ok().entity(subscriptionListDTO).build();
        } else if (!StringUtils.isEmpty(applicationId)) {
            Application application = apiConsumer.getApplicationByUUID(applicationId);

            if (application == null) {
                RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_APPLICATION, applicationId,
                        log);
                return null;
            }

            if (!RestAPIStoreUtils.isUserAccessAllowedForApplication(application)) {
                RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_APPLICATION, applicationId,
                        log);
            }

            subscriptions = apiConsumer.getPaginatedSubscribedAPIs(subscriber, application.getName(), offset,
                    limit, groupId);
            subscribedAPIList.addAll(subscriptions);

            subscriptionListDTO = SubscriptionMappingUtil.fromSubscriptionListToDTO(subscribedAPIList, limit,
                    offset);
            return Response.ok().entity(subscriptionListDTO).build();

        } else {
            //neither apiId nor applicationId is given
            RestApiUtil.handleBadRequest("Either applicationId or apiId should be available", log);
            return null;
        }
    } catch (APIManagementException e) {
        if (RestApiUtil.isDueToAuthorizationFailure(e)) {
            RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, log);
        } else if (RestApiUtil.isDueToResourceNotFound(e)) {
            RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
        } else {
            RestApiUtil.handleInternalServerError("Error while getting subscriptions of the user " + username,
                    e, log);
        }
    }
    return null;
}

From source file:org.openhab.io.neeo.internal.servletservices.NeeoBrainSearchService.java

/**
 * Does the search of all things and returns the results
 *
 * @param queryString the non-null, possibly empty query string
 * @param resp        the non-null response to write to
 * @throws IOException Signals that an I/O exception has occurred.
 *//* ww  w  .  ja v a2 s  .com*/
private void doSearch(String queryString, HttpServletResponse resp) throws IOException {
    Objects.requireNonNull(queryString, "queryString cannot be null");
    Objects.requireNonNull(resp, "resp cannot be null");

    final int idx = StringUtils.indexOf(queryString, '=');

    if (idx >= 0 && idx + 1 < queryString.length()) {
        final String search = NeeoUtil.decodeURIComponent(queryString.substring(idx + 1));

        final List<JsonObject> ja = new ArrayList<>();
        search(search).stream().sorted(Comparator.comparing(TokenScoreResult<NeeoDevice>::getScore).reversed())
                .forEach(item -> {
                    final JsonObject jo = (JsonObject) gson.toJsonTree(item);

                    // transfer id from tokenscoreresult to neeodevice (as per NEEO API)
                    final int id = jo.getAsJsonPrimitive("id").getAsInt();
                    jo.remove("id");
                    jo.getAsJsonObject("item").addProperty("id", id);
                    ja.add(jo);
                });

        final String itemStr = gson.toJson(ja);
        logger.debug("Search '{}', response: {}", search, itemStr);
        NeeoUtil.write(resp, itemStr);
    }
}

From source file:org.ballerinalang.docgen.docs.BallerinaDocGenerator.java

/**
 * API to generate Ballerina API documentation.
 *  @param sourceRoot    project root/*  w  w  w  . jav a 2s.  c o  m*/
 * @param output        path to the output directory where the API documentation will be written to.
 * @param packageFilter comma separated list of package names to be filtered from the documentation.
 * @param isNative      whether the given packages are native or not.
 * @param offline       is offline generation
 * @param sources       either the path to the directories where Ballerina source files reside or a
 */
public static void generateApiDocs(String sourceRoot, String output, String packageFilter, boolean isNative,
        boolean offline, String... sources) {
    out.println("docerina: API documentation generation for sources - " + Arrays.toString(sources));
    List<Link> primitives = primitives();

    // generate package docs
    Map<String, PackageDoc> docsMap = generatePackageDocsMap(sourceRoot, packageFilter, isNative, sources,
            offline);

    if (docsMap.size() == 0) {
        out.println("docerina: no module definitions found!");
        return;
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: generating HTML API documentation...");
    }

    // validate output path
    String userDir = System.getProperty("user.dir");
    // If output directory is empty
    if (output == null) {
        output = System.getProperty(BallerinaDocConstants.HTML_OUTPUT_PATH_KEY,
                userDir + File.separator + ProjectDirConstants.TARGET_DIR_NAME + File.separator + "api-docs");
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: creating output directory: " + output);
    }

    try {
        // Create output directory
        Files.createDirectories(Paths.get(output));
    } catch (IOException e) {
        out.println(String.format("docerina: API documentation generation failed. Couldn't create the [output "
                + "directory] %s. Cause: %s", output, e.getMessage()));
        log.error(
                String.format("API documentation generation failed. Couldn't create the [output directory] %s. "
                        + "" + "" + "Cause: %s", output, e.getMessage()),
                e);
        return;
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully created the output directory: " + output);
    }

    // Sort packages by package path
    List<PackageDoc> packageList = new ArrayList<>(docsMap.values());
    packageList.sort(Comparator.comparing(pkg -> pkg.bLangPackage.packageID.toString()));

    // Sort the package names
    List<String> packageNames = new ArrayList<>(docsMap.keySet());
    Collections.sort(packageNames);

    List<Link> packageNameList = PackageName.convertList(packageNames);

    String packageTemplateName = System.getProperty(BallerinaDocConstants.MODULE_TEMPLATE_NAME_KEY, "page");
    String packageToCTemplateName = System.getProperty(BallerinaDocConstants.MODULE_TOC_TEMPLATE_NAME_KEY,
            "toc");

    List<Path> resources = new ArrayList<>();

    //Iterate over the packages to generate the pages
    for (PackageDoc packageDoc : packageList) {

        try {
            BLangPackage bLangPackage = packageDoc.bLangPackage;
            String pkgDescription = packageDoc.description;

            // Sort functions, connectors, structs, type mappers and annotationDefs
            sortPackageConstructs(bLangPackage);

            String packagePath = refinePackagePath(bLangPackage);
            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: starting to generate docs for module: " + packagePath);
            }

            // other normal packages
            Page page = Generator.generatePage(bLangPackage, packageNameList, pkgDescription, primitives);
            String filePath = output + File.separator + packagePath + HTML;
            Writer.writeHtmlDocument(page, packageTemplateName, filePath);

            if (ConfigRegistry.getInstance().getAsBoolean(BallerinaDocConstants.GENERATE_TOC)) {
                // generates ToC into a separate HTML - requirement of Central
                out.println(
                        "docerina: generating toc: " + output + File.separator + packagePath + "-toc" + HTML);
                String tocFilePath = output + File.separator + packagePath + "-toc" + HTML;
                Writer.writeHtmlDocument(page, packageToCTemplateName, tocFilePath);
            }

            if (Names.BUILTIN_PACKAGE.getValue().equals(packagePath)) {
                // primitives are in builtin package
                Page primitivesPage = Generator.generatePageForPrimitives(bLangPackage, packageNameList,
                        primitives);
                String primitivesFilePath = output + File.separator + "primitive-types" + HTML;
                Writer.writeHtmlDocument(primitivesPage, packageTemplateName, primitivesFilePath);
            }

            // collect package resources
            resources.addAll(packageDoc.resources);

            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: generated docs for module: " + packagePath);
            }
        } catch (IOException e) {
            out.println(String.format("docerina: API documentation generation failed for module %s: %s",
                    packageDoc.bLangPackage.packageID.toString(), e.getMessage()));
            log.error(String.format("API documentation generation failed for %s",
                    packageDoc.bLangPackage.packageID.toString()), e);
        }
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: copying HTML theme into " + output);
    }
    try {
        BallerinaDocUtils.copyResources("docerina-theme", output);
    } catch (IOException e) {
        out.println(String.format("docerina: failed to copy the docerina-theme resource. Cause: %s",
                e.getMessage()));
        log.error("Failed to copy the docerina-theme resource.", e);
    }
    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully copied HTML theme into " + output);
    }

    if (!resources.isEmpty()) {
        String resourcesDir = output + File.separator + "resources";
        File resourcesDirFile = new File(resourcesDir);
        if (BallerinaDocUtils.isDebugEnabled()) {
            out.println("docerina: copying project resources into " + resourcesDir);
        }
        resources.parallelStream().forEach(path -> {
            try {
                FileUtils.copyFileToDirectory(path.toFile(), resourcesDirFile);
            } catch (IOException e) {
                out.println(String.format(
                        "docerina: failed to copy [resource] %s into [resources directory] " + "%s. Cause: %s",
                        path.toString(), resourcesDir, e.getMessage()));
                log.error(String.format(
                        "docerina: failed to copy [resource] %s into [resources directory] " + "%s. Cause: %s",
                        path.toString(), resourcesDir, e.getMessage()), e);
            }
        });
        if (BallerinaDocUtils.isDebugEnabled()) {
            out.println("docerina: successfully copied project resources into " + resourcesDir);
        }
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: generating the index HTML file.");
    }

    try {
        //Generate the index file with the list of all modules
        String indexTemplateName = System.getProperty(BallerinaDocConstants.MODULE_TEMPLATE_NAME_KEY, "index");
        String indexFilePath = output + File.separator + "index" + HTML;
        Writer.writeHtmlDocument(packageNameList, indexTemplateName, indexFilePath);
    } catch (IOException e) {
        out.println(String.format("docerina: failed to create the index.html. Cause: %s", e.getMessage()));
        log.error("Failed to create the index.html file.", e);
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully generated the index HTML file.");
        out.println("docerina: generating the module-list HTML file.");
    }

    try {
        // Generate module-list.html file which prints the list of processed packages
        String pkgListTemplateName = System.getProperty(BallerinaDocConstants.MODULE_LIST_TEMPLATE_NAME_KEY,
                "module-list");

        String pkgListFilePath = output + File.separator + "module-list" + HTML;
        Writer.writeHtmlDocument(packageNameList, pkgListTemplateName, pkgListFilePath);
    } catch (IOException e) {
        out.println(
                String.format("docerina: failed to create the module-list.html. Cause: %s", e.getMessage()));
        log.error("Failed to create the module-list.html file.", e);
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully generated the module-list HTML file.");
    }

    try {
        String zipPath = System.getProperty(BallerinaDocConstants.OUTPUT_ZIP_PATH);
        if (zipPath != null) {
            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: generating the documentation zip file.");
            }
            BallerinaDocUtils.packageToZipFile(output, zipPath);
            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: successfully generated the documentation zip file.");
            }
        }
    } catch (IOException e) {
        out.println(String.format("docerina: API documentation zip packaging failed for %s: %s", output,
                e.getMessage()));
        log.error(String.format("API documentation zip packaging failed for %s", output), e);
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: documentation generation is done.");
    }
}

From source file:org.mortbay.jetty.load.generator.jenkins.result.LoadResultProjectAction.java

public void doResponseTimeTrend(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {

    LOGGER.debug("doResponseTimeTrend");

    String jettyVersion = req.getParameter("jettyVersion");
    String[] versions = StringUtils.split(jettyVersion, '|');
    ElasticHost elasticHost = ElasticHost.get(elasticHostName);

    try {//from w  ww.  ja  v  a  2s  . c o  m
        List<RunInformations> runInformations = new ArrayList<>();
        for (String version : versions) {
            runInformations.addAll(searchRunInformations(version, elasticHost));
        }
        Collections.sort(runInformations, Comparator.comparing(o -> o.getStartTimeStamp()));
        rsp.addHeader("Content-Type", "application/json");
        LoadTestResultPublisher.OBJECT_MAPPER.writeValue(rsp.getWriter(), runInformations);
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
    }
}

From source file:View.Visualize.java

public void getLineChartData(Integer nameColumn, Integer valueColumn, Table table, LineChart lineChart,
        Boolean newSeries, Boolean rowCounter) {
    data.clear();//from   www  .j  a v a 2  s  . c  om
    ObservableList<XYChart.Data> lineChartData = FXCollections.observableArrayList();
    XYChart.Series series1 = new XYChart.Series();
    if (!newSeries) {
        series1.getData().clear();
        lineChart.getData().clear();

    }
    lineChart.setAnimated(false);//bug fix
    addDataFromTable(table, nameColumn, valueColumn, rowCounter);

    data.entrySet().stream().map(entry -> new XYChart.Data(entry.getKey(), entry.getValue()))
            .forEach(lineChartData::add);

    series1.getData().addAll(lineChartData);
    lineChart.getData().addAll(series1);
    lineChartSeries.add(series1);
    series1.getData().sort(Comparator.comparing(BarChart.Data<String, Double>::getYValue).reversed());
    series1.getNode().setUserData(lineChartSeries.size() - 1);
    addColorChangeOnSeries(series1);
    setupHover(series1);

}

From source file:it.polimi.diceH2020.SPACE4CloudWS.core.CoarseGrainedOptimizer.java

private boolean hillClimbing(SolutionPerJob solPerJob, Technology technology) {
    boolean success = false;
    Pair<Optional<Double>, Long> simulatorResult = dataProcessor.simulateClass(solPerJob);
    Optional<Double> maybeResult = simulatorResult.getLeft();
    if (maybeResult.isPresent()) {
        success = true;// w  w w .jav a 2s  .  c  om

        PerformanceSolver currentSolver = dataProcessor.getPerformanceSolver();
        Function<Double, Double> fromResult = currentSolver.transformationFromSolverResult(solPerJob,
                technology);
        Predicate<Double> feasibilityCheck = currentSolver.feasibilityCheck(solPerJob, technology);
        Consumer<Double> metricUpdater = currentSolver.metricUpdater(solPerJob, technology);

        final double tolerance = settings.getOptimization().getTolerance();

        BiPredicate<Double, Double> incrementCheck;
        Function<Integer, Integer> updateFunction;
        Predicate<Double> stoppingCondition;
        Predicate<Integer> vmCheck;

        double responseTime = fromResult.apply(maybeResult.get());
        if (feasibilityCheck.test(responseTime)) {
            updateFunction = n -> n - 1;
            stoppingCondition = feasibilityCheck.negate();
            vmCheck = n -> n == 1;
            incrementCheck = (prev, curr) -> false;
        } else {
            updateFunction = n -> n + 1;
            stoppingCondition = feasibilityCheck;
            vmCheck = n -> false;
            incrementCheck = (prev, curr) -> Math.abs((prev - curr) / prev) < tolerance;
        }

        List<Triple<Integer, Optional<Double>, Boolean>> resultsList = alterUntilBreakPoint(solPerJob,
                updateFunction, fromResult, feasibilityCheck, stoppingCondition, incrementCheck, vmCheck);
        Optional<Triple<Integer, Optional<Double>, Boolean>> result = resultsList.parallelStream()
                .filter(t -> t.getRight() && t.getMiddle().isPresent())
                .min(Comparator.comparing(Triple::getLeft));
        result.ifPresent(triple -> triple.getMiddle().ifPresent(output -> {
            int nVM = triple.getLeft();
            switch (technology) {
            case HADOOP:
            case SPARK:
                solPerJob.setThroughput(output);
                break;
            case STORM:
                break;
            default:
                throw new RuntimeException("Unexpected technology");
            }
            solPerJob.updateNumberVM(nVM);
            double metric = fromResult.apply(output);
            metricUpdater.accept(metric);
            logger.info(String.format(
                    "class%s-> MakeFeasible ended, result = %f, other metric = %f, obtained with: %d VMs",
                    solPerJob.getId(), output, metric, nVM));
        }));
    } else {
        logger.info("class" + solPerJob.getId() + "-> MakeFeasible ended with ERROR");
        solPerJob.setFeasible(false);
    }
    return success;
}

From source file:org.apache.asterix.common.config.ConfigUsageTest.java

public void generateUsage(String startDelim, String midDelim, String endDelim, EnumMap<Column, Boolean> align,
        PrintStream output) {/*ww  w  .j  a  v a  2s.c o m*/
    ConfigManager configManager = getConfigManager();
    StringBuilder buf = new StringBuilder();

    final Column[] columns = Column.values();
    for (Section section : getSections(configManager)) {
        for (IOption option : getSectionOptions(configManager, section)) {
            for (Column column : columns) {
                if (align.computeIfAbsent(column, c -> false)) {
                    calculateMaxWidth(option, column);
                }
            }
        }
    }
    // output header
    for (Column column : columns) {
        buf.append(column.ordinal() == 0 ? startDelim : midDelim);
        pad(buf, StringUtils.capitalize(column.name().toLowerCase()),
                align.computeIfAbsent(column, c -> false) ? calculateMaxWidth(column, column.name()) : 0);
    }
    buf.append(endDelim).append('\n');

    StringBuilder sepLine = new StringBuilder();
    for (Column column : columns) {
        sepLine.append(column.ordinal() == 0 ? startDelim : midDelim);
        pad(sepLine, "", maxWidths.getOrDefault(column, 0), '-');
    }
    sepLine.append(endDelim).append('\n');
    buf.append(sepLine.toString().replace(' ', '-'));

    for (Section section : getSections(configManager)) {
        List<IOption> options = new ArrayList<>(getSectionOptions(configManager, section));
        options.sort(Comparator.comparing(IOption::ini));
        for (IOption option : options) {
            for (Column column : columns) {
                buf.append(column.ordinal() == 0 ? startDelim : midDelim);
                if (column == Column.SECTION) {
                    center(buf, extractValue(column, option), maxWidths.getOrDefault(column, 0));
                } else {
                    pad(buf, extractValue(column, option), maxWidths.getOrDefault(column, 0));
                }
            }
            buf.append(endDelim).append('\n');
        }
    }
    output.println(buf);
}