Example usage for java.util List parallelStream

List of usage examples for java.util List parallelStream

Introduction

In this page you can find the example usage for java.util List parallelStream.

Prototype

default Stream<E> parallelStream() 

Source Link

Document

Returns a possibly parallel Stream with this collection as its source.

Usage

From source file:de.steilerdev.myVerein.server.model.User.java

/**
 * This function replaces the set of divisions by the stated divisions. The function guarantees that the inverse membership is handled correctly.
 * @param divisionRepository The division repository needed to save the altered divisions.
 * @param eventRepository The event repository needed to save the altered events.
 * @param divs The new list of divisions for the user.
 *///from   w  w w  . jav a 2  s  . c om
public void replaceDivisions(DivisionRepository divisionRepository, EventRepository eventRepository,
        List<Division> divs) {
    logger.debug("[{}] Replacing division set", this);

    List<Division> finalDivisions = Division.getExpandedSetOfDivisions(divs, divisionRepository);
    List<Division> oldDivisions = divisions;

    if ((finalDivisions == null || finalDivisions.isEmpty())
            && (oldDivisions == null || oldDivisions.isEmpty())) {
        logger.debug("[{}] Division sets before and after are both empty", this);
        divisions = new ArrayList<>();
    } else if (finalDivisions == null || finalDivisions.isEmpty()) {
        logger.debug(
                "[{}] Division set after is empty, before is not. Removing membership subscription from old divisions",
                this);
        oldDivisions.stream().forEach(div -> div.removeMember(this));
        divisionRepository.save(oldDivisions);

        //Updating events, affected by division change
        oldDivisions.parallelStream().forEach(div -> {
            List<Event> changedEvents = eventRepository.findAllByInvitedDivision(div);
            changedEvents.parallelStream().forEach(event -> event.updateInvitedUser(divisionRepository));
            eventRepository.save(changedEvents);
        });
        divisions = new ArrayList<>();
    } else if (oldDivisions == null || oldDivisions.isEmpty()) {
        logger.debug(
                "[{}] Division set before is empty, after is not. Adding membership subscription to new divisions",
                this);
        finalDivisions.stream().forEach(div -> div.addMember(this));
        divisionRepository.save(finalDivisions);

        //Updating events, affected by division change
        finalDivisions.stream().forEach(div -> {
            List<Event> changedEvents = eventRepository.findAllByInvitedDivision(div);
            changedEvents.stream().forEach(event -> event.updateInvitedUser(divisionRepository));
            eventRepository.save(changedEvents);
        });
        divisions = finalDivisions;
    } else {
        logger.debug(
                "[{}] Division set after and before are not empty. Applying changed membership subscriptions",
                this);
        List<Division> intersect = finalDivisions.stream().filter(oldDivisions::contains)
                .collect(Collectors.toList()); //These items are already in the list, and do not need to be modified

        //Collecting changed division for batch save
        List<Division> changedDivisions = Collections.synchronizedList(new ArrayList<>());

        //Removing membership from removed divisions
        oldDivisions.parallelStream().filter(div -> !intersect.contains(div)).forEach(div -> {
            div.removeMember(this);
            changedDivisions.add(div);
        });

        //Adding membership to added divisions
        finalDivisions.parallelStream().filter(div -> !intersect.contains(div)).forEach(div -> {
            div.addMember(this);
            changedDivisions.add(div);
        });

        divisionRepository.save(changedDivisions);

        //Updating events, affected by division change
        changedDivisions.parallelStream().forEach(div -> {
            List<Event> changedEvents = eventRepository.findAllByInvitedDivision(div);
            changedEvents.parallelStream().forEach(event -> event.updateInvitedUser(divisionRepository));
            eventRepository.save(changedEvents);
        });
        divisions = finalDivisions;
    }
}

From source file:com.github.yongchristophertang.reporter.ReporterService.java

/**
 * Submit all results asynchronously to remote storage with location at {@link #getUrl()}.
 * If a set of result fails to transmit, another attempt will be activated. However only one more chance will be
 * tried.//  ww  w  .  j av a2 s  .  c  om
 */
public void generateReport(List<XmlSuite> xmlSuites, List<ISuite> suites, String outputDirectory) {
    List<Tuple2<TestCaseResult, Future<ResponseEntity<String>>>> futureTuples = new ArrayList<>();

    for (ISuite suite : suites) {
        for (IInvokedMethod testCase : suite.getAllInvokedMethods()) {
            ITestNGMethod method = testCase.getTestMethod();
            CasePostProcessor processor = new CasePostProcessor(method);
            TestCaseResult result = new TestCaseResult.TestCaseResultBuilder(method.getMethodName(),
                    testCase.getTestResult().getEndMillis() - testCase.getTestResult().getStartMillis(),
                    testCase.getTestResult().getStatus(), Reporter.getOutput(testCase.getTestResult()))
                            .className(method.getTestClass().getName())
                            .testName(testCase.getTestResult().getTestName()).suiteName(suite.getName())
                            .configuration(testCase.isConfigurationMethod())
                            .caseDescription(processor.getCaseDescription())
                            .expectedResult(processor.getExpectedResult()).bug(processor.getBugInfo())
                            .date(testCase.getDate()).createTestCaseResult();
            futureTuples.add(new Tuple2<>(result, service.submit(new UploadResults(result))));
        }
    }

    // First round attempt to upload results, and for failed cases attempt once more
    List<Future<ResponseEntity<String>>> futures = futureTuples.parallelStream()
            .filter(f -> Try.of(() -> !f._2.get(5, TimeUnit.SECONDS).getStatusCode().is2xxSuccessful())
                    .onFailure(t -> logger.error("Failed to upload results to remote storage.", t))
                    .orElse(true))
            .map(f -> service.submit(new UploadResults(f._1))).collect(Collectors.toList());

    // Check if all attempts succeed, if not, prompt notice of errors
    long count = futures.parallelStream().filter(
            f -> Try.of(() -> !f.get(5, TimeUnit.SECONDS).getStatusCode().is2xxSuccessful()).orElse(true))
            .count();
    if (count > 0) {
        logger.error("There are {} cases failed to upload to remote storage.", count);
    } else {
        logger.info("All test case results have been successfully transmitted to remote storage");
    }

    try {
        service.shutdown();
        if (!service.awaitTermination(2, TimeUnit.SECONDS)) {
            service.shutdownNow();
        }
    } catch (InterruptedException e) {
        logger.error("Reporter service has been interrupted while shutting down", e);
    }
}

From source file:org.evosuite.continuous.persistency.StorageManager.java

/**
 * Compare the results of this CTG run with what was in
 * the database. Keep/update the best results. 
 * /*from w  w w  .ja v  a  2  s  .  c  o  m*/
 * @param
 * @return
 */
public String mergeAndCommitChanges(ProjectStaticData current, String[] cuts) throws NullPointerException {

    if (current == null) {
        throw new NullPointerException("ProjectStaticData 'current' cannot be null");
    }

    Project db = StorageManager.getDatabaseProject();
    String info = "\n\n=== CTG run results ===\n";

    info += removeNoMoreExistentData(db, current);

    List<TestsOnDisk> suites = gatherGeneratedTestsOnDisk();
    info += "\nNew test suites: " + suites.size();

    // identify for which CUTs we failed to generate tests
    Set<String> missingCUTs = new LinkedHashSet<String>();

    db.setTotalNumberOfTestableClasses(BigInteger.valueOf(current.getTotalNumberOfTestableCUTs()));
    for (String cut : current.getClassNames()) {
        if (!current.getClassInfo(cut).isTestable()) {
            // if a class is not testable, we don't need to update any database
            // of that class. and not even counting it as a missing class
            continue;
        }

        TestsOnDisk suite = suites.parallelStream().filter(s -> s.cut.equals(cut)).findFirst().orElse(null);
        if (suite == null && current.getClassInfo(cut).isToTest()) {
            missingCUTs.add(cut);
        }

        LoggingUtils.getEvoLogger().info("* Updating database to " + cut);
        updateDatabase(cut, suite, db, current);
    }

    /*
       * Print out what class(es) EvoSuite failed to generate
       * test cases in this CTG run
       */

    if (!missingCUTs.isEmpty()) {
        if (missingCUTs.size() == 1) {
            info += "\n\nWARN: failed to generate tests for " + missingCUTs.iterator().next();
        } else {
            info += "\n\nMissing classes:";
            for (String missingCUT : missingCUTs) {
                info += "\n" + missingCUT;
            }
            String summary = "\n\nWARN: failed to generate tests for " + missingCUTs.size() + " classes out of "
                    + current.getTotalNumberOfTestableCUTs();
            info += summary;
        }
    }

    commitDatabase(db);
    return info;
}

From source file:se.nrm.dina.inventory.client.controller.ExcelFileHandler_1.java

private void validateObservationSheet() {
    List<String> list = new ArrayList();

    if (fileType == null) {
        errorMsgs.add(CommonMessages.getInstance().UNDEFINED_FILE_TYPE);
        throw new InvalidExcelFileException(CommonMessages.getInstance().INVALID_EXCEL_FILE);
    } else {/*from  w  w  w. j  a v  a 2 s.c  o  m*/
        if (fileType.equals(C_FILE)) {
            list.addAll(observationDataList_cfile);
        } else {
            list.addAll(observationDataList_dfile);
        }

        Optional<String> missingTitle = list.parallelStream().filter(t -> !obsTitleMap.containsKey(t))
                .findAny();

        if (missingTitle.isPresent()) {
            errorMsgs.add(CommonMessages.getInstance().buildMessage(missingTitle.get(),
                    CommonMessages.getInstance().OBSERVATION_COLUMN_NOT_MAPPING,
                    CommonMessages.getInstance().MISSING));
            throw new InvalidExcelFileException(CommonMessages.getInstance().INVALID_EXCEL_FILE);
        }
    }
}

From source file:org.wso2.carbon.status.dashboard.core.impl.MonitoringApiServiceImpl.java

/**
 * Return all realtime statistics of the workers.If worker is not currently reachable then send the last
 * persistant state of that worker./*w w w. jav a  2 s  .  c om*/
 *
 * @return Realtime data and status of workers.
 * @throws NotFoundException
 */
@Override
public Response getAllWorkers(String username) throws NotFoundException {
    boolean isAuthorized = permissionProvider.hasPermission(username,
            new Permission(Constants.PERMISSION_APP_NAME, VIWER_PERMISSION_STRING));
    if (isAuthorized) {
        Map<String, List<WorkerOverview>> groupedWorkers = new HashMap<>();
        List<WorkerConfigurationDetails> workerList = dashboardStore.selectAllWorkers();
        if (!workerList.isEmpty()) {
            workerList.parallelStream().forEach(worker -> {
                try {
                    WorkerOverview workerOverview = new WorkerOverview();
                    feign.Response workerResponse = WorkerServiceFactory.getWorkerHttpsClient(
                            PROTOCOL + generateURLHostPort(worker.getHost(), String.valueOf(worker.getPort())),
                            getUsername(), getPassword()).getWorker();
                    if ((workerResponse != null) && (workerResponse.status() == 200)) {
                        Long timeInMillis = System.currentTimeMillis();
                        String responseBody = workerResponse.body().toString();
                        ServerDetails serverDetails = gson.fromJson(responseBody, ServerDetails.class);
                        String message = serverDetails.getMessage();
                        if (message == null || message.isEmpty()) {
                            workerOverview.setStatusMessage("Success");
                        } else {
                            workerOverview.setStatusMessage(message);
                        }
                        feign.Response activeSiddiAppsResponse = WorkerServiceFactory
                                .getWorkerHttpsClient(PROTOCOL + generateURLHostPort(worker.getHost(),
                                        String.valueOf(worker.getPort())), getUsername(), getPassword())
                                .getSiddhiApps(true);
                        String activeSiddiAppsResponseBody = activeSiddiAppsResponse.body().toString();
                        List<String> activeApps = gson.fromJson(activeSiddiAppsResponseBody,
                                new TypeToken<List<String>>() {
                                }.getType());
                        feign.Response inactiveSiddiAppsResponse = WorkerServiceFactory
                                .getWorkerHttpsClient(PROTOCOL + generateURLHostPort(worker.getHost(),
                                        String.valueOf(worker.getPort())), getUsername(), getPassword())
                                .getSiddhiApps(false);
                        String inactiveSiddiAppsResponseBody = inactiveSiddiAppsResponse.body().toString();
                        List<String> inactiveApps = gson.fromJson(inactiveSiddiAppsResponseBody,
                                new TypeToken<List<String>>() {
                                }.getType());
                        serverDetails.setSiddhiApps(activeApps.size(), inactiveApps.size());
                        WorkerMetricsSnapshot snapshot = new WorkerMetricsSnapshot(serverDetails, timeInMillis);
                        WorkerStateHolder.addMetrics(worker.getWorkerId(), snapshot);
                        workerOverview.setLastUpdate(timeInMillis);
                        workerOverview.setWorkerId(worker.getWorkerId());
                        workerOverview.setServerDetails(serverDetails);
                        //grouping the clusters of the workers
                        List nonClusterList = groupedWorkers.get(Constants.NON_CLUSTERS_ID);
                        String clusterID = serverDetails.getClusterId();
                        List existing = groupedWorkers.get(clusterID);
                        if (serverDetails.getClusterId() == null && (nonClusterList == null)) {
                            List<WorkerOverview> workers = new ArrayList<>();
                            workers.add(workerOverview);
                            groupedWorkers.put(Constants.NON_CLUSTERS_ID, workers);
                        } else if (clusterID == null && (nonClusterList != null)) {
                            nonClusterList.add(workerOverview);
                        } else if (clusterID != null && (existing == null)) {
                            List<WorkerOverview> workers = new ArrayList<>();
                            workers.add(workerOverview);
                            groupedWorkers.put(clusterID, workers);
                        } else if (clusterID != null && (existing != null)) {
                            existing.add(workerOverview);
                        }
                    } else {
                        workerOverview.setWorkerId(worker.getWorkerId());
                        ServerDetails serverDetails = new ServerDetails();
                        serverDetails.setRunningStatus(Constants.NOT_REACHABLE_ID);
                        workerOverview.setStatusMessage(getErrorMessage(workerResponse.status()));
                        workerOverview.setServerDetails(serverDetails);
                        workerOverview.setLastUpdate((long) 0);
                        //grouping the never reached
                        if (groupedWorkers.get(Constants.NEVER_REACHED) == null) {
                            List<WorkerOverview> workers = new ArrayList<>();
                            workers.add(workerOverview);
                            groupedWorkers.put(Constants.NEVER_REACHED, workers);
                        } else {
                            List existing = groupedWorkers.get(Constants.NEVER_REACHED);
                            existing.add(workerOverview);
                        }
                    }
                } catch (feign.RetryableException e) {
                    WorkerMetricsSnapshot lastSnapshot = WorkerStateHolder.getMetrics(worker.getWorkerId());
                    if (lastSnapshot != null) {
                        lastSnapshot.updateRunningStatus(Constants.NOT_REACHABLE_ID);
                        WorkerOverview workerOverview = new WorkerOverview();
                        workerOverview.setLastUpdate(lastSnapshot.getTimeStamp());
                        workerOverview.setWorkerId(worker.getWorkerId());
                        workerOverview.setServerDetails(lastSnapshot.getServerDetails());
                        if (groupedWorkers.get(lastSnapshot.getServerDetails().getClusterId()) != null) {
                            groupedWorkers.get(lastSnapshot.getServerDetails().getClusterId())
                                    .add(workerOverview);
                        } else {
                            List<WorkerOverview> workers = new ArrayList<>();
                            workers.add(workerOverview);
                            groupedWorkers.put(lastSnapshot.getServerDetails().getClusterId(), workers);
                        }
                    } else {
                        WorkerOverview workerOverview = new WorkerOverview();
                        workerOverview.setWorkerId(worker.getWorkerId());
                        ServerDetails serverDetails = new ServerDetails();
                        serverDetails.setRunningStatus(Constants.NOT_REACHABLE_ID);
                        workerOverview.setServerDetails(serverDetails);
                        workerOverview.setLastUpdate((long) 0);
                        //grouping the never reached
                        if (groupedWorkers.get(Constants.NEVER_REACHED) == null) {
                            List<WorkerOverview> workers = new ArrayList<>();
                            workers.add(workerOverview);
                            groupedWorkers.put(Constants.NEVER_REACHED, workers);
                        } else {
                            List existing = groupedWorkers.get(Constants.NEVER_REACHED);
                            existing.add(workerOverview);
                        }
                    }
                }
            });
        }
        String jsonString = new Gson().toJson(groupedWorkers);
        return Response.ok().entity(jsonString).build();
    } else {
        logger.error("Unauthorized for user : " + username);
        return Response.status(Response.Status.FORBIDDEN).entity("Unauthorized for user : " + username).build();
    }
}

From source file:no.imr.stox.functions.acoustic.PgNapesIO.java

public static void convertPgNapesToLuf20(String path, String fileName, String outFileName) {
    try {//from   www . ja va2 s  .c o m
        List<String> acList = Files.readAllLines(Paths.get(path + "/" + fileName + ".txt"));
        List<String> acVList = Files.readAllLines(Paths.get(path + "/" + fileName + "Values.txt"));
        if (acList.isEmpty() || acVList.isEmpty()) {
            return;
        }
        acList.remove(0);
        acVList.remove(0);
        List<DistanceBO> dList = acList.stream().map(s -> {
            DistanceBO d = new DistanceBO();
            String[] str = s.split("\t", 14);
            d.setNation(str[0]);
            d.setPlatform(str[1]);
            d.setCruise(str[2]);
            d.setLog_start(Conversion.safeStringtoDoubleNULL(str[3]));
            d.setStart_time(Date.from(LocalDateTime.of(Conversion.safeStringtoIntegerNULL(str[4]),
                    Conversion.safeStringtoIntegerNULL(str[5]), Conversion.safeStringtoIntegerNULL(str[6]),
                    Conversion.safeStringtoIntegerNULL(str[7]), Conversion.safeStringtoIntegerNULL(str[8]), 0)
                    .toInstant(ZoneOffset.UTC)));
            d.setLat_start(Conversion.safeStringtoDoubleNULL(str[9]));
            d.setLon_start(Conversion.safeStringtoDoubleNULL(str[10]));
            d.setIntegrator_dist(Conversion.safeStringtoDoubleNULL(str[11]));
            FrequencyBO freq = new FrequencyBO();
            d.getFrequencies().add(freq);
            freq.setTranceiver(1); // implicit in pgnapes
            freq.setUpper_interpret_depth(0d);
            freq.setUpper_integrator_depth(0d);
            freq.setDistance(d);
            freq.setFreq(Conversion.safeStringtoIntegerNULL(str[12]));
            freq.setThreshold(Conversion.safeStringtoDoubleNULL(str[13]));
            return d;
        }).collect(Collectors.toList());
        // Fill in sa values
        acVList.forEach(s -> {
            String[] str = s.split("\t", 11);
            String cruise = str[2];
            Double log = Conversion.safeStringtoDoubleNULL(str[3]);
            Integer year = Conversion.safeStringtoIntegerNULL(str[4]);
            Integer month = Conversion.safeStringtoIntegerNULL(str[5]);
            Integer day = Conversion.safeStringtoIntegerNULL(str[6]);
            if (log == null || year == null || month == null || day == null) {
                return;
            }
            DistanceBO d = dList.parallelStream().filter(di -> {
                if (di.getCruise() == null || di.getLog_start() == null || di.getStart_time() == null) {
                    return false;
                }
                LocalDate ld = di.getStart_time().toInstant().atZone(ZoneOffset.UTC).toLocalDate();
                return cruise.equals(di.getCruise()) && log.equals(di.getLog_start())
                        && year.equals(ld.getYear()) && month.equals(ld.getMonthValue())
                        && day.equals(ld.getDayOfMonth());
            }).findFirst().orElse(null);
            if (d == null) {
                return;
            }
            FrequencyBO freq = d.getFrequencies().get(0);

            String species = str[7];
            Integer acocat = PgNapesEchoConvert.getAcoCatFromPgNapesSpecies(species);
            Double chUppDepth = Conversion.safeStringtoDoubleNULL(str[8]);
            Double chLowDepth = Conversion.safeStringtoDoubleNULL(str[9]);
            Double sa = Conversion.safeStringtoDoubleNULL(str[10]);
            if (acocat == null || sa == null || sa == 0d || chLowDepth == null || chUppDepth == null) {
                return;
            }
            if (d.getPel_ch_thickness() == null) {
                d.setPel_ch_thickness(chLowDepth - chUppDepth);
            }
            Integer ch = (int) (chLowDepth / d.getPel_ch_thickness() + 0.5);
            SABO sabo = new SABO();
            sabo.setFrequency(freq);
            freq.getSa().add(sabo);
            sabo.setAcoustic_category(acocat + "");
            sabo.setCh_type("P");
            sabo.setCh(ch);
            sabo.setSa(sa);
        });
        // Calculate number of pelagic channels

        /*dList.stream().forEach(d -> {
        FrequencyBO f = d.getFrequencies().get(0);
        Integer minCh = f.getSa().stream().map(SABO::getCh).min(Integer::compare).orElse(null);
        Integer maxCh = f.getSa().stream().map(SABO::getCh).max(Integer::compare).orElse(null);
        if (maxCh != null && minCh != null) {
            f.setNum_pel_ch(maxCh - minCh + 1);
        }
        });*/

        if (dList.isEmpty()) {
            return;
        }
        DistanceBO d = dList.get(0);
        String cruise = d.getCruise();
        String nation = d.getNation();
        String pl = d.getPlatform();
        ListUser20Writer.export(cruise, nation, pl, path + "/" + cruise + outFileName + ".xml", dList);

    } catch (IOException ex) {
        Logger.getLogger(PgNapesIO.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:qupath.lib.gui.tma.TMASummaryViewer.java

void setTMAEntries(final Collection<TMAEntry> newEntries) {

    //      boolean containsSummaries = newEntries.stream().anyMatch(e -> e instanceof TMASummaryEntry);

    // Turn off use-selected - can be crashy when replacing entries
    if (!newEntries.equals(entriesBase)) {
        useSelectedProperty.set(false);/*from  w  w w . j  a  va2s . co m*/

        // Reset the cache
        imageCache.clear();

        // Try to load small images in a background thread
        List<TMAEntry> duplicateEntries = new ArrayList<>(newEntries);
        ExecutorService service = Executors.newSingleThreadExecutor();
        service.submit(() -> {
            duplicateEntries.parallelStream().forEach(entry -> {
                imageCache.getImage(entry, maxSmallWidth.get());
                imageCache.getOverlay(entry, maxSmallWidth.get());
            });
        });
        service.shutdown();

    }
    this.entriesBase.setAll(newEntries);

    // Store the names of any currently hidden columns
    lastHiddenColumns = table.getColumns().stream().filter(c -> !c.isVisible()).map(c -> c.getText())
            .collect(Collectors.toSet());

    //      this.table.getColumns().clear();

    //      // Useful for a paper, but not generally...
    //      int count = 0;
    //      int nCells = 0;
    //      int nTumor = 0;
    //      for (TMAEntry entry : entriesBase) {
    //         if (!entry.isMissing() && (predicate.get() == null || predicate.get().test(entry))) {
    //            count++;
    //            nCells += (int)(entry.getMeasurement("Num Tumor").doubleValue() + entry.getMeasurement("Num Stroma").doubleValue());
    //            nTumor += (int)(entry.getMeasurement("Num Tumor").doubleValue());
    //         }
    //      }
    //      System.err.println(String.format("Num entries:\t%d\tNum tumor:\t%d\tNum cells:\t%d", count, nTumor, nCells));

    // Update measurement names
    Set<String> namesMeasurements = new LinkedHashSet<>();
    Set<String> namesMetadata = new LinkedHashSet<>();
    //      boolean containsSummaries = false;
    for (TMAEntry entry : newEntries) {
        namesMeasurements.addAll(entry.getMeasurementNames());
        namesMetadata.addAll(entry.getMetadataNames());
        //         containsSummaries = containsSummaries || entry instanceof TMASummaryEntry;
    }

    // Get the available survival columns
    String currentSurvival = getSurvivalColumn();
    survivalColumns.clear();
    if (namesMeasurements.contains(TMACoreObject.KEY_OVERALL_SURVIVAL))
        survivalColumns.add(TMACoreObject.KEY_OVERALL_SURVIVAL);
    if (namesMeasurements.contains(TMACoreObject.KEY_RECURRENCE_FREE_SURVIVAL))
        survivalColumns.add(TMACoreObject.KEY_RECURRENCE_FREE_SURVIVAL);
    if (currentSurvival != null && survivalColumns.contains(currentSurvival))
        comboSurvival.getSelectionModel().select(currentSurvival);
    else if (!survivalColumns.isEmpty())
        comboSurvival.getSelectionModel().select(survivalColumns.get(0));

    //      // Add the count of non-missing cores if we are working with summaries
    //      if (containsSummaries)
    namesMeasurements.add("Available cores");

    // Make sure there are no nulls or other unusable values
    namesMeasurements.remove(null);
    namesMeasurements.remove("");
    //      measurementNames.clear();
    String selectedMainMeasurement = comboMainMeasurement.getSelectionModel().getSelectedItem();
    measurementNames.setAll(namesMeasurements);
    if (namesMeasurements.contains(selectedMainMeasurement))
        comboMainMeasurement.getSelectionModel().select(selectedMainMeasurement);
    else {
        namesMeasurements.remove(TMACoreObject.KEY_UNIQUE_ID);
        namesMeasurements.remove(TMACoreObject.KEY_OVERALL_SURVIVAL);
        namesMeasurements.remove(TMACoreObject.KEY_RECURRENCE_FREE_SURVIVAL);
        namesMeasurements.remove(TMACoreObject.KEY_OS_CENSORED);
        namesMeasurements.remove(TMACoreObject.KEY_RFS_CENSORED);
        namesMeasurements.remove("Censored"); // For historical reasons when there was only one censored column supported...
        if (!namesMeasurements.isEmpty())
            comboMainMeasurement.getSelectionModel().select(0);
    }
    metadataNames.clear();
    metadataNames.addAll(namesMetadata);

    refreshTableData();

    // The next time the table is empty, show a different placeholder 
    // from the original (which is for loading/import)
    table.setPlaceholder(new Text("No data"));
}

From source file:org.ballerinalang.docgen.docs.BallerinaDocGenerator.java

/**
 * API to generate Ballerina API documentation.
 *  @param sourceRoot    project root/*from  w w  w .  j  ava2 s.  c  om*/
 * @param output        path to the output directory where the API documentation will be written to.
 * @param packageFilter comma separated list of package names to be filtered from the documentation.
 * @param isNative      whether the given packages are native or not.
 * @param offline       is offline generation
 * @param sources       either the path to the directories where Ballerina source files reside or a
 */
public static void generateApiDocs(String sourceRoot, String output, String packageFilter, boolean isNative,
        boolean offline, String... sources) {
    out.println("docerina: API documentation generation for sources - " + Arrays.toString(sources));
    List<Link> primitives = primitives();

    // generate package docs
    Map<String, PackageDoc> docsMap = generatePackageDocsMap(sourceRoot, packageFilter, isNative, sources,
            offline);

    if (docsMap.size() == 0) {
        out.println("docerina: no module definitions found!");
        return;
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: generating HTML API documentation...");
    }

    // validate output path
    String userDir = System.getProperty("user.dir");
    // If output directory is empty
    if (output == null) {
        output = System.getProperty(BallerinaDocConstants.HTML_OUTPUT_PATH_KEY,
                userDir + File.separator + ProjectDirConstants.TARGET_DIR_NAME + File.separator + "api-docs");
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: creating output directory: " + output);
    }

    try {
        // Create output directory
        Files.createDirectories(Paths.get(output));
    } catch (IOException e) {
        out.println(String.format("docerina: API documentation generation failed. Couldn't create the [output "
                + "directory] %s. Cause: %s", output, e.getMessage()));
        log.error(
                String.format("API documentation generation failed. Couldn't create the [output directory] %s. "
                        + "" + "" + "Cause: %s", output, e.getMessage()),
                e);
        return;
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully created the output directory: " + output);
    }

    // Sort packages by package path
    List<PackageDoc> packageList = new ArrayList<>(docsMap.values());
    packageList.sort(Comparator.comparing(pkg -> pkg.bLangPackage.packageID.toString()));

    // Sort the package names
    List<String> packageNames = new ArrayList<>(docsMap.keySet());
    Collections.sort(packageNames);

    List<Link> packageNameList = PackageName.convertList(packageNames);

    String packageTemplateName = System.getProperty(BallerinaDocConstants.MODULE_TEMPLATE_NAME_KEY, "page");
    String packageToCTemplateName = System.getProperty(BallerinaDocConstants.MODULE_TOC_TEMPLATE_NAME_KEY,
            "toc");

    List<Path> resources = new ArrayList<>();

    //Iterate over the packages to generate the pages
    for (PackageDoc packageDoc : packageList) {

        try {
            BLangPackage bLangPackage = packageDoc.bLangPackage;
            String pkgDescription = packageDoc.description;

            // Sort functions, connectors, structs, type mappers and annotationDefs
            sortPackageConstructs(bLangPackage);

            String packagePath = refinePackagePath(bLangPackage);
            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: starting to generate docs for module: " + packagePath);
            }

            // other normal packages
            Page page = Generator.generatePage(bLangPackage, packageNameList, pkgDescription, primitives);
            String filePath = output + File.separator + packagePath + HTML;
            Writer.writeHtmlDocument(page, packageTemplateName, filePath);

            if (ConfigRegistry.getInstance().getAsBoolean(BallerinaDocConstants.GENERATE_TOC)) {
                // generates ToC into a separate HTML - requirement of Central
                out.println(
                        "docerina: generating toc: " + output + File.separator + packagePath + "-toc" + HTML);
                String tocFilePath = output + File.separator + packagePath + "-toc" + HTML;
                Writer.writeHtmlDocument(page, packageToCTemplateName, tocFilePath);
            }

            if (Names.BUILTIN_PACKAGE.getValue().equals(packagePath)) {
                // primitives are in builtin package
                Page primitivesPage = Generator.generatePageForPrimitives(bLangPackage, packageNameList,
                        primitives);
                String primitivesFilePath = output + File.separator + "primitive-types" + HTML;
                Writer.writeHtmlDocument(primitivesPage, packageTemplateName, primitivesFilePath);
            }

            // collect package resources
            resources.addAll(packageDoc.resources);

            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: generated docs for module: " + packagePath);
            }
        } catch (IOException e) {
            out.println(String.format("docerina: API documentation generation failed for module %s: %s",
                    packageDoc.bLangPackage.packageID.toString(), e.getMessage()));
            log.error(String.format("API documentation generation failed for %s",
                    packageDoc.bLangPackage.packageID.toString()), e);
        }
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: copying HTML theme into " + output);
    }
    try {
        BallerinaDocUtils.copyResources("docerina-theme", output);
    } catch (IOException e) {
        out.println(String.format("docerina: failed to copy the docerina-theme resource. Cause: %s",
                e.getMessage()));
        log.error("Failed to copy the docerina-theme resource.", e);
    }
    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully copied HTML theme into " + output);
    }

    if (!resources.isEmpty()) {
        String resourcesDir = output + File.separator + "resources";
        File resourcesDirFile = new File(resourcesDir);
        if (BallerinaDocUtils.isDebugEnabled()) {
            out.println("docerina: copying project resources into " + resourcesDir);
        }
        resources.parallelStream().forEach(path -> {
            try {
                FileUtils.copyFileToDirectory(path.toFile(), resourcesDirFile);
            } catch (IOException e) {
                out.println(String.format(
                        "docerina: failed to copy [resource] %s into [resources directory] " + "%s. Cause: %s",
                        path.toString(), resourcesDir, e.getMessage()));
                log.error(String.format(
                        "docerina: failed to copy [resource] %s into [resources directory] " + "%s. Cause: %s",
                        path.toString(), resourcesDir, e.getMessage()), e);
            }
        });
        if (BallerinaDocUtils.isDebugEnabled()) {
            out.println("docerina: successfully copied project resources into " + resourcesDir);
        }
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: generating the index HTML file.");
    }

    try {
        //Generate the index file with the list of all modules
        String indexTemplateName = System.getProperty(BallerinaDocConstants.MODULE_TEMPLATE_NAME_KEY, "index");
        String indexFilePath = output + File.separator + "index" + HTML;
        Writer.writeHtmlDocument(packageNameList, indexTemplateName, indexFilePath);
    } catch (IOException e) {
        out.println(String.format("docerina: failed to create the index.html. Cause: %s", e.getMessage()));
        log.error("Failed to create the index.html file.", e);
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully generated the index HTML file.");
        out.println("docerina: generating the module-list HTML file.");
    }

    try {
        // Generate module-list.html file which prints the list of processed packages
        String pkgListTemplateName = System.getProperty(BallerinaDocConstants.MODULE_LIST_TEMPLATE_NAME_KEY,
                "module-list");

        String pkgListFilePath = output + File.separator + "module-list" + HTML;
        Writer.writeHtmlDocument(packageNameList, pkgListTemplateName, pkgListFilePath);
    } catch (IOException e) {
        out.println(
                String.format("docerina: failed to create the module-list.html. Cause: %s", e.getMessage()));
        log.error("Failed to create the module-list.html file.", e);
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: successfully generated the module-list HTML file.");
    }

    try {
        String zipPath = System.getProperty(BallerinaDocConstants.OUTPUT_ZIP_PATH);
        if (zipPath != null) {
            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: generating the documentation zip file.");
            }
            BallerinaDocUtils.packageToZipFile(output, zipPath);
            if (BallerinaDocUtils.isDebugEnabled()) {
                out.println("docerina: successfully generated the documentation zip file.");
            }
        }
    } catch (IOException e) {
        out.println(String.format("docerina: API documentation zip packaging failed for %s: %s", output,
                e.getMessage()));
        log.error(String.format("API documentation zip packaging failed for %s", output), e);
    }

    if (BallerinaDocUtils.isDebugEnabled()) {
        out.println("docerina: documentation generation is done.");
    }
}