Example usage for javafx.collections ObservableList add

List of usage examples for javafx.collections ObservableList add

Introduction

In this page you can find the example usage for javafx.collections ObservableList add.

Prototype

boolean add(E e);

Source Link

Document

Appends the specified element to the end of this list (optional operation).

Usage

From source file:org.beryx.viewreka.fxapp.Viewreka.java

public void exportChart() {
    ViewPane<?> viewPane = getCurrentViewPane();
    if (viewPane == null)
        return;//from   ww w  .  ja  v a2  s  . c o  m

    FileChooser fileChooser = new FileChooser();
    fileChooser.setTitle("Export chart");

    List<String> acceptedFormats = Arrays.asList("png", "gif");
    ObservableList<ExtensionFilter> extensionFilters = fileChooser.getExtensionFilters();
    acceptedFormats
            .forEach(ext -> extensionFilters.add(new FileChooser.ExtensionFilter("PNG files", "*." + ext)));
    GuiSettings guiSettings = guiSettingsManager.getSettings();
    String initialDirPath = guiSettings.getProperty(PROP_CHART_EXPORT_DIR,
            guiSettings.getMostRecentProjectDir().getAbsolutePath(), false);
    File initialDir = new File(initialDirPath);

    fileChooser.setInitialDirectory(initialDir);
    File file = fileChooser.showSaveDialog(getScene().getWindow());
    if (file == null)
        return;

    if (file.getParentFile() != null) {
        guiSettings.setProperty(PROP_CHART_EXPORT_DIR, file.getParent());
    }

    String extension = FilenameUtils.getExtension(file.getName()).toLowerCase();
    if (!acceptedFormats.contains(extension)) {
        extension = acceptedFormats.get(0);
    }

    WritableImage chartImage = viewPane.getChartImage();

    try {
        ImageIO.write(SwingFXUtils.fromFXImage(chartImage, null), extension, file);
    } catch (IOException e) {
        Dialogs.error("Chart export failed", "Cannot export the chart image", e);
    }
}

From source file:aajavafx.VisitorScheduleController.java

public ObservableList<String> getVisitor() throws IOException, JSONException {

    ObservableList<String> visitorStrings = FXCollections.observableArrayList();
    //customers.add(new CustomerProperty(1, "Johny", "Walker", "London", "1972-07-01", "7207012222"));
    Visitors visitor = new Visitors();
    //Managers manager = new Managers();
    Gson gson = new Gson();
    ObservableList<CustomerProperty> customersProperty = FXCollections.observableArrayList();
    JSONObject jo = new JSONObject();
    JSONArray jsonArray = new JSONArray(IOUtils
            .toString(new URL("http://localhost:8080/MainServerREST/api/visitors"), Charset.forName("UTF-8")));
    System.out.println(jsonArray);
    for (int i = 0; i < jsonArray.length(); i++) {
        jo = (JSONObject) jsonArray.getJSONObject(i);
        visitor = gson.fromJson(jo.toString(), Visitors.class);
        visitors.add(visitor);//  w w  w  .j  av a 2  s.c om
        System.out.println(visitor.getVisId());
        String s = visitor.getVisId() + ". " + visitor.getVisFirstname() + " " + visitor.getVisLastname();
        visitorStrings.add(s);

    }
    visStrings = visitorStrings;
    return visitorStrings;
}

From source file:aajavafx.EmployeeController.java

public ObservableList<EmployeeProperty> getEmployee() throws IOException, JSONException, Exception {
    Employees myEmployee = new Employees();
    Managers manager = new Managers();
    Gson gson = new Gson();
    ObservableList<EmployeeProperty> employeesProperty = FXCollections.observableArrayList();
    JSONObject jo = new JSONObject();
    CredentialsProvider provider = new BasicCredentialsProvider();
    UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("EMPLOYEE", "password");
    provider.setCredentials(AuthScope.ANY, credentials);
    HttpClient client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build();
    HttpGet get = new HttpGet("http://localhost:8080/MainServerREST/api/employees");

    HttpResponse response = client.execute(get);
    System.out.println("RESPONSE IS: " + response);

    JSONArray jsonArray = new JSONArray(
            IOUtils.toString(response.getEntity().getContent(), Charset.forName("UTF-8")));
    System.out.println(jsonArray);
    for (int i = 0; i < jsonArray.length(); i++) {
        jo = (JSONObject) jsonArray.getJSONObject(i);
        myEmployee = gson.fromJson(jo.toString(), Employees.class);
        System.out.println(myEmployee.getEmpPhone());
        employeesProperty.add(new EmployeeProperty(myEmployee.getEmpId(), myEmployee.getEmpLastname(),
                myEmployee.getEmpFirstname(), myEmployee.getEmpUsername(), myEmployee.getEmpPassword(),
                myEmployee.getEmpEmail(), myEmployee.getEmpPhone(), myEmployee.getManagersManId().getManId(),
                myEmployee.getEmpRegistered()));
    }/*from w  w w  . j a v a2  s.  co m*/
    return employeesProperty;
}

From source file:aajavafx.MedicinesController.java

public ObservableList<Medicines> getMedicines() throws IOException, JSONException {

    ObservableList<Medicines> medicines = FXCollections.observableArrayList();
    //Managers manager = new Managers();
    Gson gson = new Gson();
    JSONObject jo = new JSONObject();

    // SSL update .......
    CredentialsProvider provider = new BasicCredentialsProvider();
    UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("EMPLOYEE", "password");
    provider.setCredentials(AuthScope.ANY, credentials);
    HttpClient client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build();
    HttpGet get = new HttpGet("http://localhost:8080/MainServerREST/api/medicines");
    HttpResponse response = client.execute(get);
    System.out.println("RESPONSE IS: " + response);
    JSONArray jsonArray = new JSONArray(
            IOUtils.toString(response.getEntity().getContent(), Charset.forName("UTF-8")));
    // ...........
    //JSONArray jsonArray = new JSONArray(IOUtils.toString(new URL(MedicineRootURL), Charset.forName("UTF-8")));
    System.out.println(jsonArray);
    for (int i = 0; i < jsonArray.length(); i++) {
        jo = (JSONObject) jsonArray.getJSONObject(i);
        Medicines medicine = gson.fromJson(jo.toString(), Medicines.class);
        System.out.println("JSON OBJECT #" + i + " " + jo);
        medicines.add(medicine);

    }/*  w w w  .j a  va  2  s. co m*/
    return medicines;
}

From source file:aajavafx.Schedule1Controller.java

public ObservableList<CustomerProperty> getCustomer() throws IOException, JSONException {
    Customers customers = new Customers();
    Gson gson = new Gson();
    ObservableList<CustomerProperty> customerProperty = FXCollections.observableArrayList();
    JSONObject jo = new JSONObject();

    CredentialsProvider provider = new BasicCredentialsProvider();
    UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("EMPLOYEE", "password");
    provider.setCredentials(AuthScope.ANY, credentials);
    HttpClient client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build();
    HttpGet get = new HttpGet("http://localhost:8080/MainServerREST/api/customers");
    HttpResponse response = client.execute(get);
    System.out.println("RESPONSE IS: " + response);

    JSONArray jsonArray = new JSONArray(
            IOUtils.toString(response.getEntity().getContent(), Charset.forName("UTF-8")));
    System.out.println(jsonArray);
    for (int i = 0; i < jsonArray.length(); i++) {
        jo = (JSONObject) jsonArray.getJSONObject(i);
        customers = gson.fromJson(jo.toString(), Customers.class);

        customerProperty.add(new CustomerProperty(customers.getCuId(), customers.getCuFirstname(),
                customers.getCuLastname(), customers.getCuPersonnummer()));
    }/*from   www . ja  v  a  2s .  c  om*/
    return customerProperty;

}

From source file:com.ggvaidya.scinames.summary.NameStabilityView.java

public void init() {
    Project project = projectView.getProject();

    // What do we actually need to do?
    boolean flag_calculateNameSimilarity = (toCalculate & NAME_SIMILARITY) == NAME_SIMILARITY;
    boolean flag_calculateClusterSimilarity = (toCalculate & CLUSTER_SIMILARITY) == CLUSTER_SIMILARITY;
    boolean flag_calculateCircumscriptionSimilarity = (toCalculate
            & CIRCUMSCRIPTIONAL_SIMILARITY) == CIRCUMSCRIPTIONAL_SIMILARITY;

    // Setup stage.
    stage.setTitle("Name stability between " + project.getDatasets().size() + " datasets");

    // Setup table.
    controller.getTableEditableProperty().set(false);
    //controller.setTableColumnResizeProperty(TableView.CONSTRAINED_RESIZE_POLICY);
    ObservableList<TableColumn> cols = controller.getTableColumnsProperty();
    cols.clear();//from  ww w . ja  v  a 2s . com

    // Precalculating.
    Table<Dataset, String, String> precalc = HashBasedTable.create();

    // Set up columns.
    cols.add(createTableColumnFromPrecalc(precalc, "dataset"));
    cols.add(createTableColumnFromPrecalc(precalc, "date"));
    cols.add(createTableColumnFromPrecalc(precalc, "year"));
    cols.add(createTableColumnFromPrecalc(precalc, "count_binomial"));
    cols.add(createTableColumnFromPrecalc(precalc, "count_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "count_monotypic_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "names_added"));
    //cols.add(createTableColumnFromPrecalc(precalc, "names_added_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "names_deleted"));
    //cols.add(createTableColumnFromPrecalc(precalc, "names_deleted_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "species_added"));
    //cols.add(createTableColumnFromPrecalc(precalc, "species_added_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "species_deleted"));
    //cols.add(createTableColumnFromPrecalc(precalc, "species_deleted_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "mean_binomials_per_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "median_binomials_per_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "mode_binomials_per_genera_list"));

    /* All them stability calculations */
    if (flag_calculateNameSimilarity) {
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev_pc_prev"));

        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next_pc_next"));

        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first_pc_first"));

        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last_pc_last"));
    }

    if (flag_calculateClusterSimilarity) {
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev_pc_prev"));

        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next_pc_next"));

        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first_pc_first"));

        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last_pc_last"));
    }

    if (flag_calculateCircumscriptionSimilarity) {
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev_pc_prev"));

        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next_pc_next"));

        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first_pc_first"));

        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last_pc_last"));
    }

    Set<String> recognitionColumns = new HashSet<>();

    // Calculate binomials per dataset.
    Map<Name, Set<Dataset>> datasetsPerName = new HashMap<>();

    // Prepare to loop!
    List<Dataset> checklists = project.getChecklists();

    // BIRD HACK! Include all datasets!
    // checklists = project.getDatasets();

    // Set table items. We're only interested in checklists, because
    // there's no such thing as "name stability" between non-checklist datasets.
    controller.getTableItemsProperty().set(FXCollections.observableArrayList(checklists));

    List<Dataset> prevChecklists = new LinkedList<>();
    Dataset firstChecklist = checklists.get(0);
    Dataset lastChecklist = checklists.get(checklists.size() - 1);

    // TODO: This used to be prevDataset, but prevChecklist makes a lot more sense, since we
    // want to compare checklists with each other, ignoring datasets. Would be nice if someone
    // with copious free time could look over the calculations and make sure they don't assume
    // that the previous checklist is also the previous dataset?
    Dataset prevChecklist = null;

    int index = -1;
    for (Dataset ds : checklists) {
        index++;

        Dataset nextChecklist = (index < (checklists.size() - 1) ? checklists.get(index + 1) : null);

        precalc.put(ds, "dataset", ds.getName());
        precalc.put(ds, "date", ds.getDate().asYYYYmmDD("-"));
        precalc.put(ds, "year", ds.getDate().getYearAsString());

        Set<Name> recognizedBinomials = project.getRecognizedNames(ds).stream().flatMap(n -> n.asBinomial())
                .collect(Collectors.toSet());
        precalc.put(ds, "count_binomial", String.valueOf(recognizedBinomials.size()));

        Set<Name> recognizedGenera = recognizedBinomials.stream().flatMap(n -> n.asGenus())
                .collect(Collectors.toSet());
        precalc.put(ds, "count_genera", String.valueOf(recognizedGenera.size()));
        precalc.put(ds, "mean_binomials_per_genera",
                new BigDecimal(((double) recognizedBinomials.size()) / recognizedGenera.size())
                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());

        Map<Name, List<Name>> countBinomialsPerGenus = recognizedBinomials.stream()
                // Eliminate names that have zero (or more than one?!) genus name.
                .filter(n -> (n.asGenus().count() == 1))
                .collect(Collectors.groupingBy(n -> n.asGenus().findAny().get()));

        /*
        LOGGER.info("Debugging: list of " + recognizedGenera.size() + " genera: " + 
           recognizedGenera.stream().map(n -> n.getFullName()).collect(Collectors.joining(", "))
        );
        */

        precalc.put(ds, "count_monotypic_genera", String.valueOf(countBinomialsPerGenus.entrySet().stream()
                .filter(entry -> new HashSet<>(entry.getValue()).size() == 1).count()));

        /*
        LOGGER.info("Debugging: list of monotypic genera: " + 
           countBinomialsPerGenus.entrySet().stream()
              .filter(entry -> new HashSet<>(entry.getValue()).size() == 1)
              .map(entry -> entry.getKey().getFullName())
              .collect(Collectors.joining(", "))
        );
        */

        // Species added and deleted
        Set<Name> namesAdded = ds.getChanges(project).filter(ch -> ch.getType().equals(ChangeType.ADDITION))
                .flatMap(ch -> ch.getToStream()).collect(Collectors.toSet());
        Set<Name> namesDeleted = ds.getChanges(project).filter(ch -> ch.getType().equals(ChangeType.DELETION))
                .flatMap(ch -> ch.getFromStream()).collect(Collectors.toSet());

        // TODO: This isn't so useful -- the more useful measure would be the number of all species added
        // and all species deleted, making sure there isn't a cluster-al overlap.
        precalc.put(ds, "names_added", String.valueOf(namesAdded.size()));
        //precalc.put(ds, "names_added_list", namesAdded.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));
        precalc.put(ds, "names_deleted", String.valueOf(namesDeleted.size()));
        //precalc.put(ds, "names_deleted_list", namesDeleted.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));

        // Eliminate names that have been added, but were previously recognized at the species level.
        Set<Name> speciesAdded = namesAdded;
        if (prevChecklist != null) {
            Set<Name> prevRecognizedNames = project.getNameClusterManager()
                    .getClusters(project.getRecognizedNames(prevChecklist)).stream()
                    .flatMap(nc -> nc.getNames().stream()).collect(Collectors.toSet());
            speciesAdded = namesAdded.stream().filter(n -> !prevRecognizedNames.contains(n))
                    .collect(Collectors.toSet());
        }

        // Eliminate names that are still represented in the checklist by a species cluster.
        // (Note that this includes cases where a subspecies is removed, but another subspecies
        // or the nominal species is still recognized!)
        Set<Name> currentlyRecognizedBinomialNames = project.getNameClusterManager()
                .getClusters(project.getRecognizedNames(ds)).stream().flatMap(nc -> nc.getNames().stream())
                .flatMap(n -> n.asBinomial()).collect(Collectors.toSet());
        Set<Name> speciesDeleted = namesDeleted.stream()
                .filter(n -> !n.asBinomial().anyMatch(bn -> currentlyRecognizedBinomialNames.contains(bn)))
                .collect(Collectors.toSet());

        precalc.put(ds, "species_added", String.valueOf(speciesAdded.size()));
        precalc.put(ds, "species_added_list",
                speciesAdded.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));
        precalc.put(ds, "species_deleted", String.valueOf(speciesDeleted.size()));
        precalc.put(ds, "species_deleted_list",
                speciesDeleted.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));

        // Measures of species per genera
        java.util.Map<String, Set<Name>> binomialsPerGenera = recognizedBinomials.stream()
                .collect(Collectors.toMap(n -> n.getGenus(), n -> {
                    Set<Name> set = new HashSet<Name>();
                    set.add(n);
                    return set;
                }, (a, b) -> {
                    a.addAll(b);
                    return a;
                }));

        List<Integer> binomialsPerGeneraCounts = binomialsPerGenera.values().stream().map(set -> set.size())
                .sorted().collect(Collectors.toList());

        Frequency freq = new Frequency();
        for (String genus : binomialsPerGenera.keySet()) {
            // Blech.
            for (Name binom : binomialsPerGenera.get(genus)) {
                freq.addValue(genus);
            }
        }
        List<Comparable<?>> modeGenera = freq.getMode();
        precalc.put(ds, "mode_binomials_per_genera_list",
                modeGenera.stream().map(o -> o.toString() + ": " + freq.getCount(o) + " binomials")
                        .collect(Collectors.joining("; ")));

        double[] binomialsPerGeneraCountsAsDouble = binomialsPerGeneraCounts.stream()
                .mapToDouble(Integer::doubleValue).toArray();
        Median median = new Median();
        precalc.put(ds, "median_binomials_per_genera",
                String.valueOf(median.evaluate(binomialsPerGeneraCountsAsDouble)));

        if (firstChecklist == null) {
            //            precalc.put(ds, "names_identical_to_first", "NA");
            //            precalc.put(ds, "names_identical_to_first_pc", "NA");
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_first",
                        String.valueOf(getBinomialNamesIntersection(project, ds, firstChecklist).size()));
                precalc.put(ds, "names_identical_to_first_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, firstChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_first_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, firstChecklist).size()
                                / getBinomialNamesUnion(project, ds, firstChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_first_pc_first",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, firstChecklist).size()
                                / getBinomialNamesUnion(project, firstChecklist, firstChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_first",
                        String.valueOf(getBinomialClustersIntersection(project, ds, firstChecklist).size()));
                precalc.put(ds, "clusters_identical_to_first_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, firstChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_first_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, firstChecklist).size()
                                        / getBinomialClustersUnion(project, ds, firstChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_first_pc_first", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, firstChecklist).size()
                                / getBinomialClustersUnion(project, firstChecklist, firstChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_first", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_first_pc_this", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_first_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, firstChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_first_pc_first", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, firstChecklist, firstChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }
        }

        if (lastChecklist == null) {
            //            precalc.put(ds, "names_identical_to_first", "NA");
            //            precalc.put(ds, "names_identical_to_first_pc", "NA");
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_last",
                        String.valueOf(getBinomialNamesIntersection(project, ds, lastChecklist).size()));
                precalc.put(ds, "names_identical_to_last_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, lastChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_last_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, lastChecklist).size()
                                / getBinomialNamesUnion(project, ds, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_last_pc_last",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, lastChecklist).size()
                                / getBinomialNamesUnion(project, lastChecklist, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_last",
                        String.valueOf(getBinomialClustersIntersection(project, ds, lastChecklist).size()));
                precalc.put(ds, "clusters_identical_to_last_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, lastChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_last_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, lastChecklist).size()
                                        / getBinomialClustersUnion(project, ds, lastChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_last_pc_last", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, lastChecklist).size()
                                / getBinomialClustersUnion(project, lastChecklist, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_last", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_last_pc_this",
                        new BigDecimal(
                                (double) getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()
                                        / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_last_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_last_pc_last", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, lastChecklist, lastChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }
        }

        if (prevChecklist == null) {
            //            precalc.put(ds, "names_identical_to_prev", "NA");
            //            precalc.put(ds, "names_identical_to_prev_pc", "NA");            
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_prev",
                        String.valueOf(getBinomialNamesIntersection(project, ds, prevChecklist).size()));
                precalc.put(ds, "names_identical_to_prev_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, prevChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_prev_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, prevChecklist).size()
                                / getBinomialNamesUnion(project, ds, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_prev_pc_prev",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, prevChecklist).size()
                                / getBinomialNamesUnion(project, prevChecklist, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_prev",
                        String.valueOf(getBinomialClustersIntersection(project, ds, prevChecklist).size()));
                precalc.put(ds, "clusters_identical_to_prev_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, prevChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_prev_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, prevChecklist).size()
                                        / getBinomialClustersUnion(project, ds, prevChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_prev_pc_prev", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, prevChecklist).size()
                                / getBinomialClustersUnion(project, prevChecklist, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_prev", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_prev_pc_this",
                        new BigDecimal(
                                (double) getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()
                                        / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_prev_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_prev_pc_prev", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, prevChecklist, prevChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            // FYI, getBinomialTaxonConceptsUnion(project, ds, prevChecklist).size() should always be equal to the number of species in the dataset.
        }

        if (nextChecklist == null) {
            //         precalc.put(ds, "names_identical_to_prev", "NA");
            //         precalc.put(ds, "names_identical_to_prev_pc", "NA");            
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_next",
                        String.valueOf(getBinomialNamesIntersection(project, ds, nextChecklist).size()));
                precalc.put(ds, "names_identical_to_next_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, nextChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_next_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, nextChecklist).size()
                                / getBinomialNamesUnion(project, ds, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_next_pc_next",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, nextChecklist).size()
                                / getBinomialNamesUnion(project, nextChecklist, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_next",
                        String.valueOf(getBinomialClustersIntersection(project, ds, nextChecklist).size()));
                precalc.put(ds, "clusters_identical_to_next_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, nextChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_next_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, nextChecklist).size()
                                        / getBinomialClustersUnion(project, ds, nextChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_next_pc_next", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, nextChecklist).size()
                                / getBinomialClustersUnion(project, nextChecklist, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_next", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_next_pc_this",
                        new BigDecimal(
                                (double) getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()
                                        / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_next_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_next_pc_next", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, nextChecklist, nextChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }
        }

        /*
        // For the visualization thingie.
        int total = prevChecklists.size();
        List<Integer> counts = new LinkedList<>();
        for(Name name: recognizedBinomials) {
           int prevRecognized = 0;
                   
           if(!datasetsPerName.containsKey(name)) {
              datasetsPerName.put(name, new HashSet<>());
           } else {
              prevRecognized = datasetsPerName.get(name).size();
           }
                   
           datasetsPerName.get(name).add(ds);
           counts.add(
              (int)(
          ((double)prevRecognized)/total*100
              )
           );
        }
                
        Map<Integer, List<Integer>> countsByPercentage = counts.stream().sorted().collect(Collectors.groupingBy(n -> (int)(n/10)*10));
        for(int percentage: countsByPercentage.keySet()) {
           precalc.put(ds, "previously_recognized_" + percentage + "pc", String.valueOf(countsByPercentage.get(percentage).size()));   
           recognitionColumns.add("previously_recognized_" + percentage + "pc");
        }
        prevChecklists.add(ds);
        */

        // Set up the previous checklist for the next loop.
        prevChecklist = ds;
    }

    /*
    LinkedList<String> recognitionColumnsList = new LinkedList<>(recognitionColumns);
    recognitionColumnsList.sort(null);      
    for(String colName: recognitionColumnsList) {
       cols.add(createTableColumnFromPrecalc(precalc, colName));
    }*/
}

From source file:aajavafx.Schedule1Controller.java

public ObservableList<EmployeeProperty> getEmployee() throws IOException, JSONException {
    Employees myEmployee = new Employees();

    Gson gson = new Gson();
    ObservableList<EmployeeProperty> employeesProperty = FXCollections.observableArrayList();
    JSONObject jo = new JSONObject();

    CredentialsProvider provider = new BasicCredentialsProvider();
    UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("EMPLOYEE", "password");
    provider.setCredentials(AuthScope.ANY, credentials);
    HttpClient client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build();
    HttpGet get = new HttpGet("http://localhost:8080/MainServerREST/api/employees");
    HttpResponse response = client.execute(get);
    System.out.println("RESPONSE IS: " + response);

    JSONArray jsonArray = new JSONArray(
            IOUtils.toString(response.getEntity().getContent(), Charset.forName("UTF-8")));
    System.out.println(jsonArray);
    for (int i = 0; i < jsonArray.length(); i++) {
        jo = (JSONObject) jsonArray.getJSONObject(i);
        myEmployee = gson.fromJson(jo.toString(), Employees.class);
        if (myEmployee.getEmpRegistered() == true) {
            employeesProperty.add(new EmployeeProperty(myEmployee.getEmpId(), myEmployee.getEmpFirstname(),
                    myEmployee.getEmpLastname(), myEmployee.getEmpUsername()));
        }/*from w  w  w.  j  a  va  2 s  . co m*/
    }
    return employeesProperty;
}

From source file:com.ggvaidya.scinames.dataset.DatasetSceneController.java

@SuppressWarnings("rawtypes")
private void updateAdditionalData() {
    ObservableList<AdditionalData> addDataItems = FXCollections.observableArrayList();

    // Done!/*from w ww.  java  2s  .c  o  m*/
    additionalDataCombobox.setItems(addDataItems);

    // We can just about get away with doing this for around ADDITIONAL_DATA_CHANGE_COUNT_LIMIT changes.
    if (dataset.getAllChanges().count() > ADDITIONAL_DATA_CHANGE_COUNT_LIMIT)
        return;
    // TODO: fix this by lazy-evaluating these durned lists.

    // 1. Changes by name
    LOGGER.info("Creating changes by name additional data");
    addDataItems.add(createChangesByNameAdditionalData());
    LOGGER.info("Finished changes by name additional data");

    // 2. Data by name
    LOGGER.info("Creating data by name additional data");
    addDataItems.add(createDataByNameAdditionalData());
    LOGGER.info("Finished changes by name additional data");

    // 3. Changes by subname
    LOGGER.info("Creating changes by subnames additional data");
    addDataItems.add(createChangesBySubnamesAdditionalData());
    LOGGER.info("Finished changes by subname additional data");

    // 4. Data in this dataset
    /*
    LOGGER.info("Creating data by name additional data");
    addDataItems.add(createDataAdditionalData());
    LOGGER.info("Finished changes by name additional data");
    */

    // 5. Properties
    LOGGER.info("Creating properties additional data");
    addDataItems.add(createPropertiesAdditionalData());
    LOGGER.info("Finished properties additional data");

    additionalDataCombobox.getSelectionModel().clearAndSelect(0);

    LOGGER.info("Finished updateAdditionalData()");
}

From source file:aajavafx.CustomerController.java

public ObservableList<CustomerProperty> getCustomer() throws IOException, JSONException {

    ObservableList<CustomerProperty> customers = FXCollections.observableArrayList();
    //customers.add(new CustomerProperty(1, "Johny", "Walker", "London", "1972-07-01", "7207012222"));
    Customers myCustomer = new Customers();
    //Managers manager = new Managers();
    Gson gson = new Gson();
    ObservableList<CustomerProperty> customersProperty = FXCollections.observableArrayList();
    JSONObject jo = new JSONObject();
    CredentialsProvider provider = new BasicCredentialsProvider();
    UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("EMPLOYEE", "password");
    provider.setCredentials(AuthScope.ANY, credentials);
    HttpClient client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build();
    HttpGet get = new HttpGet("http://localhost:8080/MainServerREST/api/customers");
    HttpResponse response = client.execute(get);
    System.out.println("RESPONSE IS: " + response);
    JSONArray jsonArray = new JSONArray(
            IOUtils.toString(response.getEntity().getContent(), Charset.forName("UTF-8")));
    System.out.println(jsonArray);
    for (int i = 0; i < jsonArray.length(); i++) {
        jo = (JSONObject) jsonArray.getJSONObject(i);
        myCustomer = gson.fromJson(jo.toString(), Customers.class);
        System.out.println(myCustomer.getCuId());
        customersProperty.add(new CustomerProperty(myCustomer.getCuId(), myCustomer.getCuFirstname(),
                myCustomer.getCuLastname(), myCustomer.getCuBirthdate(), myCustomer.getCuAddress(),
                myCustomer.getCuPersonnummer()));
    }// ww w  .j a  v  a 2 s  .  com
    return customersProperty;
}

From source file:aajavafx.Schedule1Controller.java

public ObservableList<EmployeeScheduleProperty> getSchedule() throws IOException, JSONException {
    EmployeeSchedule mySchedule = new EmployeeSchedule();

    Customers customers = new Customers();
    Employees employee = new Employees();
    Gson gson = new Gson();

    ObservableList<EmployeeScheduleProperty> employeeScheduleProperty = FXCollections.observableArrayList();
    JSONObject jo = new JSONObject();

    CredentialsProvider provider = new BasicCredentialsProvider();
    UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("EMPLOYEE", "password");
    provider.setCredentials(AuthScope.ANY, credentials);
    HttpClient client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build();
    HttpGet get = new HttpGet("http://localhost:8080/MainServerREST/api/employeeschedule");
    HttpResponse response = client.execute(get);
    System.out.println("RESPONSE IS: " + response);

    JSONArray jsonArray = new JSONArray(
            IOUtils.toString(response.getEntity().getContent(), Charset.forName("UTF-8")));
    System.out.println(jsonArray);
    for (int i = 0; i < jsonArray.length(); i++) {
        jo = (JSONObject) jsonArray.getJSONObject(i);
        jo = (JSONObject) jsonArray.getJSONObject(i);
        mySchedule = gson.fromJson(jo.toString(), EmployeeSchedule.class);

        employeeScheduleProperty.add(new EmployeeScheduleProperty(mySchedule.getSchId(),
                mySchedule.getSchDate(), mySchedule.getSchFromTime(), mySchedule.getSchUntilTime(),
                mySchedule.getEmplVisitedCust(), mySchedule.getCustomersCuId().getCuPersonnummer(),
                mySchedule.getEmployeesEmpId().getEmpUsername()));
    }/*from  www .  j a va 2  s.  c o m*/

    return employeeScheduleProperty;
}