Example usage for javafx.collections FXCollections observableArrayList

List of usage examples for javafx.collections FXCollections observableArrayList

Introduction

In this page you can find the example usage for javafx.collections FXCollections observableArrayList.

Prototype

public static <E> ObservableList<E> observableArrayList(Collection<? extends E> col) 

Source Link

Document

Creates a new observable array list and adds a content of collection col to it.

Usage

From source file:org.dataconservancy.packaging.gui.presenter.impl.EditPackageContentsPresenterImplTest.java

@Test
public void testUnignoreDataItemUnignoresParent() {
    presenter.rebuildTreeView();/* www.  j a  v  a 2  s.c  o  m*/
    presenter.toggleItemIgnore(FXCollections.observableArrayList(presenter.findItem(collection)), true);

    assertTrue(collection.isIgnored());
    assertTrue(dataItem.isIgnored());
    assertTrue(dataFile.isIgnored());

    presenter.toggleItemIgnore(FXCollections.observableArrayList(presenter.findItem(dataItem)), false);

    //Parent should not be ignored
    assertFalse(collection.isIgnored());

    //Item and it's children should not be ignored
    assertFalse(dataItem.isIgnored());
    assertFalse(dataFile.isIgnored());
}

From source file:com.danilafe.sbaccountmanager.StarboundServerAccountManager.java

private void createAccount(ListView<String> to_update) {
    Stage createAccountStage = new Stage();
    createAccountStage.initModality(Modality.APPLICATION_MODAL);

    //Set the stage info
    createAccountStage.setTitle("Add Server Account");

    //Create a layout
    GridPane gp = new GridPane();
    gp.setAlignment(Pos.CENTER);//from   ww w.j a  v a2  s .  c  om
    gp.setVgap(10);
    gp.setHgap(10);
    gp.setPadding(new Insets(25, 25, 25, 25));

    //Ads the important things
    Text welcome = new Text("Create Server Account");
    welcome.setFont(Font.font("Century Gothic", FontWeight.NORMAL, 20));
    gp.add(welcome, 0, 0, 2, 1);

    Label username = new Label("Username");
    Label password = new Label("Password");
    Label r_password = new Label("Repeat Password");

    TextField usernamefield = new TextField();
    PasswordField passwordfield = new PasswordField();
    PasswordField r_passwordfield = new PasswordField();

    gp.add(username, 0, 1);
    gp.add(password, 0, 2);
    gp.add(r_password, 0, 3);

    gp.add(usernamefield, 1, 1);
    gp.add(passwordfield, 1, 2);
    gp.add(r_passwordfield, 1, 3);

    Text error = new Text("");
    HBox error_box = new HBox(10);
    error_box.setAlignment(Pos.CENTER);
    error_box.getChildren().add(error);
    gp.add(error_box, 0, 4, 2, 1);

    Button finish = new Button("Finish");
    finish.setDisable(true);
    HBox center_button = new HBox(10);
    center_button.setAlignment(Pos.CENTER);
    center_button.getChildren().add(finish);
    gp.add(center_button, 0, 5, 2, 1);

    ChangeListener name = new ChangeListener<String>() {

        @Override
        public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) {
            finish.setDisable(true);
            if (usernamefield.getText().equals("")) {
                error.setFill(Color.RED);
                error.setText("Username can not be blank!");
            }
            if (!passwordfield.getText().equals(r_passwordfield.getText())) {
                error.setFill(Color.RED);
                error.setText("Passwords do not match!");
            }
            if (passwordfield.getText().equals("") && r_passwordfield.getText().equals("")) {
                error.setFill(Color.RED);
                error.setText("Passwords can not be blank!");
            }
            if (passwordfield.getText().equals(r_passwordfield.getText()) && !usernamefield.getText().equals("")
                    && !passwordfield.getText().equals("")) {
                error.setFill(Color.GREEN);
                error.setText("No issues.");
                finish.setDisable(false);
            }
        }

    };

    usernamefield.textProperty().addListener(name);
    passwordfield.textProperty().addListener(name);
    r_passwordfield.textProperty().addListener(name);

    finish.setOnAction(new EventHandler<ActionEvent>() {

        @Override
        public void handle(ActionEvent e) {

            users.remove(usernamefield.getText());
            users.add(usernamefield.getText());
            userpasswords.put(usernamefield.getText(), passwordfield.getText());
            to_update.setItems(FXCollections.observableArrayList(users));

            createAccountStage.close();
        }

    });

    //Creates the scene
    Scene scene = new Scene(gp, 300, 275);
    scene.getStylesheets().add(this.getClass().getResource("login_css.css").toExternalForm());

    createAccountStage.setScene(scene);
    createAccountStage.show();

}

From source file:net.sourceforge.pmd.util.fxdesigner.XPathPanelController.java

/**
 * Evaluate the contents of the XPath expression area
 * on the given compilation unit. This updates the xpath
 * result panel, and can log XPath exceptions to the
 * event log panel./*from   w  w w. java2s.co  m*/
 *
 * @param compilationUnit The AST root
 * @param version         The language version
 */
public void evaluateXPath(Node compilationUnit, LanguageVersion version) {

    try {
        String xpath = getXpathExpression();
        if (StringUtils.isBlank(xpath)) {
            invalidateResults(false);
            return;
        }

        ObservableList<Node> results = FXCollections.observableArrayList(xpathEvaluator.evaluateQuery(
                compilationUnit, version, getXpathVersion(), xpath, ruleBuilder.getRuleProperties()));
        xpathResultListView.setItems(
                results.stream().map(parent::wrapNode).collect(Collectors.toCollection(LiveArrayList::new)));
        parent.highlightXPathResults(results);
        violationsTitledPane.setText("Matched nodes\t(" + results.size() + ")");
    } catch (XPathEvaluationException e) {
        invalidateResults(true);
        designerRoot.getLogger().logEvent(new LogEntry(e, Category.XPATH_EVALUATION_EXCEPTION));
    }

    xpathResultListView.refresh();

}

From source file:com.exalttech.trex.ui.views.PacketTableView.java

/**
 * Handle Add packet button clicked/*from w w  w . ja  v  a 2s  .  c  o m*/
 *
 * @param streamName
 * @param type
 */
public void handleAddPacket(String streamName, StreamBuilderType type) {
    doUpdate = false;
    TableProfileStream newRow = new TableProfileStream();
    newRow.setName(streamName);
    tabledata.getStreamsList().add(newRow);
    Profile newProfile = new Profile();
    newProfile.setName(streamName);
    tabledata.getProfiles().add(newProfile);
    streamPacketTableView.setItems(FXCollections.observableArrayList(tabledata.getStreamsList()));
    streamPacketTableView.getSelectionModel().select(newRow);
    openStreamDialog(type);
}

From source file:org.dataconservancy.packaging.gui.presenter.impl.EditPackageContentsPresenterImplTest.java

@Test
public void testIgnoreDataFile() {
    presenter.rebuildTreeView();/*from  w w w  .  j a  va 2  s  .  co m*/
    presenter.toggleItemIgnore(FXCollections.observableArrayList(presenter.findItem(dataFile)), true);

    //Parent should not be ignored
    assertFalse(collection.isIgnored());
    assertFalse(dataItem.isIgnored());

    //Item should be ignored
    assertTrue(dataFile.isIgnored());
}

From source file:com.ggvaidya.scinames.summary.NameStabilityView.java

public void init() {
    Project project = projectView.getProject();

    // What do we actually need to do?
    boolean flag_calculateNameSimilarity = (toCalculate & NAME_SIMILARITY) == NAME_SIMILARITY;
    boolean flag_calculateClusterSimilarity = (toCalculate & CLUSTER_SIMILARITY) == CLUSTER_SIMILARITY;
    boolean flag_calculateCircumscriptionSimilarity = (toCalculate
            & CIRCUMSCRIPTIONAL_SIMILARITY) == CIRCUMSCRIPTIONAL_SIMILARITY;

    // Setup stage.
    stage.setTitle("Name stability between " + project.getDatasets().size() + " datasets");

    // Setup table.
    controller.getTableEditableProperty().set(false);
    //controller.setTableColumnResizeProperty(TableView.CONSTRAINED_RESIZE_POLICY);
    ObservableList<TableColumn> cols = controller.getTableColumnsProperty();
    cols.clear();/*w ww . ja  v a 2s .  co m*/

    // Precalculating.
    Table<Dataset, String, String> precalc = HashBasedTable.create();

    // Set up columns.
    cols.add(createTableColumnFromPrecalc(precalc, "dataset"));
    cols.add(createTableColumnFromPrecalc(precalc, "date"));
    cols.add(createTableColumnFromPrecalc(precalc, "year"));
    cols.add(createTableColumnFromPrecalc(precalc, "count_binomial"));
    cols.add(createTableColumnFromPrecalc(precalc, "count_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "count_monotypic_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "names_added"));
    //cols.add(createTableColumnFromPrecalc(precalc, "names_added_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "names_deleted"));
    //cols.add(createTableColumnFromPrecalc(precalc, "names_deleted_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "species_added"));
    //cols.add(createTableColumnFromPrecalc(precalc, "species_added_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "species_deleted"));
    //cols.add(createTableColumnFromPrecalc(precalc, "species_deleted_list"));
    cols.add(createTableColumnFromPrecalc(precalc, "mean_binomials_per_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "median_binomials_per_genera"));
    cols.add(createTableColumnFromPrecalc(precalc, "mode_binomials_per_genera_list"));

    /* All them stability calculations */
    if (flag_calculateNameSimilarity) {
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_prev_pc_prev"));

        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_next_pc_next"));

        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_first_pc_first"));

        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "names_identical_to_last_pc_last"));
    }

    if (flag_calculateClusterSimilarity) {
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_prev_pc_prev"));

        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_next_pc_next"));

        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_first_pc_first"));

        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "clusters_identical_to_last_pc_last"));
    }

    if (flag_calculateCircumscriptionSimilarity) {
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_prev_pc_prev"));

        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_next_pc_next"));

        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_first_pc_first"));

        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last_pc_this"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last_pc_union"));
        cols.add(createTableColumnFromPrecalc(precalc, "circumscriptions_identical_to_last_pc_last"));
    }

    Set<String> recognitionColumns = new HashSet<>();

    // Calculate binomials per dataset.
    Map<Name, Set<Dataset>> datasetsPerName = new HashMap<>();

    // Prepare to loop!
    List<Dataset> checklists = project.getChecklists();

    // BIRD HACK! Include all datasets!
    // checklists = project.getDatasets();

    // Set table items. We're only interested in checklists, because
    // there's no such thing as "name stability" between non-checklist datasets.
    controller.getTableItemsProperty().set(FXCollections.observableArrayList(checklists));

    List<Dataset> prevChecklists = new LinkedList<>();
    Dataset firstChecklist = checklists.get(0);
    Dataset lastChecklist = checklists.get(checklists.size() - 1);

    // TODO: This used to be prevDataset, but prevChecklist makes a lot more sense, since we
    // want to compare checklists with each other, ignoring datasets. Would be nice if someone
    // with copious free time could look over the calculations and make sure they don't assume
    // that the previous checklist is also the previous dataset?
    Dataset prevChecklist = null;

    int index = -1;
    for (Dataset ds : checklists) {
        index++;

        Dataset nextChecklist = (index < (checklists.size() - 1) ? checklists.get(index + 1) : null);

        precalc.put(ds, "dataset", ds.getName());
        precalc.put(ds, "date", ds.getDate().asYYYYmmDD("-"));
        precalc.put(ds, "year", ds.getDate().getYearAsString());

        Set<Name> recognizedBinomials = project.getRecognizedNames(ds).stream().flatMap(n -> n.asBinomial())
                .collect(Collectors.toSet());
        precalc.put(ds, "count_binomial", String.valueOf(recognizedBinomials.size()));

        Set<Name> recognizedGenera = recognizedBinomials.stream().flatMap(n -> n.asGenus())
                .collect(Collectors.toSet());
        precalc.put(ds, "count_genera", String.valueOf(recognizedGenera.size()));
        precalc.put(ds, "mean_binomials_per_genera",
                new BigDecimal(((double) recognizedBinomials.size()) / recognizedGenera.size())
                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());

        Map<Name, List<Name>> countBinomialsPerGenus = recognizedBinomials.stream()
                // Eliminate names that have zero (or more than one?!) genus name.
                .filter(n -> (n.asGenus().count() == 1))
                .collect(Collectors.groupingBy(n -> n.asGenus().findAny().get()));

        /*
        LOGGER.info("Debugging: list of " + recognizedGenera.size() + " genera: " + 
           recognizedGenera.stream().map(n -> n.getFullName()).collect(Collectors.joining(", "))
        );
        */

        precalc.put(ds, "count_monotypic_genera", String.valueOf(countBinomialsPerGenus.entrySet().stream()
                .filter(entry -> new HashSet<>(entry.getValue()).size() == 1).count()));

        /*
        LOGGER.info("Debugging: list of monotypic genera: " + 
           countBinomialsPerGenus.entrySet().stream()
              .filter(entry -> new HashSet<>(entry.getValue()).size() == 1)
              .map(entry -> entry.getKey().getFullName())
              .collect(Collectors.joining(", "))
        );
        */

        // Species added and deleted
        Set<Name> namesAdded = ds.getChanges(project).filter(ch -> ch.getType().equals(ChangeType.ADDITION))
                .flatMap(ch -> ch.getToStream()).collect(Collectors.toSet());
        Set<Name> namesDeleted = ds.getChanges(project).filter(ch -> ch.getType().equals(ChangeType.DELETION))
                .flatMap(ch -> ch.getFromStream()).collect(Collectors.toSet());

        // TODO: This isn't so useful -- the more useful measure would be the number of all species added
        // and all species deleted, making sure there isn't a cluster-al overlap.
        precalc.put(ds, "names_added", String.valueOf(namesAdded.size()));
        //precalc.put(ds, "names_added_list", namesAdded.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));
        precalc.put(ds, "names_deleted", String.valueOf(namesDeleted.size()));
        //precalc.put(ds, "names_deleted_list", namesDeleted.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));

        // Eliminate names that have been added, but were previously recognized at the species level.
        Set<Name> speciesAdded = namesAdded;
        if (prevChecklist != null) {
            Set<Name> prevRecognizedNames = project.getNameClusterManager()
                    .getClusters(project.getRecognizedNames(prevChecklist)).stream()
                    .flatMap(nc -> nc.getNames().stream()).collect(Collectors.toSet());
            speciesAdded = namesAdded.stream().filter(n -> !prevRecognizedNames.contains(n))
                    .collect(Collectors.toSet());
        }

        // Eliminate names that are still represented in the checklist by a species cluster.
        // (Note that this includes cases where a subspecies is removed, but another subspecies
        // or the nominal species is still recognized!)
        Set<Name> currentlyRecognizedBinomialNames = project.getNameClusterManager()
                .getClusters(project.getRecognizedNames(ds)).stream().flatMap(nc -> nc.getNames().stream())
                .flatMap(n -> n.asBinomial()).collect(Collectors.toSet());
        Set<Name> speciesDeleted = namesDeleted.stream()
                .filter(n -> !n.asBinomial().anyMatch(bn -> currentlyRecognizedBinomialNames.contains(bn)))
                .collect(Collectors.toSet());

        precalc.put(ds, "species_added", String.valueOf(speciesAdded.size()));
        precalc.put(ds, "species_added_list",
                speciesAdded.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));
        precalc.put(ds, "species_deleted", String.valueOf(speciesDeleted.size()));
        precalc.put(ds, "species_deleted_list",
                speciesDeleted.stream().sorted().map(n -> n.getFullName()).collect(Collectors.joining(", ")));

        // Measures of species per genera
        java.util.Map<String, Set<Name>> binomialsPerGenera = recognizedBinomials.stream()
                .collect(Collectors.toMap(n -> n.getGenus(), n -> {
                    Set<Name> set = new HashSet<Name>();
                    set.add(n);
                    return set;
                }, (a, b) -> {
                    a.addAll(b);
                    return a;
                }));

        List<Integer> binomialsPerGeneraCounts = binomialsPerGenera.values().stream().map(set -> set.size())
                .sorted().collect(Collectors.toList());

        Frequency freq = new Frequency();
        for (String genus : binomialsPerGenera.keySet()) {
            // Blech.
            for (Name binom : binomialsPerGenera.get(genus)) {
                freq.addValue(genus);
            }
        }
        List<Comparable<?>> modeGenera = freq.getMode();
        precalc.put(ds, "mode_binomials_per_genera_list",
                modeGenera.stream().map(o -> o.toString() + ": " + freq.getCount(o) + " binomials")
                        .collect(Collectors.joining("; ")));

        double[] binomialsPerGeneraCountsAsDouble = binomialsPerGeneraCounts.stream()
                .mapToDouble(Integer::doubleValue).toArray();
        Median median = new Median();
        precalc.put(ds, "median_binomials_per_genera",
                String.valueOf(median.evaluate(binomialsPerGeneraCountsAsDouble)));

        if (firstChecklist == null) {
            //            precalc.put(ds, "names_identical_to_first", "NA");
            //            precalc.put(ds, "names_identical_to_first_pc", "NA");
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_first",
                        String.valueOf(getBinomialNamesIntersection(project, ds, firstChecklist).size()));
                precalc.put(ds, "names_identical_to_first_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, firstChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_first_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, firstChecklist).size()
                                / getBinomialNamesUnion(project, ds, firstChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_first_pc_first",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, firstChecklist).size()
                                / getBinomialNamesUnion(project, firstChecklist, firstChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_first",
                        String.valueOf(getBinomialClustersIntersection(project, ds, firstChecklist).size()));
                precalc.put(ds, "clusters_identical_to_first_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, firstChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_first_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, firstChecklist).size()
                                        / getBinomialClustersUnion(project, ds, firstChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_first_pc_first", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, firstChecklist).size()
                                / getBinomialClustersUnion(project, firstChecklist, firstChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_first", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_first_pc_this", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_first_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, firstChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_first_pc_first", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, firstChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, firstChecklist, firstChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }
        }

        if (lastChecklist == null) {
            //            precalc.put(ds, "names_identical_to_first", "NA");
            //            precalc.put(ds, "names_identical_to_first_pc", "NA");
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_last",
                        String.valueOf(getBinomialNamesIntersection(project, ds, lastChecklist).size()));
                precalc.put(ds, "names_identical_to_last_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, lastChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_last_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, lastChecklist).size()
                                / getBinomialNamesUnion(project, ds, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_last_pc_last",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, lastChecklist).size()
                                / getBinomialNamesUnion(project, lastChecklist, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_last",
                        String.valueOf(getBinomialClustersIntersection(project, ds, lastChecklist).size()));
                precalc.put(ds, "clusters_identical_to_last_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, lastChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_last_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, lastChecklist).size()
                                        / getBinomialClustersUnion(project, ds, lastChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_last_pc_last", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, lastChecklist).size()
                                / getBinomialClustersUnion(project, lastChecklist, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_last", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_last_pc_this",
                        new BigDecimal(
                                (double) getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()
                                        / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_last_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, lastChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_last_pc_last", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, lastChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, lastChecklist, lastChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }
        }

        if (prevChecklist == null) {
            //            precalc.put(ds, "names_identical_to_prev", "NA");
            //            precalc.put(ds, "names_identical_to_prev_pc", "NA");            
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_prev",
                        String.valueOf(getBinomialNamesIntersection(project, ds, prevChecklist).size()));
                precalc.put(ds, "names_identical_to_prev_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, prevChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_prev_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, prevChecklist).size()
                                / getBinomialNamesUnion(project, ds, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_prev_pc_prev",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, prevChecklist).size()
                                / getBinomialNamesUnion(project, prevChecklist, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_prev",
                        String.valueOf(getBinomialClustersIntersection(project, ds, prevChecklist).size()));
                precalc.put(ds, "clusters_identical_to_prev_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, prevChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_prev_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, prevChecklist).size()
                                        / getBinomialClustersUnion(project, ds, prevChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_prev_pc_prev", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, prevChecklist).size()
                                / getBinomialClustersUnion(project, prevChecklist, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_prev", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_prev_pc_this",
                        new BigDecimal(
                                (double) getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()
                                        / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_prev_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, prevChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_prev_pc_prev", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, prevChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, prevChecklist, prevChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            // FYI, getBinomialTaxonConceptsUnion(project, ds, prevChecklist).size() should always be equal to the number of species in the dataset.
        }

        if (nextChecklist == null) {
            //         precalc.put(ds, "names_identical_to_prev", "NA");
            //         precalc.put(ds, "names_identical_to_prev_pc", "NA");            
        } else {
            if (flag_calculateNameSimilarity) {
                precalc.put(ds, "names_identical_to_next",
                        String.valueOf(getBinomialNamesIntersection(project, ds, nextChecklist).size()));
                precalc.put(ds, "names_identical_to_next_pc_this",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, nextChecklist).size()
                                / recognizedBinomials.size() * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN)
                                        .toPlainString());
                precalc.put(ds, "names_identical_to_next_pc_union",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, nextChecklist).size()
                                / getBinomialNamesUnion(project, ds, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "names_identical_to_next_pc_next",
                        new BigDecimal((double) getBinomialNamesIntersection(project, ds, nextChecklist).size()
                                / getBinomialNamesUnion(project, nextChecklist, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateClusterSimilarity) {
                int clustersForDataset = project.getNameClusterManager().getClusters(recognizedBinomials)
                        .size();
                if (clustersForDataset != recognizedBinomials.size()) {
                    throw new RuntimeException(
                            "We have " + clustersForDataset + " clusters for this dataset, but "
                                    + recognizedBinomials.size() + " recognized binomials. What?");
                }
                precalc.put(ds, "clusters_identical_to_next",
                        String.valueOf(getBinomialClustersIntersection(project, ds, nextChecklist).size()));
                precalc.put(ds, "clusters_identical_to_next_pc_this",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, nextChecklist).size()
                                        / getBinomialClustersUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_next_pc_union",
                        new BigDecimal(
                                (double) getBinomialClustersIntersection(project, ds, nextChecklist).size()
                                        / getBinomialClustersUnion(project, ds, nextChecklist).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "clusters_identical_to_next_pc_next", new BigDecimal(
                        (double) getBinomialClustersIntersection(project, ds, nextChecklist).size()
                                / getBinomialClustersUnion(project, nextChecklist, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }

            if (flag_calculateCircumscriptionSimilarity) {
                precalc.put(ds, "circumscriptions_identical_to_next", String
                        .valueOf(getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()));
                precalc.put(ds, "circumscriptions_identical_to_next_pc_this",
                        new BigDecimal(
                                (double) getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()
                                        / getBinomialTaxonConceptsUnion(project, ds, ds).size() * 100)
                                                .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_next_pc_union", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, ds, nextChecklist).size() * 100)
                                        .setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
                precalc.put(ds, "circumscriptions_identical_to_next_pc_next", new BigDecimal(
                        (double) getBinomialTaxonConceptsIntersection(project, ds, nextChecklist).size()
                                / getBinomialTaxonConceptsUnion(project, nextChecklist, nextChecklist).size()
                                * 100).setScale(2, BigDecimal.ROUND_HALF_EVEN).toPlainString());
            }
        }

        /*
        // For the visualization thingie.
        int total = prevChecklists.size();
        List<Integer> counts = new LinkedList<>();
        for(Name name: recognizedBinomials) {
           int prevRecognized = 0;
                   
           if(!datasetsPerName.containsKey(name)) {
              datasetsPerName.put(name, new HashSet<>());
           } else {
              prevRecognized = datasetsPerName.get(name).size();
           }
                   
           datasetsPerName.get(name).add(ds);
           counts.add(
              (int)(
          ((double)prevRecognized)/total*100
              )
           );
        }
                
        Map<Integer, List<Integer>> countsByPercentage = counts.stream().sorted().collect(Collectors.groupingBy(n -> (int)(n/10)*10));
        for(int percentage: countsByPercentage.keySet()) {
           precalc.put(ds, "previously_recognized_" + percentage + "pc", String.valueOf(countsByPercentage.get(percentage).size()));   
           recognitionColumns.add("previously_recognized_" + percentage + "pc");
        }
        prevChecklists.add(ds);
        */

        // Set up the previous checklist for the next loop.
        prevChecklist = ds;
    }

    /*
    LinkedList<String> recognitionColumnsList = new LinkedList<>(recognitionColumns);
    recognitionColumnsList.sort(null);      
    for(String colName: recognitionColumnsList) {
       cols.add(createTableColumnFromPrecalc(precalc, colName));
    }*/
}

From source file:org.dataconservancy.packaging.gui.presenter.impl.EditPackageContentsPresenterImplTest.java

@Test
public void testUnIgnoreDataFile() {
    presenter.rebuildTreeView();//from  ww  w .  j  a va 2s  .co  m
    presenter.toggleItemIgnore(FXCollections.observableArrayList(presenter.findItem(dataFile)), true);

    //Parent should not be ignored
    assertFalse(collection.isIgnored());
    assertFalse(dataItem.isIgnored());

    //Item should be ignored
    assertTrue(dataFile.isIgnored());

    presenter.toggleItemIgnore(FXCollections.observableArrayList(presenter.findItem(dataFile)), false);

    //Parent should not be ignored
    assertFalse(collection.isIgnored());
    assertFalse(dataItem.isIgnored());

    //Item should not be ignored
    assertFalse(dataFile.isIgnored());
}

From source file:acmi.l2.clientmod.l2smr.Controller.java

private void initializeUnr() {
    mapsDirProperty().addListener((observable, oldValue, newValue) -> {
        unrChooser.getSelectionModel().clearSelection();
        unrChooser.getItems().clear();/* ww w  . j  a v a 2s.c  o m*/
        unrChooser.setDisable(true);

        if (newValue == null)
            return;

        unrChooser.getItems().addAll(Arrays.stream(newValue.listFiles(MAP_FILE_FILTER)).map(File::getName)
                .collect(Collectors.toList()));

        unrChooser.setDisable(false);

        AutoCompleteComboBox.autoCompleteComboBox(unrChooser, AutoCompleteComboBox.AutoCompleteMode.CONTAINING);
    });
    this.unrChooser.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> {
        table.getSelectionModel().clearSelection();
        filterPane.setDisable(true);
        actors.set(null);
        actorStaticMeshChooser.getItems().clear();

        System.gc();

        if (newValue == null)
            return;

        try (UnrealPackage up = new UnrealPackage(new File(getMapsDir(), newValue), true)) {
            longTask(progress -> {
                List<UnrealPackage.ImportEntry> staticMeshes = up.getImportTable().parallelStream()
                        .filter(ie -> ie.getFullClassName().equalsIgnoreCase("Engine.StaticMesh"))
                        .sorted((ie1, ie2) -> String.CASE_INSENSITIVE_ORDER
                                .compare(ie1.getObjectInnerFullName(), ie2.getObjectInnerFullName()))
                        .collect(Collectors.toList());
                Platform.runLater(() -> {
                    actorStaticMeshChooser.getItems().setAll(staticMeshes);
                    AutoCompleteComboBox.autoCompleteComboBox(actorStaticMeshChooser,
                            AutoCompleteComboBox.AutoCompleteMode.CONTAINING);
                });

                List<Actor> actors = up
                        .getExportTable().parallelStream().filter(e -> UnrealPackage.ObjectFlag
                                .getFlags(e.getObjectFlags()).contains(UnrealPackage.ObjectFlag.HasStack))
                        .map(entry -> {
                            try {
                                return new Actor(entry.getIndex(), entry.getObjectInnerFullName(),
                                        entry.getObjectRawDataExternally(), up);
                            } catch (Throwable e) {
                                return null;
                            }
                        }).filter(Objects::nonNull)
                        .filter(actor -> actor.getStaticMeshRef() != 0 && actor.getOffsets().location != 0)
                        .collect(Collectors.toList());
                Platform.runLater(() -> Controller.this.actors.set(FXCollections.observableArrayList(actors)));
            }, e -> onException("Import failed", e));
        } catch (Exception e) {
            onException("Read failed", e);
        }

        resetFilter();
        filterPane.setDisable(false);
    });
    this.actorColumn.setCellValueFactory(actorStringCellDataFeatures -> new SimpleStringProperty(
            actorStringCellDataFeatures.getValue().getActorName()));
    this.staticMeshColumn.setCellValueFactory(actorStringCellDataFeatures -> new SimpleStringProperty(
            actorStringCellDataFeatures.getValue().getStaticMesh()));
    this.table.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
    this.table.getSelectionModel().selectedItemProperty().addListener((observable) -> updateSMAPane());

    this.table.setOnMouseClicked(event -> {
        if (event.getButton() == MouseButton.PRIMARY && event.getClickCount() == 2) {
            String obj = table.getSelectionModel().getSelectedItem().getStaticMesh();
            String file = obj.substring(0, obj.indexOf('.')) + ".usx";
            showUmodel(obj, file);
        }
    });
}

From source file:com.danilafe.sbaccountmanager.StarboundServerAccountManager.java

private void createBannedUser(ListView<String> to_update) {
    Stage createBannedUser = new Stage();
    createBannedUser.setTitle("Add Banned User");
    createBannedUser.initModality(Modality.APPLICATION_MODAL);

    GridPane gp = new GridPane();
    gp.setPadding(new Insets(25, 25, 25, 25));
    gp.setAlignment(Pos.CENTER);// www .  j  ava  2 s . co m
    gp.setVgap(10);
    gp.setHgap(10);

    Text title = new Text("Add Banned Username");
    title.setFont(Font.font("Century Gothic", FontWeight.NORMAL, 20));
    gp.add(title, 0, 0, 2, 1);

    Label newusername = new Label("Ban Username");
    TextField username = new TextField();
    gp.add(newusername, 0, 1);
    gp.add(username, 1, 1);

    Button finish = new Button("Finish");
    HBox finish_box = new HBox(10);
    finish_box.setAlignment(Pos.CENTER);
    finish_box.getChildren().add(finish);

    finish.setOnAction(new EventHandler<ActionEvent>() {

        @Override
        public void handle(ActionEvent event) {
            banned_usernames.remove(username.getText());
            banned_usernames.add(username.getText());
            to_update.setItems(FXCollections.observableArrayList(banned_usernames));
            createBannedUser.close();
        }

    });

    gp.add(finish_box, 0, 2, 2, 1);

    Scene sc = new Scene(gp, 300, 175);
    createBannedUser.setScene(sc);
    createBannedUser.show();
}

From source file:org.dataconservancy.packaging.gui.presenter.impl.EditPackageContentsPresenterImplTest.java

@Test
public void testUnIgnoreDataFileUnIgnoresParents() {
    presenter.rebuildTreeView();//  w w  w .  j a  va2 s .c  o  m
    presenter.toggleItemIgnore(FXCollections.observableArrayList(presenter.findItem(collection)), true);

    assertTrue(collection.isIgnored());
    assertTrue(dataItem.isIgnored());
    assertTrue(dataFile.isIgnored());

    presenter.toggleItemIgnore(FXCollections.observableArrayList(presenter.findItem(dataFile)), false);

    //Parents should not be ignored
    assertFalse(collection.isIgnored());
    assertFalse(dataItem.isIgnored());

    //Item should not be ignored
    assertFalse(dataFile.isIgnored());
}