List of usage examples for javafx.collections FXCollections observableList
public static <E> ObservableList<E> observableList(List<E> list)
From source file:com.chart.SwingChart.java
/** * Series edition/*from ww w.ja va 2 s .co m*/ * @param series Series to edit */ void editSeries(final Series series) { String[] style = series.getStyle().split(";"); String strColor = "black"; final TextField editWidth = new TextField(); String tempS = "null"; for (String e : style) { if (e.contains("color: ")) { strColor = e.replace("color: ", ""); } else if (e.contains("width: ")) { editWidth.setText(e.replace("width: ", "")); } else if (e.contains("shape: ")) { tempS = e.replace("shape: ", ""); } } final String symbol = tempS; final List<SeriesShape> symbolList = new ArrayList<>(); final ObservableList<SeriesShape> symbolListModel; final ListView<SeriesShape> comboSymbol = new ListView(); symbolList.add(new SeriesShape("null", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("rectangle", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("circle", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("triangle", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("crux", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("diamond", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("empty rectangle", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("empty circle", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("empty triangle", javafx.scene.paint.Color.web(strColor))); symbolList.add(new SeriesShape("empty diamond", javafx.scene.paint.Color.web(strColor))); symbolListModel = FXCollections.observableList(symbolList); comboSymbol.setItems(symbolListModel); comboSymbol.setCellFactory(new Callback<ListView<SeriesShape>, ListCell<SeriesShape>>() { @Override public ListCell<SeriesShape> call(ListView<SeriesShape> p) { ListCell<SeriesShape> cell = new ListCell<SeriesShape>() { @Override protected void updateItem(SeriesShape t, boolean bln) { super.updateItem(t, bln); if (t != null) { setText(""); setGraphic(t.getShapeGraphic()); } } }; return cell; } }); for (SeriesShape smb : symbolListModel) { if (smb.getName().equals(symbol)) { comboSymbol.getSelectionModel().select(smb); } } final ColorPicker colorPicker = new ColorPicker(javafx.scene.paint.Color.web(strColor)); colorPicker.setOnAction((ActionEvent t) -> { String sc = colorPicker.getValue().toString(); symbolListModel.clear(); symbolListModel.add(new SeriesShape("null", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("rectangle", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("circle", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("triangle", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("crux", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("diamond", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("empty rectangle", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("empty circle", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("empty triangle", javafx.scene.paint.Color.web(sc))); symbolListModel.add(new SeriesShape("empty diamond", javafx.scene.paint.Color.web(sc))); comboSymbol.setItems(symbolListModel); for (SeriesShape smb : symbolListModel) { if (smb.getName().equals(symbol)) { comboSymbol.getSelectionModel().select(smb); } } }); GridPane grid = new GridPane(); grid.setHgap(10); grid.setVgap(10); grid.setPadding(new Insets(0, 10, 0, 10)); grid.add(new Label("Series"), 0, 0); grid.add(new Label(series.getKey().toString()), 1, 0); grid.add(new Label("Color"), 0, 1); grid.add(colorPicker, 1, 1); grid.add(new Label("Width"), 0, 2); grid.add(editWidth, 1, 2); grid.add(new Label("Shape"), 0, 3); grid.add(comboSymbol, 1, 3); new PseudoModalDialog(skeleton, grid, true) { @Override public boolean validation() { String strColor = colorPicker.getValue().toString(); String strWidth = editWidth.getText(); double dWidth = Double.valueOf(strWidth); String strSimbolo = "null"; SeriesShape simb = new SeriesShape(comboSymbol.getSelectionModel().getSelectedItem().toString(), javafx.scene.paint.Color.web(strColor)); XYItemRenderer renderer = (XYItemRenderer) plot.getRenderer(series.getAxisIndex()); renderer.setSeriesPaint(series.getSeriesIndex(), scene2awtColor(colorPicker.getValue())); try { if (Double.valueOf(strWidth) > 0) { ((XYLineAndShapeRenderer) renderer).setSeriesLinesVisible(series.getSeriesIndex(), true); renderer.setSeriesStroke(series.getSeriesIndex(), new BasicStroke(Integer.valueOf(strWidth))); } else { ((XYLineAndShapeRenderer) renderer).setSeriesLinesVisible(series.getSeriesIndex(), false); } } catch (NumberFormatException ex) { } if (simb.getName().contains("null")) { ((XYLineAndShapeRenderer) renderer).setSeriesShapesVisible(series.getSeriesIndex(), false); renderer.setSeriesShape(series.getSeriesIndex(), null); } else { ((XYLineAndShapeRenderer) renderer).setSeriesShapesVisible(series.getSeriesIndex(), true); renderer.setSeriesShape(series.getSeriesIndex(), simb.getShapeAWT()); if (simb.getName().contains("empty")) { ((XYLineAndShapeRenderer) renderer).setSeriesShapesFilled(series.getSeriesIndex(), false); } else { ((XYLineAndShapeRenderer) renderer).setSeriesShapesFilled(series.getSeriesIndex(), true); } } series.setStyle( "color: " + strColor + ";width: " + editWidth.getText() + ";shape: " + strSimbolo + ";"); for (Node le : legendFrame.getChildren()) { if (le instanceof LegendAxis) { for (Node nn : ((LegendAxis) le).getChildren()) { if (nn instanceof Label) { if (((Label) nn).getText().equals(series.getKey().toString())) { ((Label) nn).setGraphic(simb.getShapeGraphic()); } } } } } return true; } }.show(); }
From source file:com.ggvaidya.scinames.ui.DataReconciliatorController.java
private void reconcileDataFromOneDataset() { Project project = dataReconciliatorView.getProjectView().getProject(); String reconciliationMethod = reconcileUsingComboBox.getValue(); Table<String, String, Set<String>> precalc = HashBasedTable.create(); Dataset namesDataset = useNamesFromComboBox.getSelectionModel().getSelectedItem(); List<NameCluster> nameClusters = null; List<Name> namesInDataset = null; // Set up namesInDataset. switch (namesToUseComboBox.getValue()) { case USE_NAMES_IN_DATASET_ROWS: if (namesDataset == ALL) { namesInDataset = project.getDatasets().stream().flatMap(ds -> ds.getNamesInAllRows().stream()) .distinct().sorted().collect(Collectors.toList()); } else {/* ww w .j a v a2s. c om*/ namesInDataset = namesDataset.getNamesInAllRows().stream().sorted().distinct() .collect(Collectors.toList()); } break; case USE_ALL_REFERENCED_NAMES: if (namesDataset == ALL) { namesInDataset = project.getDatasets().stream().flatMap(ds -> ds.getReferencedNames()).distinct() .sorted().collect(Collectors.toList()); } else { namesInDataset = namesDataset.getReferencedNames().sorted().collect(Collectors.toList()); } break; case USE_ALL_RECOGNIZED_NAMES: if (namesDataset == ALL) { namesInDataset = project.getDatasets().stream() .flatMap(ds -> project.getRecognizedNames(ds).stream()).distinct().sorted() .collect(Collectors.toList()); } else { namesInDataset = project.getRecognizedNames(namesDataset).stream().sorted() .collect(Collectors.toList()); } break; } // IMPORTANT NOTE // This algorithm now relies on nameClusters and namesInDataset // having EXACTLY the same size. So please make sure every combination // of logic here lines up exactly. boolean flag_nameClustersAreTaxonConcepts = false; switch (reconciliationMethod) { case RECONCILE_BY_NAME: // namesInDataset already has all the names we want. nameClusters = createSingleNameClusters(namesDataset, namesInDataset); break; case RECONCILE_BY_SPECIES_NAME: namesInDataset = namesInDataset.stream().filter(n -> n.hasSpecificEpithet()) .flatMap(n -> n.asBinomial()).distinct().sorted().collect(Collectors.toList()); nameClusters = createSingleNameClusters(namesDataset, namesInDataset); break; case RECONCILE_BY_SPECIES_NAME_CLUSTER: // nameClusters = project.getNameClusterManager().getSpeciesClustersAfterFiltering(project).collect(Collectors.toList()); namesInDataset = namesInDataset.stream().filter(n -> n.hasSpecificEpithet()) .flatMap(n -> n.asBinomial()).distinct().sorted().collect(Collectors.toList()); nameClusters = project.getNameClusterManager().getClusters(namesInDataset); break; case RECONCILE_BY_NAME_CLUSTER: // Note that this includes genus name clusters! nameClusters = project.getNameClusterManager().getClusters(namesInDataset); break; case RECONCILE_BY_SPECIES_TAXON_CONCEPT: /* * WARNING: untested! Please test before using! */ List<NameCluster> nameClustersByName = project.getNameClusterManager().getClusters(namesInDataset); List<Name> namesInDatasetCorresponding = new LinkedList<>(); List<NameCluster> nameClustersCorresponding = new LinkedList<>(); for (int x = 0; x < namesInDataset.size(); x++) { Name name = namesInDataset.get(0); NameCluster nameCluster = nameClustersByName.get(0); List<TaxonConcept> taxonConcepts; if (nameCluster == null) { taxonConcepts = new ArrayList<>(); } else { taxonConcepts = nameCluster.getTaxonConcepts(project); } // Now we need to unwind this data structure: each entry in nameClusters // should have a corresponding entry in namesInDataset. for (TaxonConcept tc : taxonConcepts) { namesInDatasetCorresponding.add(name); nameClustersCorresponding.add((NameCluster) tc); } } // All good? Let's swap in those variables to replace their actual counterparts. namesInDataset = namesInDatasetCorresponding; nameClusters = nameClustersCorresponding; // This is special, at least for now. Maybe some day it won't? flag_nameClustersAreTaxonConcepts = true; break; default: LOGGER.log(Level.SEVERE, "Reconciliation method ''{0}'' has not yet been implemented!", reconciliationMethod); return; } if (nameClusters == null) { dataTableView.setItems(FXCollections.emptyObservableList()); return; } LOGGER.info("Name clusters ready to display: " + nameClusters.size() + " clusters"); LOGGER.info("Based on " + namesInDataset.size() + " names from " + namesDataset + ": " + namesInDataset); // What columns do we have from the other dataset? Dataset dataDataset = includeDataFromComboBox.getSelectionModel().getSelectedItem(); List<Dataset> datasets = null; if (dataDataset == ALL) datasets = project.getDatasets(); else if (dataDataset == NONE) datasets = new ArrayList<>(); else datasets = Arrays.asList(dataDataset); // Precalculate. List<String> existingColNames = new ArrayList<>(); existingColNames.add("id"); existingColNames.add("name"); existingColNames.add("names_in_dataset"); existingColNames.add("all_names_in_cluster"); existingColNames.add("dataset_rows_for_name"); existingColNames.add("name_cluster_id"); // existingColNames.add("distinct_dataset_rows_for_name"); // If these are taxon concepts, there's three other columns we want // to emit. if (flag_nameClustersAreTaxonConcepts) { existingColNames.add("starts_with"); existingColNames.add("ends_with"); existingColNames.add("is_ongoing"); } else { existingColNames.add("taxon_concept_count"); existingColNames.add("taxon_concepts"); existingColNames.add("trajectory"); existingColNames.add("trajectory_without_renames"); existingColNames.add("trajectory_lumps_splits"); } existingColNames.add("first_added_dataset"); existingColNames.add("first_added_year"); existingColNames.add("reconciliation_duplicate_of"); // Precalculate all dataset rows. Map<Name, Set<DatasetRow>> datasetRowsByName = new HashMap<>(); for (Dataset ds : datasets) { Map<Name, Set<DatasetRow>> rowsByName = ds.getRowsByName(); // Merge into the main list. for (Name n : rowsByName.keySet()) { Set<DatasetRow> rows = rowsByName.get(n); if (!reconciliationMethod.equals(RECONCILE_BY_NAME)) { // If we're reconciling by binomial names, then // we should include binomial names for each row, too. Optional<Name> binomialName = n.asBinomial().findAny(); if (binomialName.isPresent()) { Set<DatasetRow> rowsForBinomial = rowsByName.get(binomialName.get()); if (rowsForBinomial != null) rows.addAll(rowsForBinomial); // Don't write this to the sub-binomial name, // just write to the binomial name. n = binomialName.get(); } } if (!datasetRowsByName.containsKey(n)) datasetRowsByName.put(n, new HashSet<>()); datasetRowsByName.get(n).addAll(rows); } } LOGGER.info("Precalculating all dataset rows"); // Finally, come up with unique names for every dataset we might have. Map<DatasetColumn, String> datasetColumnMap = new HashMap<>(); existingColNames.addAll(datasets.stream().flatMap(ds -> ds.getColumns().stream()).distinct().map(col -> { String colName = col.getName(); String baseName = colName; int uniqueCounter = 0; while (existingColNames.contains(colName)) { // Duplicate column name! Map it elsewhere. uniqueCounter++; colName = baseName + "." + uniqueCounter; } // Where did we map it to? datasetColumnMap.put(col, colName); // Okay, now return the new column name we need to create. return colName; }).collect(Collectors.toList())); LOGGER.info("Precalculating " + nameClusters.size() + " name clusters"); // Make sure names and name clusters are unique, otherwise bail. // Earlier this was being ensured by keeping namesInDataset as a // Set, but since it's a List now, duplicates might sneak in. assert (namesInDataset.size() == new HashSet<>(namesInDataset).size()); // Since it's a list, we can set it up so that it always corresponds to // the correct name cluster. assert (namesInDataset.size() == nameClusters.size()); // Now, nameClusters should NOT be de-duplicated: we might have the same // cluster appear multiple times! If so, we'll set // "reconciliation_duplicate_of" to point to the first reconciliation, // so we don't duplicate reconciliations. // Let's track which IDs we use for duplicated name clusters. Map<NameCluster, List<String>> idsForNameClusters = new HashMap<>(); if (nameClusters.size() != new HashSet<>(nameClusters).size()) { LOGGER.warning("Clusters not unique: " + nameClusters.size() + " clusters found, but only " + new HashSet<>(nameClusters).size() + " are unique."); } // Track duplicates. Map<NameCluster, List<String>> clusterIDsPerNameCluster = new HashMap<>(); int totalClusterCount = nameClusters.size(); int currentClusterCount = 0; List<String> nameClusterIDs = new LinkedList<>(); for (NameCluster cluster : nameClusters) { currentClusterCount++; // Probably don't need GUIDs here, right? String clusterID = String.valueOf(currentClusterCount); nameClusterIDs.add(clusterID); LOGGER.info("(" + currentClusterCount + "/" + totalClusterCount + ") Precalculating name cluster: " + cluster); precalc.put(clusterID, "id", getOneElementSet(clusterID)); precalc.put(clusterID, "name_cluster_id", getOneElementSet(cluster.getId().toString())); // The 'name' should come from namesInDataset. precalc.put(clusterID, "name", getOneElementSet(namesInDataset.get(currentClusterCount - 1).getFullName())); // Okay, here's what we need to do: // - If names is ALL, then we can't do better than cluster.getName(). if (namesDataset == ALL) { precalc.put(clusterID, "names_in_dataset", cluster.getNames().stream().map(n -> n.getFullName()).collect(Collectors.toSet())); } else { // hey, here's something cool we can do: figure out which name(s) // this dataset uses from this cluster! Set<Name> namesToFilterTo = new HashSet<>(namesInDataset); List<String> namesInCluster = cluster.getNames().stream().filter(n -> namesToFilterTo.contains(n)) .map(n -> n.getFullName()).collect(Collectors.toList()); precalc.put(clusterID, "names_in_dataset", new HashSet<>(namesInCluster)); } precalc.put(clusterID, "all_names_in_cluster", cluster.getNames().stream().map(n -> n.getFullName()).collect(Collectors.toSet())); // Is this a duplicate? if (clusterIDsPerNameCluster.containsKey(cluster)) { List<String> duplicatedRows = clusterIDsPerNameCluster.get(cluster); // Only the first one should have the actual data. precalc.put(clusterID, "reconciliation_duplicate_of", getOneElementSet(duplicatedRows.get(0))); duplicatedRows.add(clusterID); // Okay, do no other work on this cluster, since all the actual information is // in the other entry. continue; } else { precalc.put(clusterID, "reconciliation_duplicate_of", getOneElementSet("NA")); List<String> clusterIds = new LinkedList<>(); clusterIds.add(clusterID); clusterIDsPerNameCluster.put(cluster, clusterIds); } LOGGER.fine("Cluster calculation began for " + cluster); // If it's a taxon concept, precalculate a few more columns. if (flag_nameClustersAreTaxonConcepts) { TaxonConcept tc = (TaxonConcept) cluster; precalc.put(clusterID, "starts_with", tc.getStartsWith().stream().map(ch -> ch.toString()).collect(Collectors.toSet())); precalc.put(clusterID, "ends_with", tc.getEndsWith().stream().map(ch -> ch.toString()).collect(Collectors.toSet())); precalc.put(clusterID, "is_ongoing", getOneElementSet(tc.isOngoing(project) ? "yes" : "no")); } else { // If it's a true name cluster, then perhaps people will want // to know what taxon concepts are in here? Maybe for some sort // of PhD? List<TaxonConcept> tcs = cluster.getTaxonConcepts(project); precalc.put(clusterID, "taxon_concept_count", getOneElementSet(String.valueOf(tcs.size()))); precalc.put(clusterID, "taxon_concepts", tcs.stream().map(tc -> tc.toString()).collect(Collectors.toSet())); } LOGGER.fine("Cluster calculation ended for " + cluster); // When was this first added? List<Dataset> foundInSorted = cluster.getFoundInSortedWithDates(); if (!foundInSorted.isEmpty()) { precalc.put(clusterID, "first_added_dataset", getOneElementSet(foundInSorted.get(0).getCitation())); precalc.put(clusterID, "first_added_year", getOneElementSet(foundInSorted.get(0).getDate().getYearAsString())); } LOGGER.fine("Trajectory began for " + cluster); // For name clusters we can also figure out trajectories! if (!flag_nameClustersAreTaxonConcepts) { List<String> trajectorySteps = cluster.getFoundInSortedWithDates().stream().map(dataset -> { String changes = dataset.getChanges(project).filter(ch -> cluster.containsAny(ch.getAllNames())) .map(ch -> ch.getType().toString()).collect(Collectors.joining("|")); if (!changes.isEmpty()) return changes; // This can happen when a change is referenced without an explicit addition. if (cluster.containsAny(dataset.getReferencedNames().collect(Collectors.toList()))) return "referenced"; else return "missing"; }).collect(Collectors.toList()); precalc.put(clusterID, "trajectory", getOneElementSet(String.join(" -> ", trajectorySteps))); precalc.put(clusterID, "trajectory_without_renames", getOneElementSet(trajectorySteps.stream() .filter(ch -> !ch.contains("rename")).collect(Collectors.joining(" -> ")))); precalc.put(clusterID, "trajectory_lumps_splits", getOneElementSet( trajectorySteps.stream().filter(ch -> ch.contains("split") || ch.contains("lump")) .collect(Collectors.joining(" -> ")))); } LOGGER.fine("Trajectory ended for " + cluster); // Okay, here's where we reconcile! LOGGER.fine("Reconciliation began for " + cluster); // Now we need to actually reconcile the data from these unique row objects. Set<DatasetRow> allDatasetRowsCombined = new HashSet<>(); for (Name name : cluster.getNames()) { // We don't have to convert cluster names to binomial, // because the cluster formation -- or the hacky thing we do // for RECONCILE_SPECIES_NAME -- should already have done that! // // Where necessary, the previous code will automatically // set up datasetRowsByName so it matched binomial names. Set<DatasetRow> rowsToReconcile = datasetRowsByName.get(name); if (rowsToReconcile == null) continue; allDatasetRowsCombined.addAll(rowsToReconcile); Set<DatasetColumn> columns = rowsToReconcile.stream().flatMap(row -> row.getColumns().stream()) .collect(Collectors.toSet()); for (DatasetColumn col : columns) { // We've precalculated column names. String colName = datasetColumnMap.get(col); // Make sure we get this column down into 'precalc'. if (!precalc.contains(clusterID, colName)) precalc.put(clusterID, colName, new HashSet<>()); // Add all values for all rows in this column. Set<String> vals = rowsToReconcile.stream().flatMap(row -> { if (!row.hasColumn(col)) return Stream.empty(); else return Stream.of(row.get(col)); }).collect(Collectors.toSet()); precalc.get(clusterID, colName).addAll(vals); LOGGER.fine("Added " + vals.size() + " rows under name cluster '" + cluster + "'"); } } LOGGER.info("(" + currentClusterCount + "/" + totalClusterCount + ") Reconciliation completed for " + cluster); precalc.put(clusterID, "dataset_rows_for_name", getOneElementSet(allDatasetRowsCombined.size())); } // Set up table items. dataTableView.setItems(FXCollections.observableList(nameClusterIDs)); LOGGER.info("Setting up columns: " + existingColNames); dataTableView.getColumns().clear(); for (String colName : existingColNames) { dataTableView.getColumns().add(createColumnFromPrecalc(colName, precalc)); } // Get distinct column names. Stream<String> colNames = precalc.cellSet().stream().map(set -> set.getColumnKey()); // Eliminate columns that are in the existingColNames. colNames = colNames.filter(colName -> !existingColNames.contains(colName)); // And add tablecolumns for the rest. List<TableColumn<String, String>> cols = colNames.distinct().sorted() .map(colName -> createColumnFromPrecalc(colName, precalc)).collect(Collectors.toList()); dataTableView.getColumns().addAll(cols); dataTableView.refresh(); // Fill in status text field. long distinctNameCount = precalc.cellSet().stream().map(cluster -> precalc.get(cluster, "name")).distinct() .count(); String str_duplicates = ""; if (distinctNameCount != dataTableView.getItems().size()) { str_duplicates = " for " + distinctNameCount + " distinct names"; } statusTextField.setText(dataTableView.getItems().size() + " rows across " + cols.size() + " reconciled columns" + str_duplicates); LOGGER.info("All done!"); }
From source file:com.esri.geoevent.test.performance.ui.FixtureController.java
/** * TODO: More finite control of when we connect to producers and consumers * * @param remoteHost/* w ww .ja v a2s . co m*/ */ // private void disconnect(final ConnectableRemoteHost remoteHost) // { // // } private ObservableList<Protocol> getProducerProtocolList() { ArrayList<Protocol> list = new ArrayList<Protocol>(); list.add(Protocol.ACTIVE_MQ); list.add(Protocol.AZURE); list.add(Protocol.KAFKA); list.add(Protocol.RABBIT_MQ); list.add(Protocol.STREAM_SERVICE); list.add(Protocol.TCP); list.add(Protocol.TCP_SERVER); list.add(Protocol.WEBSOCKETS); list.add(Protocol.WEBSOCKET_SERVER); return FXCollections.observableList(list); }
From source file:com.esri.geoevent.test.performance.ui.FixtureController.java
private ObservableList<Protocol> getConsumerProtocolList() { ArrayList<Protocol> list = new ArrayList<Protocol>(); list.add(Protocol.ACTIVE_MQ); list.add(Protocol.AZURE);//ww w.ja va 2s . c o m list.add(Protocol.BDS); list.add(Protocol.RABBIT_MQ); list.add(Protocol.STREAM_SERVICE); list.add(Protocol.TCP); list.add(Protocol.TCP_SERVER); list.add(Protocol.WEBSOCKETS); list.add(Protocol.KAFKA); return FXCollections.observableList(list); }
From source file:com.esri.geoevent.test.performance.ui.FixtureController.java
/** * FIXME: This is a hardcoding method - we need to get rid oof this and * introspec the Producer or Consumer for its individual properties *///from w w w.j a va 2s. co m private void initPropertiesCache() { // Producers producerPropertiesCache = new HashMap<Protocol, ObservableList<Property>>(); Property simulationFile = new Property("simulationFile", ""); // TCP ArrayList<Property> props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("hosts", "localhost")); props.add(new Property("port", "5565")); producerPropertiesCache.put(Protocol.TCP, FXCollections.observableList(props)); // TCP SERVER props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("hosts", "localhost")); props.add(new Property("port", "5565")); producerPropertiesCache.put(Protocol.TCP_SERVER, FXCollections.observableList(props)); // ACTIVE_MQ props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("providerUrl", "")); props.add(new Property("destinationType", "")); props.add(new Property("destinationName", "")); producerPropertiesCache.put(Protocol.ACTIVE_MQ, FXCollections.observableList(props)); // KAFKA props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("brokerList", "localhost:9092")); props.add(new Property("topic", "default-topic")); props.add(new Property("requiredAcks", "1")); producerPropertiesCache.put(Protocol.KAFKA, FXCollections.observableList(props)); // RABBIT_MQ props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("uri", "")); props.add(new Property("exchangeName", "")); props.add(new Property("queueName", "")); props.add(new Property("routingKey", "")); producerPropertiesCache.put(Protocol.RABBIT_MQ, FXCollections.observableList(props)); // STREAM_SERVICE props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("hosts", "localhost")); props.add(new Property("port", "6180")); props.add(new Property("serviceName", "vehicles")); props.add(new Property("connectionCount", "1")); producerPropertiesCache.put(Protocol.STREAM_SERVICE, FXCollections.observableList(props)); // WEBSOCKETS props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("url", "")); props.add(new Property("connectionCount", "1")); producerPropertiesCache.put(Protocol.WEBSOCKETS, FXCollections.observableList(props)); // WEBSOCKETS_SERVER props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("port", "5565")); producerPropertiesCache.put(Protocol.WEBSOCKET_SERVER, FXCollections.observableList(props)); // AZURE props = new ArrayList<Property>(); props.add(simulationFile); props.add(new Property("producerConnectionString", "")); producerPropertiesCache.put(Protocol.AZURE, FXCollections.observableList(props)); // Consumers consumerPropertiesCache = new HashMap<Protocol, ObservableList<Property>>(); // TCP props = new ArrayList<Property>(); props.add(new Property("hosts", "localhost")); props.add(new Property("port", "5575")); consumerPropertiesCache.put(Protocol.TCP, FXCollections.observableList(props)); //TCP_SERVER props = new ArrayList<Property>(); props.add(new Property("port", "5775")); consumerPropertiesCache.put(Protocol.TCP_SERVER, FXCollections.observableList(props)); // ACTIVE_MQ props = new ArrayList<Property>(); props.add(new Property("providerUrl", "")); props.add(new Property("destinationType", "")); props.add(new Property("destinationName", "")); consumerPropertiesCache.put(Protocol.ACTIVE_MQ, FXCollections.observableList(props)); // RABBIT_MQ props = new ArrayList<Property>(); props.add(new Property("uri", "")); props.add(new Property("exchangeName", "")); props.add(new Property("queueName", "")); props.add(new Property("routingKey", "")); consumerPropertiesCache.put(Protocol.RABBIT_MQ, FXCollections.observableList(props)); // STREAM_SERVICE props = new ArrayList<Property>(); props.add(new Property("hosts", "localhost")); props.add(new Property("port", "6180")); props.add(new Property("serviceName", "vehicles")); props.add(new Property("connectionCount", "1")); consumerPropertiesCache.put(Protocol.STREAM_SERVICE, FXCollections.observableList(props)); // WEBSOCKETS props = new ArrayList<Property>(); props.add(new Property("url", "")); props.add(new Property("connectionCount", "1")); consumerPropertiesCache.put(Protocol.WEBSOCKETS, FXCollections.observableList(props)); // KAFKA props = new ArrayList<Property>(); props.add(new Property("zookeeper", "localhost:2181")); props.add(new Property("consumergroup", "consumergroup1")); props.add(new Property("topic", "testtopic")); props.add(new Property("numthreads", "1")); consumerPropertiesCache.put(Protocol.KAFKA, FXCollections.observableList(props)); // BDS props = new ArrayList<Property>(); props.add(new Property("msLayerUrl", "https://gis.server.com/arcgis/rest/services/Hosted/FAA-Stream/MapServer/0")); consumerPropertiesCache.put(Protocol.BDS, FXCollections.observableList(props)); // AXURE props = new ArrayList<Property>(); props.add(new Property("consumerConnectionUri", "")); props.add(new Property("consumerEventHubName", "")); props.add(new Property("consumerNumberOfPartitions", "")); consumerPropertiesCache.put(Protocol.AZURE, FXCollections.observableList(props)); }
From source file:com.ggvaidya.scinames.dataset.BinomialChangesSceneController.java
private void additionalDataUpdateList() { // Which AdditionalData and ListOf are we in right now? AdditionalData aData = additionalDataCombobox.getSelectionModel().getSelectedItem(); // No aData? Do nothing! if (aData == null) return;//from w w w.j a va2 s . c om // Object currentSelection = additionalListView.getSelectionModel().getSelectedItem(); additionalListView.setItems(FXCollections.observableList(aData.getList())); additionalListView.getSelectionModel().clearAndSelect(0); // This is also the right time to set up columns for the table. additionalDataTableView.getColumns().clear(); additionalDataTableView.getColumns().addAll(aData.getColumns()); // additionalListView.getSelectionModel().select(prevSelection); }
From source file:nl.mvdr.umvc3replayanalyser.controller.Umvc3ReplayManagerController.java
/** Loads the replays from storage. */ private void loadReplays() { if (replays != null) { throw new IllegalStateException("Replays already loaded: " + replays); }// w ww. java2s .c om replays = FXCollections.observableList(new ArrayList<Replay>()); File dataDirectory = new File(this.configuration.getDataDirectoryPath()); if (!dataDirectory.exists()) { throw new IllegalStateException( "Not an existing path: " + dataDirectory + ". Check your configuration."); } if (!dataDirectory.isDirectory()) { throw new IllegalStateException("Not a directory: " + dataDirectory + ". Check your configuration."); } ObjectMapper mapper = new ObjectMapper(); for (File file : dataDirectory.listFiles()) { if (file.getName().endsWith(".replay")) { try { if (log.isDebugEnabled()) { log.debug("Attempting to load: " + file); } Replay replay = mapper.readValue(file, Replay.class); log.info("Loaded from " + file + ": " + replay.getGame()); replays.add(replay); } catch (IOException e) { log.warn("Failed to import replay from " + file, e); } } else { if (log.isDebugEnabled()) { log.debug("Skipping file: " + file); } } } replays.addListener(new ListChangeListener<Replay>() { /** {@inheritDoc} */ @Override public void onChanged(Change<? extends Replay> change) { if (log.isDebugEnabled()) { log.debug("Replay list changed: " + change); } updateReplayTable(); } }); }
From source file:nl.mvdr.umvc3replayanalyser.controller.Umvc3ReplayManagerController.java
/** Disables column swapping on the table view. */ // As far as I know there is no easy way to do this directly in the FXML (yet?), so we do this using a Java hack. private void disableColumnSwapping() { // First make a copy of the columns. final ObservableList<TableColumn<Replay, ?>> columns = FXCollections .observableList(new ArrayList<>(replayTableView.getColumns())); // Now, whenever the list is changed, reset it to the original. replayTableView.getColumns().addListener(new ListChangeListener<TableColumn<Replay, ?>>() { /** Used to limit recursion to 1. */ public boolean suspended; /** {@inheritDoc} */ @Override// w w w .j av a 2 s. c om public void onChanged(Change<? extends TableColumn<Replay, ?>> change) { change.next(); if (change.wasReplaced() && !suspended) { this.suspended = true; replayTableView.getColumns().setAll(columns); this.suspended = false; } } }); }
From source file:nl.mvdr.umvc3replayanalyser.controller.Umvc3ReplayManagerController.java
/** Intialises the table view. */ private void initTableView() { List<Replay> replaysView = new ArrayList<>(replays); replayTableView.setItems(FXCollections.observableList(replaysView)); // Set default sort order. replayTableView.getSortOrder().add(replayTableView.getColumns().get(0)); // Set listener for item selection. replayTableView.getSelectionModel().selectedItemProperty().addListener(new ChangeListener<Replay>() { /** {@inheritDoc} */ @Override// w w w.ja va 2 s .c om public void changed(ObservableValue<? extends Replay> observable, Replay oldValue, Replay newValue) { handleSelectedReplayChanged(newValue); } }); }
From source file:open.dolphin.client.MainWindowController.java
private ObservableList<ReceptInfo> fetchDataFromServer() { // ?/*from w w w .j av a 2 s. co m*/ List<ReceptInfo> list = new ArrayList<>(); PVTDelegater delegater = PVTDelegater.getInstance(); List<PatientVisitModel> pList = delegater.getPvtList(); if (pList != null) { for (PatientVisitModel model : pList) { ReceptInfo bean = new ReceptInfo(String.valueOf(model.getNumber()), model.getPvtDate(), model.getPatientModel().getPatientId(), model.getPatientModel().getFullName(), model.getPatientModel().getGender(), model.getHealthInsuranceInfo(), model.getPatientModel().getBirthday(), model.getDoctorName(), model.getDeptName(), model.getAppointment(), model.getMemo(), String.valueOf(model.getState())); list.add(bean); } } else { // dummy ?? list.add(new ReceptInfo("1", "14:23", "234-48", "??", "", "?", "1900/01/11", "", "", "", "email@softbank.com", "0")); list.add(new ReceptInfo("2", "09:45", "213-48", "??", "", "?", "1900/12/11", "???", "", "", "email@kddi.com", "2")); list.add(new ReceptInfo("3", "12:11", "118-48", "?", "", "", "1900/11/11", "", "", "", "email@google.com", "3")); list.add(new ReceptInfo("4", "01:55", "034-48", "??", "", "", "1900/10/11", "??", "", "", "email@microsofut.com", "4")); list.add(new ReceptInfo("5", "12:30", "083-48", "??", "", "?", "1900/04/11", "?", "", "", "email@apple.com", "8")); } return FXCollections.observableList(list); }