Example usage for javafx.collections ObservableList stream

List of usage examples for javafx.collections ObservableList stream

Introduction

In this page you can find the example usage for javafx.collections ObservableList stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:edu.kit.trufflehog.model.configdata.FilterDataModelTest.java

/**
 * <p>//www .  j  a  va  2 s.  c om
 *     Tests the database for correct duplicate entry handling. The same entry is added twice, but should only
 *     be there once.
 * </p>
 *
 * @throws Exception Passes any errors that occurred during the test on
 */
// FIXME: 15.05.16
@Test
@Ignore
public void testForDuplicateEntry() throws Exception {
    FilterInput filterInput = generateRandomFilterInput();
    filterDataModel.addFilterToDatabaseAsynchronous(filterInput);
    filterDataModel.addFilterToDatabaseAsynchronous(filterInput);

    // Wait for all threads to finish
    Thread.sleep(1000);

    // Retrieve them
    filterDataModel = new FilterDataModel(fileSystem);
    ObservableList<FilterInput> filterInputFromDB = filterDataModel.getAllFilters();
    Map<String, FilterInput> filterInputMap = new HashMap<>();

    filterInputFromDB.stream().forEach(fIn -> filterInputMap.put(fIn.getName(), fIn));

    assertEquals(1, filterInputFromDB.size());
    assertEquals(true, filterInputMap.containsKey(filterInput.getName()));
}

From source file:edu.kit.trufflehog.model.configdata.FilterDataModelTest.java

/**
 * <p>/*w w  w  . jav a2 s.c o m*/
 *     Test the add functionality of the database by generating 100 random FilterInputs and adding them to the
 *     database and then checking if they were all added correctly.
 * </p>
 *
 * @throws Exception Passes any errors that occurred during the test on
 */
// FIXME fix this test, it randomly fails (Database file locked)
// TODO undeprecate
@Test
@Ignore
public void testAddAndLoadFilterToDatabase() throws Exception {
    List<FilterInput> filterInputs = new ArrayList<>();
    int size = (int) (Math.random() * 100);

    // Add X FilterInputs into the database
    for (int i = 0; i < size; i++) {
        FilterInput filterInput = generateRandomFilterInput();
        filterInputs.add(filterInput);
        filterDataModel.addFilterToDatabaseAsynchronous(filterInput);
    }

    // Wait for all threads to finish
    Thread.sleep(1000);

    // Retrieve them
    filterDataModel = new FilterDataModel(fileSystem);
    ObservableList<FilterInput> filterInputFromDB = filterDataModel.getAllFilters();
    Map<String, FilterInput> filterInputMap = new HashMap<>();
    filterInputFromDB.stream().forEach(fIn -> filterInputMap.put(fIn.getName(), fIn));

    // Make sure we could retrieve them all correctly
    for (FilterInput filterInput : filterInputs) {
        assertEquals(true, filterInputMap.containsKey(filterInput.getName()));
    }
}

From source file:edu.kit.trufflehog.model.configdata.FilterDataModelTest.java

/**
 * <p>/*w  ww.  j  av a 2s . c o  m*/
 *     Tests the remove functionality of the database by adding 100 randomly generated FilterInputs and removing
 *     them again, making sure the database is up-to-date along the way.
 * </p>
 *
 * @throws Exception Passes any errors that occurred during the test on
 */
// FIXME fix this test, it randomly fails (Database file locked)
@Test
public void testRemoveFilterFromDatabase() throws Exception {
    List<FilterInput> filterInputs = new ArrayList<>();
    int size = (int) (Math.random() * 100);

    // Add X FilterInputs into the database
    for (int i = 0; i < size; i++) {
        FilterInput filterInput = generateRandomFilterInput();
        filterInputs.add(filterInput);
        filterDataModel.addFilterToDatabaseAsynchronous(filterInput);
    }

    // Wait for all threads to finish
    Thread.sleep(1000);

    // Retrieve them
    filterDataModel = new FilterDataModel(fileSystem);
    ObservableList<FilterInput> filterInputFromDB = filterDataModel.getAllFilters();

    Map<String, FilterInput> filterInputMap = new HashMap<>();

    filterInputFromDB.stream().forEach(fIn -> filterInputMap.put(fIn.getName(), fIn));

    // Make sure we could retrieve them all correctly
    for (FilterInput filterInput : filterInputs) {
        assertEquals(true, filterInputMap.containsKey(filterInput.getName()));
    }

    // Delete them
    filterInputFromDB.forEach((value) -> filterDataModel.removeFilterFromDatabaseAsynchronous(value));

    // Wait for all threads to finish
    Thread.sleep(5000);

    // Retrieve them
    filterDataModel = new FilterDataModel(fileSystem);
    filterInputFromDB = filterDataModel.getAllFilters();

    // Make sure none were found
    assertEquals(true, filterInputFromDB.isEmpty());
}

From source file:edu.kit.trufflehog.model.configdata.FilterDataModelTest.java

/**
 * <p>/*from w  w  w  .  j a  v  a 2 s  .  co  m*/
 *     Tests the update functionality of the FilterDataModel by adding FilterInputs, and then editing them, and then
 *     calling the update function on the database (which removes and adds them again).
 * </p>
 *
 * @throws Exception Passes any errors that occurred during the test on
 */
// FIXME fix this test, it randomly fails (Database file locked)
// TODO undeprecate
@Test
@Ignore
public void testUpdateAndLoadFilterInDatabase() throws Exception {
    List<FilterInput> filterInputs = new ArrayList<>();
    int size = (int) (Math.random() * 100);

    // Add X FilterInputs into the database
    for (int i = 0; i < size; i++) {
        FilterInput filterInput = generateRandomFilterInput();
        filterInputs.add(filterInput);
        filterDataModel.addFilterToDatabaseAsynchronous(filterInput);
    }

    // Wait for all threads to finish
    Thread.sleep(1000);

    // Retrieve them
    ObservableList<FilterInput> filterInputFromDB = filterDataModel.getAllFilters();
    Map<String, FilterInput> filterInputMap = new HashMap<>();
    filterInputFromDB.stream().forEach(fIn -> filterInputMap.put(fIn.getName(), fIn));

    // Make sure we could retrieve them all correctly
    for (FilterInput filterInput : filterInputs) {
        assertEquals(true, filterInputMap.containsKey(filterInput.getName()));
    }

    // Update them
    List<FilterInput> updatedFilterInputs = new ArrayList<>();
    for (FilterInput filterInput : filterInputs) {
        updatedFilterInputs.add(updateFilterInput(filterInput));
        filterDataModel.updateFilterInDatabase(filterInput);
    }

    // Wait for all threads to finish
    Thread.sleep(5000);

    // Retrieve them
    filterDataModel = new FilterDataModel(fileSystem);
    filterInputFromDB = filterDataModel.getAllFilters();

    // Make sure we could retrieve them all correctly
    for (FilterInput filterInput : updatedFilterInputs) {
        assertEquals(true, filterInputMap.containsKey(filterInput.getName()));
    }
}

From source file:com.github.naoghuman.testdata.abclist.service.ExerciseTermService.java

@Override
protected Task<Void> createTask() {
    return new Task<Void>() {
        {/*www.ja  v a 2  s  .co  m*/
            updateProgress(0, saveMaxEntities);
        }

        @Override
        protected Void call() throws Exception {
            LoggerFacade.getDefault().deactivate(Boolean.TRUE);

            final StopWatch stopWatch = new StopWatch();
            stopWatch.start();

            final ObservableList<Topic> topics = SqlProvider.getDefault().findAllTopics();
            final ObservableList<Term> terms = SqlProvider.getDefault().findAllTerms();
            final int sizeTerms = terms.size();
            final AtomicInteger index = new AtomicInteger(0);

            final CrudService crudService = DatabaseFacade.getDefault().getCrudService(entityName);
            final AtomicLong id = new AtomicLong(
                    -1_000_000_000L + DatabaseFacade.getDefault().getCrudService().count(entityName));
            topics.stream().forEach(topic -> {
                final ObservableList<Exercise> exercises = SqlProvider.getDefault()
                        .findAllExercisesWithTopicId(topic.getId());
                exercises.stream().filter(exercise -> exercise.isReady()).forEach(exercise -> {
                    final int maxExerciseTerms = TestdataGenerator.RANDOM.nextInt(70) + 10;
                    for (int i = 0; i < maxExerciseTerms; i++) {
                        final Term term = terms.get(TestdataGenerator.RANDOM.nextInt(sizeTerms));
                        final ExerciseTerm exerciseTerm = ModelProvider.getDefault().getExerciseTerm();
                        exerciseTerm.setExerciseId(exercise.getId());
                        exerciseTerm.setId(id.getAndIncrement());
                        exerciseTerm.setTermId(term.getId());

                        crudService.create(exerciseTerm);
                    }
                });

                updateProgress(index.getAndIncrement(), saveMaxEntities);
            });

            LoggerFacade.getDefault().deactivate(Boolean.FALSE);
            stopWatch.split();
            LoggerFacade.getDefault().debug(this.getClass(),
                    "  + " + stopWatch.toSplitString() + " for " + saveMaxEntities + " ExerciseTerms."); // NOI18N
            stopWatch.stop();

            return null;
        }
    };
}

From source file:com.github.naoghuman.testdata.abclist.service.LinkMappingService.java

@Override
protected Task<Void> createTask() {
    return new Task<Void>() {
        {/*from www. j  a  va 2 s  . c o m*/
            updateProgress(0, saveMaxEntities);
        }

        @Override
        protected Void call() throws Exception {
            LoggerFacade.getDefault().deactivate(Boolean.TRUE);

            final StopWatch stopWatch = new StopWatch();
            stopWatch.start();

            /*
             1) over all links
             2) if random > 0.005d then do
             3) otherwise create a link without parent
             4) get 1-10 terms, create LinkMapping foreach of them
             - means a link is mapped to 1-10 terms
             5) get 0-10 topics, create LinkMapping foreach of them
             - means a link is mapped to 0-10 topics
            */

            final ObservableList<Link> links = SqlProvider.getDefault().findAllLinks();
            final ObservableList<Term> terms = SqlProvider.getDefault().findAllTerms();
            final int sizeTerms = terms.size();
            final ObservableList<Topic> topics = SqlProvider.getDefault().findAllTopics();
            final int sizeTopics = topics.size();
            final AtomicInteger index = new AtomicInteger(0);

            final CrudService crudService = DatabaseFacade.getDefault().getCrudService(entityName);
            final AtomicLong id = new AtomicLong(
                    -1_000_000_000L + DatabaseFacade.getDefault().getCrudService().count(entityName));
            links.stream() // 1
                    .forEach(link -> {
                        // 2) Should the [Link] have a parent
                        final double random = TestdataGenerator.RANDOM.nextDouble();
                        if (random > 0.005d) {
                            // 4) Create [Link]s with parent [Term]
                            final int maxTerms = TestdataGenerator.RANDOM.nextInt(10) + 1;
                            for (int i = 0; i < maxTerms; i++) {
                                final LinkMapping lm = ModelProvider.getDefault().getLinkMapping();
                                lm.setId(id.getAndIncrement());

                                final Term term = terms.get(TestdataGenerator.RANDOM.nextInt(sizeTerms));
                                lm.setParentId(term.getId());
                                lm.setParentType(LinkMappingType.TERM);

                                lm.setChildId(link.getId());
                                lm.setChildType(LinkMappingType.LINK);

                                crudService.create(lm);
                            }

                            // 5) Create [Link]s with parent [Topic]
                            final int maxTopics = TestdataGenerator.RANDOM.nextInt(11);
                            for (int i = 0; i < maxTopics; i++) {
                                final LinkMapping lm = ModelProvider.getDefault().getLinkMapping();
                                lm.setId(id.getAndIncrement());

                                final Topic topic = topics.get(TestdataGenerator.RANDOM.nextInt(sizeTopics));
                                lm.setParentId(topic.getId());
                                lm.setParentType(LinkMappingType.TOPIC);

                                lm.setChildId(link.getId());
                                lm.setChildType(LinkMappingType.LINK);

                                crudService.create(lm);
                            }
                        } else {
                            // 3) Some [Link]s havn't a parent
                            final LinkMapping lm = ModelProvider.getDefault().getLinkMapping();
                            lm.setId(id.getAndIncrement());
                            lm.setParentId(IDefaultConfiguration.DEFAULT_ID);
                            lm.setParentType(LinkMappingType.NOT_DEFINED);
                            lm.setChildId(link.getId());
                            lm.setChildType(LinkMappingType.LINK);

                            crudService.create(lm);
                        }

                        updateProgress(index.getAndIncrement(), saveMaxEntities);
                    });

            LoggerFacade.getDefault().deactivate(Boolean.FALSE);
            stopWatch.split();
            LoggerFacade.getDefault().debug(this.getClass(),
                    "  + " + stopWatch.toSplitString() + " for " + saveMaxEntities + " LinkMappings."); // NOI18N
            stopWatch.stop();

            return null;
        }
    };
}

From source file:com.github.drbookings.ui.controller.MainController.java

private void updateStatusLabelFX() {

    final ObservableList<RoomBean> selectedRooms = RoomBeanSelectionManager.getInstance().selectionProperty();
    final List<BookingEntry> selectedBookings = selectedRooms.stream()
            .flatMap(r -> r.getBookingEntries().stream())
            .filter(new BookingFilter(guestNameFilterInput.getText())).collect(Collectors.toList());
    Range<LocalDate> selectedRange = DateBeanSelectionManager.getInstance().getSelectedDateRange();
    if (selectedRange == null) {

    } else {//from w  ww.  j  ava2 s. c o m
        StringBuilder sb = new StringBuilder();
        sb.append(selectedRange.lowerEndpoint());
        sb.append("  ");
        sb.append(selectedRange.upperEndpoint());
        selectedDatesLabel.setText(sb.toString());
    }

    final BookingsByOrigin<BookingEntry> bo = new BookingsByOrigin<>(selectedBookings);
    final StringBuilder sb = new StringBuilder(new StatusLabelStringFactory(bo).build());

    // sb.append("\tPerformance total:" +
    // StatusLabelStringFactory.DECIMAL_FORMAT.format(pc.getProfit()) + "
    // \t"
    // + "Performance/hour:" +
    // StatusLabelStringFactory.DECIMAL_FORMAT.format(pc.getProfitPerHour()));
    statusLabel.textProperty().set(sb.toString());
}

From source file:com.esri.geoevent.test.performance.ui.FixtureController.java

/**
 * Helper method that is used to set the fixture object and apply it to all
 * of the ui elements/*from   w  w w .j  a v a2s  .  co  m*/
 *
 * @param fixture
 */
private void applyFixture(Fixture fixture) {
    if (fixture == null) {
        return;
    }

    if (isDefault) {
        nameField.setEditable(false);
        editNameBtn.setVisible(false);
    } else {
        nameField.setText(fixture.getName());
        nameField.setEditable(true);
        editNameBtn.setVisible(true);
    }

    // apply if necessary
    if (fixture.getConsumerConfig() != null) {
        fixture.getConsumerConfig().apply(fixture.getDefaultConfig());
    }
    if (fixture.getProducerConfig() != null) {
        fixture.getProducerConfig().apply(fixture.getDefaultConfig());
    }

    // get the consumers
    List<RemoteHost> consumers = new ArrayList<RemoteHost>();
    if (fixture.getConsumerConfig() != null) {
        if (fixture.getConsumerConfig().getProtocol() != null
                && fixture.getConsumerConfig().getProtocol() != Protocol.UNKNOWN) {
            consumersProtocolType.setValue(fixture.getConsumerConfig().getProtocol());
            if (fixture.getConsumerConfig().getProperties() != null) {
                ObservableList<Property> propsInCache = consumerPropertiesCache
                        .get(fixture.getConsumerConfig().getProtocol());
                propsInCache.stream().forEach(prop -> {
                    prop.setValue(fixture.getConsumerConfig().getPropertyValue(prop.getName()));
                });
            }
        }
        if (fixture.getConsumerConfig().getConsumers() != null) {
            consumers = fixture.getConsumerConfig().getConsumers();
        } else {
            consumers.add(fixture.getConsumerConfig().getDefaultRemoteHost());
        }
    }
    List<ConnectableRemoteHost> connectableConsumers = convert(consumers);
    consumersTable.getItems().clear();
    consumersTable.getItems().addAll(connectableConsumers);

    // get the producers
    List<RemoteHost> producers = new ArrayList<RemoteHost>();
    if (fixture.getProducerConfig() != null) {
        if (fixture.getProducerConfig().getProtocol() != null
                && fixture.getProducerConfig().getProtocol() != Protocol.UNKNOWN) {
            producersProtocolType.setValue(fixture.getProducerConfig().getProtocol());
            if (fixture.getProducerConfig().getProperties() != null) {
                ObservableList<Property> propsInCache = producerPropertiesCache
                        .get(fixture.getProducerConfig().getProtocol());
                propsInCache.stream().forEach(prop -> {
                    prop.setValue(fixture.getProducerConfig().getPropertyValue(prop.getName()));
                });
            }
        }

        if (fixture.getProducerConfig().getProducers() != null) {
            producers = fixture.getProducerConfig().getProducers();
        } else {
            producers.add(fixture.getProducerConfig().getDefaultRemoteHost());
        }
    }
    List<ConnectableRemoteHost> connectableProducers = convert(producers);
    producersTable.getItems().clear();
    producersTable.getItems().addAll(connectableProducers);

    // set the test
    if (fixture.getSimulation() == null) {
        fixture.setSimulation(new Simulation());
    }
    if (fixture.getSimulation().getTest() == null) {
        fixture.getSimulation().setTest(new TimeTest());
    }

    Test test = fixture.getSimulation().getTest();
    testType.setValue(test.getType());
    switch (test.getType()) {
    case RAMP:
        RampTest rampTest = (RampTest) test;
        if (rampTest.getEventsToAddPerTest() > 0) {
            eventsToAddPerTest.setNumber(new BigDecimal(rampTest.getEventsToAddPerTest()));
        }
        if (rampTest.getMaxEvents() > 0) {
            maxEvents.setNumber(new BigDecimal(rampTest.getMaxEvents()));
        }
        if (rampTest.getMinEvents() > 0) {
            minEvents.setNumber(new BigDecimal(rampTest.getMinEvents()));
        }
        if (rampTest.getExpectedResultCountPerTest() > 0) {
            expectedResultCountPerTest.setNumber(new BigDecimal(rampTest.getExpectedResultCountPerTest()));
        }
        break;

    case STRESS:
        StressTest stressTest = (StressTest) test;
        if (stressTest.getNumOfEvents() > 0) {
            numOfEvents.setNumber(new BigDecimal(stressTest.getNumOfEvents()));
        }
        if (stressTest.getIterations() > 0) {
            iterations.setNumber(new BigDecimal(stressTest.getIterations()));
        }
        if (stressTest.getExpectedResultCount() > 0) {
            expectedResultCount.setNumber(new BigDecimal(stressTest.getExpectedResultCount()));
        }
        break;

    case TIME:
        TimeTest timeTest = (TimeTest) test;
        if (timeTest.getEventsPerSec() > 0) {
            eventsPerSec.setNumber(new BigDecimal(timeTest.getEventsPerSec()));
        }
        if (timeTest.getTotalTimeInSec() > 0) {
            totalTimeInSec.setNumber(new BigDecimal(timeTest.getTotalTimeInSec()));
        }
        if (timeTest.getExpectedResultCountPerSec() > 0) {
            expectedResultCountPerSec.setNumber(new BigDecimal(timeTest.getExpectedResultCountPerSec()));
        }
        if (timeTest.getStaggeringInterval() > 0) {
            staggeringInterval.setNumber(new BigDecimal(timeTest.getStaggeringInterval()));
        }
        break;

    default:
        break;
    }
    toggleTestType(null);
}

From source file:editeurpanovisu.EditeurPanovisu.java

/**
 *
 * @param iNumZone numro de la zone/*from  www. j av  a  2s  . c om*/
 * @param points liste de points du polygone
 * @return ancres du polygone
 */
private static ObservableList<AncreForme> olCreeAncresPourPolygone(int iNumZone,
        final ObservableList<Double> points) {
    ObservableList<AncreForme> olAnchors = FXCollections.observableArrayList();

    for (int i = 0; i < points.size(); i += 2) {
        final int idx = i;

        DoubleProperty xProperty = new SimpleDoubleProperty(points.get(i));
        DoubleProperty yProperty = new SimpleDoubleProperty(points.get(i + 1));

        xProperty.addListener((ObservableValue<? extends Number> ov, Number oldX, Number x) -> {
            points.set(idx, (double) x);
            String chaine = "";
            chaine = points.stream().map((point) -> point.toString() + ",").reduce(chaine, String::concat);
            chaine = chaine.substring(0, chaine.length() - 1);
            zones[iNumZone].setStrCoordonneesZone(chaine);
        });

        yProperty.addListener((ObservableValue<? extends Number> ov, Number oldY, Number y) -> {
            points.set(idx + 1, (double) y);
            String chaine = "";
            chaine = points.stream().map((point) -> point.toString() + ",").reduce(chaine, String::concat);
            chaine = chaine.substring(0, chaine.length() - 1);
            zones[iNumZone].setStrCoordonneesZone(chaine);
        });

        olAnchors.add(new AncreForme(Color.GOLD, xProperty, yProperty));
    }

    return olAnchors;
}

From source file:net.sourceforge.pmd.util.fxdesigner.XPathPanelController.java

/**
 * Evaluate the contents of the XPath expression area
 * on the given compilation unit. This updates the xpath
 * result panel, and can log XPath exceptions to the
 * event log panel./* w w w.ja va2 s .c o m*/
 *
 * @param compilationUnit The AST root
 * @param version         The language version
 */
public void evaluateXPath(Node compilationUnit, LanguageVersion version) {

    try {
        String xpath = getXpathExpression();
        if (StringUtils.isBlank(xpath)) {
            invalidateResults(false);
            return;
        }

        ObservableList<Node> results = FXCollections.observableArrayList(xpathEvaluator.evaluateQuery(
                compilationUnit, version, getXpathVersion(), xpath, ruleBuilder.getRuleProperties()));
        xpathResultListView.setItems(
                results.stream().map(parent::wrapNode).collect(Collectors.toCollection(LiveArrayList::new)));
        parent.highlightXPathResults(results);
        violationsTitledPane.setText("Matched nodes\t(" + results.size() + ")");
    } catch (XPathEvaluationException e) {
        invalidateResults(true);
        designerRoot.getLogger().logEvent(new LogEntry(e, Category.XPATH_EVALUATION_EXCEPTION));
    }

    xpathResultListView.refresh();

}