List of usage examples for java.util Collection stream
default Stream<E> stream()
From source file:dk.dma.ais.abnormal.analyzer.analysis.CloseEncounterAnalysis.java
/** * In the set of candidateTracks: find the candidateTracks which are near to the nearToTrack - with 'near' * defined as//from ww w . j a va2 s. c o m * * - last reported position timestamp within +/- 1 minute of nearToTrack's * - last reported position within 1 nm of nearToTrack * * @param candidateTracks the set of candidate candidateTracks to search among. * @param nearToTrack the nearToTrack to find other near-by candidateTracks for. * @return the set of nearby candidateTracks */ Set<Track> findNearByTracks(Collection<Track> candidateTracks, Track nearToTrack, int maxTimestampDeviationMillis, int maxDistanceDeviationMeters) { Set<Track> nearbyTracks = Collections.EMPTY_SET; TrackingReport positionReport = nearToTrack.getNewestTrackingReport(); if (positionReport != null) { final long timestamp = positionReport.getTimestamp(); nearbyTracks = candidateTracks.stream() .filter(candidateTrack -> candidateTrack.getMmsi() != nearToTrack.getMmsi() && candidateTrack.getTimeOfLastPositionReport() > 0L && candidateTrack.getTimeOfLastPositionReport() > timestamp - maxTimestampDeviationMillis && candidateTrack.getTimeOfLastPositionReport() < timestamp + maxTimestampDeviationMillis && candidateTrack.getPosition().distanceTo(nearToTrack.getPosition(), CoordinateSystem.CARTESIAN) < maxDistanceDeviationMeters) .collect(toSet()); } return nearbyTracks; }
From source file:io.pivotal.cla.service.github.MylynGitHubApi.java
private boolean hasObviousFixComment(Collection<? extends Comment> comments, String claUserLogin) { Optional<? extends Comment> obviousFixComment = comments.stream() // .filter(comment -> comment.getUser() != null && !claUserLogin.equals(comment.getUser().getLogin())) // .filter(comment -> comment.getBody().contains(OBVIOUS_FIX)) // .findFirst();/*from w ww . j a v a 2s . c om*/ return obviousFixComment.isPresent(); }
From source file:delfos.rs.trustbased.WeightedGraph.java
public WeightedGraph(Collection<Node> nodes, Set<PathBetweenNodes<Node>> edges) { validateEdges(edges);//from w w w .ja va2 s . co m nodesIndex = makeIndex(nodes.stream().sorted().collect(Collectors.toList())); nodesByIndex = makeNodesByIndex(nodesIndex); double[][] matrix = makeMatrixFromEdges(edges); adjMatrixEdgeWeightedDigraph = makeWeightedDiGraph(nodesIndex, matrix); validateWeightsGraph(adjMatrixEdgeWeightedDigraph); }
From source file:delfos.group.experiment.validation.groupformation.GroupFormationTechnique_cache.java
private Collection<GroupOfUsers> actuallyGenerateGroups(DatasetLoader<? extends Rating> datasetLoader, GroupFormationTechnique groupFormationTechnique) { Collection<GroupOfUsers> groupsGenerated; int ratingsDatasetHashCode = datasetLoader.getRatingsDataset().hashCode(); String datasetLoaderAlias = datasetLoader.getAlias(); String groupFormationString = "_gftHash=" + groupFormationTechnique.hashCode(); String datasetLoaderString = "_datasetLoader=" + datasetLoaderAlias + "_DLHash=" + ratingsDatasetHashCode; File file = new File(getDirectory().getPath() + File.separator + groupFormationTechnique.getName() + groupFormationString + datasetLoaderString); try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(file))) { Collection<Collection<Integer>> groupsByIdUser = (Collection<Collection<Integer>>) ois.readObject(); if (groupsByIdUser == null) { Global.showWarning("The loaded group formation is null. (GroupFormationTechnique: " + groupFormationTechnique.getClass().getName() + ")"); throw new IllegalStateException("The loaded group formation is null. (GroupFormationTechnique: " + groupFormationTechnique.getClass().getName() + ")"); }//from w ww. ja v a 2 s . c o m groupsGenerated = groupsByIdUser.stream().map(groupOfUsers -> groupOfUsers.stream() .map(idUser -> datasetLoader.getUsersDataset().get(idUser)).collect(Collectors.toList())) .map(users -> new GroupOfUsers(users)).collect(Collectors.toList()); return groupsGenerated; } catch (NotSerializableException ex) { Global.showWarning("The system " + this.getClass() + " has a model not serializable."); throw new UnsupportedOperationException(ex); } catch (Throwable anyException) { Global.showMessageTimestamped("Generating groups: " + file.getAbsolutePath() + "\n"); final GroupFormationTechniqueProgressListener_default listener = new GroupFormationTechniqueProgressListener_default( System.out, 300000); if (Global.isInfoPrinted()) { groupFormationTechnique.addListener(listener); } groupsGenerated = groupFormationTechnique.generateGroups(datasetLoader); if (Global.isInfoPrinted()) { groupFormationTechnique.removeListener(listener); } //Save groups if (FileUtilities.createDirectoriesForFileIfNotExist(file)) { Global.showWarning("Created directory path " + file.getAbsoluteFile().getParentFile() + " for recommendation model"); } Collection<Collection<Integer>> groupsGeneratedIDs = groupsGenerated.stream() .map(group -> group.getIdMembers()).collect(Collectors.toList()); try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(file))) { oos.writeObject(groupsGeneratedIDs); } catch (NotSerializableException ex) { Global.showWarning("The system " + this.getClass() + " has a model not serializable."); throw new UnsupportedOperationException(ex); } catch (Throwable ex) { throw new IllegalStateException(ex); } } return groupsGenerated; }
From source file:de.ks.flatadocdb.session.Session.java
private SessionEntry loadSessionEntry(IndexElement indexElement) { Objects.requireNonNull(indexElement); EntityDescriptor descriptor = metaModel.getEntityDescriptor(indexElement.getEntityClass()); HashMap<Relation, Collection<String>> relationIds = new HashMap<>(); descriptor.getAllRelations().forEach(rel -> relationIds.put(rel, new ArrayList<>())); EntityPersister persister = descriptor.getPersister(); Object object = persister.load(repository, descriptor, indexElement.getPathInRepository(), relationIds); SessionEntry sessionEntry = new SessionEntry(object, indexElement.getId(), descriptor.getVersion(object), indexElement.getNaturalId(), indexElement.getPathInRepository(), descriptor); descriptor.writePathInRepo(object, indexElement.getPathInRepository()); byte[] md5Sum = indexElement.getMd5Sum(); sessionEntry.setMd5(md5Sum);/*w ww . ja v a 2 s . c o m*/ if (md5Sum == null) { try (FileInputStream stream = new FileInputStream(indexElement.getPathInRepository().toFile())) { sessionEntry.setMd5(DigestUtils.md5(stream)); } catch (IOException e) { log.error("Could not get md5sum from {}", indexElement.getPathInRepository(), e); } } Path rootFolder = descriptor.getFolderGenerator().getFolder(repository, repository.getPath(), object); boolean isChild = !rootFolder.equals(sessionEntry.getCompletePath().getParent()); sessionEntry.setChild(isChild); log.trace("Loaded {}", object); addToSession(sessionEntry); for (Map.Entry<Relation, Collection<String>> entry : relationIds.entrySet()) { Relation relation = entry.getKey(); Collection<String> ids = entry.getValue(); if (relation.isLazy()) { relation.setupLazy(object, ids, this); } else { List<Object> relatedEntities = ids.stream().sequential().map(this::findById).filter(o -> o != null) .collect(Collectors.toList()); relation.setRelatedEntities(object, relatedEntities); } } return sessionEntry; }
From source file:com.haulmont.cuba.core.app.importexport.EntityImportExport.java
protected Collection<? extends Entity> reloadEntities(Collection<? extends Entity> entities, View view) { List<Object> ids = entities.stream().map(Entity::getId).collect(Collectors.toList()); MetaClass metaClass = metadata.getClassNN(view.getEntityClass()); LoadContext.Query query = LoadContext .createQuery("select e from " + metaClass.getName() + " e where e.id in :ids") .setParameter("ids", ids); LoadContext<? extends Entity> ctx = LoadContext.create(view.getEntityClass()).setQuery(query).setView(view); return dataManager.loadList(ctx); }
From source file:com.github.drbookings.model.data.manager.MainManager.java
private BookingBean findBooking(final LocalDate date, final String roomName) throws MatchException { final int maxCount = 100; int count = 0; LocalDate date2 = date;/* ww w .ja v a 2 s . c o m*/ Collection<BookingBean> result2 = null; do { result2 = bookingEntries.get(date2).stream().filter(b -> b.getRoom().getName().equals(roomName)) .filter(b -> !b.isCheckIn()).map(b -> b.getElement()).collect(Collectors.toSet()); if (result2.stream().anyMatch(b -> b.getCleaning() != null)) { if (logger.isDebugEnabled()) { logger.debug("Found entry with cleaning, aborting"); } throw new MatchException("Failed to find matching booking for " + date + " and " + roomName); } result2 = result2.stream().filter(b -> b.getCleaning() == null).collect(Collectors.toSet()); count++; date2 = date2.minusDays(1); } while ((result2 == null || result2.isEmpty()) && count < maxCount); if (count == maxCount) { throw new MatchException("Failed to find matching booking for " + date + " and " + roomName); } if (result2.size() > 1) { throw new MatchException("Found more than one matching booking"); } return result2.iterator().next(); }
From source file:cz.lbenda.dataman.db.DbStructureFactory.java
private void generatePKColumns(Collection<CatalogDesc> catalogs, DatabaseMetaData dmd) throws SQLException { SQLDialect di = dbConfig.getJdbcConfiguration().getDialect(); StatusHelper.getInstance().progressNextStep(this, STEP_READ_PRIMARY_KEYS, catalogs.stream().mapToInt( cat -> cat.getSchemas().stream().mapToInt(schema -> schema.getTables().size()).sum()) .sum());//from w w w. ja v a 2 s. c o m for (CatalogDesc ch : catalogs) { for (SchemaDesc schema : ch.getSchemas()) { for (TableDesc td : schema.getTables()) { StatusHelper.getInstance().progress(this); try (ResultSet rsPk = dmd.getPrimaryKeys(ch.getName(), schema.getName(), td.getName())) { while (rsPk.next()) { ColumnDesc column = td.getColumn(rsPk.getString(di.pkColumnName())); if (column == null) { LOG.error("The primary column not exist in whole column set of table: " + di.pkColumnName()); } else { column.setPK(true); } } } } } } }
From source file:com.github.drbookings.ui.controller.UpcomingController.java
private void addEvents(final LocalDate date, final Collection<BookingEntry> upcomingBookings, final Collection<CleaningEntry> upcomingCleanings) { final VBox box = new VBox(4); if (date.equals(LocalDate.now())) { box.getStyleClass().add("first-day"); } else if (date.equals(LocalDate.now().plusDays(1))) { box.getStyleClass().add("second-day"); } else if (date.isAfter(LocalDate.now().plusDays(1))) { box.getStyleClass().add("later"); }/* ww w . j av a2 s. c om*/ if (upcomingBookings.stream().filter(b -> b.isCheckIn() || b.isCheckOut()).collect(Collectors.toList()) .isEmpty() && upcomingCleanings.isEmpty()) { final Text t0 = new Text(getDateString(date)); final Text t1 = new Text(" there are no events."); t0.getStyleClass().add("emphasis"); final TextFlow tf = new TextFlow(); tf.getChildren().addAll(t0, t1); box.getChildren().addAll(tf); } else { final List<CheckInOutDetails> checkInNotes = Collections.synchronizedList(new ArrayList<>()); final List<CheckInOutDetails> checkOutNotes = Collections.synchronizedList(new ArrayList<>()); upcomingBookings.forEach(b -> { if (b.isCheckIn()) { String note = ""; if (b.getElement().getCheckInNote() != null) { note = b.getElement().getCheckInNote(); } if (b.getElement().getSpecialRequestNote() != null) { note = note + "\n" + b.getElement().getSpecialRequestNote(); } checkInNotes.add(new CheckInOutDetails(b.getRoom().getName(), b.getElement().getBookingOrigin().getName(), note)); } else if (b.isCheckOut()) { checkOutNotes.add(new CheckInOutDetails(b.getRoom().getName(), b.getElement().getBookingOrigin().getName(), b.getElement().getCheckOutNote())); } }); Collections.sort(checkInNotes); Collections.sort(checkOutNotes); addGeneralSummary(date, box, checkInNotes); addCheckOutSummary(date, box, checkOutNotes); addCheckOutNotes(date, box, checkOutNotes); addCheckInSummary(date, box, checkInNotes); addCheckInNotes(date, box, checkInNotes); addCleaningSummary(date, box, upcomingCleanings); addCleanings(date, box, upcomingCleanings); } this.box.getChildren().add(box); }