List of usage examples for java.util.stream Collectors groupingBy
public static <T, K, A, D> Collector<T, ?, Map<K, D>> groupingBy(Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream)
From source file:ws.salient.session.Session.java
public Map<String, Long> getFactCount() { return ksession.getFactHandles().stream().map((handle) -> { return ksession.getObject(handle); }).collect(Collectors.groupingBy((o) -> { return o.getClass().getSimpleName(); }, Collectors.counting()));//from www .ja v a2 s.co m }
From source file:com.mycompany.wolf.Room.java
private void notifySomeoneBeVoted() { Collection<String> newlyDead = new LinkedHashSet<>(); List<Map.Entry<String, Long>> top2 = playerVotings.values().stream() .collect(Collectors.groupingBy(playerVoting -> playerVoting.playerId, Collectors.counting())) .entrySet().stream()//from w w w . j a va 2 s . c o m .sorted(Comparator.comparingLong((Map.Entry<String, Long> entry) -> entry.getValue()).reversed()) .limit(2).collect(Collectors.toList()); if (top2.size() == 1 || (top2.size() == 2 && top2.get(0).getValue().compareTo(top2.get(1).getValue()) > 0)) { newlyDead.add(top2.get(0).getKey()); this.dead.add(top2.get(0).getKey()); if (sessions.stream().noneMatch(session -> WOLF.equals(session.getUserProperties().get("role")))) { wolvesLose(); } } Map<String, Object> notifyDead = ImmutableMap.of("code", "notifyDead", "properties", newlyDead); String jsonText = JsonUtils.toString(notifyDead); sessions.stream().forEach(s -> s.getAsyncRemote().sendText(jsonText)); notifyWolvesKillVillagers(); }
From source file:com.ikanow.aleph2.graph.titan.utils.TitanGraphBuildingUtils.java
/** Separates out edges/vertices, groups by key * @param config/* w w w .j a v a 2 s . c o m*/ * @param vertices_and_edges * @return */ protected static Map<ObjectNode, Tuple2<List<ObjectNode>, List<ObjectNode>>> groupNewEdgesAndVertices( final GraphSchemaBean config, final MutableStatsBean stats, final Stream<ObjectNode> vertices_and_edges) { final Map<ObjectNode, Tuple2<List<ObjectNode>, List<ObjectNode>>> nodes_to_get = vertices_and_edges .filter(o -> o.has(GraphAnnotationBean.type)) .<Tuple3<ObjectNode, ObjectNode, Boolean>>flatMap(o -> { final JsonNode type = o.get(GraphAnnotationBean.type); if ((null == type) || !type.isTextual()) return Stream.empty(); if (GraphAnnotationBean.ElementType.edge.toString().equals(type.asText())) { stats.edges_emitted++; // Grab both edges from both ends: return Stream.concat( Optional.ofNullable(o.get(GraphAnnotationBean.inV)) .map(k -> Stream.of(Tuples._3T(convertToObject(k, config), o, false))) .orElse(Stream.empty()), Optional.ofNullable(o.get(GraphAnnotationBean.outV)) .map(k -> Stream.of(Tuples._3T(convertToObject(k, config), o, false))) .orElse(Stream.empty())); } else if (GraphAnnotationBean.ElementType.vertex.toString().equals(type.asText())) { stats.vertices_emitted++; return Optional.ofNullable(o.get(GraphAnnotationBean.id)) .map(k -> Stream.of(Tuples._3T(convertToObject(k, config), o, true))) .orElse(Stream.empty()); } else return Stream.empty(); }).collect(Collectors.groupingBy(t3 -> t3._1() // group by key , Collectors.collectingAndThen( Collectors.<Tuple3<ObjectNode, ObjectNode, Boolean>>partitioningBy(t3 -> t3._3()) // group by edge/vertex , m -> Tuples._2T(m.get(true).stream().map(t3 -> t3._2()).collect(Collectors.toList()) // convert group edge/vertex to pair of lists , m.get(false).stream().map(t3 -> t3._2()).collect(Collectors.toList()))))); return nodes_to_get; }
From source file:nu.yona.server.analysis.service.ActivityService.java
private Map<LocalDate, Set<DayActivity>> getDayActivitiesGroupedByDate(UUID userAnonymizedId, Set<GoalDto> relevantGoals, Interval interval) { List<DayActivity> dayActivityEntities = findAllActivitiesForUserInInterval(userAnonymizedId, relevantGoals, interval);/*from w w w.j a v a 2 s . com*/ return dayActivityEntities.stream() .collect(Collectors.groupingBy(IntervalActivity::getStartDate, Collectors.toSet())); }
From source file:com.ggvaidya.scinames.model.Dataset.java
public String getExplicitChangesCountSummary(Project p) { if (getExplicitChanges(p).count() == 0) return "None"; Map<ChangeType, Long> changeCounts = getExplicitChanges(p) .collect(Collectors.groupingBy(Change::getType, Collectors.counting())); String changes_by_type = changeCounts.entrySet().stream() .sorted((a, b) -> b.getValue().compareTo(a.getValue())).map(e -> e.getValue() + " " + e.getKey()) .collect(Collectors.joining(", ")); return getExplicitChanges(p).count() + " explicit changes (" + changes_by_type + ")"; }
From source file:com.ggvaidya.scinames.model.Dataset.java
public String getImplicitChangesCountSummary(Project p) { if (getImplicitChanges(p).count() == 0) return "None"; Map<ChangeType, Long> implicitChangeCounts = getImplicitChanges(p) .collect(Collectors.groupingBy(Change::getType, Collectors.counting())); String implicit_changes_by_type = implicitChangeCounts.entrySet().stream() .sorted((a, b) -> b.getValue().compareTo(a.getValue())).map(e -> e.getValue() + " " + e.getKey()) .collect(Collectors.joining(", ")); return getImplicitChanges(p).count() + " implicit changes (" + implicit_changes_by_type + ")"; }
From source file:com.uber.hoodie.common.table.view.HoodieTableFileSystemViewTest.java
@Test public void testStreamLatestVersionInPartition() throws IOException { // Put some files in the partition String fullPartitionPath = basePath + "/2016/05/01/"; new File(fullPartitionPath).mkdirs(); String commitTime1 = "1"; String commitTime2 = "2"; String commitTime3 = "3"; String commitTime4 = "4"; String fileId1 = UUID.randomUUID().toString(); String fileId2 = UUID.randomUUID().toString(); String fileId3 = UUID.randomUUID().toString(); String fileId4 = UUID.randomUUID().toString(); new File(fullPartitionPath + FSUtils.makeDataFileName(commitTime1, 1, fileId1)).createNewFile(); new File(fullPartitionPath + FSUtils.makeDataFileName(commitTime4, 1, fileId1)).createNewFile(); new File( fullPartitionPath + FSUtils.makeLogFileName(fileId1, HoodieLogFile.DELTA_EXTENSION, commitTime4, 0)) .createNewFile();// w w w . j a v a 2 s . c om new File( fullPartitionPath + FSUtils.makeLogFileName(fileId1, HoodieLogFile.DELTA_EXTENSION, commitTime4, 1)) .createNewFile(); new File(fullPartitionPath + FSUtils.makeDataFileName(commitTime1, 1, fileId2)).createNewFile(); new File(fullPartitionPath + FSUtils.makeDataFileName(commitTime2, 1, fileId2)).createNewFile(); new File(fullPartitionPath + FSUtils.makeDataFileName(commitTime3, 1, fileId2)).createNewFile(); new File( fullPartitionPath + FSUtils.makeLogFileName(fileId2, HoodieLogFile.DELTA_EXTENSION, commitTime3, 0)) .createNewFile(); new File(fullPartitionPath + FSUtils.makeDataFileName(commitTime3, 1, fileId3)).createNewFile(); new File(fullPartitionPath + FSUtils.makeDataFileName(commitTime4, 1, fileId3)).createNewFile(); new File( fullPartitionPath + FSUtils.makeLogFileName(fileId4, HoodieLogFile.DELTA_EXTENSION, commitTime4, 0)) .createNewFile(); new File(basePath + "/.hoodie/" + commitTime1 + ".commit").createNewFile(); new File(basePath + "/.hoodie/" + commitTime2 + ".commit").createNewFile(); new File(basePath + "/.hoodie/" + commitTime3 + ".commit").createNewFile(); new File(basePath + "/.hoodie/" + commitTime4 + ".commit").createNewFile(); // Now we list the entire partition FileStatus[] statuses = metaClient.getFs().listStatus(new Path(fullPartitionPath)); assertEquals(11, statuses.length); refreshFsView(null); // Check files as of lastest commit. List<FileSlice> allSlices = rtView.getAllFileSlices("2016/05/01").collect(Collectors.toList()); assertEquals(8, allSlices.size()); Map<String, Long> fileSliceMap = allSlices.stream() .collect(Collectors.groupingBy(slice -> slice.getFileId(), Collectors.counting())); assertEquals(2, fileSliceMap.get(fileId1).longValue()); assertEquals(3, fileSliceMap.get(fileId2).longValue()); assertEquals(2, fileSliceMap.get(fileId3).longValue()); assertEquals(1, fileSliceMap.get(fileId4).longValue()); List<HoodieDataFile> dataFileList = roView.getLatestDataFilesBeforeOrOn("2016/05/01", commitTime4) .collect(Collectors.toList()); assertEquals(3, dataFileList.size()); Set<String> filenames = Sets.newHashSet(); for (HoodieDataFile status : dataFileList) { filenames.add(status.getFileName()); } assertTrue(filenames.contains(FSUtils.makeDataFileName(commitTime4, 1, fileId1))); assertTrue(filenames.contains(FSUtils.makeDataFileName(commitTime3, 1, fileId2))); assertTrue(filenames.contains(FSUtils.makeDataFileName(commitTime4, 1, fileId3))); filenames = Sets.newHashSet(); List<HoodieLogFile> logFilesList = rtView.getLatestFileSlicesBeforeOrOn("2016/05/01", commitTime4) .map(slice -> slice.getLogFiles()).flatMap(logFileList -> logFileList).collect(Collectors.toList()); assertEquals(logFilesList.size(), 4); for (HoodieLogFile logFile : logFilesList) { filenames.add(logFile.getFileName()); } assertTrue(filenames .contains(FSUtils.makeLogFileName(fileId1, HoodieLogFile.DELTA_EXTENSION, commitTime4, 0))); assertTrue(filenames .contains(FSUtils.makeLogFileName(fileId1, HoodieLogFile.DELTA_EXTENSION, commitTime4, 1))); assertTrue(filenames .contains(FSUtils.makeLogFileName(fileId2, HoodieLogFile.DELTA_EXTENSION, commitTime3, 0))); assertTrue(filenames .contains(FSUtils.makeLogFileName(fileId4, HoodieLogFile.DELTA_EXTENSION, commitTime4, 0))); // Reset the max commit time List<HoodieDataFile> dataFiles = roView.getLatestDataFilesBeforeOrOn("2016/05/01", commitTime3) .collect(Collectors.toList()); assertEquals(dataFiles.size(), 3); filenames = Sets.newHashSet(); for (HoodieDataFile status : dataFiles) { filenames.add(status.getFileName()); } assertTrue(filenames.contains(FSUtils.makeDataFileName(commitTime1, 1, fileId1))); assertTrue(filenames.contains(FSUtils.makeDataFileName(commitTime3, 1, fileId2))); assertTrue(filenames.contains(FSUtils.makeDataFileName(commitTime3, 1, fileId3))); logFilesList = rtView.getLatestFileSlicesBeforeOrOn("2016/05/01", commitTime3) .map(slice -> slice.getLogFiles()).flatMap(logFileList -> logFileList).collect(Collectors.toList()); assertEquals(logFilesList.size(), 1); assertTrue(logFilesList.get(0).getFileName() .equals(FSUtils.makeLogFileName(fileId2, HoodieLogFile.DELTA_EXTENSION, commitTime3, 0))); }
From source file:no.asgari.civilization.server.action.GameAction.java
public List<CivHighscoreDTO> getCivHighscore() { if (!CivSingleton.instance().itemsCache().containsKey(GameType.WAW)) { readItemFromExcel(GameType.WAW, new ItemReader()); }//w ww.j ava2s.c om ItemReader itemReader = CivSingleton.instance().itemsCache().get(GameType.WAW); if (itemReader == null) { return Collections.emptyList(); } List<PBF> pbfs = pbfCollection.find().toArray(); try { Map<String, Long> numberOfCivsWinning = pbfs.stream() .filter(pbf -> !Strings.isNullOrEmpty(pbf.getWinner())).filter(pbf -> !pbf.isActive()) .filter(pbf -> { String playerWhoWon = pbf.getWinner(); return pbf.getPlayers().stream().filter(p -> p.getUsername().equals(playerWhoWon)) .filter(p -> p.getCivilization() != null).findFirst().isPresent(); }).map(pbf -> { String playerWhoWon = pbf.getWinner(); Playerhand playerhand = pbf.getPlayers().stream() .filter(p -> p.getUsername().equals(playerWhoWon)) .filter(p -> p.getCivilization() != null).findFirst().get(); return playerhand.getCivilization().getName(); }).collect(Collectors.groupingBy(e -> e, Collectors.counting())); Map<String, Long> numberOfCivAttempts = pbfs.stream() .filter(pbf -> !Strings.isNullOrEmpty(pbf.getWinner())).filter(pbf -> !pbf.isActive()) .flatMap(pbf -> pbf.getPlayers().stream()).filter(p -> p.getCivilization() != null) .map(p -> p.getCivilization().getName()) .collect(Collectors.groupingBy(e -> e, Collectors.counting())); return itemReader.shuffledCivs.stream().map(civ -> new CivHighscoreDTO(civ.getName(), numberOfCivsWinning.get(civ.getName()), numberOfCivAttempts.get(civ.getName()))).sorted() .collect(toList()); } catch (Exception ex) { ex.printStackTrace(); return Collections.emptyList(); } }
From source file:no.asgari.civilization.server.action.GameAction.java
public List<WinnerDTO> getWinners() { List<PBF> pbfs = pbfCollection.find().toArray(); final ListMultimap<String, String> multimap = ArrayListMultimap.create(); pbfs.stream().filter(pbf -> !Strings.isNullOrEmpty(pbf.getWinner())) .forEach(pbf -> multimap.put(pbf.getWinner(), pbf.getId())); Map<String, Long> attemptsPerUsername = pbfs.stream().filter(pbf -> !Strings.isNullOrEmpty(pbf.getWinner())) .filter(pbf -> !pbf.isActive()).flatMap(pbf -> pbf.getPlayers().stream()) .map(Playerhand::getUsername).collect(Collectors.groupingBy(e -> e, Collectors.counting())); List<Player> allPlayers = playerCollection.find().toArray(); List<WinnerDTO> filteredPlayers = allPlayers.stream() .filter(p -> !multimap.containsKey(p.getUsername()) && p.getUsername() != null).map(p -> { long attempts = attemptsPerUsername.get(p.getUsername()) == null ? 0L : attemptsPerUsername.get(p.getUsername()); WinnerDTO winner = new WinnerDTO(p.getUsername(), 0, attempts); return winner; }).collect(toList());/*from w ww . jav a 2 s . com*/ List<WinnerDTO> winners = multimap.keySet().stream() .map(user -> new WinnerDTO(user, multimap.get(user).size(), attemptsPerUsername.get(user))) .collect(toList()); winners.addAll(filteredPlayers); Collections.sort(winners); return winners; }
From source file:no.imr.stox.functions.acoustic.PgNapesIO.java
public static void export2(String cruise, String country, String callSignal, String path, String fileName, List<DistanceBO> distances, Double groupThickness, Integer freqFilter, String specFilter, boolean withZeros) { Set<Integer> freqs = distances.stream().flatMap(dist -> dist.getFrequencies().stream()) .map(FrequencyBO::getFreq).collect(Collectors.toSet()); if (freqFilter == null && freqs.size() == 1) { freqFilter = freqs.iterator().next(); }//from w w w.j a va2s.c om if (freqFilter == null) { System.out.println("Multiple frequencies, specify frequency filter as parameter"); return; } Integer freqFilterF = freqFilter; // ef.final List<String> acList = distances.parallelStream().flatMap(dist -> dist.getFrequencies().stream()) .filter(fr -> freqFilterF.equals(fr.getFreq())).map(f -> { DistanceBO d = f.getDistanceBO(); LocalDateTime sdt = LocalDateTime.ofInstant(d.getStart_time().toInstant(), ZoneOffset.UTC); Double intDist = d.getIntegrator_dist(); String month = StringUtils.leftPad(sdt.getMonthValue() + "", 2, "0"); String day = StringUtils.leftPad(sdt.getDayOfMonth() + "", 2, "0"); String hour = StringUtils.leftPad(sdt.getHour() + "", 2, "0"); String minute = StringUtils.leftPad(sdt.getMinute() + "", 2, "0"); String log = Conversion.formatDoubletoDecimalString(d.getLog_start(), "0.0"); String acLat = Conversion.formatDoubletoDecimalString(d.getLat_start(), "0.000"); String acLon = Conversion.formatDoubletoDecimalString(d.getLon_start(), "0.000"); return Stream .of(d.getNation(), d.getPlatform(), d.getCruise(), log, sdt.getYear(), month, day, hour, minute, acLat, acLon, intDist, f.getFreq(), f.getThreshold()) .map(o -> o == null ? "" : o.toString()).collect(Collectors.joining("\t")) + "\t"; }).collect(Collectors.toList()); String fil1 = path + "/" + fileName + ".txt"; acList.add(0, Stream.of("Country", "Vessel", "Cruise", "Log", "Year", "Month", "Day", "Hour", "Min", "AcLat", "AcLon", "Logint", "Frequency", "Sv_threshold").collect(Collectors.joining("\t"))); try { Files.write(Paths.get(fil1), acList, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); } catch (IOException ex) { Logger.getLogger(PgNapesIO.class.getName()).log(Level.SEVERE, null, ex); } acList.clear(); // Acoustic values distances.stream().filter(d -> d.getPel_ch_thickness() != null) .flatMap(dist -> dist.getFrequencies().stream()).filter(fr -> freqFilterF.equals(fr.getFreq())) .forEachOrdered(f -> { try { Double groupThicknessF = Math.max(f.getDistanceBO().getPel_ch_thickness(), groupThickness); Map<String, Map<Integer, Double>> pivot = f.getSa().stream() .filter(s -> s.getCh_type().equals("P")).map(s -> new SAGroup(s, groupThicknessF)) .filter(s -> s.getSpecies() != null && (specFilter == null || specFilter.equals(s.getSpecies()))) // create pivot table: species (dim1) -> depth interval index (dim2) -> sum sa (group aggregator) .collect(Collectors.groupingBy(SAGroup::getSpecies, Collectors.groupingBy( SAGroup::getDepthGroupIdx, Collectors.summingDouble(SAGroup::sa)))); if (pivot.isEmpty() && specFilter != null && withZeros) { pivot.put(specFilter, new HashMap<>()); } Integer maxGroupIdx = pivot.entrySet().stream().flatMap(e -> e.getValue().keySet().stream()) .max(Integer::compare).orElse(null); if (maxGroupIdx == null) { return; } acList.addAll(pivot.entrySet().stream().sorted(Comparator.comparing(Map.Entry::getKey)) .flatMap(e -> { return IntStream.range(0, maxGroupIdx + 1).boxed().map(groupIdx -> { Double chUpDepth = groupIdx * groupThicknessF; Double chLowDepth = (groupIdx + 1) * groupThicknessF; Double sa = e.getValue().get(groupIdx); if (sa == null) { sa = 0d; } String res = null; if (withZeros || sa > 0d) { DistanceBO d = f.getDistanceBO(); String log = Conversion.formatDoubletoDecimalString(d.getLog_start(), "0.0"); LocalDateTime sdt = LocalDateTime .ofInstant(d.getStart_time().toInstant(), ZoneOffset.UTC); String month = StringUtils.leftPad(sdt.getMonthValue() + "", 2, "0"); String day = StringUtils.leftPad(sdt.getDayOfMonth() + "", 2, "0"); //String sas = String.format(Locale.UK, "%11.5f", sa); res = Stream .of(d.getNation(), d.getPlatform(), d.getCruise(), log, sdt.getYear(), month, day, e.getKey(), chUpDepth, chLowDepth, sa) .map(o -> o == null ? "" : o.toString()) .collect(Collectors.joining("\t")); } return res; }).filter(s -> s != null); }).collect(Collectors.toList())); } catch (Exception e) { e.printStackTrace(); } }); String fil2 = path + "/" + fileName + "Values.txt"; acList.add(0, Stream.of("Country", "Vessel", "Cruise", "Log", "Year", "Month", "Day", "Species", "ChUppDepth", "ChLowDepth", "SA").collect(Collectors.joining("\t"))); try { Files.write(Paths.get(fil2), acList, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); } catch (IOException ex) { Logger.getLogger(PgNapesIO.class.getName()).log(Level.SEVERE, null, ex); } }