List of usage examples for java.util.stream Collectors toMap
public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper)
From source file:com.serphacker.serposcope.db.google.GoogleSerpRescanDB.java
public void rescanNonBulk(Integer specificRunId, Collection<GoogleTarget> targets, Collection<GoogleSearch> searches, boolean updateSummary) { LOG.debug("SERP rescan (non-bulk) : starting"); long _start = System.currentTimeMillis(); Run specPrevRun = null;//from www .j a va2 s . c o m Map<Integer, GoogleTargetSummary> specPrevRunSummaryByTarget = new HashMap<>(); if (specificRunId != null) { specPrevRun = runDB.findPrevious(specificRunId); if (specPrevRun != null) { specPrevRunSummaryByTarget = targetSummaryDB.list(specPrevRun.getId()).stream() .collect(Collectors.toMap(GoogleTargetSummary::getTargetId, Function.identity())); } } for (GoogleTarget target : targets) { Map<Integer, GoogleTargetSummary> summaryByRunId = new HashMap<>(); GoogleTargetSummary specificPreviousSummary = specPrevRunSummaryByTarget.get(target.getId()); if (specificPreviousSummary != null) { summaryByRunId.put(specPrevRun.getId(), specificPreviousSummary); } for (GoogleSearch search : searches) { final MutableInt previousRunId = new MutableInt(0); final MutableInt previousRank = new MutableInt(GoogleRank.UNRANKED); GoogleBest searchBest = new GoogleBest(target.getGroupId(), target.getId(), search.getId(), GoogleRank.UNRANKED, null, null); if (specPrevRun != null) { previousRunId.setValue(specPrevRun.getId()); previousRank.setValue( rankDB.get(specPrevRun.getId(), target.getGroupId(), target.getId(), search.getId())); GoogleBest specificBest = rankDB.getBest(target.getGroupId(), target.getId(), search.getId()); if (specificBest != null) { searchBest = specificBest; } } final GoogleBest best = searchBest; serpDB.stream(specificRunId, specificRunId, search.getId(), (GoogleSerp res) -> { int rank = GoogleRank.UNRANKED; String rankedUrl = null; for (int i = 0; i < res.getEntries().size(); i++) { if (target.match(res.getEntries().get(i).getUrl())) { rankedUrl = res.getEntries().get(i).getUrl(); rank = i + 1; break; } } // only update last run GoogleRank gRank = new GoogleRank(res.getRunId(), target.getGroupId(), target.getId(), search.getId(), rank, previousRank.shortValue(), rankedUrl); rankDB.insert(gRank); if (updateSummary) { GoogleTargetSummary summary = summaryByRunId.get(res.getRunId()); if (summary == null) { summaryByRunId.put(res.getRunId(), summary = new GoogleTargetSummary(target.getGroupId(), target.getId(), res.getRunId(), 0)); } summary.addRankCandidat(gRank); } if (rank != GoogleRank.UNRANKED && rank <= best.getRank()) { best.setRank((short) rank); best.setUrl(rankedUrl); best.setRunDay(res.getRunDay()); } previousRunId.setValue(res.getRunId()); previousRank.setValue(rank); }); if (best.getRank() != GoogleRank.UNRANKED) { rankDB.insertBest(best); } } // fill previous summary score if (updateSummary) { TreeMap<Integer, GoogleTargetSummary> summaries = new TreeMap<>(summaryByRunId); GoogleTargetSummary previousSummary = null; for (Map.Entry<Integer, GoogleTargetSummary> entry : summaries.entrySet()) { if (previousSummary != null) { entry.getValue().setPreviousScoreBP(previousSummary.getScoreBP()); } previousSummary = entry.getValue(); } if (specPrevRun != null) { summaries.remove(specPrevRun.getId()); } if (!summaries.isEmpty()) { targetSummaryDB.insert(summaries.values()); } } } LOG.debug("SERP rescan : done, duration = {}", DurationFormatUtils.formatDurationHMS(System.currentTimeMillis() - _start)); }
From source file:com.hortonworks.registries.schemaregistry.state.SchemaVersionLifecycleStateMachine.java
private SchemaVersionLifecycleStateMachine(Map<Byte, SchemaVersionLifecycleState> states, Map<SchemaVersionLifecycleStateTransition, SchemaVersionLifecycleStateAction> transitions, Map<SchemaVersionLifecycleStateTransition, ConcurrentLinkedQueue<SchemaVersionLifecycleStateTransitionListener>> listeners) { this.states = Collections.unmodifiableMap(states); this.transitions = Collections.unmodifiableMap(transitions); this.listeners = Collections.unmodifiableMap(listeners).entrySet().stream().collect(Collectors.toMap( Map.Entry::getKey,// ww w.j a va 2 s . c om transitionWithListener -> Lists.newArrayList(transitionWithListener.getValue().iterator()))); }
From source file:com.github.horrorho.inflatabledonkey.args.ArgsManager.java
public Map<Property, String> process(Option... options) { return Arrays.asList(options).stream().map(this::value) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); }
From source file:io.fd.maintainer.plugin.util.MaintainersIndex.java
public MaintainersIndex(@Nonnull final List<ComponentInfo> maintainers) { pathToMaintainersIndex = maintainers.stream() .flatMap(maintainersInfo -> maintainersInfo.getPaths().stream() .map(componentPath -> new Tuple2<>(componentPath, maintainersInfo.getMaintainers()))) .collect(Collectors.toMap(tuple -> tuple.a, tuple -> tuple.b)); pathToComponentIndex = new HashMap<>(); maintainers.forEach(maintainersInfo -> maintainersInfo.getPaths().forEach( componentPath -> pathToComponentIndex.put(componentPath.getPath(), maintainersInfo.getTitle()))); maintainerNameToComponentIndex = LinkedListMultimap.create(); maintainers.forEach(maintainersInfo -> maintainersInfo.getMaintainers() .forEach(maintainer -> maintainerNameToComponentIndex.put(maintainer.getName(), maintainersInfo.getTitle()))); reviewComponentIndex = maintainers.stream().collect( Collectors.toMap(ComponentInfo::getTitle, component -> !component.getMaintainers().isEmpty())); }
From source file:com.ikanow.aleph2.v1.document_db.utils.V1DocumentDbHadoopUtils.java
/** * @param input_config - the input settings * @return/*from w w w .ja va 2 s.c o m*/ */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static IAnalyticsAccessContext<InputFormat> getInputFormat(final String user_id, final AnalyticThreadJobBean.AnalyticThreadJobInputBean job_input, final Optional<ISecurityService> maybe_security, final V1DocDbConfigBean config) { //TODO (ALEPH-20): need to perform security in here return new IAnalyticsAccessContext<InputFormat>() { private LinkedHashMap<String, Object> _mutable_output = null; @Override public String describe() { //(return the entire thing) return ErrorUtils.get("service_name={0} options={1}", this.getAccessService().right().value().getSimpleName(), this.getAccessConfig().get().entrySet().stream() .filter(kv -> !DESCRIBE_FILTER.contains(kv.getKey())) .collect(Collectors.toMap(kv -> kv.getKey(), kv -> kv.getValue()))); } /* (non-Javadoc) * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessService() */ @Override public Either<InputFormat, Class<InputFormat>> getAccessService() { return Either.right((Class<InputFormat>) (Class<?>) Aleph2V1InputFormat.class); } /* (non-Javadoc) * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessConfig() */ @Override public Optional<Map<String, Object>> getAccessConfig() { if (null != _mutable_output) { return Optional.of(_mutable_output); } _mutable_output = new LinkedHashMap<>(); // Parse various inputs: final List<String> communities = Arrays .stream(job_input.resource_name_or_id() .substring(BucketUtils.EXTERNAL_BUCKET_PREFIX.length()).split("_")) .collect(Collectors.toList()); // Validate communities: maybe_security.ifPresent(sec -> { communities.stream().filter(cid -> !sec.isUserPermitted(user_id, Tuples._2T("community", cid), Optional.of(ISecurityService.ACTION_READ))).findAny().ifPresent(cid -> { throw new RuntimeException(ErrorUtils .get(V1DocumentDbErrorUtils.V1_DOCUMENT_USER_PERMISSIONS, user_id, cid)); }); }); final String query = _mapper .convertValue(Optional.ofNullable(job_input.filter()).orElse(Collections.emptyMap()), JsonNode.class) .toString(); final Tuple4<String, Tuple2<Integer, Integer>, BasicDBObject, DBObject> horrible_object = LegacyV1HadoopUtils .parseQueryObject(query, communities); final String db_server = config.mongodb_connection(); // Here's all the fields to fill in // 1) Generic MongoDB fields: //name of job shown in jobtracker --><name>mongo.job.name</name><value>title //run the job verbosely ? --><name>mongo.job.verbose</name><value>true //Run the job in the foreground and wait for response, or background it? --><name>mongo.job.background</name><value>false //If you are reading from mongo, the URI --><name>mongo.input.uri</name><value>mongodb://"+dbserver+"/"+input //The number of documents to limit to for read [OPTIONAL] --><name>mongo.input.limit</name><value>" + nLimit //The query, in JSON, to execute [OPTIONAL] --><name>mongo.input.query</name><value>" + StringEscapeUtils.escapeXml(query) //The fields, in JSON, to read [OPTIONAL] --><name>mongo.input.fields</name><value>"+( (fields==null) ? ("") : fields ) //InputFormat Class --><name>mongo.job.input.format</name><value>com.ikanow.infinit.e.data_model.custom.InfiniteMongoInputFormat _mutable_output.put("mongo.job.name", Optional.ofNullable(job_input.data_service()).orElse("unknown") + ":" + Optional.ofNullable(job_input.resource_name_or_id()).orElse("unknown")); // (i think this is ignored in fact) _mutable_output.put("mongo.job.verbose", "true"); _mutable_output.put("mongo.job.background", "false"); _mutable_output.put("mongo.input.uri", "mongodb://" + db_server + "/doc_metadata.metadata"); _mutable_output.put("mongo.input.query", horrible_object._1()); _mutable_output.put("mongo.input.fields", Optional.ofNullable(horrible_object._4()).map(o -> o.toString()).orElse("")); _mutable_output.put("mongo.input.limit", Optional.ofNullable(job_input.config()) .map(cfg -> cfg.test_record_limit_request()).map(o -> o.toString()).orElse("0")); // 2) Basic Infinit.e/MongoDB fields: //Maximum number of splits [optional] --><name>max.splits</name><value>"+nSplits //Maximum number of docs per split [optional] --><name>max.docs.per.split</name><value>"+nDocsPerSplit _mutable_output.put("max.splits", horrible_object._2()._1().toString()); _mutable_output.put("max.docs.per.split", horrible_object._2()._2().toString()); // 3) Advanced Infinit.e/MongoDB fields: //Infinit.e src tags filter [optional] --><name>infinit.e.source.tags.filter</name><value>"+srcTags.toString() if (null != horrible_object._3()) { _mutable_output.put("infinit.e.source.tags.filter", horrible_object._3().toString()); } return Optional.of(Collections.unmodifiableMap(_mutable_output)); } }; }
From source file:org.workspace7.moviestore.controller.SessionsController.java
@CrossOrigin @RequestMapping(method = RequestMethod.GET, value = "/sessions", produces = "application/json") public @ResponseBody String sessions(HttpServletRequest request) { final String hostname = System.getenv().getOrDefault("HOSTNAME", "unknown"); ObjectMapper sessions = new ObjectMapper(); ObjectNode rootNode = sessions.createObjectNode().put("hostName", hostname); String jsonResponse = "{\"message\":\"NO SESSIONS AVAILABLE\"}"; try {//from w ww. ja v a 2 s.com AdvancedCache<Object, Object> sessionCache = cacheManager.getCache("moviestore-sessions-cache") .getAdvancedCache(); if (sessionCache != null && !sessionCache.isEmpty()) { ArrayNode sessionsArray = rootNode.arrayNode(); Map<Object, Object> sessionsCacheMap = sessionCache.entrySet().stream().collect(CacheCollectors .serializableCollector(() -> Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); sessionsCacheMap.forEach((s, o) -> { MapSession mapSession = (MapSession) o; log.debug("Session Controller Map Session Id {} value : {}", s, mapSession); if (log.isDebugEnabled()) { StringBuilder debugMessage = new StringBuilder(); mapSession.getAttributeNames().forEach(key -> { debugMessage.append("Attribute :" + s + " Value: " + mapSession.getAttribute(key)); }); log.debug("Map Session Attributes : {}", debugMessage); } MovieCart movieCart = mapSession.getAttribute(ShoppingCartController.SESSION_ATTR_MOVIE_CART); if (movieCart != null) { ObjectNode movieCartNode = sessions.createObjectNode(); movieCartNode.put("sessionId", mapSession.getId()); movieCartNode.put("orderId", movieCart.getOrderId()); ArrayNode movieItemsNode = movieCartNode.arrayNode(); movieCart.getMovieItems().forEach((movieId, qty) -> { ObjectNode movieItem = movieItemsNode.addObject(); movieItem.put("movieId", movieId); movieItem.put("orderQuantity", qty); }); movieCartNode.set("movies", movieItemsNode); sessionsArray.add(movieCartNode); } }); rootNode.set("sessions", sessionsArray); } jsonResponse = sessions.writeValueAsString(rootNode); } catch (Exception e) { log.error("Error building JSON response for sesisons", e); } return jsonResponse; }
From source file:ddf.catalog.transformer.output.rtf.model.ExportCategory.java
@Override public Map<String, ExportValue> toExportMap(Metacard metacard) { return attributes.stream() .map(key -> new AbstractMap.SimpleEntry<>(attributeKeyFrom(key), attributeExportValueFrom(key, metacard.getAttribute(key)))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); }
From source file:com.github.horrorho.inflatabledonkey.requests.Headers.java
public static Map<Headers, Header> headers(Map.Entry<Headers, String>... headers) { return Arrays.asList(headers).stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getKey().header(e.getValue()))); }
From source file:io.syndesis.model.connection.Action.java
@JsonProperty(access = JsonProperty.Access.READ_ONLY) default Map<String, ConfigurationProperty> getProperties() { ActionDefinition definition = getDefinition(); return definition != null ? definition.getPropertyDefinitionSteps().stream() .flatMap(step -> step.getProperties().entrySet().stream()) .collect(Collectors.toMap(Entry::getKey, Entry::getValue)) : Collections.emptyMap(); }
From source file:alfio.model.modification.EventWithStatistics.java
public EventWithStatistics(Event event, List<EventDescription> eventDescriptions, List<TicketCategoryWithStatistic> ticketCategories) { this.event = event; this.ticketCategories = ticketCategories; this.soldTickets = countSoldTickets(ticketCategories); this.checkedInTickets = countCheckedInTickets(ticketCategories); this.allocatedTickets = ticketCategories.stream().filter(IS_BOUNDED) .mapToInt(TicketCategoryWithStatistic::getMaxTickets).sum(); this.containsUnboundedCategories = ticketCategories.stream().anyMatch(IS_BOUNDED.negate()); this.description = eventDescriptions.stream() .collect(Collectors.toMap(EventDescription::getLocale, EventDescription::getDescription)); }