List of usage examples for java.util.stream Collectors toMap
public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper)
From source file:fr.paris.lutece.portal.web.xsl.XslExportJspBeanTest.java
public void testDoCreateXslExportNoToken() throws AccessDeniedException, IOException { MockHttpServletRequest request = new MockHttpServletRequest(); AdminUser user = new AdminUser(); user.setRoles(/*from www.j a v a2 s. com*/ AdminRoleHome.findAll().stream().collect(Collectors.toMap(AdminRole::getKey, Function.identity()))); Utils.registerAdminUserWithRigth(request, user, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT); String randomName = getRandomName(); Map<String, String[]> parameters = new HashMap<>(); parameters.put("title", new String[] { randomName }); parameters.put("description", new String[] { randomName }); parameters.put("extension", new String[] { randomName }); Map<String, List<FileItem>> multipartFiles = new HashMap<>(); List<FileItem> fileItems = new ArrayList<>(); FileItem item = new DiskFileItemFactory().createItem("id_file", "", false, "xsl"); item.getOutputStream().write("<?xml version='1.0'?><a/>".getBytes()); fileItems.add(item); multipartFiles.put("id_file", fileItems); _instance.init(request, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT); try { _instance.doCreateXslExport(new MultipartHttpServletRequest(request, multipartFiles, parameters)); fail("Should have thrown"); } catch (AccessDeniedException ade) { assertFalse(XslExportHome.getList().stream().anyMatch(e -> randomName.equals(e.getTitle()) && randomName.equals(e.getDescription()) && randomName.equals(e.getExtension()))); } finally { XslExportHome.getList().stream().filter(e -> randomName.equals(e.getTitle())) .forEach(e -> XslExportHome.remove(e.getIdXslExport())); } }
From source file:org.ligoj.app.plugin.prov.aws.in.ProvAwsPriceImportResource.java
private Map<String, ProvStorageType> installStorageTypes(UpdateContext context) throws IOException { final Map<String, ProvStorageType> previous = stRepository.findAllBy(BY_NODE, context.getNode().getId()) .stream().collect(Collectors.toMap(INamableBean::getName, Function.identity())); csvForBean.toBean(ProvStorageType.class, "csv/aws-prov-storage-type.csv").forEach(t -> { final ProvStorageType entity = previous.computeIfAbsent(t.getName(), n -> { t.setNode(context.getNode()); return t; });/* ww w . j a va 2 s . c o m*/ // Merge the storage type details entity.setDescription(t.getDescription()); entity.setInstanceCompatible(t.isInstanceCompatible()); entity.setIops(t.getIops()); entity.setLatency(t.getLatency()); entity.setMaximal(t.getMaximal()); entity.setMinimal(t.getMinimal()); entity.setOptimized(t.getOptimized()); entity.setThroughput(t.getThroughput()); stRepository.save(entity); }); return previous; }
From source file:io.yields.math.framework.kpi.ExplorerJsonExporter.java
private Values toDescriptorValues(Stats stats) { return new Values(stats.getDescriptorNames().stream() .collect(Collectors.toMap(name -> name, name -> stats.getDescriptorAt(name)))); }
From source file:com.yahoo.parsec.filters.RequestResponeLoggingFilterTest.java
private String createLogStringPattern(String requestMethod, String url, String queryString, Map<String, Collection<String>> reqHeaders, String reqBodyJson, int responseCode, Map<String, Collection<String>> respHeaders, String respBodyJson) throws JsonProcessingException { Map<String, String> headers = reqHeaders.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> String.join(",", e.getValue()))); String reqHeaderString = _OBJECT_MAPPER.writeValueAsString(headers); headers = respHeaders.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> String.join(",", e.getValue()))); String respHeaderString = _OBJECT_MAPPER.writeValueAsString(headers); String pattern = String.format( "{" + "\"time\":\"${json-unit.any-number}\"," + "\"request\": {" + "\"method\": \"%s\"," + "\"uri\": \"%s\"," + "\"query\": \"%s\"," + "\"headers\": %s," + "\"payload\": \"%s\"" + "}," + "\"response\": {" + "\"status\": %d," + "\"headers\": %s," + "\"payload\": \"%s\"" + "}," + "\"progress\": {" + "\"namelookup_time\": \"${json-unit.any-number}\"," + "\"connect_time\": \"${json-unit.any-number}\"," + "\"pretransfer_time\": \"${json-unit.any-number}\"," + "\"starttransfer_time\": \"${json-unit.any-number}\"," + "\"total_time\":\"${json-unit.any-number}\"" + "}" + "}", requestMethod, url, queryString, reqHeaderString, StringEscapeUtils.escapeJson(reqBodyJson), responseCode, respHeaderString, StringEscapeUtils.escapeJson(respBodyJson)); return pattern; }
From source file:mtsar.resources.WorkerResource.java
@PATCH @Path("{worker}/answers/skip") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) public Response postAnswersSkip(@Context Validator validator, @Context UriInfo uriInfo, @PathParam("worker") Integer id, @FormParam("tags") List<String> tags, @FormParam("datetime") String datetimeParam, @FormParam("tasks") List<Integer> tasks) { final Timestamp datetime = (datetimeParam == null) ? DateTimeUtils.now() : Timestamp.valueOf(datetimeParam); final Worker worker = fetchWorker(id); final Map<Answer, Set<ConstraintViolation<Object>>> answers = tasks.stream().map(taskId -> { final Task task = fetchTask(taskId); final Answer answer = new Answer.Builder().setStage(stage.getId()).addAllTags(tags) .setType(AnswerDAO.ANSWER_TYPE_SKIP).setTaskId(task.getId()).setWorkerId(worker.getId()) .setDateTime(datetime).build(); /* Since we are skipping the task, the only constraint we need is #answer-duplicate. */ final Set<ConstraintViolation<Object>> violations = ParamsUtils.validate(validator, new AnswerValidation.Builder().setAnswer(answer).setAnswerDAO(answerDAO).build()); return Pair.of(answer, violations); }).collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); final Set<ConstraintViolation<Object>> violations = answers.values().stream().flatMap(Set::stream) .collect(Collectors.toSet()); if (!violations.isEmpty()) throw new ConstraintViolationException(violations); final List<Answer> inserted = AnswerDAO.insert(answerDAO, answers.keySet()); return Response.ok(inserted).build(); }
From source file:com.thinkbiganalytics.feedmgr.nifi.PropertyExpressionResolver.java
/** * Resolve the str with values from the supplied {@code properties} This will recursively fill out the str looking back at the properties to get the correct value. NOTE the property values will be * overwritten if replacement is found!/*from w ww. j a v a 2 s . c om*/ */ public ResolvedVariables resolveVariables(String str, List<NifiProperty> properties) { ResolvedVariables variables = new ResolvedVariables(str); StrLookup resolver = new StrLookup() { @Override public String lookup(String key) { Optional<NifiProperty> optional = properties.stream().filter(prop -> key.equals(prop.getKey())) .findFirst(); if (optional.isPresent()) { NifiProperty property = optional.get(); String value = StringUtils.isNotBlank(property.getValue()) ? property.getValue().trim() : ""; variables.getResolvedVariables().put(property.getKey(), value); return value; } else { return null; } } }; StrSubstitutor ss = new StrSubstitutor(resolver); variables.setResolvedString(ss.replace(str)); Map<String, String> map = variables.getResolvedVariables(); Map<String, String> vars = map.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> ss.replace(entry.getValue()))); variables.getResolvedVariables().putAll(vars); return variables; }
From source file:ch.algotrader.service.ib.IBNativeReferenceDataServiceImpl.java
private void retrieveOptions(OptionFamily securityFamily, Set<ContractDetails> contractDetailsSet) { // get all current options List<Option> allOptions = this.optionDao.findBySecurityFamily(securityFamily.getId()); Map<String, Option> mapByConid = allOptions.stream().filter(e -> e.getConid() != null) .collect(Collectors.toMap(e -> e.getConid(), e -> e)); Map<String, Option> mapBySymbol = allOptions.stream().collect(Collectors.toMap(e -> e.getSymbol(), e -> e)); for (ContractDetails contractDetails : contractDetailsSet) { Contract contract = contractDetails.m_summary; String conid = String.valueOf(contract.m_conId); if (!mapByConid.containsKey(conid)) { OptionType type = "C".equals(contract.m_right) ? OptionType.CALL : OptionType.PUT; BigDecimal strike = RoundUtil.getBigDecimal(contract.m_strike, securityFamily.getScale(Broker.IB.name())); LocalDate expirationDate = DATE_FORMAT.parse(contract.m_expiry, LocalDate::from); String symbol = OptionSymbol.getSymbol(securityFamily, expirationDate, type, strike, this.commonConfig.getOptionSymbolPattern()); if (!mapBySymbol.containsKey(symbol)) { Option option = Option.Factory.newInstance(); final String isin = OptionSymbol.getIsin(securityFamily, expirationDate, type, strike); String ric = OptionSymbol.getRic(securityFamily, expirationDate, type, strike); option.setSymbol(symbol); option.setIsin(isin);//from w w w . j ava2 s. c o m option.setRic(ric); option.setConid(conid); option.setOptionType(type); option.setStrike(strike); option.setExpiration(DateTimeLegacy.toLocalDate(expirationDate)); option.setSecurityFamily(securityFamily); option.setUnderlying(securityFamily.getUnderlying()); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Creating option based on IB definition: {} {} {} {}", contract.m_symbol, contract.m_right, contract.m_expiry, contract.m_strike); } this.optionDao.save(option); } } } }
From source file:it.damore.solr.importexport.App.java
/** * @param client/*from w ww . j a va 2 s.com*/ * @param outputFile * @throws SolrServerException * @throws IOException */ private static void readAllDocuments(HttpSolrClient client, File outputFile) throws SolrServerException, IOException { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery("*:*"); if (config.getFilterQuery() != null) { solrQuery.addFilterQuery(config.getFilterQuery()); } solrQuery.setRows(0); solrQuery.addSort(config.getUniqueKey(), ORDER.asc); // Pay attention to this line String cursorMark = CursorMarkParams.CURSOR_MARK_START; TimeZone.setDefault(TimeZone.getTimeZone("UTC")); // objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, true); DateFormat df = new SimpleDateFormat("YYYY-MM-dd'T'HH:mm:sss'Z'"); objectMapper.setDateFormat(df); objectMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true); QueryResponse r = client.query(solrQuery); long nDocuments = r.getResults().getNumFound(); logger.info("Found " + nDocuments + " documents"); if (!config.getDryRun()) { logger.info("Creating " + config.getFileName()); Set<SkipField> skipFieldsEquals = config.getSkipFieldsSet().stream() .filter(s -> s.getMatch() == MatchType.EQUAL).collect(Collectors.toSet()); Set<SkipField> skipFieldsStartWith = config.getSkipFieldsSet().stream() .filter(s -> s.getMatch() == MatchType.STARTS_WITH).collect(Collectors.toSet()); Set<SkipField> skipFieldsEndWith = config.getSkipFieldsSet().stream() .filter(s -> s.getMatch() == MatchType.ENDS_WITH).collect(Collectors.toSet()); try (PrintWriter pw = new PrintWriter(outputFile)) { solrQuery.setRows(200); boolean done = false; while (!done) { solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); QueryResponse rsp = client.query(solrQuery); String nextCursorMark = rsp.getNextCursorMark(); for (SolrDocument d : rsp.getResults()) { skipFieldsEquals.forEach(f -> d.removeFields(f.getText())); if (skipFieldsStartWith.size() > 0 || skipFieldsEndWith.size() > 0) { Map<String, Object> collect = d.entrySet().stream() .filter(e -> !skipFieldsStartWith.stream() .filter(f -> e.getKey().startsWith(f.getText())).findFirst() .isPresent()) .filter(e -> !skipFieldsEndWith.stream() .filter(f -> e.getKey().endsWith(f.getText())).findFirst().isPresent()) .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); pw.write(objectMapper.writeValueAsString(collect)); } else { pw.write(objectMapper.writeValueAsString(d)); } pw.write("\n"); } if (cursorMark.equals(nextCursorMark)) { done = true; } cursorMark = nextCursorMark; } } } }
From source file:io.yields.math.framework.kpi.ExplorerJsonExporter.java
private Values toStatsValues(Stats stats) { return new Values(stats.getStatsNames().stream() .collect(Collectors.toMap(name -> name, name -> stats.getStats(name)))); }
From source file:it.greenvulcano.gvesb.iam.service.internal.GVUsersManager.java
@Override public SearchResult searchUsers(SearchCriteria criteria) { SearchResult result = new SearchResult(); Map<UserRepositoryHibernate.Parameter, Object> parameters = criteria.getParameters().keySet().stream() .map(UserRepositoryHibernate.Parameter::valueOf).filter(Objects::nonNull) .collect(Collectors.toMap(Function.identity(), k -> criteria.getParameters().get(k.name()))); LinkedHashMap<UserRepositoryHibernate.Parameter, UserRepositoryHibernate.Order> order = new LinkedHashMap<>( criteria.getOrder().size()); for (Entry<String, String> e : criteria.getOrder().entrySet()) { Optional.ofNullable(UserRepositoryHibernate.Parameter.get(e.getKey())).ifPresent(p -> { order.put(p, UserRepositoryHibernate.Order.get(e.getValue())); });// w w w.j a va 2 s . c o m } result.setTotalCount(userRepository.count(parameters)); if (criteria.getOffset() > result.getTotalCount()) { result.setFounds(new HashSet<>()); } else { result.setFounds(userRepository.find(parameters, order, criteria.getOffset(), criteria.getLimit())); } return result; }