List of usage examples for java.util Set stream
default Stream<E> stream()
From source file:com.epam.dlab.billing.azure.AzureInvoiceCalculationService.java
/** * Constructs service class/*from w ww . j a v a 2 s. c om*/ * * @param billingConfigurationAzure contains <code>billing-azure</code> module configuration * @param billableResources resources that invoices should be calculated for */ public AzureInvoiceCalculationService(BillingConfigurationAzure billingConfigurationAzure, Set<AzureDlabBillableResource> billableResources) { this.billingConfigurationAzure = billingConfigurationAzure; this.billableResources = billableResources.stream() .collect(Collectors.toMap(AzureDlabBillableResource::getId, e -> e)); }
From source file:com.chadekin.jadys.syntax.where.impl.WhereClauseExtendedBuilderImpl.java
@Override public void finalizeExpression() { String expression = SqlExpressionFactory.newExpression(getSqlExpressionItem()).build(); Set<String> extractedAlias = extractAlias(expression); boolean isParentWithAlias = getParent() != null && !getParent().getAlias().isEmpty(); if ((isParentWithAlias && !extractedAlias.isEmpty() && isValidAlias(extractedAlias)) || (!isParentWithAlias && extractedAlias.isEmpty())) { extractedAlias.stream().forEach(this::withAlias); apply(expression);//w ww.jav a 2s. c o m } else if (!expression.isEmpty()) { StringBuilder exceptionMessage = new StringBuilder(JadysExceptionMessageKeys.INVALID_ALIAS) .append(JadysKeys.COLUMN).append(JadysKeys.SPACE).append(JadysKeys.APOSTROPHE) .append(expression).append(JadysKeys.APOSTROPHE); throw new IllegalArgumentException(exceptionMessage.toString()); } }
From source file:it.damore.solr.importexport.App.java
/** * @param client/*from www.j av a2 s . c o m*/ * @param outputFile * @throws SolrServerException * @throws IOException */ private static void readAllDocuments(HttpSolrClient client, File outputFile) throws SolrServerException, IOException { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery("*:*"); if (config.getFilterQuery() != null) { solrQuery.addFilterQuery(config.getFilterQuery()); } solrQuery.setRows(0); solrQuery.addSort(config.getUniqueKey(), ORDER.asc); // Pay attention to this line String cursorMark = CursorMarkParams.CURSOR_MARK_START; TimeZone.setDefault(TimeZone.getTimeZone("UTC")); // objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, true); DateFormat df = new SimpleDateFormat("YYYY-MM-dd'T'HH:mm:sss'Z'"); objectMapper.setDateFormat(df); objectMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true); QueryResponse r = client.query(solrQuery); long nDocuments = r.getResults().getNumFound(); logger.info("Found " + nDocuments + " documents"); if (!config.getDryRun()) { logger.info("Creating " + config.getFileName()); Set<SkipField> skipFieldsEquals = config.getSkipFieldsSet().stream() .filter(s -> s.getMatch() == MatchType.EQUAL).collect(Collectors.toSet()); Set<SkipField> skipFieldsStartWith = config.getSkipFieldsSet().stream() .filter(s -> s.getMatch() == MatchType.STARTS_WITH).collect(Collectors.toSet()); Set<SkipField> skipFieldsEndWith = config.getSkipFieldsSet().stream() .filter(s -> s.getMatch() == MatchType.ENDS_WITH).collect(Collectors.toSet()); try (PrintWriter pw = new PrintWriter(outputFile)) { solrQuery.setRows(200); boolean done = false; while (!done) { solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); QueryResponse rsp = client.query(solrQuery); String nextCursorMark = rsp.getNextCursorMark(); for (SolrDocument d : rsp.getResults()) { skipFieldsEquals.forEach(f -> d.removeFields(f.getText())); if (skipFieldsStartWith.size() > 0 || skipFieldsEndWith.size() > 0) { Map<String, Object> collect = d.entrySet().stream() .filter(e -> !skipFieldsStartWith.stream() .filter(f -> e.getKey().startsWith(f.getText())).findFirst() .isPresent()) .filter(e -> !skipFieldsEndWith.stream() .filter(f -> e.getKey().endsWith(f.getText())).findFirst().isPresent()) .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); pw.write(objectMapper.writeValueAsString(collect)); } else { pw.write(objectMapper.writeValueAsString(d)); } pw.write("\n"); } if (cursorMark.equals(nextCursorMark)) { done = true; } cursorMark = nextCursorMark; } } } }
From source file:com.daimler.spm.storefront.controllers.pages.MyQuotesController.java
/** * Set allowed actions for a given quote on model. * * @param model//from w w w.j a v a2 s . c o m * the MVC model * @param quoteCode * the quote to be checked. */ protected void setAllowedActions(final Model model, final String quoteCode) { final Set<QuoteAction> actionSet = getQuoteFacade().getAllowedActions(quoteCode); if (actionSet != null) { final Map<String, Boolean> actionsMap = actionSet.stream() .collect(Collectors.toMap((v) -> v.getCode(), (v) -> Boolean.TRUE)); model.addAttribute(ALLOWED_ACTIONS, actionsMap); } }
From source file:com.linecorp.armeria.server.docs.EndpointInfo.java
EndpointInfo(String hostnamePattern, String path, SerializationFormat defaultFormat, Set<SerializationFormat> availableFormats) { this.hostnamePattern = requireNonNull(hostnamePattern, "hostnamePattern"); this.path = requireNonNull(path, "path"); defaultMimeType = requireNonNull(defaultFormat, "defaultFormat").mimeType(); final Set<String> sortedAvailableMimeTypes = availableFormats.stream().map(SerializationFormat::mimeType) .collect(Collectors.toCollection(TreeSet::new)); availableMimeTypes = Collections.unmodifiableSet(sortedAvailableMimeTypes); }
From source file:com.marand.thinkmed.medications.business.MedicationsFinderImpl.java
@Override public List<MedicationDto> findMedicationProducts(final long medicationId, @Nonnull final List<Long> routeIds, @Nonnull final DateTime when) { Preconditions.checkNotNull(routeIds, "routeIds must not be null"); Preconditions.checkNotNull(when, "when must not be null"); final boolean productBasedMedication = medicationsDao.isProductBasedMedication(medicationId); if (productBasedMedication) { final Set<Long> similarMedicationsIds = medicationsDao.findSimilarMedicationsIds(medicationId, routeIds, when);/* w w w . ja va 2 s . co m*/ return similarMedicationsIds.stream().map(i -> medicationsValueHolder.getValue().get(i)) .map(m -> medicationHolderDtoMapper.mapToMedicationDto(m)).collect(Collectors.toList()); } return medicationsDao.getMedicationChildProducts(medicationId, routeIds, when); }
From source file:io.github.jeddict.jpa.modeler.specification.model.workspace.WorkSpaceDialog.java
private void save_ButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_save_ButtonActionPerformed if (!validateField()) { return;// www .j av a 2 s .c o m } //add dependantClasses Set<JavaClass<? extends IAttributes>> selectedClasses = entityMappingPanel.getSelectedJavaClass().stream() .collect(toSet()); Set<JavaClass<? extends IAttributes>> dependantClasses = findDependents(selectedClasses); String dependantClassesText = dependantClasses.stream().map(JavaClass::getClazz).collect(joining(",")); int MAX_CHAR = 100; dependantClassesText = dependantClassesText.substring(0, Math.min(MAX_CHAR, dependantClassesText.length())) + "..."; if (dependantClasses.size() > 0) { int option = showConfirmDialog(WindowManager.getDefault().getMainWindow(), String.format( "Workspace have dependecies on [%s] classes, Are you sure you want to proceed by adding \n [%s]?", dependantClasses.size(), dependantClassesText), "Dependant class", YES_NO_OPTION); if (option == OK_OPTION) { selectedClasses.addAll(dependantClasses); } else { return; } } //save data workSpace.setItems(selectedClasses.stream().map(WorkSpaceItem::new).collect(toSet())); workSpace.setName(nameTextField.getText()); scene.getModelerPanelTopComponent().changePersistenceState(false); saveActionPerformed(evt); }
From source file:info.archinnov.achilles.internals.apt.processors.meta.AchillesProcessor.java
private void parseCodecRegistry(GlobalParsingContext parsingContext, Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { final boolean hasCompileTimeCodecRegistry = annotations.stream() .filter(annotation -> isAnnotationOfType(annotation, CodecRegistry.class)).findFirst().isPresent(); if (hasCompileTimeCodecRegistry) { aptUtils.printNote("[Achilles] Parsing compile-time codec registry"); new CodecRegistryParser(aptUtils).parseCodecs(roundEnv, parsingContext); }/*from w ww. j a v a2 s . co m*/ }
From source file:pt.ist.fenixedu.delegates.ui.DelegateCrudController.java
@RequestMapping(value = "/search", method = RequestMethod.POST) public String search(@ModelAttribute DelegateSearchBean searchBean, Model model) { searchBean.setExecutionYear(ExecutionYear.readCurrentExecutionYear()); searchBean = delegateService.generateNewBean(searchBean); model.addAttribute("searchBean", searchBean); Set<DelegateBean> delegateBeanSet = delegateService.searchDelegates(searchBean, new DateTime()).stream() .collect(Collectors.toSet()); delegateBeanSet.addAll(delegateService.getDegreePositions(searchBean.getDegree())); model.addAttribute("delegates", delegateBeanSet.stream().distinct().collect(Collectors.toList())); return search(model); }
From source file:com.linkedin.gradle.python.tasks.ParallelWheelGenerationTask.java
@TaskAction public void buildWheels() { ProgressLoggerFactory progressLoggerFactory = getServices().get(ProgressLoggerFactory.class); ProgressLogger progressLogger = progressLoggerFactory.newOperation(ParallelWheelGenerationTask.class); progressLogger.setDescription("Building wheels"); progressLogger.started();// ww w. j ava2s . com TaskTimer taskTimer = new TaskTimer(); // This way we don't try to over-alloc the system to much. We'll use slightly over 1/2 of the machine to build // the wheels in parallel. Allowing other operations to continue. ForkJoinPool forkJoinPool = new ForkJoinPool(Runtime.getRuntime().availableProcessors() / 2 + 1); Set<File> files = getFilesToConvert().getFiles(); int totalSize = files.size(); try { forkJoinPool.submit(() -> { files.stream().parallel().forEach(file -> { PackageInfo packageInfo = PackageInfo.fromPath(file); currentPackages.add(packageInfo.getName()); counter.incrementAndGet(); updateStatusLine(progressLogger, totalSize, counter.get()); TaskTimer.TickingClock clock = taskTimer .start(packageInfo.getName() + "-" + packageInfo.getVersion()); if (!packageSettings.requiresSourceBuild(packageInfo)) { makeWheelFromSdist(packageInfo); } currentPackages.remove(packageInfo.getName()); updateStatusLine(progressLogger, totalSize, counter.get()); clock.stop(); }); }).get(); } catch (InterruptedException | ExecutionException e) { logger.warn("Unable to pre-build some dependencies"); } finally { forkJoinPool.shutdown(); } try { FileUtils.write(getBuildReport(), taskTimer.buildReport()); } catch (IOException ignore) { // Don't fail if there is are issues writing the timing report. } progressLogger.completed(); }