Example usage for java.util Set stream

List of usage examples for java.util Set stream

Introduction

In this page you can find the example usage for java.util Set stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:io.adeptj.runtime.core.RuntimeInitializer.java

/**
 * {@inheritDoc}/*from  w  ww. jav  a 2  s. c  om*/
 */
@Override
public void onStartup(Set<Class<?>> startupAwareClasses, ServletContext context) {
    Logger logger = LoggerFactory.getLogger(RuntimeInitializer.class);
    if (startupAwareClasses == null || startupAwareClasses.isEmpty()) {
        logger.error("No @HandlesTypes(StartupAware) on classpath!!");
        throw new IllegalStateException("No @HandlesTypes(StartupAware) on classpath!!");
    } else {
        ServletContextHolder.getInstance().setServletContext(context);
        startupAwareClasses.stream().sorted(new StartupAwareComparator()).forEach(clazz -> {
            logger.info("@HandlesTypes: [{}]", clazz);
            try {
                StartupAware.class.cast(ConstructorUtils.invokeConstructor(clazz)).onStartup(context);
            } catch (Exception ex) { // NOSONAR
                logger.error("Exception while executing StartupAware#onStartup!!", ex);
                throw new InitializationException(ex);
            }
        });
        context.addListener(FrameworkShutdownHandler.class);
    }
}

From source file:nu.yona.server.messaging.service.MessageService.java

private void deleteMessages(Collection<Message> messages) {
    Set<Message> messagesToBeDeleted = messages.stream()
            .flatMap(m -> m.getMessagesToBeCascadinglyDeleted().stream()).collect(Collectors.toSet());
    messagesToBeDeleted.addAll(messages);
    Set<MessageDestination> involvedMessageDestinations = messagesToBeDeleted.stream()
            .map(Message::getMessageDestination).collect(Collectors.toSet());

    messagesToBeDeleted.forEach(Message::prepareForDelete);
    involvedMessageDestinations.forEach(d -> MessageDestination.getRepository().saveAndFlush(d));

    messagesToBeDeleted.forEach(m -> m.getMessageDestination().remove(m));
    involvedMessageDestinations.forEach(d -> MessageDestination.getRepository().save(d));
}

From source file:uk.ac.ebi.ep.ebeye.EbeyeRestService.java

/**
 *
 * @param query/* w ww .j  a  v  a2  s  .co  m*/
 * @param paginate
 * @param limit limit the number of results from Ebeye service. default is
 * 100 and only used when pagination is true.
 * @return list of accessions
 */
public List<String> queryEbeyeForAccessions(String query, boolean paginate, int limit) {

    try {
        EbeyeSearchResult searchResult = queryEbeye(query.trim());
        LOGGER.warn("Number of hits for search for " + query + " : " + searchResult.getHitCount());

        Set<String> accessions = new LinkedHashSet<>();

        if (!paginate) {
            for (Entry entry : searchResult.getEntries()) {
                accessions.add(entry.getUniprotAccession());
            }

            List<String> accessionList = accessions.stream().distinct().collect(Collectors.toList());
            LOGGER.warn("Number of Accessions to be processed (Pagination = false) :  " + accessionList.size());
            return accessionList;

        }

        if (paginate) {

            int hitcount = searchResult.getHitCount();

            //for now limit hitcount to 5k
            if (hitcount > HITCOUNT) {
                hitcount = HITCOUNT;
            }

            int resultLimit = 0;

            if (limit < 0) {
                resultLimit = DEFAULT_EBI_SEARCH_LIMIT;
            }

            //for now limit results
            if (resultLimit > 0 && hitcount > resultLimit) {
                hitcount = resultLimit;
            }

            int numIteration = hitcount / DEFAULT_EBI_SEARCH_LIMIT;

            List<String> accessionList = query(query, numIteration);
            LOGGER.warn("Number of Accessions to be processed (Pagination = true)  :  " + accessionList.size());
            return accessionList;

        }

    } catch (InterruptedException | NullPointerException | ExecutionException ex) {
        LOGGER.error(ex.getMessage(), ex);
    }
    return new ArrayList<>();
}

From source file:org.barmaley.vkr.controller.CoordinatorController.java

@GetMapping(value = "/ticket/{id}/check")
public String getCheckTicket(@PathVariable(value = "id") String ticketId, ModelMap model) {

    Users user = (Users) SecurityContextHolder.getContext().getAuthentication().getPrincipal();
    Set<CoordinatorRights> coordinatorRightsSet = user.getCoordinatorRights();
    Ticket ticket = ticketService.get(ticketId);

    CoordinatorRights coordinatorRights = coordinatorRightsSet.stream()
            .filter(x -> ticket.getGroupNum().equals(x.getGroupNum())).findAny().orElse(null);

    if ((coordinatorRights != null && (ticket.getStatus().getId() != 1)) || ticket.getStatus().getId() == 7) {
        if (ticket.getStatus().getId() == 2) {
            ticket.setStatus(statusService.get(3));
        }/* w  w  w.j ava2s . c o m*/
        ticket.setDateCheckCoordinatorStart(new Date());
        ticketService.edit(ticket);
        List<TypeOfUse> typesOfUse = typeOfUseService.getAll();

        model.addAttribute("ticketAttribute", ticket);
        model.addAttribute("typesOfUse", typesOfUse);

        return "checkPage";

    } else {
        return "pnh";
    }
}

From source file:io.mandrel.worker.Loop.java

protected Blob processBlob(Uri uri, StopWatch watch, Requester<? extends Strategy> r) throws Exception {
    Blob blob;/*from  ww w .  j  a  v  a2  s  .  c  o  m*/
    blob = r.get(uri);

    watch.stop();

    log.trace("> Start parsing data for {}", uri);

    blob.getMetadata().getFetchMetadata().setTimeToFetch(watch.getTotalTimeMillis());

    updateMetrics(watch, blob);

    Map<String, Instance<?>> cachedSelectors = new HashMap<>();
    if (spider.getExtractors() != null && spider.getExtractors().getData() != null) {
        log.trace(">  - Extracting documents for {}...", uri);
        spider.getExtractors().getData().forEach(ex -> {
            List<Document> documents = extractorService.extractThenFormatThenStore(spider.getId(),
                    cachedSelectors, blob, ex);

            if (documents != null) {
                spiderAccumulator.incDocumentForExtractor(ex.getName(), documents.size());
            }
        });
        log.trace(">  - Extracting documents for {} done!", uri);
    }

    if (spider.getExtractors().getOutlinks() != null) {
        log.trace(">  - Extracting outlinks for {}...", uri);
        final Uri theUri = uri;
        spider.getExtractors().getOutlinks().forEach(ol -> {
            Set<Link> allFilteredOutlinks = extractorService
                    .extractAndFilterOutlinks(spider, theUri, cachedSelectors, blob, ol).getRight();
            blob.getMetadata().getFetchMetadata().setOutlinks(allFilteredOutlinks);
            add(spider.getId(), allFilteredOutlinks.stream().map(l -> l.getUri()).collect(Collectors.toSet()));
        });
        log.trace(">  - Extracting outlinks done for {}!", uri);
    }

    BlobStores.get(spider.getId()).ifPresent(b -> b.putBlob(blob.getMetadata().getUri(), blob));

    log.trace(">  - Storing metadata for {}...", uri);
    MetadataStores.get(spider.getId()).addMetadata(blob.getMetadata().getUri(), blob.getMetadata());
    log.trace(">  - Storing metadata for {} done!", uri);

    log.trace("> End parsing data for {}", uri);
    return blob;
}

From source file:com.devicehive.service.DeviceNotificationServiceTest.java

@Test
@DirtiesContext(methodMode = DirtiesContext.MethodMode.BEFORE_METHOD)
public void testFindWithResponse() throws Exception {
    final List<String> guids = IntStream.range(0, 5).mapToObj(i -> UUID.randomUUID().toString())
            .collect(Collectors.toList());
    final Date timestampSt = new Date();
    final Date timestampEnd = new Date();
    final String parameters = "{\"param1\":\"value1\",\"param2\":\"value2\"}";

    final Set<String> guidsForSearch = new HashSet<>(Arrays.asList(guids.get(0), guids.get(2), guids.get(3)));

    // return response for any request
    Map<String, DeviceNotification> notificationMap = guidsForSearch.stream()
            .collect(Collectors.toMap(Function.identity(), guid -> {
                DeviceNotification notification = new DeviceNotification();
                notification.setId(System.nanoTime());
                notification.setDeviceGuid(guid);
                notification.setNotification(RandomStringUtils.randomAlphabetic(10));
                notification.setTimestamp(new Date());
                notification.setParameters(new JsonStringWrapper(parameters));
                return notification;
            }));/*from  w  w  w. j a  v  a 2 s.com*/

    when(requestHandler.handle(any(Request.class))).then(invocation -> {
        Request request = invocation.getArgumentAt(0, Request.class);
        String guid = request.getBody().cast(NotificationSearchRequest.class).getGuid();
        return Response.newBuilder()
                .withBody(new NotificationSearchResponse(Collections.singletonList(notificationMap.get(guid))))
                .buildSuccess();
    });

    notificationService.find(guidsForSearch, Collections.emptySet(), timestampSt, timestampEnd)
            .thenAccept(notifications -> {
                assertEquals(3, notifications.size());
                assertEquals(new HashSet<>(notificationMap.values()), new HashSet<>(notifications)); // using HashSet to ignore order
            }).exceptionally(ex -> {
                fail(ex.toString());
                return null;
            }).get(30, TimeUnit.SECONDS);

    verify(requestHandler, times(3)).handle(argument.capture());
}

From source file:com.haulmont.cuba.core.global.filter.QueryFilter.java

protected boolean isActual(Condition condition, Set<String> params) {
    Set<ParameterInfo> declaredParams = condition.getParameters();

    if (declaredParams.isEmpty())
        return true;
    if (enableSessionParams) {
        Predicate<ParameterInfo> paramHasValue = paramInfo -> params.contains(paramInfo.getName());
        if (condition.getConditions().isEmpty()) {
            // for leaf condition all parameters must have values
            return declaredParams.stream().allMatch(paramHasValue);
        } else {/* w  w w.j a  v  a  2 s .  c  om*/
            // for branch conditions at least some parameters must have values
            return declaredParams.stream().anyMatch(paramHasValue);
        }
    } else {
        // Return true only if declared params have values and there is at least one non-session parameter among them.
        // This is necessary to exclude generic filter conditions that contain only session parameters. Otherwise
        // there is no way to handle exclusion. Unfortunately this imposes the restriction on custom filters design:
        // condition with session-only parameters must be avoided, they must be coded as part of main query body or as
        // part of another condition.
        boolean found = false;
        for (ParameterInfo paramInfo : declaredParams) {
            if (params.contains(paramInfo.getName())) {
                found = found || !paramInfo.getType().equals(ParameterInfo.Type.SESSION);
            }
        }
        return found;
    }
}

From source file:com.netflix.genie.web.data.repositories.jpa.specifications.JpaClusterSpecs.java

/**
 * Generate a specification given the parameters.
 *
 * @param name          The name of the cluster to find
 * @param statuses      The statuses of the clusters to find
 * @param tags          The tags of the clusters to find
 * @param minUpdateTime The minimum updated time of the clusters to find
 * @param maxUpdateTime The maximum updated time of the clusters to find
 * @return The specification// www .  j av a2 s  . co m
 */
public static Specification<ClusterEntity> find(@Nullable final String name,
        @Nullable final Set<ClusterStatus> statuses, @Nullable final Set<TagEntity> tags,
        @Nullable final Instant minUpdateTime, @Nullable final Instant maxUpdateTime) {
    return (final Root<ClusterEntity> root, final CriteriaQuery<?> cq, final CriteriaBuilder cb) -> {
        final List<Predicate> predicates = new ArrayList<>();
        if (StringUtils.isNotBlank(name)) {
            predicates.add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb,
                    root.get(ClusterEntity_.name), name));
        }
        if (minUpdateTime != null) {
            predicates.add(cb.greaterThanOrEqualTo(root.get(ClusterEntity_.updated), minUpdateTime));
        }
        if (maxUpdateTime != null) {
            predicates.add(cb.lessThan(root.get(ClusterEntity_.updated), maxUpdateTime));
        }
        if (tags != null && !tags.isEmpty()) {
            final Join<ClusterEntity, TagEntity> tagEntityJoin = root.join(ClusterEntity_.tags);
            predicates.add(tagEntityJoin.in(tags));
            cq.groupBy(root.get(ClusterEntity_.id));
            cq.having(cb.equal(cb.count(root.get(ClusterEntity_.id)), tags.size()));
        }
        if (statuses != null && !statuses.isEmpty()) {
            //Could optimize this as we know size could use native array
            predicates.add(
                    cb.or(statuses.stream().map(status -> cb.equal(root.get(ClusterEntity_.status), status))
                            .toArray(Predicate[]::new)));
        }

        return cb.and(predicates.toArray(new Predicate[predicates.size()]));
    };
}

From source file:com.thinkbiganalytics.feedmgr.service.template.DefaultFeedManagerTemplateService.java

/**
 * Return all the processors that are connected to a given NiFi input port
 *
 * @param inputPortIds the ports to inspect
 * @return all the processors that are connected to a given NiFi input port
 *//*from  w ww  .j  a v  a 2  s  .  co m*/
public List<RegisteredTemplate.Processor> getReusableTemplateProcessorsForInputPorts(
        List<String> inputPortIds) {
    Set<ProcessorDTO> processorDTOs = new HashSet<>();
    if (inputPortIds != null && !inputPortIds.isEmpty()) {
        ProcessGroupDTO processGroup = nifiRestClient.getProcessGroupByName("root",
                TemplateCreationHelper.REUSABLE_TEMPLATES_PROCESS_GROUP_NAME);
        if (processGroup != null) {
            //fetch the Content
            ProcessGroupDTO content = nifiRestClient.getProcessGroup(processGroup.getId(), true, true);
            processGroup.setContents(content.getContents());
            Set<PortDTO> ports = getReusableFeedInputPorts();
            ports.stream().filter(portDTO -> inputPortIds.contains(portDTO.getId())).forEach(port -> {
                List<ConnectionDTO> connectionDTOs = NifiConnectionUtil.findConnectionsMatchingSourceId(
                        processGroup.getContents().getConnections(), port.getId());
                if (connectionDTOs != null) {
                    connectionDTOs.stream().forEach(connectionDTO -> {
                        String processGroupId = connectionDTO.getDestination().getGroupId();
                        Set<ProcessorDTO> processors = nifiRestClient.getProcessorsForFlow(processGroupId);
                        if (processors != null) {
                            processorDTOs.addAll(processors);
                        }
                    });
                }
            });

        }
    }

    List<RegisteredTemplate.Processor> processorProperties = processorDTOs.stream()
            .map(processorDTO -> registeredTemplateUtil.toRegisteredTemplateProcessor(processorDTO, true))
            .collect(Collectors.toList());
    return processorProperties;
}