Example usage for java.util.stream StreamSupport stream

List of usage examples for java.util.stream StreamSupport stream

Introduction

In this page you can find the example usage for java.util.stream StreamSupport stream.

Prototype

public static <T> Stream<T> stream(Spliterator<T> spliterator, boolean parallel) 

Source Link

Document

Creates a new sequential or parallel Stream from a Spliterator .

Usage

From source file:edu.cmu.cs.lti.discoursedb.annotation.lightside.io.LightSideService.java

/**
 * Exports annotations on contributions associated with any of the provided DiscourseParts into
 * the provided output file.//from   w  w w.  ja v a  2s . c  o m
 * 
 * @param discourseParts the discourse parts to extract contributions from
 * @param outputFile file to write the annotations to
 */
@Transactional(readOnly = true)
public void exportAnnotations(Iterable<DiscoursePart> discourseParts, File outputFile) {
    List<RawDataInstance> data = StreamSupport.stream(discourseParts.spliterator(), false)
            .flatMap(dp -> extractAnnotations(dp).stream()).collect(Collectors.toList());

    try {
        FileUtils.writeStringToFile(outputFile, generateLightSideOutput(data));
    } catch (IOException e) {
        log.error("Error writing LightSide file to disk", e);
    }
}

From source file:org.hawkular.metrics.dropwizard.HawkularReporterTest.java

@Test
public void shouldReportHistogram() {
    HawkularReporter reporter = HawkularReporter.builder(registry, "unit-test").useHttpClient(uri -> client)
            .build();/*from  w ww. j  a v a 2 s .  com*/

    final Histogram histogram = registry.histogram("my.histogram");
    histogram.update(3);
    histogram.update(8);
    histogram.update(7);
    histogram.update(1);
    histogram.update(8);
    histogram.update(4);
    reporter.report();

    assertThat(client.getMetricsRestCalls()).hasSize(1);
    JSONObject metrics = new JSONObject(client.getMetricsRestCalls().get(0));
    assertThat(metrics.keySet()).containsOnly("counters", "gauges");
    JSONArray countersJson = metrics.getJSONArray("counters");
    assertThat(countersJson).extracting(idFromRoot).containsExactly("my.histogram.count");
    assertThat(countersJson).extracting(valueFromRoot).containsExactly(6);

    JSONArray gaugesJson = metrics.getJSONArray("gauges");
    Map<String, Integer> values = StreamSupport.stream(gaugesJson.spliterator(), false)
            .collect(toMap(idFromRoot::extract, valueFromRoot::extract));
    // Note: we extract int values here for simplicity, but actual values are double. The goal is not to test
    // Dropwizard algorithm for metrics generation, so we don't bother with accuracy.
    assertThat(values).containsOnly(entry("my.histogram.mean", 5), entry("my.histogram.median", 7),
            entry("my.histogram.stddev", 2), entry("my.histogram.75perc", 8), entry("my.histogram.95perc", 8),
            entry("my.histogram.98perc", 8), entry("my.histogram.99perc", 8), entry("my.histogram.999perc", 8));

    assertThat(client.getTagsRestCalls()).containsOnly(
            Pair.of("/counters/my.histogram.count/tags", "{\"histogram\":\"count\"}"),
            Pair.of("/gauges/my.histogram.mean/tags", "{\"histogram\":\"mean\"}"),
            Pair.of("/gauges/my.histogram.min/tags", "{\"histogram\":\"min\"}"),
            Pair.of("/gauges/my.histogram.max/tags", "{\"histogram\":\"max\"}"),
            Pair.of("/gauges/my.histogram.stddev/tags", "{\"histogram\":\"stddev\"}"),
            Pair.of("/gauges/my.histogram.median/tags", "{\"histogram\":\"median\"}"),
            Pair.of("/gauges/my.histogram.75perc/tags", "{\"histogram\":\"75perc\"}"),
            Pair.of("/gauges/my.histogram.95perc/tags", "{\"histogram\":\"95perc\"}"),
            Pair.of("/gauges/my.histogram.98perc/tags", "{\"histogram\":\"98perc\"}"),
            Pair.of("/gauges/my.histogram.99perc/tags", "{\"histogram\":\"99perc\"}"),
            Pair.of("/gauges/my.histogram.999perc/tags", "{\"histogram\":\"999perc\"}"));
}

From source file:com.bouncestorage.swiftproxy.v1.ObjectResource.java

private List<Pair<Long, Long>> parseRange(String range) {
    range = range.replaceAll(" ", "").toLowerCase();
    String bytesUnit = "bytes=";
    int idx = range.indexOf(bytesUnit);
    if (idx == 0) {
        String byteRangeSet = range.substring(bytesUnit.length());
        Iterator<Object> iter = Iterators.forEnumeration(new StringTokenizer(byteRangeSet, ","));
        return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iter, Spliterator.ORDERED), false)
                .map(rangeSpec -> (String) rangeSpec).map(rangeSpec -> {
                    int dash = rangeSpec.indexOf("-");
                    if (dash == -1) {
                        throw new BadRequestException("Range");
                    }/*  w w w. ja  v a2 s  .c o m*/
                    String firstBytePos = rangeSpec.substring(0, dash);
                    String lastBytePos = rangeSpec.substring(dash + 1);
                    Long firstByte = firstBytePos.isEmpty() ? null : Long.parseLong(firstBytePos);
                    Long lastByte = lastBytePos.isEmpty() ? null : Long.parseLong(lastBytePos);
                    return new Pair<>(firstByte, lastByte);
                }).peek(r -> logger.debug("parsed range {} {}", r.getFirst(), r.getSecond()))
                .collect(Collectors.toList());
    } else {
        return null;
    }
}

From source file:com.adobe.acs.commons.workflow.process.impl.SyncSmartTagsToXmpMetadataNodeProcess.java

protected void syncSmartTagsToMetadata(final Asset asset, ProcessArgs processArgs) throws PersistenceException {
    final Resource assetResource = asset.adaptTo(Resource.class);
    final ResourceResolver resourceResolver = assetResource.getResourceResolver();

    final Resource metadataResource = assetResource
            .getChild(JcrConstants.JCR_CONTENT + "/" + DamConstants.METADATA_FOLDER);
    final Resource smartTagsResource = assetResource
            .getChild(JcrConstants.JCR_CONTENT + "/" + DamConstants.METADATA_FOLDER + "/" + NN_PREDICTED_TAGS);

    if (metadataResource.getChild(processArgs.getSequenceName()) != null) {
        // Remove existing, as they will be re-created
        resourceResolver.delete(metadataResource.getChild(processArgs.getSequenceName()));
    }/*from w w  w.  java  2  s. co m*/

    final Resource parentResource = resourceResolver.create(metadataResource, processArgs.getSequenceName(),
            new ImmutableMap.Builder<String, Object>()
                    .put(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED)
                    .put("xmpArrayType", "rdf:Seq").put("xmpNodeType", "xmpArray").put("xmpArraySize", 0L)
                    .build());

    final AtomicInteger count = new AtomicInteger(0);
    if (smartTagsResource != null) {
        StreamSupport.stream(smartTagsResource.getChildren().spliterator(), false).map(Resource::getValueMap)
                .filter(properties -> properties.get(PN_SMART_TAG_CONFIDENCE, 0D) >= processArgs
                        .getMinimumConfidence())
                .filter(properties -> StringUtils.isNotBlank(properties.get(PN_SMART_TAG_NAME, String.class)))
                .forEach(properties -> {
                    createSequenceItemResource(asset, processArgs, resourceResolver, parentResource, count,
                            properties);
                });
    }

    parentResource.adaptTo(ModifiableValueMap.class).put("xmpArraySize", count.get());

    log.info("Synced [ {} ] Smart Tags to Asset XMP Metadata structure: [ {} ] ", count.get(),
            asset.getPath() + "/jcr:content/metadata/" + processArgs.getSequenceName());
}

From source file:io.syndesis.dao.DeploymentDescriptorTest.java

@Test
public void thereShouldBeNoDuplicateMavenCoordinates() {
    final Map<String, Long> coordinatesWithCount = StreamSupport.stream(deployment.spliterator(), true)
            .filter(data -> "connector".equals(data.get("kind").asText()))
            .flatMap(/*from ww w .  j a va2  s  .  c om*/
                    connector -> StreamSupport.stream(connector.get("data").get("actions").spliterator(), true))
            .map(action -> action.get("camelConnectorGAV").asText())
            .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));

    final Map<String, Long> multipleCoordinates = coordinatesWithCount.entrySet().stream()
            .filter(e -> e.getValue() > 1).collect(Collectors.toMap(Entry::getKey, Entry::getValue));

    assertThat(multipleCoordinates).as("Expected connector GAV coordinates to be unique").isEmpty();
}

From source file:com.thinkbiganalytics.metadata.modeshape.support.JcrUtil.java

/**
 * Gets the nodes in a same-name-sibling node set with the given name and returns them as a list.
 *//* w w  w .j  a  v a2s .  co m*/
public static List<Node> getNodeList(Node parent, String name) {
    return StreamSupport.stream(getIterableChildren(parent, name).spliterator(), false)
            .collect(Collectors.toList());
}

From source file:cop.maven.plugins.AbstractRestToRamlMojo.java

protected void executeWithExceptionsHandled() throws Exception {
    evaluator = new PluginParameterExpressionEvaluator(session, mojoExecution);

    checkOutputDirectoryExists();/* www .  ja  v a2s  .  c  om*/
    checkFileName();

    FileUtils.write(new File(out, Config.YAML), readConfiguration(), encoding);

    StandardJavaFileManager fileManager = COMPILER.getStandardFileManager(null, null,
            Charset.forName(encoding));
    List<JavaFileObject> compilationUnits = StreamSupport
            .stream(fileManager.getJavaFileObjectsFromFiles(getSourceFiles()).spliterator(), true)
            .collect(Collectors.toList());

    if (compilationUnits.isEmpty())
        getLog().warn("no source file(s) detected! Processor task will be skipped");
    else {
        JavaCompiler.CompilationTask task = COMPILER.getTask(null, fileManager, null, getCompilerOptions(),
                null, compilationUnits);

        if (!task.call())
            throw new Exception("error during compilation");
    }
}

From source file:ai.grakn.migration.csv.CSVMigrator.java

/**
 * Partition a stream into a stream of collections, each with batchSize elements.
 * @param iterator Iterator to partition
 * @param <T> Type of values of iterator
 * @return Stream over a collection that are each of batchSize
 *///from   w  ww .j  av a  2s.co m
protected <T> Stream<T> stream(Iterator<T> iterator) {
    return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false);

}

From source file:com.thinkbiganalytics.metadata.modeshape.user.JcrUserGroup.java

@Nonnull
public Stream<UserGroup> streamAllGroups() {
    return StreamSupport.stream(getGroups().spliterator(), false).flatMap(g -> g.streamAllGroups());
}

From source file:de.appsolve.padelcampus.controller.events.EventsBookingController.java

@RequestMapping(method = POST, value = "/{eventId}/participate")
@Transactional/*from   w  ww.j av a  2s. c  o m*/
public ModelAndView postParticipate(@PathVariable("eventId") Long eventId, HttpServletRequest request,
        final @ModelAttribute("EventBookingRequest") EventBookingRequest eventBookingRequest,
        BindingResult bindingResult) {
    ModelAndView participateView = participateView(eventId, eventBookingRequest);
    try {
        Player user = sessionUtil.getUser(request);
        if (user == null) {
            return getLoginRequiredView(request, msg.get("Participate"));
        }

        Event event = eventDAO.findByIdFetchWithParticipantsAndCommunities(eventId);

        Booking booking = new Booking();
        booking.setPlayer(user);
        booking.setBookingDate(event.getStartDate());
        booking.setBookingTime(event.getStartTime());
        booking.setAmount(event.getPrice());
        booking.setCurrency(event.getCurrency());
        booking.setPaymentMethod(eventBookingRequest.getPaymentMethod());
        booking.setBookingType(BookingType.loggedIn);
        booking.setEvent(event);

        switch (event.getEventType()) {
        case PullRoundRobin:
            break;
        case CommunityRoundRobin:
            // remove players without any information
            List<Player> newPlayers = StreamSupport
                    .stream(eventBookingRequest.getNewPlayers().spliterator(), false)
                    .filter(player -> !(StringUtils.isEmpty(player.getFirstName())
                            && StringUtils.isEmpty(player.getLastName())
                            && StringUtils.isEmpty(player.getEmail())
                            && StringUtils.isEmpty(player.getPhone())))
                    .collect(Collectors.toList());

            // if at least one field is given, validate all data
            newPlayers.forEach(player -> validator.validate(player, bindingResult));
            if (bindingResult.hasErrors()) {
                return participateView;
            }

            // make sure at least one person participates
            if (newPlayers.isEmpty() && (eventBookingRequest.getPlayers() == null
                    || eventBookingRequest.getPlayers().isEmpty())) {
                throw new Exception(msg.get("PleaseAddParticipants"));
            }

            // check if email already exists
            for (Player player : newPlayers) {
                if (playerDAO.findByEmail(player.getEmail()) != null) {
                    throw new Exception(msg.get("EmailAlreadyRegistered"));
                }
            }

            // make sure community name exists
            Community community = eventBookingRequest.getCommunity();
            if (StringUtils.isEmpty(community.getName())) {
                throw new Exception(msg.get("PleaseAddCommunityName"));
            }
            booking.setCommunity(community);

            Set<Player> allPlayers = new HashSet<>(eventBookingRequest.getPlayers());
            newPlayers.forEach(newPlayer -> allPlayers.add(playerDAO.saveOrUpdate(newPlayer)));

            //do not add user as this would cause duplicate key (player and players go into the same table), instead use transient boolean value
            allPlayers.remove(user);
            booking.setPlayers(allPlayers);
            booking.setPlayerParticipates(eventBookingRequest.getPlayers() != null
                    && eventBookingRequest.getPlayers().contains(user));
            break;
        default:
            Player player = eventBookingRequest.getPlayer();
            Player partner;
            if (player.getUUID() == null) {
                validator.validate(player, bindingResult);
                if (bindingResult.hasErrors()) {
                    return participateView;
                }
                if (playerDAO.findByEmail(player.getEmail()) != null) {
                    throw new Exception(msg.get("EmailAlreadyRegistered"));
                }
                partner = playerDAO.saveOrUpdate(player);
            } else {
                partner = playerDAO.findByUUID(player.getUUID());
                if (partner == null) {
                    throw new Exception(msg.get("ChoosePartner"));
                }
                if (partner.equals(user)) {
                    throw new Exception(msg.get("ChooseDifferentPartner"));
                }
            }

            Set<Player> participants = new HashSet<>();
            //do not add user as this would cause duplicate key (player and players go into the same table)
            participants.add(partner);

            //extra fields
            booking.setPlayers(participants);
        }
        isEventBookingPossible(booking);

        sessionUtil.setBooking(request, booking);
        return new ModelAndView("redirect:/events/bookings/" + event.getId() + "/confirm");
    } catch (Exception e) {
        bindingResult.addError(new ObjectError("*", e.getMessage()));
        return participateView;
    }
}