Example usage for java.util.stream StreamSupport stream

List of usage examples for java.util.stream StreamSupport stream

Introduction

In this page you can find the example usage for java.util.stream StreamSupport stream.

Prototype

public static <T> Stream<T> stream(Spliterator<T> spliterator, boolean parallel) 

Source Link

Document

Creates a new sequential or parallel Stream from a Spliterator .

Usage

From source file:nl.rivm.cib.episim.cbs.CBSConnectorTest.java

@Test
public void testOlingo() throws IOException {
    final String serviceUrl = "http://opendata.cbs.nl/ODataApi/odata/83225ned";//"http://opendata.cbs.nl/ODataApi/odata/81435ned";
    final Edm edm = ODataUtil.readEdm(serviceUrl);
    edm.getSchemas().forEach(s -> {// w ww  .jav  a  2s. c om
        s.getEntityTypes().forEach(t -> {
            t.getPropertyNames().forEach(p -> {
                if (p.equals("Key"))
                    System.err.println(ODataUtil.readEntities(edm, serviceUrl, t.getName() + "$select="));
                LOG.trace("{}.{} :: {} ({})", t.getNamespace(), t.getName(), p, t.getProperty(p).getType());
            });
        });
        //         final Map<Object, Object> dims = s.getEntityTypes().stream().filter( e->e.getPropertyNames().contains( "Key" ) )
        //               .collect( Collectors.toMap(
        //                     e -> e.getProperty( "Key" ),
        //                     e -> e.getProperty( "Title" ) ) );
        //         LOG.trace( "{} dims: {}", s.getNamespace(), dims );

        final String dim = "Geslacht";
        final Map<Object, Object> keys = StreamSupport
                .stream(Spliterators.spliteratorUnknownSize(ODataUtil.readEntities(edm, serviceUrl, dim),
                        Spliterator.CONCURRENT), false)
                .collect(Collectors.toMap(e -> e.getProperty("Key").getPrimitiveValue().toValue(),
                        e -> e.getProperty("Title").getPrimitiveValue().toValue()));
        LOG.trace("{} keys: {}", dim, keys);
    });

}

From source file:am.ik.categolj2.domain.service.entry.EntryServiceImpl.java

void applyRelations(Iterable<Entry> entries) {
    List<Integer> entryIds = StreamSupport.stream(entries.spliterator(), false).map(Entry::getEntryId)
            .collect(Collectors.toList());
    if (!entryIds.isEmpty()) {
        // apply categories
        List<Category> categories = categoryRepository.findByEntryIds(entryIds);

        Multimap<Integer, Category> categoryMultimap = TreeMultimap.create();
        for (Category c : categories) {
            categoryMultimap.put(c.getEntry().getId(), c);
        }/*w ww. j a  v a2s .c o m*/
        for (Entry entry : entries) {
            entry.setCategory(new ArrayList<>(categoryMultimap.get(entry.getId())));
        }
        // apply tags
        List<TagAndEntryId> tags = tagRepository.findByEntryIds(entryIds);
        Multimap<Integer, Tag> tagMultimap = HashMultimap.create();
        for (TagAndEntryId tag : tags) {
            tagMultimap.put(tag.getEntryId(), tag.getTag());
        }
        for (Entry entry : entries) {
            entry.setTags(new LinkedHashSet<>(tagMultimap.get(entry.getEntryId())));
        }
    }
}

From source file:com.netflix.spinnaker.orca.clouddriver.tasks.manifest.DeployManifestTask.java

@Nonnull
@Override//from   w  ww. ja va2  s  .  c o  m
public TaskResult execute(@Nonnull Stage stage) {
    String credentials = getCredentials(stage);
    String cloudProvider = getCloudProvider(stage);

    List<Artifact> artifacts = artifactResolver.getArtifacts(stage);
    Map task = new HashMap(stage.getContext());
    String artifactSource = (String) task.get("source");
    if (StringUtils.isNotEmpty(artifactSource) && artifactSource.equals("artifact")) {
        Artifact manifestArtifact = artifactResolver.getBoundArtifactForId(stage,
                task.get("manifestArtifactId").toString());

        if (manifestArtifact == null) {
            throw new IllegalArgumentException(
                    "No artifact could be bound to '" + task.get("manifestArtifactId") + "'");
        }

        log.info("Using {} as the manifest to be deployed", manifestArtifact);

        manifestArtifact.setArtifactAccount((String) task.get("manifestArtifactAccount"));
        Object parsedManifests = retrySupport.retry(() -> {
            try {
                Response manifestText = oort.fetchArtifact(manifestArtifact);

                Iterable<Object> rawManifests = yamlParser.get().loadAll(manifestText.getBody().in());
                List<Map> manifests = StreamSupport.stream(rawManifests.spliterator(), false).map(m -> {
                    try {
                        return Collections.singletonList(objectMapper.convertValue(m, Map.class));
                    } catch (Exception e) {
                        return (List<Map>) objectMapper.convertValue(m, List.class);
                    }
                }).flatMap(Collection::stream).collect(Collectors.toList());

                Map<String, Object> manifestWrapper = new HashMap<>();
                manifestWrapper.put("manifests", manifests);

                manifestWrapper = contextParameterProcessor.process(manifestWrapper,
                        contextParameterProcessor.buildExecutionContext(stage, true), true);

                if (manifestWrapper.containsKey("expressionEvaluationSummary")) {
                    throw new IllegalStateException("Failure evaluating manifest expressions: "
                            + manifestWrapper.get("expressionEvaluationSummary"));
                }

                return manifestWrapper.get("manifests");
            } catch (Exception e) {
                log.warn("Failure fetching/parsing manifests from {}", manifestArtifact, e);
                // forces a retry
                throw new IllegalStateException(e);
            }
        }, 10, 200, true); // retry 10x, starting at .2s intervals
        task.put("manifests", parsedManifests);
        task.put("source", "text");
    }

    List<String> requiredArtifactIds = (List<String>) task.get("requiredArtifactIds");
    requiredArtifactIds = requiredArtifactIds == null ? new ArrayList<>() : requiredArtifactIds;
    List<Artifact> requiredArtifacts = requiredArtifactIds.stream()
            .map(id -> artifactResolver.getBoundArtifactForId(stage, id)).collect(Collectors.toList());

    log.info("Deploying {} artifacts within the provided manifest", requiredArtifacts);

    task.put("requiredArtifacts", requiredArtifacts);
    task.put("optionalArtifacts", artifacts);
    Map<String, Map> operation = new ImmutableMap.Builder<String, Map>().put(TASK_NAME, task).build();

    TaskId taskId = kato.requestOperations(cloudProvider, Collections.singletonList(operation)).toBlocking()
            .first();

    Map<String, Object> outputs = new ImmutableMap.Builder<String, Object>().put("kato.result.expected", true)
            .put("kato.last.task.id", taskId).put("deploy.account.name", credentials).build();

    return new TaskResult(ExecutionStatus.SUCCEEDED, outputs);
}

From source file:com.wrmsr.wava.basic.BasicSet.java

public Stream<Name> stream() {
    return StreamSupport.stream(basics.spliterator(), false).map(Map.Entry::getKey);
}

From source file:org.sonar.plugins.github.PullRequestIssuePostJob.java

private Map<InputFile, Map<Integer, StringBuilder>> processIssues(GlobalReport report,
        Iterable<PostJobIssue> issues) {
    Map<InputFile, Map<Integer, StringBuilder>> commentToBeAddedByFileAndByLine = new HashMap<>();

    StreamSupport.stream(issues.spliterator(), false).filter(PostJobIssue::isNew)
            // SONARGITUB-13 Ignore issues on files not modified by the P/R
            .filter(i -> {/*from  w w w  . j  a  va  2s .  co  m*/
                InputComponent inputComponent = i.inputComponent();
                return inputComponent == null || !inputComponent.isFile()
                        || pullRequestFacade.hasFile((InputFile) inputComponent);
            }).sorted(ISSUE_COMPARATOR).forEach(i -> processIssue(report, commentToBeAddedByFileAndByLine, i));
    return commentToBeAddedByFileAndByLine;

}

From source file:io.github.azige.moebooruviewer.MoebooruAPITest.java

@Test
public void testListPostsOrder() throws IOException {
    String resourceName = "/postV2_limit100.json";
    byte[] resourceBytes;
    {//from   www.  java  2s. c o m
        ByteArrayOutputStream output = new ByteArrayOutputStream();
        try (InputStream input = getClass().getResourceAsStream(resourceName)) {
            IOUtils.copy(input, output);
        }
        resourceBytes = output.toByteArray();
    }
    when(netIO.download("http://yande.re/post.json?api_version=2&include_pools=1&page=1&limit=100&tags="))
            .thenReturn(resourceBytes);

    List<Post> posts = mapi.listPosts(1, 100);
    ObjectMapper mapper = new ObjectMapper();
    JsonNode postNode = mapper.readTree(getClass().getResourceAsStream(resourceName)).get("posts");

    List<Integer> originalIdList = StreamSupport.stream(postNode.spliterator(), false)
            .map(node -> node.get("id").asInt()).collect(Collectors.toList());
    List<Integer> convertedIdList = posts.stream().map(post -> post.getId()).collect(Collectors.toList());
    assertThat(convertedIdList, is(equalTo(originalIdList)));
}

From source file:org.javersion.util.PersistentHashSetTest.java

@Test
public void reduce() {
    PersistentHashSet<HashKey> set = new PersistentHashSet<>();
    int sum = 0;//  w w w  .ja  v a 2  s . co m
    int count = 0;
    // ArrayNode
    for (int i = 0; i < 32; i++) {
        sum += i;
        count++;
        set = set.conj(new HashKey(i));
    }
    // HashNode
    for (int i = 1; i < 5; i++) {
        int num = i << (4 + i);
        sum += num;
        count++;
        set = set.conj(new HashKey(num));
    }
    // CollisionNodes
    set = set.conj(new HashKey(1));
    sum += 1;
    count++;
    set = set.conj(new HashKey(1));
    sum += 1;
    count++;

    assertThat(sumOf(set.stream())).isEqualTo(sum);
    assertThat(set.stream().count()).isEqualTo(count);

    assertThat(sumOf(set.parallelStream())).isEqualTo(sum);
    assertThat(set.parallelStream().count()).isEqualTo(count);

    // Reduce partially consumed in parallel
    for (int i = 1; i < set.size(); i++) {
        Spliterator<HashKey> spliterator = set.spliterator();
        final MutableInt partialSum = new MutableInt(0);
        for (int j = 0; j < i; j++) {
            spliterator.tryAdvance(k -> partialSum.add(k.hash));
        }
        Assertions.assertThat(sumOf(StreamSupport.stream(spliterator, true)) + partialSum.intValue())
                .isEqualTo(sum);
    }
}

From source file:com.thinkbiganalytics.metadata.modeshape.user.JcrUserGroup.java

@Nonnull
public Stream<User> streamAllUsers() {
    return streamAllGroups().flatMap(g -> StreamSupport.stream(g.getUsers().spliterator(), false));
}

From source file:org.apache.nifi.minifi.c2.cache.s3.S3CacheFileInfoImpl.java

@Override
public Stream<WriteableConfiguration> getCachedConfigurations() throws IOException {

    Iterable<S3ObjectSummary> objectSummaries = S3Objects.withPrefix(s3, bucket, prefix);
    Stream<S3ObjectSummary> objectStream = StreamSupport.stream(objectSummaries.spliterator(), false);

    return objectStream.map(p -> {
        Integer version = getVersionIfMatch(p.getKey());
        if (version == null) {
            return null;
        }//from w  w  w .ja v  a  2  s  .c  o  m
        return new Pair<>(version, p);
    }).filter(Objects::nonNull)
            .sorted(Comparator.comparing(pair -> ((Pair<Integer, S3ObjectSummary>) pair).getFirst()).reversed())
            .map(pair -> new S3WritableConfiguration(s3, pair.getSecond(), Integer.toString(pair.getFirst())));

}

From source file:com.wrmsr.kleist.util.Itertools.java

public static <T> Stream<EnumeratedElement<T>> enumerate(Stream<T> stream) {
    return StreamSupport.stream(enumerate(stream.spliterator()), false);
}