Example usage for java.util Spliterators spliteratorUnknownSize

List of usage examples for java.util Spliterators spliteratorUnknownSize

Introduction

In this page you can find the example usage for java.util Spliterators spliteratorUnknownSize.

Prototype

public static Spliterator.OfDouble spliteratorUnknownSize(PrimitiveIterator.OfDouble iterator,
        int characteristics) 

Source Link

Document

Creates a Spliterator.OfDouble using a given DoubleStream.DoubleIterator as the source of elements, with no initial size estimate.

Usage

From source file:org.hawkular.rest.json.RelationshipJacksonDeserializer.java

@Override
public Relationship deserialize(JsonParser jp, DeserializationContext deserializationContext)
        throws IOException {
    JsonNode node = jp.getCodec().readTree(jp);
    String id = node.get(FIELD_ID) != null ? node.get(FIELD_ID).asText() : null;

    // other fields are not compulsory, e.g. when deleting the relationship {id: foo} is just fine
    String name = "";
    if (node.get(FIELD_NAME) != null) {
        name = node.get(FIELD_NAME).asText();
    }//  w w w.ja v  a2 s .c o m
    CanonicalPath source = null, target = null;
    if (node.get(FIELD_SOURCE) != null && !node.get(FIELD_SOURCE).asText().isEmpty()) {
        String sourcePath = node.get(FIELD_SOURCE).asText();
        source = CanonicalPath.fromString(sourcePath);
    }
    if (node.get(FIELD_TARGET) != null && !node.get(FIELD_TARGET).asText().isEmpty()) {
        String targetPath = node.get(FIELD_TARGET).asText();
        target = CanonicalPath.fromString(targetPath);
    }

    JsonNode properties = node.get(FIELD_PROPERTIES);
    Map<String, Object> relProperties = null;
    if (properties != null) {
        try {
            Stream<Map.Entry<String, JsonNode>> stream = StreamSupport.stream(
                    Spliterators.spliteratorUnknownSize(properties.fields(), Spliterator.ORDERED), false);

            relProperties = stream.collect(Collectors.toMap(Map.Entry::getKey,
                    ((Function<Map.Entry<String, JsonNode>, JsonNode>) Map.Entry::getValue)
                            .andThen(x -> (Object) x.asText())));
        } catch (Exception e) {
            throw new IllegalArgumentException("Error during relationship deserialization,"
                    + " unable to recognize properties: " + properties);
        }
    }

    return new Relationship(id, name, source, target, relProperties);
}

From source file:org.mskcc.shenkers.data.interval.GIntervalTree.java

public Stream<Node> streamOverlapNodes(String chr, int start, int end, Strand strand) {
    boolean parallel = false;
    int characteristics = 0;
    return StreamSupport.stream(
            Spliterators.spliteratorUnknownSize(queryNodes(chr, start, end, strand), characteristics),
            parallel);//from w  w  w .ja  v  a 2  s . c o m
}

From source file:org.mskcc.shenkers.data.interval.GIntervalTree.java

public Stream<IntervalFeature> streamOverlaps(String chr, int start, int end) {
    boolean parallel = false;
    int characteristics = 0;
    return StreamSupport.stream(Spliterators.spliteratorUnknownSize(query(chr, start, end), characteristics),
            parallel);/*  w ww .  j  av a  2s. co m*/
}

From source file:nl.rivm.cib.episim.cbs.CBSConnectorTest.java

@Test
public void testOlingo() throws IOException {
    final String serviceUrl = "http://opendata.cbs.nl/ODataApi/odata/83225ned";//"http://opendata.cbs.nl/ODataApi/odata/81435ned";
    final Edm edm = ODataUtil.readEdm(serviceUrl);
    edm.getSchemas().forEach(s -> {// w w w  .j  a v  a  2  s.  c om
        s.getEntityTypes().forEach(t -> {
            t.getPropertyNames().forEach(p -> {
                if (p.equals("Key"))
                    System.err.println(ODataUtil.readEntities(edm, serviceUrl, t.getName() + "$select="));
                LOG.trace("{}.{} :: {} ({})", t.getNamespace(), t.getName(), p, t.getProperty(p).getType());
            });
        });
        //         final Map<Object, Object> dims = s.getEntityTypes().stream().filter( e->e.getPropertyNames().contains( "Key" ) )
        //               .collect( Collectors.toMap(
        //                     e -> e.getProperty( "Key" ),
        //                     e -> e.getProperty( "Title" ) ) );
        //         LOG.trace( "{} dims: {}", s.getNamespace(), dims );

        final String dim = "Geslacht";
        final Map<Object, Object> keys = StreamSupport
                .stream(Spliterators.spliteratorUnknownSize(ODataUtil.readEntities(edm, serviceUrl, dim),
                        Spliterator.CONCURRENT), false)
                .collect(Collectors.toMap(e -> e.getProperty("Key").getPrimitiveValue().toValue(),
                        e -> e.getProperty("Title").getPrimitiveValue().toValue()));
        LOG.trace("{} keys: {}", dim, keys);
    });

}

From source file:org.xsystem.sql2.dml.DmlCommand.java

public Stream<Map<String, Object>> stream(Connection con, String stmt, List<DmlParams> paramsSpec, Long skip,
        Integer total, Map<String, Object> value) {
    ResultSetIterator rsItr = new ResultSetIterator(con, stmt, paramsSpec, skip, total, value);
    return StreamSupport.stream(Spliterators.spliteratorUnknownSize(rsItr, 0), false);
}

From source file:io.mashin.rich.spark.TestJavaRichRDD.java

@Test
public void testHttpRDD() {
    String serverIP = HttpMockConfig.serverIP();
    int serverPort = HttpMockConfig.serverPort();

    JavaSparkContext sc = sc("testHttpRDD");
    HttpMock mock = new HttpMock();
    mock.start();//from   ww w.jav a2 s .c  om

    int numPages = 4;

    JavaRDD<String> rdd = JavaRichRDD
            .httpRDD(sc, i -> new HttpGet("http://" + serverIP + ":" + serverPort + "/rdd?page=" + (i + 1)),
                    (i, httpResponse) -> {
                        BufferedReader is = new BufferedReader(
                                new InputStreamReader(httpResponse.getEntity().getContent()));
                        String s = is.readLine();
                        is.close();
                        return Arrays.asList(s.split(",")).iterator();
                    }, numPages)
            .cache();

    assertEquals(numPages, rdd.getNumPartitions());
    assertEquals(numPages * HttpMockConfig.perPage(), rdd.count());

    boolean isValid = rdd.mapPartitionsWithIndex((i, iter) -> {
        List<String> list = StreamSupport
                .stream(Spliterators.spliteratorUnknownSize(iter, Spliterator.ORDERED), false)
                .collect(Collectors.toList());
        return IntStream.range(0, list.size()).mapToObj(j -> HttpMockConfig.isValidElement(list.get(j), i, j))
                .iterator();
    }, true).reduce(Boolean::logicalAnd);

    assertTrue(isValid);

    sc.stop();
    mock.stop();
}

From source file:org.mskcc.shenkers.data.interval.GIntervalTree.java

public Stream<IntervalFeature> stream() {
    boolean parallel = false;
    int characteristics = 0;
    return intervals.entrySet().stream().flatMap(e -> {
        String chr = e.getKey().chr;
        Strand strand = e.getKey().strand;
        return StreamSupport
                .stream(Spliterators.spliteratorUnknownSize((Iterator<Node>) e.getValue().iterator(),
                        characteristics), parallel)
                .map(n -> new IntervalFeatureImpl(chr, n.getStart(), n.getEnd(), strand, null));
    });//w  w w  . ja  v  a 2s. c o  m
}

From source file:org.dice_research.topicmodeling.io.test.AbstractCorpusIOTest.java

public Corpus readCorpus() {
    InputStream in = null;// w  ww .  j  ava 2 s. c  om
    try {
        if (reader != null) {
            in = new BufferedInputStream(new FileInputStream(testFile));
            reader.readCorpus(in);
            return reader.getCorpus();
        } else if (supplier != null) {
            return new DocumentListCorpus<List<Document>>(StreamSupport
                    .stream(Spliterators.spliteratorUnknownSize(new DocumentSupplierAsIterator(supplier),
                            Spliterator.DISTINCT & Spliterator.NONNULL), false)
                    .collect(Collectors.toList()));
        } else {
            Assert.fail("Test is misconfigured since reader==null and supplier==null.");
        }
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Got an Exception: " + e.getLocalizedMessage());
    } finally {
        IOUtils.closeQuietly(in);
    }
    return null;
}

From source file:ddf.catalog.util.impl.ResultIterable.java

private static Stream<Result> stream(Iterator<Result> iterator) {
    return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false);
}

From source file:com.yevster.spdxtra.Read.java

public static Stream<SpdxPackage> getAllPackages(Dataset dataset) {

    try (DatasetAutoAbortTransaction transaction = DatasetAutoAbortTransaction.begin(dataset, ReadWrite.READ)) {

        String sparql = createSparqlQueryByType(SpdxUris.SPDX_PACKAGE);
        QueryExecution qe = QueryExecutionFactory.create(sparql, dataset);
        ResultSet results = qe.execSelect();
        Stream<QuerySolution> querySolutionStream = StreamSupport.stream(
                Spliterators.spliteratorUnknownSize(results, Spliterator.ORDERED | Spliterator.NONNULL), false);

        return querySolutionStream.map((QuerySolution qs) -> {
            RDFNode subject = qs.get("s");
            return new SpdxPackage(subject.asResource());
        });//w  w  w .  ja va  2s .  c  o m
    }
}