List of usage examples for java.util Spliterator ORDERED
int ORDERED
To view the source code for java.util Spliterator ORDERED.
Click Source Link
From source file:ddf.catalog.util.impl.ResultIterable.java
private static Stream<Result> stream(Iterator<Result> iterator) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); }
From source file:fi.hsl.parkandride.dev.DevController.java
@RequestMapping(method = PUT, value = DEV_UTILIZATION) @TransactionalWrite//ww w .j a va 2 s . co m public ResponseEntity<Void> generateUtilizationData(@NotNull @PathVariable(FACILITY_ID) Long facilityId) { final Facility facility = facilityRepository.getFacility(facilityId); // Generate dummy usage for the last month final Random random = new Random(); final List<Utilization> utilizations = StreamSupport .stream(spliteratorUnknownSize( new DateTimeIterator(DateTime.now().minusMonths(1), DateTime.now(), Minutes.minutes(5)), Spliterator.ORDERED), false) .flatMap(ts -> facility.builtCapacity.keySet().stream().flatMap(capacityType -> { if (facility.pricingMethod == PricingMethod.PARK_AND_RIDE_247_FREE) { return Stream.of(new UtilizationKey(facilityId, capacityType, Usage.PARK_AND_RIDE)); } else { return facility.pricing.stream().filter(pr -> pr.capacityType == capacityType) .map(pr -> new UtilizationKey(facilityId, capacityType, pr.usage)); } }).map(utilizationKey -> newUtilization(utilizationKey, facility.builtCapacity.get(utilizationKey.capacityType), ts.minusSeconds(random.nextInt(180)) // Randomness to prevent timestamps for different capacity types being equal ))).collect(toList()); utilizationRepository.insertUtilizations(utilizations); predictionService.signalUpdateNeeded(utilizations); return new ResponseEntity<>(CREATED); }
From source file:com.simiacryptus.mindseye.test.data.MNIST.java
private static <T> Stream<T> toStream(@Nonnull final Iterator<T> iterator, final int size, final boolean parallel) { return StreamSupport.stream(Spliterators.spliterator(iterator, size, Spliterator.ORDERED), parallel); }
From source file:com.yevster.spdxtra.Read.java
public static Stream<SpdxPackage> getAllPackages(Dataset dataset) { try (DatasetAutoAbortTransaction transaction = DatasetAutoAbortTransaction.begin(dataset, ReadWrite.READ)) { String sparql = createSparqlQueryByType(SpdxUris.SPDX_PACKAGE); QueryExecution qe = QueryExecutionFactory.create(sparql, dataset); ResultSet results = qe.execSelect(); Stream<QuerySolution> querySolutionStream = StreamSupport.stream( Spliterators.spliteratorUnknownSize(results, Spliterator.ORDERED | Spliterator.NONNULL), false); return querySolutionStream.map((QuerySolution qs) -> { RDFNode subject = qs.get("s"); return new SpdxPackage(subject.asResource()); });/*from ww w .jav a2 s .c om*/ } }
From source file:com.bouncestorage.swiftproxy.v1.ObjectResource.java
private List<Pair<Long, Long>> parseRange(String range) { range = range.replaceAll(" ", "").toLowerCase(); String bytesUnit = "bytes="; int idx = range.indexOf(bytesUnit); if (idx == 0) { String byteRangeSet = range.substring(bytesUnit.length()); Iterator<Object> iter = Iterators.forEnumeration(new StringTokenizer(byteRangeSet, ",")); return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iter, Spliterator.ORDERED), false) .map(rangeSpec -> (String) rangeSpec).map(rangeSpec -> { int dash = rangeSpec.indexOf("-"); if (dash == -1) { throw new BadRequestException("Range"); }/*from ww w . j a va 2 s .c o m*/ String firstBytePos = rangeSpec.substring(0, dash); String lastBytePos = rangeSpec.substring(dash + 1); Long firstByte = firstBytePos.isEmpty() ? null : Long.parseLong(firstBytePos); Long lastByte = lastBytePos.isEmpty() ? null : Long.parseLong(lastBytePos); return new Pair<>(firstByte, lastByte); }).peek(r -> logger.debug("parsed range {} {}", r.getFirst(), r.getSecond())) .collect(Collectors.toList()); } else { return null; } }
From source file:ai.grakn.migration.csv.CSVMigrator.java
/** * Partition a stream into a stream of collections, each with batchSize elements. * @param iterator Iterator to partition * @param <T> Type of values of iterator * @return Stream over a collection that are each of batchSize *//*from w ww .ja v a 2 s . co m*/ protected <T> Stream<T> stream(Iterator<T> iterator) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); }
From source file:notaql.performance.PerformanceTest.java
private static String composeEngine(JSONObject engine) { final StringBuilder builder = new StringBuilder(); builder.append(engine.get("engine")); builder.append("("); final String params = StreamSupport .stream(Spliterators.spliteratorUnknownSize(engine.keys(), Spliterator.ORDERED), false) .filter(k -> !k.equals("engine")).map(k -> k + " <- " + toArg(k, engine)) .collect(Collectors.joining(", ")); builder.append(params);//ww w .ja v a2 s. c o m builder.append(")"); return builder.toString(); }
From source file:com.yevster.spdxtra.Read.java
private static Stream<Relationship> getRelationshipsWithSparql(Dataset dataset, String sparql) { try (DatasetAutoAbortTransaction transaction = DatasetAutoAbortTransaction.begin(dataset, ReadWrite.READ)) { QueryExecution qe = QueryExecutionFactory.create(sparql, dataset); ResultSet results = qe.execSelect(); Stream<QuerySolution> solutionStream = StreamSupport.stream( Spliterators.spliteratorUnknownSize(results, Spliterator.ORDERED | Spliterator.NONNULL), false); return solutionStream.map((QuerySolution qs) -> { RDFNode relationshipNode = qs.get("o"); assert (relationshipNode.isResource()); return new Relationship(relationshipNode.asResource()); });//from ww w. j a v a2s.c om } }
From source file:fi.hsl.parkandride.dev.DevController.java
@RequestMapping(method = PUT, value = DEV_PREDICTION_HISTORY) @TransactionalRead // each call to predictionService.updatePredictionsHistoryForFacility creates a separate write transaction to avoid too long transactions public ResponseEntity<Void> generatePredictionHistory(@NotNull @PathVariable(FACILITY_ID) Long facilityId) { facilityRepository.getFacility(facilityId); // ensure facility exists final UtilizationSearch utilizationSearch = new UtilizationSearch(); utilizationSearch.start = DateTime.now().minusWeeks(5); utilizationSearch.end = DateTime.now(); utilizationSearch.facilityIds = Collections.singleton(facilityId); final List<Utilization> utilizations = Lists .newArrayList(utilizationRepository.findUtilizations(utilizationSearch)); DateTime lastTimestamp = utilizations.stream().map(u -> u.timestamp).max(DateTime::compareTo) .orElse(utilizationSearch.end); StreamSupport//from w w w . j a va 2 s .c o m .stream(spliteratorUnknownSize(new DateTimeIterator(utilizationSearch.start.plusWeeks(4), lastTimestamp.minus(PredictionRepository.PREDICTION_RESOLUTION), // avoid collision with scheduled predictions PredictionRepository.PREDICTION_RESOLUTION), Spliterator.ORDERED), false) .map(endTime -> utilizations.stream().filter(utilization -> utilization.timestamp.isBefore(endTime)) .collect(toList())) .forEach(utilizationList -> predictionService.updatePredictionsHistoryForFacility(utilizationList)); return new ResponseEntity<>(CREATED); }
From source file:org.jsonschema2pojo.rules.AdditionalPropertiesRule.java
private JMethod addInnerBuilder(JDefinedClass jclass, JType propertyType, JFieldVar field) { Optional<JDefinedClass> builderClass = StreamSupport .stream(Spliterators.spliteratorUnknownSize(jclass.classes(), Spliterator.ORDERED), false) .filter(definedClass -> definedClass.name().equals(getBuilderClassName(jclass))).findFirst(); JMethod builder = builderClass.get().method(JMod.PUBLIC, builderClass.get(), "withAdditionalProperty"); JVar nameParam = builder.param(String.class, "name"); JVar valueParam = builder.param(propertyType, "value"); JBlock body = builder.body();// w w w. ja va2 s . com JInvocation mapInvocation = body.invoke(JExpr.ref(JExpr.cast(jclass, JExpr._this().ref("instance")), field), "put"); mapInvocation.arg(nameParam); mapInvocation.arg(valueParam); body._return(JExpr._this()); return builder; }