List of usage examples for java.util.stream StreamSupport stream
public static <T> Stream<T> stream(Spliterator<T> spliterator, boolean parallel)
From source file:org.hawkular.metrics.dropwizard.HawkularReporterTest.java
@Test public void shouldReportPartialHistogram() { HawkularReporter reporter = HawkularReporter.builder(registry, "unit-test") .setMetricComposition("my.histogram", Lists.newArrayList("mean", "median", "stddev")) .useHttpClient(uri -> client).build(); final Histogram histogram = registry.histogram("my.histogram"); histogram.update(3);//w ww . ja va2 s.c om reporter.report(); assertThat(client.getMetricsRestCalls()).hasSize(1); JSONObject metrics = new JSONObject(client.getMetricsRestCalls().get(0)); assertThat(metrics.keySet()).containsOnly("gauges"); JSONArray gaugesJson = metrics.getJSONArray("gauges"); Map<String, Integer> values = StreamSupport.stream(gaugesJson.spliterator(), false) .collect(toMap(idFromRoot::extract, valueFromRoot::extract)); // Note: we extract int values here for simplicity, but actual values are double. The goal is not to test // Dropwizard algorithm for metrics generation, so we don't bother with accuracy. assertThat(values).containsOnly(entry("my.histogram.mean", 3), entry("my.histogram.median", 3), entry("my.histogram.stddev", 0)); assertThat(client.getTagsRestCalls()).containsOnly( Pair.of("/gauges/my.histogram.mean/tags", "{\"histogram\":\"mean\"}"), Pair.of("/gauges/my.histogram.stddev/tags", "{\"histogram\":\"stddev\"}"), Pair.of("/gauges/my.histogram.median/tags", "{\"histogram\":\"median\"}")); }
From source file:com.joyent.manta.client.multipart.TestMultipartManager.java
@Override public void complete(TestMultipartUpload upload, Iterable<? extends MantaMultipartUploadTuple> parts) throws IOException { Validate.notNull(upload, "Upload state object must not be null"); final Stream<? extends MantaMultipartUploadTuple> partsStream = StreamSupport.stream(parts.spliterator(), false);//from w w w .ja v a 2s .c o m complete(upload, partsStream); }
From source file:org.apache.nifi.minifi.c2.provider.nifi.rest.NiFiRestConfigurationProvider.java
private Pair<Stream<Pair<String, String>>, Closeable> getIdAndFilenameStream() throws ConfigurationProviderException, IOException { TemplatesIterator templatesIterator = new TemplatesIterator(httpConnector, jsonFactory); return new Pair<>(StreamSupport .stream(Spliterators.spliteratorUnknownSize(templatesIterator, Spliterator.ORDERED), false), templatesIterator);/* w w w .j a va 2 s. co m*/ }
From source file:com.intuit.wasabi.repository.cassandra.impl.CassandraPagesRepository.java
Stream<AppPage> getAppPagesFromCassandra(Application.Name applicationName) { Optional<Iterator<AppPage>> optionalResult = Optional.empty(); try {/*from w w w. ja va2 s .c o m*/ optionalResult = Optional .ofNullable(appPageIndexAccessor.selectBy(applicationName.toString()).iterator()); } catch (ReadTimeoutException | UnavailableException | NoHostAvailableException e) { throw new RepositoryException( "Could not retrieve the pages and its associated experiments for application:\"" + applicationName + "\"", e); } return StreamSupport.stream(Spliterators.spliteratorUnknownSize( optionalResult.orElse(Collections.emptyIterator()), Spliterator.ORDERED), false); }
From source file:com.bouncestorage.swiftproxy.v1.ContainerResource.java
@GET public Response listContainer(@NotNull @PathParam("container") String container, @HeaderParam("X-Auth-Token") String authToken, @QueryParam("limit") Integer limit, @QueryParam("marker") String marker, @QueryParam("end_marker") String endMarker, @QueryParam("format") Optional<String> format, @QueryParam("prefix") String prefixParam, @QueryParam("delimiter") String delimiterParam, @QueryParam("path") String path, @HeaderParam("X-Newest") @DefaultValue("false") boolean newest, @HeaderParam("Accept") Optional<String> accept) { BlobStore store = getBlobStore(authToken).get(container); if (!store.containerExists(container)) { return Response.status(Response.Status.NOT_FOUND).build(); }/*from w w w . j a va 2s . co m*/ ListContainerOptions options = new ListContainerOptions(); if (!Strings.isNullOrEmpty(marker)) { options.afterMarker(marker); } if (Strings.isNullOrEmpty(delimiterParam) && path == null) { options.recursive(); } if (!Strings.isNullOrEmpty(delimiterParam)) { options.delimiter(delimiterParam); } if (!Strings.isNullOrEmpty(prefixParam)) { options.prefix(prefixParam); } if (path != null) { if (path.equals("/")) { options.prefix("/"); options.delimiter("/"); } else { options.inDirectory(path); } } logger.info("list: {} marker={} prefix={}", options, options.getMarker(), prefixParam); List<ObjectEntry> entries = StreamSupport .stream(Utils.crawlBlobStore(store, container, options).spliterator(), false) .peek(meta -> logger.debug("meta: {}", meta)) //.filter(meta -> (prefix == null || meta.getName().startsWith(prefix))) //.filter(meta -> delimFilter(meta.getName(), delim_filter)) .filter(meta -> endMarker == null || meta.getName().compareTo(endMarker) < 0) .limit(limit == null ? InfoResource.CONFIG.swift.container_listing_limit : limit) .map(meta -> new ObjectEntry(meta.getName(), meta.getETag(), meta.getSize() == null ? 0 : meta.getSize(), contentType(meta), meta.getLastModified())) .collect(Collectors.toList()); MediaType formatType; if (format.isPresent()) { formatType = BounceResourceConfig.getMediaType(format.get()); } else if (accept.isPresent()) { formatType = MediaType.valueOf(accept.get()); } else { formatType = MediaType.TEXT_PLAIN_TYPE; } if (store.getContext().unwrap().getId().equals("transient")) { entries.forEach(entry -> { try { entry.name = URLDecoder.decode(entry.name, "UTF-8"); } catch (UnsupportedEncodingException e) { throw propagate(e); } }); } // XXX semi-bogus value long totalBytes = entries.stream().mapToLong(e -> e.bytes).sum(); ContainerRoot root = new ContainerRoot(); root.name = container; root.object = entries; return output(root, entries, formatType).header("X-Container-Object-Count", entries.size()) .header("X-Container-Bytes-Used", totalBytes).header("X-Timestamp", -1).header("X-Trans-Id", -1) .header("Accept-Ranges", "bytes").build(); }
From source file:jp.classmethod.aws.brian.BrianClient.java
@Override public List<String> listTriggers(String group) throws BrianClientException, BrianServerException { logger.debug("list triggers: {}", group); HttpResponse httpResponse = null;/* w w w.j a v a 2 s.c o m*/ try { String path = String.format("/triggers/%s", group); URI uri = new URI(scheme, null, hostname, port, path, null, null); HttpUriRequest httpRequest = RequestBuilder.get().setUri(uri).build(); httpResponse = httpClientExecute(httpRequest); int statusCode = httpResponse.getStatusLine().getStatusCode(); logger.debug("statusCode: {}", statusCode); if (statusCode == HttpStatus.SC_OK) { JsonNode tree = mapper.readTree(httpResponse.getEntity().getContent()); return StreamSupport.stream(tree.spliterator(), false).map(item -> item.textValue()) .collect(Collectors.toList()); } else if (statusCode >= 500) { throw new BrianServerException("status = " + statusCode); } else if (statusCode >= 400) { throw new BrianClientException("status = " + statusCode); } else { throw new Error("status = " + statusCode); } } catch (URISyntaxException e) { throw new IllegalStateException(e); } catch (IOException e) { throw new BrianServerException(e); } catch (IllegalStateException e) { throw new Error(e); } finally { if (httpResponse != null) { EntityUtils.consumeQuietly(httpResponse.getEntity()); } } }
From source file:org.cyclop.service.cassandra.intern.QueryServiceImpl.java
private <T extends Comparable<?>> ImmutableSortedSet<T> map(Optional<ResultSet> result, String columnName, Function<String, T> mapper) { ImmutableSortedSet<T> res = StreamSupport.stream(result.get().spliterator(), false) .map(r -> r.getString(columnName)).map(StringUtils::trimToNull).filter(Objects::nonNull).map(mapper) .collect(toNaturalImmutableSortedSet()); return res;//from w w w . j a va 2 s. co m }
From source file:com.intuit.wasabi.repository.cassandra.impl.CassandraAssignmentsRepository.java
Stream<ExperimentUserByUserIdContextAppNameExperimentId> getUserIndexStream(String userId, String appName, String context) {/* w w w . ja v a2 s . c om*/ Stream<ExperimentUserByUserIdContextAppNameExperimentId> resultStream = Stream.empty(); try { final Result<ExperimentUserByUserIdContextAppNameExperimentId> result = experimentUserIndexAccessor .selectBy(userId, appName, context); resultStream = StreamSupport .stream(Spliterators.spliteratorUnknownSize(result.iterator(), Spliterator.ORDERED), false); } catch (ReadTimeoutException | UnavailableException | NoHostAvailableException e) { throw new RepositoryException("Could not retrieve assignments for " + "experimentID = \"" + appName + "\" userID = \"" + userId + "\" and context " + context, e); } return resultStream; }
From source file:org.apache.storm.daemon.logviewer.utils.LogCleaner.java
/** * Return the last modified time for all log files in a worker's log dir. * Using stream rather than File.listFiles is to avoid large mem usage * when a directory has too many files./*from w w w . ja v a2s .c om*/ */ private long lastModifiedTimeWorkerLogdir(File logDir) { long dirModified = logDir.lastModified(); DirectoryStream<Path> dirStream = null; try { dirStream = directoryCleaner.getStreamForDirectory(logDir); } catch (IOException e) { return dirModified; } if (dirStream == null) { return dirModified; } try { return StreamSupport.stream(dirStream.spliterator(), false).reduce(dirModified, (maximum, path) -> { long curr = path.toFile().lastModified(); return curr > maximum ? curr : maximum; }, BinaryOperator.maxBy(Long::compareTo)); } catch (Exception ex) { LOG.error(ex.getMessage(), ex); return dirModified; } finally { if (DirectoryStream.class.isInstance(dirStream)) { IOUtils.closeQuietly(dirStream); } } }
From source file:io.mandrel.metrics.impl.MongoMetricsRepository.java
@Override public Timeserie serie(String name) { Set<Data> results = StreamSupport.stream(timeseries.find(Filters.eq("type", name)) .sort(Sorts.ascending("timestamp_hour")).limit(3).map(doc -> { LocalDateTime hour = LocalDateTime .ofEpochSecond(((Date) doc.get("timestamp_hour")).getTime() / 1000, 0, ZoneOffset.UTC); Map<String, Long> values = (Map<String, Long>) doc.get("values"); List<Data> mapped = values.entrySet().stream() .map(elt -> Data.of(hour.plusMinutes(Long.valueOf(elt.getKey())), elt.getValue())) .collect(Collectors.toList()); return mapped; }).spliterator(), true).flatMap(elts -> elts.stream()) .collect(TreeSet::new, Set::add, (left, right) -> { left.addAll(right);/*from w ww. ja v a2s . c o m*/ }); Timeserie timeserie = new Timeserie(); timeserie.addAll(results); return timeserie; }