List of usage examples for java.util.stream StreamSupport stream
public static <T> Stream<T> stream(Spliterator<T> spliterator, boolean parallel)
From source file:org.obiba.mica.dataset.service.HarmonizedDatasetService.java
public Iterable<DatasetVariable> getDatasetVariables(HarmonizationDataset dataset, OpalTable opalTable) throws NoSuchStudyException, NoSuchValueTableException { return StreamSupport.stream(getVariables(opalTable).spliterator(), false) .map(input -> new DatasetVariable(dataset, input, opalTable)).collect(toList()); }
From source file:org.mascherl.example.service.ComposeMailService.java
public CompletableFuture<List<MailAddressUsage>> getLastSendToAddressesAsync2(User currentUser, int limit) { CompletableFuture<List<MailAddressUsage>> completableFuture = new CompletableFuture<>(); db.query(/*from w w w .j av a2s. c om*/ "select distinct mto.address, m.datetime " + "from mail m " + "join mail_to mto on mto.mail_uuid = m.uuid " + "where m.user_uuid = $1 " + "and m.mail_type = $2 " + "and not exists (" + " select 1 from mail m2 " + " join mail_to mto2 on mto2.mail_uuid = m2.uuid " + " where m2.user_uuid = $1 " + " and m2.mail_type = $2 " + " and mto2.address = mto.address " + " and m2.datetime > m.datetime " + ") " + "order by m.datetime desc " + "limit $3", Arrays.asList(currentUser.getUuid(), MailType.SENT.name(), limit), result -> { try { TimestampColumnZonedDateTimeMapper dateTimeColumnMapper = new PersistentZonedDateTime() .getColumnMapper(); List<MailAddressUsage> usages = StreamSupport.stream(result.spliterator(), false) .map(row -> new MailAddressUsage(new MailAddress(row.getString(0)), dateTimeColumnMapper.fromNonNullValue(row.getTimestamp(1)))) .collect(Collectors.toList()); completableFuture.complete(usages); } catch (Exception e) { completableFuture.completeExceptionally(e); } }, completableFuture::completeExceptionally); return completableFuture; }
From source file:org.hawkular.metrics.dropwizard.HawkularReporterTest.java
@Test public void shouldReportDoubleGauge() { HawkularReporter reporter = HawkularReporter.builder(registry, "unit-test").useHttpClient(uri -> client) .build();// w w w .j av a 2s .c o m final Gauge<Double> gauge = () -> 1.5d; registry.register("gauge.double", gauge); reporter.report(); assertThat(client.getMetricsRestCalls()).hasSize(1); JSONObject metrics = new JSONObject(client.getMetricsRestCalls().get(0)); assertThat(metrics.keySet()).containsOnly("gauges"); JSONArray gaugesJson = metrics.getJSONArray("gauges"); Map<String, Double> values = StreamSupport.stream(gaugesJson.spliterator(), false) .collect(toMap(idFromRoot::extract, dValueFromRoot::extract)); assertThat(values).containsOnly(entry("gauge.double", 1.5d)); }
From source file:com.github.larsq.spring.embeddedamqp.SimpleAmqpMessageContainer.java
/** * Find set of abstract exchange routers by traversing a comprising class. * This method is invoked to discover available router types. * * @return/*from ww w.ja va2 s .c om*/ */ Set<AbstractExchangeRouter> findRouters() { ClassStructureWalker walker = new ClassStructureWalker(Routers.class, false, true); Iterable<Class<?>> innerClasses = walker .traverseClassStructure(clz -> Sets.newHashSet(clz.getDeclaredClasses())); return StreamSupport.stream(innerClasses.spliterator(), false) .filter(AbstractExchangeRouter.class::isAssignableFrom) .filter(clz -> !Modifier.isAbstract(clz.getModifiers())) .map(clz -> (AbstractExchangeRouter) invokeInnerClassConstructor(clz)).collect(Collectors.toSet()); }
From source file:com.epam.dlab.backendapi.dao.BaseDAO.java
protected Stream<Document> stream(Iterable<Document> iterable) { return StreamSupport.stream(iterable.spliterator(), false); }
From source file:com.intuit.wasabi.repository.cassandra.impl.CassandraPagesRepository.java
@Override public Map<Pair<Application.Name, Page.Name>, List<PageExperiment>> getExperimentsWithoutLabels( Collection<Pair<Application.Name, Page.Name>> appAndPagePairs) { logger.debug("getExperimentsWithoutLabels {}", appAndPagePairs); Map<Pair<Application.Name, Page.Name>, List<PageExperiment>> resultMap = new HashMap<>(); try {/*from w ww . jav a 2 s. c o m*/ Map<Pair<Application.Name, Page.Name>, ListenableFuture<Result<PageExperimentByAppNamePage>>> expFutureMap = new HashMap<>(); appAndPagePairs.forEach(pair -> { expFutureMap.put(pair, pageExperimentIndexAccessor.asyncSelectBy(pair.getLeft().toString(), pair.getRight().toString())); }); for (Pair<Application.Name, Page.Name> pair : expFutureMap.keySet()) { ListenableFuture<Result<PageExperimentByAppNamePage>> expFuture = expFutureMap.get(pair); Stream<PageExperimentByAppNamePage> resultList = StreamSupport.stream( Spliterators.spliteratorUnknownSize( UninterruptibleUtil.getUninterruptibly(expFuture).iterator(), Spliterator.ORDERED), false); List<PageExperiment> pageExperimentsList = resultList.map(t -> { PageExperiment.Builder builder = new PageExperiment.Builder( Experiment.ID.valueOf(t.getExperimentId()), null, t.isAssign()); return builder.build(); }).collect(Collectors.toList()); resultMap.put(pair, pageExperimentsList); } } catch (Exception e) { logger.error("getExperimentsWithoutLabels for {} failed", appAndPagePairs, e); throw new RepositoryException("Could not getExperimentsWithoutLabels", e); } logger.debug("Returning PageExperimentList map {}", resultMap); return resultMap; }
From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashUtils.java
/** * Reads the given output file and outputs it to the logger with the spec'd log level. * @param logger/* w w w . j av a 2 s. c o m*/ * @param level * @param output_file * @throws IOException */ public static void sendOutputToLogger(final IBucketLogger logger, final Level level, final File output_file, final Optional<Long> max_lines) throws IOException { // _logger.error("Reading output file: " + output_file + " to send to logger at level: " + level); Files.lines(output_file.toPath()).limit(max_lines.orElse(10000L)).forEach(line -> { try { //convert line to valid json, then parse json, build BMB object from it final String fixed_line = line.replaceAll(logstash_colon_search, logstash_colon_replace) .replaceAll(logstash_arrow_search, logstash_arrow_replace) .replaceAll(logstash_newline_search, logstash_newline_replace); final String plugin_fixed = fixPlugin(fixed_line); final ObjectNode line_object = (ObjectNode) _mapper.readTree(plugin_fixed); //move specific fields we want into BMB final Date date = parseLogstashDate(line_object.remove("timestamp").asText()); final Level logstash_level = Level.valueOf(line_object.remove("level").asText()); final String message = line_object.remove("message").asText(); //move everything else into details map logger.inefficientLog(logstash_level, new BasicMessageBean(date, true, LogstashHarvestService.class.getSimpleName(), "test_output", null, message, StreamSupport .stream(Spliterators.spliteratorUnknownSize(line_object.fields(), Spliterator.ORDERED), true) .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().asText())))); } catch (Exception ex) { //fallback on conversion failure logger.inefficientLog(level, ErrorUtils .buildSuccessMessage(LogstashHarvestService.class.getSimpleName(), "test_output", line)); } }); //TODO should we delete log file after we've read it? }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java
/** Recursive function that will return all fields in an insert (eg "geoip", "geoip.location") * @param index// w w w . java 2s . c o m * @return */ protected static Stream<String> getAllFixedFields_internal(final JsonNode index) { return Optional.ofNullable(index.get("properties")).filter(p -> !p.isNull()).map(p -> { if (!p.isObject()) throw new RuntimeException("properties must be object"); return p; }).map(p -> { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(p.fields(), Spliterator.ORDERED), false) .map(kv -> { return kv; }).<String>flatMap(kv -> { final Stream<String> parent_element = Stream.of(kv.getKey()); return Stream.concat(parent_element, getAllFixedFields_internal(kv.getValue()).map(s -> kv.getKey() + "." + s)); }); }).orElse(Stream.<String>empty()); }
From source file:org.hawkular.metrics.dropwizard.HawkularReporterTest.java
@Test public void shouldReportBigDecimalGauge() { HawkularReporter reporter = HawkularReporter.builder(registry, "unit-test").useHttpClient(uri -> client) .build();//from w w w .j a v a 2s . c o m final Gauge<BigDecimal> gauge = () -> new BigDecimal("1.5"); registry.register("gauge.bigd", gauge); reporter.report(); assertThat(client.getMetricsRestCalls()).hasSize(1); JSONObject metrics = new JSONObject(client.getMetricsRestCalls().get(0)); assertThat(metrics.keySet()).containsOnly("gauges"); JSONArray gaugesJson = metrics.getJSONArray("gauges"); Map<String, Double> values = StreamSupport.stream(gaugesJson.spliterator(), false) .collect(toMap(idFromRoot::extract, dValueFromRoot::extract)); assertThat(values).containsOnly(entry("gauge.bigd", 1.5d)); }
From source file:org.hawkular.inventory.impl.tinkerpop.test.BasicTest.java
@Test public void testTenants() throws Exception { Function<String, Void> test = (id) -> { GraphQuery q = graph.query().has("__type", "tenant").has("__eid", id); Iterator<Vertex> evs = q.vertices().iterator(); assert evs.hasNext(); Vertex ev = evs.next();/*from w w w . j a va2 s . co m*/ assert !evs.hasNext(); Tenant t = inventory.tenants().get(id).entity(); assert ev.getProperty("__eid").equals(id); assert t.getId().equals(id); return null; }; test.apply("com.acme.tenant"); test.apply("com.example.tenant"); GraphQuery query = graph.query().has("__type", "tenant"); assert StreamSupport.stream(query.vertices().spliterator(), false).count() == 2; }