Example usage for java.util List stream

List of usage examples for java.util List stream

Introduction

In this page you can find the example usage for java.util List stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:de.metas.ui.web.view.json.JSONDocumentViewOrderBy.java

public static final List<DocumentQueryOrderBy> unwrapList(final List<JSONDocumentViewOrderBy> jsonOrderBys) {
    if (jsonOrderBys == null || jsonOrderBys.isEmpty()) {
        return ImmutableList.of();
    }//from  www.ja  v a  2 s.c o m

    return jsonOrderBys.stream().map(jsonOrderBy -> unwrap(jsonOrderBy)).filter(orderBy -> orderBy != null)
            .collect(GuavaCollectors.toImmutableList());
}

From source file:uk.ac.cam.cl.dtg.picky.client.analytics.Analytics.java

private static void fillProperties(Properties properties, Dataset dataset) {
    if (dataset == null)
        return;/* ww  w . ja v a 2s  .co m*/

    List<FileEntry> files = dataset.getFiles();

    long numberOfChunks = files.stream().flatMap(f -> f.getBlocks().stream())
            .flatMap(b -> b.getChunks().stream()).count();

    double avgNumberofBlocks = files.stream().mapToLong(f -> f.getBlocks().size()).average().getAsDouble();

    properties.put(KEY_DATASET_DESCRIPTION, Strings.nullToEmpty(dataset.getDescription()));
    properties.put(KEY_DATASET_ID, dataset.getId());
    properties.put(KEY_DATASET_NUMBER_OF_FILES, "" + files.size());
    properties.put(KEY_DATASET_NUMBER_OF_CHUNKS, "" + numberOfChunks);
    properties.put(KEY_DATASET_AVG_NUMBER_OF_BLOCKS, "" + avgNumberofBlocks);
}

From source file:com.devicehive.dao.riak.model.RiakDeviceEquipment.java

public static List<RiakDeviceEquipment> convertToEntity(List<DeviceEquipmentVO> equipment) {
    if (equipment == null) {
        return Collections.emptyList();
    }/*  w ww.j  a  v a2s  . c  o m*/
    return equipment.stream().map(RiakDeviceEquipment::convertToEntity).collect(Collectors.toList());
}

From source file:com.netflix.spinnaker.clouddriver.google.controllers.GoogleNamedImageLookupController.java

/**
 * Apply tag-based filtering to the list of named images.
 *
 * For example: /gce/images/find?q=PackageName&tag:stage=released&tag:somekey=someval
 *///from  ww  w .ja  v  a 2 s.c o m
private static List<NamedImage> filter(List<NamedImage> namedImages, Map<String, String> tagFilters) {
    return namedImages.stream().filter(namedImage -> matchesTagFilters(namedImage, tagFilters))
            .collect(Collectors.toList());
}

From source file:com.pinterest.rocksplicator.controller.config.ConfigParser.java

/**
 * Convert cluster config data into a {@link ClusterBean}.
 *
 * @param clusterName name of the cluster
 * @param content binary config data//from   w ww .ja  va 2  s.  c o m
 * @return ClusterBean or null if parsing failed
 */
@SuppressWarnings("unchecked")
public static ClusterBean parseClusterConfig(String clusterName, byte[] content) {
    try {
        Map<String, Object> segmentMap = OBJECT_MAPPER.readValue(new String(content, UTF_8), HashMap.class);

        final List<SegmentBean> segments = new ArrayList<>();
        for (Map.Entry<String, Object> entry : segmentMap.entrySet()) {
            Map<String, Object> segmentInfo = (Map<String, Object>) entry.getValue();
            // num_leaf_segments must exist so that we can proceed
            if (!segmentInfo.containsKey(NUM_SHARDS)) {
                return null;
            }

            final SegmentBean segment = new SegmentBean().setName(entry.getKey())
                    .setNumShards((Integer) segmentInfo.get(NUM_SHARDS));

            final List<HostBean> hosts = new ArrayList<>();
            for (Map.Entry<String, Object> entry2 : segmentInfo.entrySet()) {
                // skip num_leaf_segments in shard map
                if (entry2.getKey().equals(NUM_SHARDS)) {
                    continue;
                }

                HostBean host = parseHost(entry2.getKey());
                List<String> shardList = (List<String>) entry2.getValue();
                List<ShardBean> shards = shardList.stream().map(ConfigParser::parseShard)
                        .collect(Collectors.toList());
                host.setShards(shards);
                hosts.add(host);
            }
            segment.setHosts(hosts);
            segments.add(segment);
        }
        return new ClusterBean().setName(clusterName).setSegments(segments);
    } catch (IOException | IllegalArgumentException e) {
        LOG.error("Failed to parse cluster config.", e);
        return null;
    }
}

From source file:com.devicehive.dao.riak.model.RiakDeviceEquipment.java

public static List<DeviceEquipmentVO> convertToVo(List<RiakDeviceEquipment> equipment) {
    if (equipment == null) {
        return Collections.emptyList();
    }/*w ww.jav  a  2 s .co m*/
    return equipment.stream().map(RiakDeviceEquipment::convertToVo).collect(Collectors.toList());
}

From source file:kafka.benchmark.AdvertisingTopology.java

public static String listOfStringToString(List<String> list, String port) {
    return list.stream().map(item -> item + ":" + port).collect(Collectors.joining(","));
}

From source file:de.tynne.benchmarksuite.Main.java

private static void runBenchmarks(Args args, BenchmarkProducer benchmarkProducer) throws IOException {
    BackupHelper.backupIfNeeded(args.getOutput());

    // this looks like NOT comma seperated values, but excel and libreoffice load this automatically
    final CSVFormat format = CSVFormat.EXCEL.withDelimiter(';').withHeader("#", "ID", "Name", "Min [ns]",
            "Avg [ns]", "Max [ns]", "Chart Pos", "Best Increase [%]", "Iterations");
    try (CSVPrinter printer = new CSVPrinter(
            new OutputStreamWriter(new FileOutputStream(args.getOutput()), Charset.forName(args.getCharset())),
            format)) {/*  www .  j av a 2  s.co m*/
        List<Benchmark> benchmarks = benchmarkProducer.get();
        List<Benchmark> matching = benchmarks.stream()
                .filter(b -> args.getExecute().matcher(b.getId()).matches()).collect(Collectors.toList());
        BenchmarkRunner benchmarkRunner = new BenchmarkRunner(matching,
                BenchmarkRunner.SEC_IN_NANOS * args.getWarumUpTime(),
                BenchmarkRunner.SEC_IN_NANOS * args.getRunTime());
        benchmarkRunner.run();
        Chart chart = Chart.of(matching);

        for (Benchmark b : matching) {
            try {
                StatRecord statRecord = chart.getStats().get(b);
                printer.print(matching.indexOf(b));
                printer.print(b.getId());
                printer.print(b.getName());
                printer.print(format(args, statRecord.getMin()));
                printer.print(format(args, statRecord.getAverage()));
                printer.print(format(args, statRecord.getMax()));
                printer.print(chart.getChart().get(b).chartPosition);
                double bestAvg = chart.getStats().get(chart.getPerformanceChart().get(0)).getAverage();
                double thisAvg = statRecord.getAverage();

                printer.print(format(args, 100. * (thisAvg - bestAvg) / bestAvg));
                printer.print(statRecord.getCount());
                printer.println();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }
    }
}

From source file:edu.umd.umiacs.clip.tools.classifier.LibSVMUtils.java

public static List<Map<Integer, Double>> asMap(List<String> features) {
    return features.stream().map(LibSVMUtils::asMap).collect(toList());
}

From source file:info.archinnov.achilles.internals.parser.FunctionParser.java

public static void validateNoDuplicateDeclaration(AptUtils aptUtils, List<FunctionSignature> signatures) {
    // Validate not declared many time using full equality
    for (FunctionSignature signature : signatures) {
        signatures.stream().filter(signature::equals) //Equality by comparing name, keyspace, return types and param types
                .filter(x -> x != signature) //Identity comparison, exclude self
                .forEach(x -> aptUtils.printError(
                        "Functions '%s' and '%s' have same signature. Duplicate function declaration is not allowed",
                        signature, x));//from w w w.  j  av a 2s .  c  o  m
    }
}