Example usage for java.util List stream

List of usage examples for java.util List stream

Introduction

In this page you can find the example usage for java.util List stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:oct.util.Util.java

public static List<LinePoint> findMaxAndMins(List<LinePoint> line) {
    //create list of all positive Y values to get peaks
    ArrayList<LinePoint> convList = new ArrayList<>(line.size());
    line.forEach(p -> {/*w  w  w . j  a v a  2  s.  c om*/
        convList.add(new LinePoint(p.getX(), Math.abs(p.getY())));
    });
    //find X values of peaks
    List<LinePoint> peaks = getMaximums(convList);
    //collect peak points
    List<LinePoint> ret = line.parallelStream()
            .filter(p -> peaks.stream().anyMatch(pk -> pk.getX() == p.getX())).collect(Collectors.toList());
    //sort by X position
    ret.sort(Comparator.comparingInt(peak -> peak.getX()));
    return ret;
}

From source file:com.timeinc.seleniumite.junit.SimpleSeleniumBuilderTest.java

@Parameterized.Parameters
public static Collection<Object[]> getEnvironments() throws Exception {
    List<RawSourceTestingEnvironment> rval = DefaultRawGlobalTestConfiguration.getDefault()
            .createRawSourcedEnvironments();

    String testFilter = EnvironmentUtils.findEnvOrProperty("testFilter");
    if (testFilter != null) {
        int preCount = rval.size();
        Pattern pattern = Pattern.compile(testFilter);
        if (LOG.isTraceEnabled()) {
            rval.stream().forEach((p) -> LOG.trace(" Source : {} , {}", p.getSource().getName(),
                    p.getSource().getOptionalSourceFile()));
        }/*  ww  w  .ja v  a  2 s  .c o m*/

        rval = rval.stream().filter((p) -> p.getSource().matchesFilter(pattern)).collect(Collectors.toList());
        LOG.info("After applying filter {} to {} source files, {} remain", testFilter, preCount, rval.size());
    }

    // Since Parameters REALY wants an array of objects
    List<Object[]> castToExpectedValue = rval.stream().map(p -> new Object[] { p })
            .collect(Collectors.toList());
    return castToExpectedValue;
}

From source file:oct.util.Util.java

public static List<LinePoint> findPeaksAndVallies(List<LinePoint> line) {
    //first find peaks
    List<LinePoint> peaks = getMaximums(line);
    //create inverse of line to find vallies
    ArrayList<LinePoint> convList = new ArrayList<>(line.size());
    line.forEach(p -> {//from   www  .  j a  v  a2 s.c  o  m
        convList.add(new LinePoint(p.getX(), 0D - p.getY()));
    });
    //find X values of vallies
    List<LinePoint> vallies = getMaximums(convList);
    //collect valley points
    List<LinePoint> ret = line.parallelStream()
            .filter(p -> vallies.stream().anyMatch(pk -> pk.getX() == p.getX())).collect(Collectors.toList());
    //sort by X position
    ret.addAll(peaks);
    ret.sort(Comparator.comparingInt(peak -> peak.getX()));
    return ret;
}

From source file:io.spring.initializr.generator.ProjectGenerator.java

private static List<Dependency> filterDependencies(List<Dependency> dependencies, String scope) {
    return dependencies.stream().filter(dep -> scope.equals(dep.getScope()))
            .sorted(Comparator.comparing(MetadataElement::getId)).collect(Collectors.toList());
}

From source file:com.wrmsr.neurosis.aws.ec2.Ec2InstanceTypeDetails.java

public static Map<String, Ec2InstanceTypeDetails> read() throws IOException {
    List<Ec2InstanceTypeDetails> lst;
    try (InputStream in = Ec2InstanceTypeDetails.class.getClassLoader().getResourceAsStream(RESOURCE)) {
        lst = Serialization.JSON_OBJECT_MAPPER.get().readValue(in,
                new TypeReference<List<Ec2InstanceTypeDetails>>() {
                });//from  w  w w  .  j  av  a2s.  co m
    }
    return lst.stream().map(i -> new ImmutablePair<>(i.instanceType, i)).collect(toImmutableMap());
}

From source file:be.ordina.msdashboard.MicroservicesDashboardServerApplicationTest.java

private static void assertLinkBetweenIds(Map<String, List> r, String source, String target) throws IOException {
    List<Object> nodes = (List<Object>) r.get(NODES);
    List<Map<String, Integer>> links = (List<Map<String, Integer>>) r.get(LINKS);
    int sourceId = -1;
    int targetId = -1;
    for (int i = 0; i < nodes.size(); i++) {
        if (((Map) nodes.get(i)).get(ID).equals(source)) {
            sourceId = i;//from ww  w . j a  v  a  2s.com
        } else if (((Map) nodes.get(i)).get(ID).equals(target)) {
            targetId = i;
        }
    }
    final int s = sourceId;
    final int t = targetId;
    assertThat(links.stream().anyMatch(link -> link.get("source") == s && link.get("target") == t)).isTrue();
}

From source file:com.ikanow.aleph2.v1.document_db.utils.V1DocumentDbHadoopUtils.java

/** 
 * @param input_config - the input settings
 * @return/*from w  w  w  .  j  a  v a 2 s  .c o m*/
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static IAnalyticsAccessContext<InputFormat> getInputFormat(final String user_id,
        final AnalyticThreadJobBean.AnalyticThreadJobInputBean job_input,
        final Optional<ISecurityService> maybe_security, final V1DocDbConfigBean config) {
    //TODO (ALEPH-20): need to perform security in here

    return new IAnalyticsAccessContext<InputFormat>() {
        private LinkedHashMap<String, Object> _mutable_output = null;

        @Override
        public String describe() {
            //(return the entire thing)
            return ErrorUtils.get("service_name={0} options={1}",
                    this.getAccessService().right().value().getSimpleName(),
                    this.getAccessConfig().get().entrySet().stream()
                            .filter(kv -> !DESCRIBE_FILTER.contains(kv.getKey()))
                            .collect(Collectors.toMap(kv -> kv.getKey(), kv -> kv.getValue())));
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessService()
         */
        @Override
        public Either<InputFormat, Class<InputFormat>> getAccessService() {
            return Either.right((Class<InputFormat>) (Class<?>) Aleph2V1InputFormat.class);
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessConfig()
         */
        @Override
        public Optional<Map<String, Object>> getAccessConfig() {
            if (null != _mutable_output) {
                return Optional.of(_mutable_output);
            }
            _mutable_output = new LinkedHashMap<>();

            // Parse various inputs:

            final List<String> communities = Arrays
                    .stream(job_input.resource_name_or_id()
                            .substring(BucketUtils.EXTERNAL_BUCKET_PREFIX.length()).split("_"))
                    .collect(Collectors.toList());

            // Validate communities:
            maybe_security.ifPresent(sec -> {
                communities.stream().filter(cid -> !sec.isUserPermitted(user_id, Tuples._2T("community", cid),
                        Optional.of(ISecurityService.ACTION_READ))).findAny().ifPresent(cid -> {
                            throw new RuntimeException(ErrorUtils
                                    .get(V1DocumentDbErrorUtils.V1_DOCUMENT_USER_PERMISSIONS, user_id, cid));
                        });
            });

            final String query = _mapper
                    .convertValue(Optional.ofNullable(job_input.filter()).orElse(Collections.emptyMap()),
                            JsonNode.class)
                    .toString();

            final Tuple4<String, Tuple2<Integer, Integer>, BasicDBObject, DBObject> horrible_object = LegacyV1HadoopUtils
                    .parseQueryObject(query, communities);

            final String db_server = config.mongodb_connection();

            // Here's all the fields to fill in

            // 1) Generic MongoDB fields:
            //name of job shown in jobtracker --><name>mongo.job.name</name><value>title
            //run the job verbosely ? --><name>mongo.job.verbose</name><value>true
            //Run the job in the foreground and wait for response, or background it? --><name>mongo.job.background</name><value>false
            //If you are reading from mongo, the URI --><name>mongo.input.uri</name><value>mongodb://"+dbserver+"/"+input
            //The number of documents to limit to for read [OPTIONAL] --><name>mongo.input.limit</name><value>" + nLimit
            //The query, in JSON, to execute [OPTIONAL] --><name>mongo.input.query</name><value>" + StringEscapeUtils.escapeXml(query)
            //The fields, in JSON, to read [OPTIONAL] --><name>mongo.input.fields</name><value>"+( (fields==null) ? ("") : fields )
            //InputFormat Class --><name>mongo.job.input.format</name><value>com.ikanow.infinit.e.data_model.custom.InfiniteMongoInputFormat

            _mutable_output.put("mongo.job.name",
                    Optional.ofNullable(job_input.data_service()).orElse("unknown") + ":"
                            + Optional.ofNullable(job_input.resource_name_or_id()).orElse("unknown")); // (i think this is ignored in fact)            
            _mutable_output.put("mongo.job.verbose", "true");
            _mutable_output.put("mongo.job.background", "false");
            _mutable_output.put("mongo.input.uri", "mongodb://" + db_server + "/doc_metadata.metadata");
            _mutable_output.put("mongo.input.query", horrible_object._1());
            _mutable_output.put("mongo.input.fields",
                    Optional.ofNullable(horrible_object._4()).map(o -> o.toString()).orElse(""));
            _mutable_output.put("mongo.input.limit", Optional.ofNullable(job_input.config())
                    .map(cfg -> cfg.test_record_limit_request()).map(o -> o.toString()).orElse("0"));

            // 2) Basic Infinit.e/MongoDB fields:
            //Maximum number of splits [optional] --><name>max.splits</name><value>"+nSplits
            //Maximum number of docs per split [optional] --><name>max.docs.per.split</name><value>"+nDocsPerSplit
            _mutable_output.put("max.splits", horrible_object._2()._1().toString());
            _mutable_output.put("max.docs.per.split", horrible_object._2()._2().toString());

            // 3) Advanced Infinit.e/MongoDB fields:            
            //Infinit.e src tags filter [optional] --><name>infinit.e.source.tags.filter</name><value>"+srcTags.toString()
            if (null != horrible_object._3()) {
                _mutable_output.put("infinit.e.source.tags.filter", horrible_object._3().toString());
            }
            return Optional.of(Collections.unmodifiableMap(_mutable_output));
        }
    };
}

From source file:de.bund.bfr.math.LeastSquaresOptimization.java

public static LeastSquaresOptimization createMultiVectorDiffOptimizer(List<String> formulas,
        List<String> dependentVariables, List<Double> initValues, List<List<String>> initParameters,
        List<String> parameters, List<List<Double>> timeValues, List<List<Double>> targetValues,
        String dependentVariable, String timeVariable, List<Map<String, List<Double>>> variableValues,
        IntegratorFactory integrator, InterpolationFactory interpolator) throws ParseException {
    return new LeastSquaresOptimization(parameters,
            targetValues.stream().flatMap(List::stream).collect(Collectors.toList()),
            new MultiVectorDiffFunction(formulas, dependentVariables, initValues, initParameters, parameters,
                    variableValues, timeValues, dependentVariable, timeVariable, integrator, interpolator));
}

From source file:com.joyent.manta.benchmark.Benchmark.java

/**
 * Method used to run a multi-threaded benchmark.
 *
 * @param method to measure//  w w  w.  j  a va  2  s .  c  om
 * @param path path to store benchmarking test data
 * @param iterations number of iterations to run
 * @param concurrency number of threads to run
 * @throws IOException thrown when we can't communicate with the server
 */
private static void multithreadedBenchmark(final String method, final String path, final int iterations,
        final int concurrency) throws IOException {
    final AtomicLong fullAggregation = new AtomicLong(0L);
    final AtomicLong serverAggregation = new AtomicLong(0L);
    final AtomicLong count = new AtomicLong(0L);
    final long perThreadCount = perThreadCount(iterations, concurrency);

    System.out.printf("Running %d iterations per thread\n", perThreadCount);

    final long testStart = System.nanoTime();

    Runtime.getRuntime().addShutdownHook(new Thread(Benchmark::cleanUp));

    final Callable<Void> worker = () -> {
        for (int i = 0; i < perThreadCount; i++) {
            Duration[] durations;

            if (method.equals("put")) {
                durations = measurePut(sizeInBytesOrNoOfDirs);
            } else if (method.equals("putDir")) {
                durations = measurePutDir(sizeInBytesOrNoOfDirs);
            } else {
                durations = measureGet(path);
            }

            long fullLatency = durations[0].toMillis();
            long serverLatency = durations[1].toMillis();
            fullAggregation.addAndGet(fullLatency);
            serverAggregation.addAndGet(serverLatency);

            System.out.printf("%s %d full=%dms, server=%dms, thread=%s\n", method, count.getAndIncrement(),
                    fullLatency, serverLatency, Thread.currentThread().getName());
        }

        return null;
    };

    final Thread.UncaughtExceptionHandler handler = (t, e) -> LOG.error("Error when executing benchmark", e);

    final AtomicInteger threadCounter = new AtomicInteger(0);
    ThreadFactory threadFactory = r -> {
        Thread t = new Thread(r);
        t.setDaemon(true);
        t.setUncaughtExceptionHandler(handler);
        t.setName(String.format("benchmark-%d", threadCounter.incrementAndGet()));

        return t;
    };

    ExecutorService executor = Executors.newFixedThreadPool(concurrency, threadFactory);

    List<Callable<Void>> workers = new ArrayList<>(concurrency);
    for (int i = 0; i < concurrency; i++) {
        workers.add(worker);
    }

    try {
        List<Future<Void>> futures = executor.invokeAll(workers);

        boolean completed = false;
        while (!completed) {
            try (Stream<Future<Void>> stream = futures.stream()) {
                completed = stream.allMatch((f) -> f.isDone() || f.isCancelled());

                if (!completed) {
                    Thread.sleep(CHECK_INTERVAL);
                }
            }
        }

    } catch (InterruptedException e) {
        return;
    } finally {
        System.err.println("Shutting down the thread pool");
        executor.shutdown();
    }

    final long testEnd = System.nanoTime();

    final long fullAverage = Math.round(fullAggregation.get() / iterations);
    final long serverAverage = Math.round(serverAggregation.get() / iterations);
    final long totalTime = Duration.ofNanos(testEnd - testStart).toMillis();

    System.out.printf("Average full latency: %d ms\n", fullAverage);
    System.out.printf("Average server latency: %d ms\n", serverAverage);
    System.out.printf("Total test time: %d ms\n", totalTime);
    System.out.printf("Total invocations: %d\n", count.get());
}

From source file:com.thinkbiganalytics.nifi.rest.support.NifiProcessUtil.java

public static ProcessorDTO findFirstProcessorsByTypeAndName(Collection<ProcessorDTO> processors, String type,
        String name) {/*from  ww  w  . j a v  a  2 s  .  c  o  m*/
    ProcessorDTO processorDTO = null;
    if (type != null) {
        List<ProcessorDTO> list = findProcessorsByType(processors, type);
        if (list != null && !list.isEmpty()) {
            if (StringUtils.isNotBlank(name)) {
                processorDTO = list.stream().filter(p -> p.getName().equalsIgnoreCase(name)).findFirst()
                        .orElse(list.get(0));
            }
            if (processorDTO == null) {
                processorDTO = list.get(0);
            }
        }
    }
    return processorDTO;
}