List of usage examples for java.util List stream
default Stream<E> stream()
From source file:de.se_rwth.langeditor.util.Misc.java
public static <T> Stream<T> preorder(T start, Function<T, Collection<? extends T>> generatingFunction) { List<T> elements = new ArrayList<>(); Stack<T> stack = new Stack<>(); stack.push(start);// w ww . ja v a2 s. com T currentElement; while (!stack.isEmpty()) { currentElement = stack.pop(); elements.add(currentElement); stack.addAll(generatingFunction.apply(currentElement)); } return elements.stream(); }
From source file:com.github.gavlyukovskiy.boot.jdbc.decorator.flexypool.FlexyPoolConfiguration.java
static <T extends DataSource> List<ConnectionAcquiringStrategyFactory<?, T>> mergeFactories( List<ConnectionAcquiringStrategyFactory<?, T>> factories, FlexyPoolProperties flexyPool) { List<ConnectionAcquiringStrategyFactory<?, T>> newFactories = new ArrayList<>(); List<? extends Class<?>> factoryClasses; if (factories != null) { factoryClasses = factories.stream().map(Object::getClass).collect(Collectors.toList()); newFactories.addAll(factories);// w w w.j a va2s . c o m } else { factoryClasses = Collections.emptyList(); } if (!factoryClasses.contains(IncrementPoolOnTimeoutConnectionAcquiringStrategy.Factory.class)) { IncrementPool incrementPool = flexyPool.getAcquiringStrategy().getIncrementPool(); if (incrementPool.getMaxOverflowPoolSize() > 0) { newFactories.add(new IncrementPoolOnTimeoutConnectionAcquiringStrategy.Factory<>( incrementPool.getMaxOverflowPoolSize(), incrementPool.getTimeoutMillis())); } } if (!factoryClasses.contains(RetryConnectionAcquiringStrategy.Factory.class)) { Retry retry = flexyPool.getAcquiringStrategy().getRetry(); if (retry.getAttempts() > 0) { newFactories.add(new RetryConnectionAcquiringStrategy.Factory<>(retry.getAttempts())); } } return newFactories; }
From source file:com.yahoo.bullet.parsing.RuleUtils.java
@SafeVarargs public static String makeGroupAggregation(Integer size, AggregationType operation, List<GroupOperation> operations, Pair<String, String>... fields) { return "{" + "'type' : '" + getOperationFor(operation) + "', " + "'fields' : " + makeGroupFields(fields) + ", " + "'attributes' : {" + "'operations' : [" + operations.stream().map(RuleUtils::makeGroupOperation).reduce((a, b) -> a + " , " + b).orElse("") + "]" + "}, " + "'size' : " + size + "}"; }
From source file:com.hengyi.japp.tools.PYUtil.java
public static List<String> getFirstSpell(String cs) { if (isBlank(cs)) { return null; }/*ww w .ja va2s . c om*/ List<String> result = null; List<Set<Character>> cs_fpys = cs.chars().mapToObj(i -> toHanyuPinyinStringArray((char) i)) .map(a -> Arrays.stream(a).map(s -> s.charAt(0)).collect(Collectors.toSet())) .collect(Collectors.toList()); for (Set<Character> fpys : cs_fpys) { if (result == null) { result = fpys.stream().map(String::valueOf).collect(Collectors.toList()); } else { Stream<String> tmps = result.stream().flatMap(s -> fpys.stream().map(fpy -> s + fpy)); result = tmps.collect(Collectors.toList()); } } return result; }
From source file:com.uber.hoodie.common.util.CompactionUtils.java
/** * Generate compaction plan from file-slices * * @param partitionFileSlicePairs list of partition file-slice pairs * @param extraMetadata Extra Metadata * @param metricsCaptureFunction Metrics Capture function *///from w w w. j a v a2s .c o m public static HoodieCompactionPlan buildFromFileSlices(List<Pair<String, FileSlice>> partitionFileSlicePairs, Optional<Map<String, String>> extraMetadata, Optional<Function<Pair<String, FileSlice>, Map<String, Double>>> metricsCaptureFunction) { HoodieCompactionPlan.Builder builder = HoodieCompactionPlan.newBuilder(); extraMetadata.ifPresent(m -> builder.setExtraMetadata(m)); builder.setOperations(partitionFileSlicePairs.stream() .map(pfPair -> buildFromFileSlice(pfPair.getKey(), pfPair.getValue(), metricsCaptureFunction)) .collect(Collectors.toList())); return builder.build(); }
From source file:keywhiz.cli.ClientUtils.java
/** * Serialize the cookies to JSON from the given CookieManager to a file at the specified path. * Output file will have 660 permissions (owner-read, owner-write). * * @param cookieManager CookieManager that contains cookies to be serialized. * @param path Location to serialize cookies to file. *///from w w w. j a v a2 s. c om public static void saveCookies(CookieManager cookieManager, Path path) { List<HttpCookie> cookies = cookieManager.getCookieStore().getCookies(); try (BufferedWriter writer = Files.newBufferedWriter(path, CREATE)) { Files.setPosixFilePermissions(path, ImmutableSet.of(OWNER_READ, OWNER_WRITE)); writer.write(Jackson.newObjectMapper().writeValueAsString( cookies.stream().map(c -> JsonCookie.fromHttpCookie(c)).collect(Collectors.toList()))); } catch (IOException e) { throw Throwables.propagate(e); } }
From source file:com.github.aptd.simulation.CMain.java
/** * returns the experiment data models/* w w w . j av a2 s. c o m*/ * * @param p_options commandline options * @return stream of experiments */ private static Stream<Pair<EDataModel, String>> datamodel(final CommandLine p_options) { final List<String> l_instances = Arrays.stream(p_options.getOptionValue("scenario").split(",")) .map(String::trim).filter(i -> !i.isEmpty()).collect(Collectors.toList()); final List<String> l_types = Arrays.stream(p_options.getOptionValue("scenariotype", "").split(",")) .map(String::trim).filter(i -> !i.isEmpty()).collect(Collectors.toList()); return StreamUtils.zip(l_instances.stream(), Stream.concat(l_types.stream(), IntStream.range(0, l_instances.size() - l_types.size()).mapToObj( i -> CConfiguration.INSTANCE.getOrDefault("xml", "default", "datamodel"))), (i, j) -> new ImmutablePair<>(EDataModel.from(j), i)); }
From source file:org.eclipse.sw360.datahandler.common.SW360Utils.java
public static List<String> getReleaseNames(List<Release> releases) { if (releases == null) return Collections.emptyList(); return releases.stream().map(SW360Utils::printName).collect(Collectors.toList()); }
From source file:io.github.benas.randombeans.util.ReflectionUtils.java
/** * Filters a list of types to keep only elements having the same parameterized types as the given type. * * @param type the type to use for the search * @param types a list of types to filter * @return a list of types having the same parameterized types as the given type *//*from ww w. ja v a 2 s . c om*/ public static List<Class<?>> filterSameParameterizedTypes(final List<Class<?>> types, final Type type) { if (type instanceof ParameterizedType) { Type[] fieldArugmentTypes = ((ParameterizedType) type).getActualTypeArguments(); List<Class<?>> typesWithSameParameterizedTypes = new ArrayList<>(); for (Class<?> currentConcreteType : types) { List<Type[]> actualTypeArguments = getActualTypeArgumentsOfGenericInterfaces(currentConcreteType); typesWithSameParameterizedTypes.addAll(actualTypeArguments.stream() .filter(currentTypeArguments -> Arrays.equals(fieldArugmentTypes, currentTypeArguments)) .map(currentTypeArguments -> currentConcreteType).collect(toList())); } return typesWithSameParameterizedTypes; } return types; }
From source file:de.bund.bfr.math.MathUtils.java
public static Double getR2(double sse, List<Double> targetValues) { if (targetValues.size() < 2) { return null; }/*from w ww . j av a 2 s. co m*/ double targetMean = DoubleMath.mean(targetValues); double targetTotalSumOfSquares = targetValues.stream().mapToDouble(v -> Math.pow(v - targetMean, 2.0)) .sum(); double rSquared = 1 - sse / targetTotalSumOfSquares; return Math.max(rSquared, 0.0); }