Example usage for java.util Arrays stream

List of usage examples for java.util Arrays stream

Introduction

In this page you can find the example usage for java.util Arrays stream.

Prototype

public static DoubleStream stream(double[] array) 

Source Link

Document

Returns a sequential DoubleStream with the specified array as its source.

Usage

From source file:org.shredzone.commons.gravatar.impl.GravatarServiceImpl.java

/**
 * Cleans up the gravatar cache. Oldest entries are deleted until cache size is
 * valid again.//from www .  j av  a2s.  co  m
 */
@Scheduled(fixedDelay = CACHE_CLEANUP)
public void cacheCleanup() {
    Arrays.stream(new File(cachePath).listFiles()).filter(file -> file.isFile() && !file.isHidden())
            .sorted(comparing(File::lastModified).reversed()) // younger files first
            .skip(MAX_CACHE_ENTRIES).forEach(this::delete);
}

From source file:me.rkfg.xmpp.bot.plugins.CoolStoryPlugin.java

@Override
public String processCommand(Message message, Matcher matcher) throws LogicException, ClientAuthException {
    try {//  www .  ja va  2s.  c  o m
        final String command = matcher.group(1);

        final Optional<Website> website = Arrays.stream(WEBSITES)
                .filter(w -> ArrayUtils.contains(w.getCommands(), command.toLowerCase())).findFirst();
        if (!website.isPresent()) {
            return ERROR_WEBSITE_NOT_SUPPORTED;
        }

        return fetchStory(website.get());

    } catch (Exception e) {
        logger.error("{}", e);
        return null;
    }
}

From source file:com.marklogic.entityservices.examples.ExamplesBase.java

public void importRDF(Path referenceDataDir, String collection) {

    logger.info("RDF Load Job started");

    WriteHostBatcher batcher = moveMgr.newWriteHostBatcher().withBatchSize(10).withThreadCount(1)
            .withTransform(new ServerTransform("turtle-to-xml"))
            .onBatchSuccess((client, batch) -> logger.info("Loaded rdf data batch"))
            .onBatchFailure((client, batch, throwable) -> {
                logger.error("FAILURE on batch:" + batch.toString() + "\n", throwable);
                System.err.println(throwable.getMessage());
                System.err.println(Arrays.stream(batch.getItems()).map(item -> item.getTargetUri())
                        .collect(Collectors.joining("\n")));
                // throwable.printStackTrace();
            });/*from w w  w  .ja va 2s  .c  o m*/
    ;
    ticket = moveMgr.startJob(batcher);

    importOrDescend(referenceDataDir, batcher, collection, Format.TEXT);

    batcher.flush();

}

From source file:delfos.rs.collaborativefiltering.als.ALSRecommender.java

@Override
public MatrixFactorizationModel buildRecommendationModel(DatasetLoader<? extends Rating> datasetLoader)
        throws CannotLoadRatingsDataset, CannotLoadContentDataset, CannotLoadUsersDataset {

    int numIter = 1;
    int dimension = 5;
    long seed = getSeedValue();

    final double lambda = 0.1;

    Bias bias = new Bias(datasetLoader);

    Map<User, List<Double>> randomUserVectors = datasetLoader.getUsersDataset().parallelStream()
            .collect(Collectors.toMap(user -> user, user -> {
                Random random = new Random(seed + user.getId());
                List<Double> vector = random.doubles(-10, 10).limit(dimension).boxed()
                        .collect(Collectors.toList());
                return vector;
            }));/* w  w w .  j a va 2s  .  co  m*/

    Map<Item, List<Double>> randomItemVectors = datasetLoader.getContentDataset().parallelStream()
            .collect(Collectors.toMap(item -> item, item -> {
                Random random = new Random(seed + item.getId());
                List<Double> vector = random.doubles(-10, 10).limit(dimension).boxed()
                        .collect(Collectors.toList());
                return vector;
            }));

    MatrixFactorizationModel model = new MatrixFactorizationModel(randomUserVectors, randomItemVectors, bias);

    for (int iterationIndex = 0; iterationIndex < numIter; iterationIndex++) {

        final int iteration = iterationIndex;
        final MatrixFactorizationModel initialModel = model;

        double error = getModelError(bias, datasetLoader, initialModel);

        System.out.println("Error in iteration " + iterationIndex + " is " + error);

        ProgressChangedController userProgress = new ProgressChangedController(
                getAlias() + " for dataset " + datasetLoader.getAlias() + " userOptimization iteration "
                        + iteration,
                datasetLoader.getUsersDataset().size(), this::fireBuildingProgressChangedEvent);

        Map<User, List<Double>> trainedUserVectors = datasetLoader.getUsersDataset().parallelStream()
                .collect(Collectors.toMap(user -> user, (User user) -> {
                    Map<Integer, ? extends Rating> userRatings = datasetLoader.getRatingsDataset()
                            .getUserRatingsRated(user.getId());

                    ObjectiveFunction objectiveFunction = new ObjectiveFunction((double[] pu) -> {
                        List<Double> userVector = Arrays.stream(pu).boxed().collect(Collectors.toList());
                        double predictionError = userRatings.values().parallelStream()
                                .map(bias.getBiasApplier()).map(rating -> {
                                    List<Double> itemVector = initialModel.getItemFeatures(rating.getItem());
                                    double prediction = IntStream.range(0, userVector.size())
                                            .mapToDouble(index -> userVector.get(index) * itemVector.get(index))
                                            .sum();

                                    double value = rating.getRatingValue().doubleValue();

                                    double errorThisRating = prediction - value;

                                    return errorThisRating;
                                }).map(value -> Math.pow(value, 2)).mapToDouble(value -> value).sum();

                        double penalty = Arrays.stream(pu).map(value -> Math.pow(value, 2)).sum();
                        double objectiveFunctionValue = predictionError + lambda * penalty;
                        return objectiveFunctionValue;
                    });

                    SimplexOptimizer simplexOptimizer = new SimplexOptimizer(0, 0);

                    double[] initialGuess = new Random(seed + user.getId()).doubles(-10, 10).limit(dimension)
                            .toArray();

                    List<Double> initialGuessList = Arrays.stream(initialGuess).boxed()
                            .collect(Collectors.toList());

                    double initialGuessPenalty = objectiveFunction.getObjectiveFunction().value(initialGuess);

                    try {
                        PointValuePair optimize = simplexOptimizer.optimize(
                                new MultiDirectionalSimplex(dimension), new InitialGuess(initialGuess),
                                objectiveFunction, GoalType.MINIMIZE, MAX_EVAL, MAX_ITER);
                        double optimizedPenalty = optimize.getValue();
                        userProgress.setTaskFinished();

                        List<Double> optimizedUserVector = Arrays.stream(optimize.getPoint()).boxed()
                                .collect(Collectors.toList());
                        return optimizedUserVector;
                    } catch (Exception ex) {
                        System.out.println("Vector cannot be optimized for user " + user + " (numRatings="
                                + userRatings.size() + ")");
                        return initialModel.getUserFeatures(user);
                    }
                }));

        ProgressChangedController itemProgress = new ProgressChangedController(
                getAlias() + " for dataset " + datasetLoader.getAlias() + " item optimization iteration "
                        + iteration,
                datasetLoader.getContentDataset().size(), this::fireBuildingProgressChangedEvent);

        Map<Item, List<Double>> trainedItemVectors = datasetLoader.getContentDataset().parallelStream()
                .collect(Collectors.toMap(item -> item, item -> {
                    Map<Integer, ? extends Rating> itemRatings = datasetLoader.getRatingsDataset()
                            .getItemRatingsRated(item.getId());

                    ObjectiveFunction objectiveFunction = new ObjectiveFunction((double[] pu) -> {
                        List<Double> itemVector = Arrays.stream(pu).boxed().collect(Collectors.toList());
                        double predictionError = itemRatings.values().parallelStream()
                                .map(bias.getBiasApplier()).map(rating -> {
                                    List<Double> userVector = initialModel.getUserFeatures(rating.getUser());
                                    double prediction = IntStream.range(0, userVector.size())
                                            .mapToDouble(index -> userVector.get(index) * itemVector.get(index))
                                            .sum();

                                    double value = rating.getRatingValue().doubleValue();

                                    double errorThisRating = prediction - value;

                                    return errorThisRating;
                                }).map(value -> Math.pow(value, 2)).mapToDouble(value -> value).sum();

                        double penalty = Arrays.stream(pu).map(value -> Math.pow(value, 2)).sum();
                        double objectiveFunctionValue = predictionError + lambda * penalty;
                        return objectiveFunctionValue;
                    });

                    SimplexOptimizer simplexOptimizer = new SimplexOptimizer(0, 0);

                    double[] initialGuess = new Random(seed + item.getId()).doubles(-10, 10).limit(dimension)
                            .toArray();

                    List<Double> initialGuessList = Arrays.stream(initialGuess).boxed()
                            .collect(Collectors.toList());

                    double initialGuessPenalty = objectiveFunction.getObjectiveFunction().value(initialGuess);

                    try {
                        PointValuePair optimize = simplexOptimizer.optimize(
                                new MultiDirectionalSimplex(dimension), new InitialGuess(initialGuess),
                                objectiveFunction, GoalType.MINIMIZE, MAX_EVAL, MAX_ITER);
                        double optimizedPenalty = optimize.getValue();
                        itemProgress.setTaskFinished();

                        List<Double> optimizedVector = Arrays.stream(optimize.getPoint()).boxed()
                                .collect(Collectors.toList());

                        return optimizedVector;
                    } catch (Exception ex) {
                        System.out.println("Vector cannot be optimized " + item
                                + " cannot be optimized (numRatings=" + itemRatings.size() + ")");
                        return initialModel.getItemFeatures(item);
                    }
                }));

        model = new MatrixFactorizationModel(trainedUserVectors, trainedItemVectors, bias);

    }
    return model;

}

From source file:io.promagent.internal.HookMetadataParser.java

/**
 * See {@link #parse()}./*from   www . ja va  2s  .c o m*/
 *
 * The classNameFilter is used to parse only specific classes from the JAR files.
 */
public SortedSet<HookMetadata> parse(Predicate<String> classNameFilter)
        throws IOException, ClassNotFoundException {
    SortedSet<HookMetadata> result = new TreeSet<>();
    for (String className : listAllJavaClasses(hookJars, classNameFilter)) {
        byte[] binaryRepresentation = readBinaryRepresentation(className);
        ClassReader classReader = new ClassReader(binaryRepresentation);
        HookMetadataBuilder hookMetadata = new HookMetadataBuilder(className);
        classReader.accept(new ClassVisitor(Opcodes.ASM5) {
            @Override
            public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
                if (visible && typeEquals(desc, io.promagent.annotations.Hook.class)) {
                    return new AnnotationValueCollector("instruments", hookMetadata::addInstruments,
                            Opcodes.ASM5, super.visitAnnotation(desc, visible));
                } else {
                    return super.visitAnnotation(desc, visible);
                }
            }

            @Override
            public MethodVisitor visitMethod(int i, String method, String desc, String signature,
                    String[] strings) {
                List<String> parameterTypes = Arrays.stream(Type.getArgumentTypes(desc)).map(Type::getClassName)
                        .collect(Collectors.toList());
                MethodSignatureBuilder builder = hookMetadata.newMethodSignature(parameterTypes);
                return new MethodVisitor(Opcodes.ASM5, super.visitMethod(i, method, desc, signature, strings)) {
                    @Override
                    public AnnotationVisitor visitParameterAnnotation(int parameter, String desc,
                            boolean visible) {
                        if (visible && typeEquals(desc, Returned.class, Thrown.class)) {
                            builder.markReturnedOrThrown(parameter);
                        }
                        return super.visitParameterAnnotation(parameter, desc, visible);
                    }

                    @Override
                    public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
                        if (visible && typeEquals(desc, Before.class, After.class)) {
                            return new AnnotationValueCollector("method", builder::addMethodName, Opcodes.ASM5,
                                    super.visitAnnotation(desc, visible));
                        } else {
                            return super.visitAnnotation(desc, visible);
                        }
                    }
                };
            }

        }, ClassReader.SKIP_CODE);
        if (hookMetadata.isComplete()) {
            result.add(hookMetadata.build());
        }
    }
    return result;
}

From source file:com.github.erchu.beancp.MapperBuilder.java

/**
 * If two data types has no mapping defined by
 * {@link #addMap(java.lang.Class, java.lang.Class, com.github.erchu.beancp.DeclarativeMapSetup)}
 * or any of {@code addConverter} methods then this convention will be used.
 *
 * @param conventions convention to add.
 *
 * @return this (for method chaining)/*from  ww w .jav a2 s  . c o m*/
 */
public MapperBuilder addMapAnyByConvention(final MapConvention... conventions)
        throws MapperConfigurationException {
    List<MapConventionExecutor> conventionExecutors = Arrays.stream(conventions)
            .map(i -> new MapConventionExecutor(i)).collect(Collectors.toList());

    this._mapAnyConventions.addAll(conventionExecutors);

    return this;
}

From source file:com.thinkbiganalytics.metadata.modeshape.security.role.JcrAbstractRoleMembership.java

@Override
public void setMemebers(GroupPrincipal... principals) {
    Set<GroupPrincipal> newMembers = Arrays.stream(principals).collect(Collectors.toSet());
    Set<GroupPrincipal> oldMembers = streamGroups().collect(Collectors.toSet());

    newMembers.stream().filter(u -> !oldMembers.contains(u)).forEach(this::addMember);

    oldMembers.stream().filter(u -> !newMembers.contains(u)).forEach(this::removeMember);
}

From source file:com.synopsys.integration.blackduck.service.model.PolicyStatusDescription.java

private void getPolicySeverityMessage(final StringBuilder stringBuilder) {
    stringBuilder.append("Policy Severity counts: ");
    // let's loop over the actual enum values for a consistently ordered output
    final String policySeverityItems = Arrays.stream(PolicySeverityType.values())
            .filter(policySeverityCount::containsKey)
            .map(policySeverityType -> fixMatchPlural("%d %s a severity level of %s",
                    policySeverityCount.get(policySeverityType).value, policySeverityType))
            .collect(Collectors.joining(", "));
    stringBuilder.append(policySeverityItems);
}

From source file:com.thinkbiganalytics.schema.QueryRunner.java

/**
 * Tests that the specified query is a SHOW, SELECT, DESC, or DESCRIBE query.
 *
 * @param query the query to test/*from  w w  w . ja v a2  s .co m*/
 * @return {@code true} if the query is valid, or {@code false} otherwise
 */
private boolean validateQuery(@Nonnull final String query) {
    final String testQuery = StringUtils.trimToEmpty(query);
    return Arrays.stream(new String[] { "show", "select", "desc", "describe" })
            .anyMatch(prefix -> StringUtils.startsWithIgnoreCase(testQuery, prefix));
}

From source file:com.netflix.spinnaker.halyard.deploy.spinnaker.v1.service.distributed.kubernetes.v2.KubernetesV2Utils.java

static public boolean isReady(KubernetesAccount account, String namespace, String service) {
    log.info("Checking readiness for " + service);
    List<String> command = kubectlPrefix(account);

    if (StringUtils.isNotEmpty(namespace)) {
        command.add("-n=" + namespace);
    }//from  w w  w .j  a v a  2  s . c  o  m

    command.add("get");
    command.add("po");

    command.add("-l=cluster=" + service);
    command.add("-o=jsonpath='{.items[*].status.containerStatuses[*].ready}'");
    // This command returns a space-separated string of true/false values indicating whether each of
    // the pod's containers are READY.
    // e.g., if we are querying two spin-orca pods and both pods' monitoring-daemon containers are
    // READY but the orca containers are not READY, the output may be 'true false true false'.

    JobRequest request = new JobRequest().setTokenizedCommand(command);

    String jobId = DaemonTaskHandler.getJobExecutor().startJob(request);

    JobStatus status;
    try {
        status = DaemonTaskHandler.getJobExecutor().backoffWait(jobId);
    } catch (InterruptedException e) {
        throw new DaemonTaskInterrupted(e);
    }

    if (status.getState() != JobStatus.State.COMPLETED) {
        throw new HalException(Problem.Severity.FATAL,
                String.join("\n", "Unterminated readiness check for " + service + " in " + namespace,
                        status.getStdErr(), status.getStdOut()));
    }

    if (status.getResult() == JobStatus.Result.SUCCESS) {
        String readyStatuses = status.getStdOut();
        if (readyStatuses.isEmpty()) {
            return false;
        }
        readyStatuses = readyStatuses.substring(1, readyStatuses.length() - 1); // Strip leading and trailing single quote
        if (readyStatuses.isEmpty()) {
            return false;
        }
        return Arrays.stream(readyStatuses.split(" ")).allMatch(s -> s.equals("true"));
    } else {
        throw new HalException(Problem.Severity.FATAL,
                String.join("\n", "Failed readiness check for " + service + " in " + namespace,
                        status.getStdErr(), status.getStdOut()));
    }
}