Example usage for java.util Arrays stream

List of usage examples for java.util Arrays stream

Introduction

In this page you can find the example usage for java.util Arrays stream.

Prototype

public static DoubleStream stream(double[] array) 

Source Link

Document

Returns a sequential DoubleStream with the specified array as its source.

Usage

From source file:com.ejisto.core.classloading.javassist.ObjectEditor.java

private static boolean hasAlreadyBeenProcessed(FieldAccess f, String fieldName) {
    Optional<String> fieldMarker = getFieldMarker(fieldName);
    return !fieldMarker.isPresent() || Arrays.stream(f.getEnclosingClass().getDeclaredFields())
            .anyMatch(field -> field.getName().equals(fieldMarker.get()));
}

From source file:org.createnet.raptor.auth.service.services.AclDeviceService.java

@Retryable(maxAttempts = 3, value = AclManagerService.AclManagerException.class, backoff = @Backoff(delay = 500, multiplier = 3))
public void register(Device device) {

    User owner = device.getOwner();//from  w w w.j  ava  2s  .co  m
    List<Permission> permissions = list(device, owner);
    Sid sid = new UserSid(owner);

    logger.debug("Found {} permissions for {}", permissions.size(), owner.getUuid());

    if (permissions.isEmpty()) {

        logger.debug("Set default permission");
        List<Permission> newPerms = Arrays.stream(defaultPermissions).collect(Collectors.toList());

        if (owner.getId().equals(device.getOwner().getId())) {
            newPerms.add(RaptorPermission.ADMINISTRATION);
        }

        try {
            aclManagerService.addPermissions(Device.class, device.getId(), sid, newPerms, device.getParentId());
        } catch (AclManagerService.AclManagerException ex) {
            logger.warn("Failed to store default permission for {} ({}): {}", device.getId(), sid,
                    ex.getMessage());
            throw ex;
        }

        permissions.addAll(newPerms);
    } else {
        aclManagerService.setParent(device.getClass(), device.getId(), device.getParentId());
    }

    String perms = String.join(", ", RaptorPermission.toLabel(permissions));
    logger.debug("Permission set for device {} to {} - {}", device.getUuid(), device.getOwner().getUuid(),
            perms);

}

From source file:cool.pandora.modeller.ui.handlers.iiif.CreateDefaultContainersHandler.java

@Override
public void execute() {
    final String message = ApplicationContextUtil.getMessage("bag.message.containercreated");
    final DefaultBag bag = bagView.getBag();
    final Map<String, BagInfoField> map = bag.getInfo().getFieldMap();

    final URI collectionIDURI = IIIFObjectURI.getCollectionIdURI(map);
    final URI objektURI = IIIFObjectURI.getObjektURI(map);

    try {//from w  w w .ja va 2s.  c o m
        ModellerClient.doPut(collectionIDURI);
    } catch (final ModellerClientFailedException e) {
        ApplicationContextUtil.addConsoleMessage(getMessage(e));
    }
    try {
        ModellerClient.doPut(objektURI);
    } catch (final ModellerClientFailedException e) {
        ApplicationContextUtil.addConsoleMessage(getMessage(e));
    }

    final String[] Containers;
    final String[] IIIFContainers = new String[] { ProfileOptions.RESOURCE_CONTAINER_KEY,
            ProfileOptions.MANIFEST_RESOURCE_LABEL, ProfileOptions.SEQUENCE_CONTAINER_KEY,
            ProfileOptions.RANGE_CONTAINER_KEY, ProfileOptions.CANVAS_CONTAINER_KEY,
            ProfileOptions.LIST_CONTAINER_KEY, ProfileOptions.LAYER_CONTAINER_KEY };

    final String[] TextContainers = new String[] { ProfileOptions.TEXT_PAGE_CONTAINER_KEY,
            ProfileOptions.TEXT_AREA_CONTAINER_KEY, ProfileOptions.TEXT_LINE_CONTAINER_KEY,
            ProfileOptions.TEXT_WORD_CONTAINER_KEY };

    if (bag.hasText()) {
        Containers = Stream.concat(Arrays.stream(IIIFContainers), Arrays.stream(TextContainers))
                .toArray(String[]::new);
    } else {
        Containers = IIIFContainers;
    }

    for (final String containerKey : Containers) {
        final URI containerURI = IIIFObjectURI.buildContainerURI(map, containerKey);
        try {
            ModellerClient.doPut(containerURI);
            ApplicationContextUtil.addConsoleMessage(message + " " + containerURI);
        } catch (final ModellerClientFailedException e) {
            ApplicationContextUtil.addConsoleMessage(getMessage(e));
        }
    }
    bagView.getControl().invalidate();
}

From source file:eu.itesla_project.merge.MergeByDateTool.java

@Override
public void run(CommandLine line) throws Exception {
    ComponentDefaultConfig defaultConfig = ComponentDefaultConfig.load();
    CaseRepository caseRepository = defaultConfig.newFactoryImpl(CaseRepositoryFactory.class)
            .create(LocalComputationManager.getDefault());
    LoadFlowFactory loadFlowFactory = defaultConfig.newFactoryImpl(LoadFlowFactory.class);
    MergeOptimizerFactory mergeOptimizerFactory = defaultConfig.newFactoryImpl(MergeOptimizerFactory.class);
    Set<Country> countries = Arrays.stream(line.getOptionValue("countries").split(",")).map(Country::valueOf)
            .collect(Collectors.toSet());
    DateTime date = DateTime.parse(line.getOptionValue("date"));
    Path outputDir = Paths.get(line.getOptionValue("output-dir"));
    String outputFormat = line.getOptionValue("output-format");
    Exporter exporter = Exporters.getExporter(outputFormat);
    if (exporter == null) {
        throw new RuntimeException("Format " + outputFormat + " not supported");
    }/*from ww  w .j a  v  a  2  s .  c  o  m*/
    boolean optimize = line.hasOption("optimize");

    System.out.println("merging...");

    Network merge = MergeUtil.merge(caseRepository, date, CaseType.SN, countries, loadFlowFactory, 0,
            mergeOptimizerFactory, LocalComputationManager.getDefault(), optimize);

    System.out.println("exporting...");

    String baseName = merge.getId().replace(" ", "_");
    exporter.export(merge, null, new FileDataSource(outputDir, baseName));
}

From source file:io.wcm.caravan.commons.cors.impl.CorsServletFilter.java

@Activate
void activate(Map<String, Object> config) {
    enabled = PropertiesUtil.toBoolean(config.get(PROPERTY_ENABLED), DEFAULT_ENABLED);
    allowAllHosts = PropertiesUtil.toBoolean(config.get(PROPERTY_ALLOW_ALL_HOSTS), DEFAULT_ALLOW_ALL_HOSTS);

    String[] whitelist = PropertiesUtil.toStringArray(config.get(PROPERTY_HOST_WHITELIST), new String[0]);
    hostWhitelist = Arrays.stream(whitelist).filter(StringUtils::isNotBlank).collect(Collectors.toSet());

    String[] blacklist = PropertiesUtil.toStringArray(config.get(PROPERTY_HOST_BLACKLIST), new String[0]);
    hostBlacklist = Arrays.stream(blacklist).filter(StringUtils::isNotBlank).collect(Collectors.toSet());
}

From source file:com.simiacryptus.mindseye.applications.ObjectLocationBase.java

/**
 * Run.//from   w w w  .  j  a v a 2  s.  c  o m
 *
 * @param log the log
 */
public void run(@Nonnull final NotebookOutput log) {
    //    @Nonnull String logName = "cuda_" + log.getName() + ".log";
    //    log.p(log.file((String) null, logName, "GPU Log"));
    //    CudaSystem.addLog(new PrintStream(log.file(logName)));

    ImageClassifierBase classifier = getClassifierNetwork();
    Layer classifyNetwork = classifier.getNetwork();

    ImageClassifierBase locator = getLocatorNetwork();
    Layer locatorNetwork = locator.getNetwork();
    ArtistryUtil.setPrecision((DAGNetwork) classifyNetwork, Precision.Float);
    ArtistryUtil.setPrecision((DAGNetwork) locatorNetwork, Precision.Float);

    Tensor[][] inputData = loadImages_library();
    //    Tensor[][] inputData = loadImage_Caltech101(log);
    double alphaPower = 0.8;

    final AtomicInteger index = new AtomicInteger(0);
    Arrays.stream(inputData).limit(10).forEach(row -> {
        log.h3("Image " + index.getAndIncrement());
        final Tensor img = row[0];
        log.p(log.image(img.toImage(), ""));
        Result classifyResult = classifyNetwork.eval(new MutableResult(row));
        Result locationResult = locatorNetwork.eval(new MutableResult(row));
        Tensor classification = classifyResult.getData().get(0);
        List<CharSequence> categories = classifier.getCategories();
        int[] sortedIndices = IntStream.range(0, categories.size()).mapToObj(x -> x)
                .sorted(Comparator.comparing(i -> -classification.get(i))).mapToInt(x -> x).limit(10).toArray();
        logger.info(Arrays.stream(sortedIndices)
                .mapToObj(
                        i -> String.format("%s: %s = %s%%", i, categories.get(i), classification.get(i) * 100))
                .reduce((a, b) -> a + "\n" + b).orElse(""));
        LinkedHashMap<CharSequence, Tensor> vectors = new LinkedHashMap<>();
        List<CharSequence> predictionList = Arrays.stream(sortedIndices).mapToObj(categories::get)
                .collect(Collectors.toList());
        Arrays.stream(sortedIndices).limit(6).forEach(category -> {
            CharSequence name = categories.get(category);
            log.h3(name);
            Tensor alphaTensor = renderAlpha(alphaPower, img, locationResult, classification, category);
            log.p(log.image(img.toRgbImageAlphaMask(0, 1, 2, alphaTensor), ""));
            vectors.put(name, alphaTensor.unit());
        });

        Tensor avgDetection = vectors.values().stream().reduce((a, b) -> a.add(b)).get()
                .scale(1.0 / vectors.size());
        Array2DRowRealMatrix covarianceMatrix = new Array2DRowRealMatrix(predictionList.size(),
                predictionList.size());
        for (int x = 0; x < predictionList.size(); x++) {
            for (int y = 0; y < predictionList.size(); y++) {
                Tensor l = vectors.get(predictionList.get(x));
                Tensor r = vectors.get(predictionList.get(y));

                covarianceMatrix.setEntry(x, y,
                        null == l || null == r ? 0 : (l.minus(avgDetection)).dot(r.minus(avgDetection)));
            }
        }
        @Nonnull
        final EigenDecomposition decomposition = new EigenDecomposition(covarianceMatrix);

        for (int objectVector = 0; objectVector < 10; objectVector++) {
            log.h3("Eigenobject " + objectVector);
            double eigenvalue = decomposition.getRealEigenvalue(objectVector);
            RealVector eigenvector = decomposition.getEigenvector(objectVector);
            Tensor detectionRegion = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> {
                Tensor tensor = vectors.get(predictionList.get(i));
                return null == tensor ? null : tensor.scale(eigenvector.getEntry(i));
            }).filter(x -> null != x).reduce((a, b) -> a.add(b)).get();
            detectionRegion = detectionRegion.scale(255.0 / detectionRegion.rms());
            CharSequence categorization = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> {
                CharSequence category = predictionList.get(i);
                double component = eigenvector.getEntry(i);
                return String.format("<li>%s = %.4f</li>", category, component);
            }).reduce((a, b) -> a + "" + b).get();
            log.p(String.format("Object Detected: <ol>%s</ol>", categorization));
            log.p("Object Eigenvalue: " + eigenvalue);
            log.p("Object Region: " + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion), ""));
            log.p("Object Region Compliment: "
                    + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion.scale(-1)), ""));
        }

        //      final int[] orderedVectors = IntStream.range(0, 10).mapToObj(x -> x)
        //        .sorted(Comparator.comparing(x -> -decomposition.getRealEigenvalue(x))).mapToInt(x -> x).toArray();
        //      IntStream.range(0, orderedVectors.length)
        //        .mapToObj(i -> {
        //            //double realEigenvalue = decomposition.getRealEigenvalue(orderedVectors[i]);
        //            return decomposition.getEigenvector(orderedVectors[i]).toArray();
        //          }
        //        ).toArray(i -> new double[i][]);

        log.p(String.format(
                "<table><tr><th>Cosine Distance</th>%s</tr>%s</table>", Arrays.stream(sortedIndices).limit(10)
                        .mapToObj(col -> "<th>" + categories.get(col) + "</th>").reduce((a, b) -> a + b).get(),
                Arrays.stream(sortedIndices).limit(10).mapToObj(r -> {
                    return String.format("<tr><td>%s</td>%s</tr>", categories.get(r),
                            Arrays.stream(sortedIndices).limit(10).mapToObj(col -> {
                                Tensor l = vectors.get(categories.get(r));
                                Tensor r2 = vectors.get(categories.get(col));
                                return String.format("<td>%.4f</td>",
                                        (null == l || null == r2) ? 0 : Math.acos(l.dot(r2)));
                            }).reduce((a, b) -> a + b).get());
                }).reduce((a, b) -> a + b).orElse("")));
    });

    log.setFrontMatterProperty("status", "OK");
}

From source file:net.dempsy.distconfig.apahcevfs.ApacheVfsPropertiesStore.java

@Override
public int clear(final String... props) throws IOException {
    return mapChecked(() -> {
        final FileObject latest = getLatest(parentDirObj);
        if (latest == null)
            return -1;

        final Properties oldProps = read(latest);
        final Properties newProps = new Properties();
        newProps.putAll(oldProps);//from  w ww  . j a  v a 2 s .  c o  m
        Arrays.stream(props).forEach(newProps::remove);

        final FileObject next = nextFile(latest, parentDirObj);
        try (OutputStream os = next.getContent().getOutputStream()) {
            newProps.store(os, COMMENT);
        }
        return new Integer(getVersion(next));
    }, em).intValue();
}

From source file:com.intellij.plugins.haxe.model.HaxePackageModel.java

@NotNull
public List<HaxePackageModel> getChildren() {
    PsiDirectory directory = root.access(path);
    if (directory != null) {
        return Arrays.stream(directory.getSubdirectories())
                .map(subDirectory -> new HaxePackageModel(root, subDirectory.getName(), this))
                .collect(Collectors.toList());
    }//from   w  ww.  ja v a  2 s.  co m
    return Collections.emptyList();
}

From source file:ddf.lib.OwaspDiffRunner.java

private static String getModulesOfChangedPoms() throws OwaspDiffRunnerException {
    String changedFiles;//from   w w w .  j  a  v  a 2 s  . c  o m
    String currentBranchName;

    try {
        currentBranchName = IOUtils.toString(runTime.exec(GIT_SHORT_BRANCH_NAME_COMMAND).getInputStream())
                .replace(File.separator, "").replace(System.getProperty("line.separator"), "");

        changedFiles = IOUtils.toString(
                runTime.exec(GIT_DIFF_NAME_COMMAND + currentBranchName + "..master").getInputStream());
    } catch (IOException e) {
        throw new OwaspDiffRunnerException(OwaspDiffRunnerException.UNABLE_TO_RETRIEVE_GIT_INFO, e);
    }

    System.out.println("Comparing commits of branch " + currentBranchName + " to master. Changed poms: ");

    return Arrays.stream(changedFiles.split(System.getProperty("line.separator")))
            .filter(path -> path.endsWith("pom.xml")).peek(System.out::println)
            .map(path -> path.endsWith(File.separator + "pom.xml")
                    ? path.replace(File.separator + "pom.xml", "")
                    : path.replace("pom.xml", File.separator)) //Special case for the root pom, change path pom.xml -> /
            .collect(Collectors.joining(","));
}

From source file:eu.itesla_project.online.tools.PrintOnlineWorkflowViolationsTool.java

@Override
public void run(CommandLine line) throws Exception {
    OnlineConfig config = OnlineConfig.load();
    String workflowId = line.getOptionValue("workflow");
    final LimitViolationFilter violationsFilter = (line.hasOption("type"))
            ? new LimitViolationFilter(Arrays.stream(line.getOptionValue("type").split(","))
                    .map(LimitViolationType::valueOf).collect(Collectors.toSet()), 0)
            : null;//from ww w  . j  a va2s . c  o  m
    TableFormatterConfig tableFormatterConfig = TableFormatterConfig.load();
    Column[] tableColumns = { new Column("State"), new Column("Step"), new Column("Equipment"),
            new Column("Type"), new Column("Value"), new Column("Limit"), new Column("Limit reduction"),
            new Column("Voltage Level") };
    Path cvsOutFile = (line.hasOption("csv")) ? Paths.get(line.getOptionValue("csv")) : null;
    try (OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create()) {
        if (line.hasOption("state") && line.hasOption("step")) {
            Integer stateId = Integer.parseInt(line.getOptionValue("state"));
            OnlineStep step = OnlineStep.valueOf(line.getOptionValue("step"));
            List<LimitViolation> violationsByStateAndStep = onlinedb.getViolations(workflowId, stateId, step);
            if (violationsByStateAndStep != null && !violationsByStateAndStep.isEmpty()) {
                try (TableFormatter formatter = PrintOnlineWorkflowUtils.createFormatter(tableFormatterConfig,
                        cvsOutFile, TABLE_TITLE, tableColumns)) {
                    printStateStepViolations(formatter, stateId, step, violationsByStateAndStep,
                            violationsFilter);
                }
            } else {
                System.out.println("\nNo violations for workflow " + workflowId + ", step " + step.name()
                        + " and state " + stateId);
            }
        } else if (line.hasOption("state")) {
            Integer stateId = Integer.parseInt(line.getOptionValue("state"));
            Map<OnlineStep, List<LimitViolation>> stateViolations = onlinedb.getViolations(workflowId, stateId);
            if (stateViolations != null && !stateViolations.keySet().isEmpty()) {
                try (TableFormatter formatter = PrintOnlineWorkflowUtils.createFormatter(tableFormatterConfig,
                        cvsOutFile, TABLE_TITLE, tableColumns)) {
                    new TreeMap<>(stateViolations)
                            .forEach((onlineStep, violations) -> printStateStepViolations(formatter, stateId,
                                    onlineStep, violations, violationsFilter));
                }
            } else {
                System.out.println("\nNo violations for workflow " + workflowId + " and state " + stateId);
            }
        } else if (line.hasOption("step")) {
            OnlineStep step = OnlineStep.valueOf(line.getOptionValue("step"));
            Map<Integer, List<LimitViolation>> stepViolations = onlinedb.getViolations(workflowId, step);
            if (stepViolations != null && !stepViolations.keySet().isEmpty()) {
                try (TableFormatter formatter = PrintOnlineWorkflowUtils.createFormatter(tableFormatterConfig,
                        cvsOutFile, TABLE_TITLE, tableColumns)) {
                    new TreeMap<>(stepViolations)
                            .forEach((stateId, violations) -> printStateStepViolations(formatter, stateId, step,
                                    violations, violationsFilter));
                }
            } else {
                System.out.println("\nNo violations for workflow " + workflowId + " and step " + step);
            }
        } else {
            Map<Integer, Map<OnlineStep, List<LimitViolation>>> workflowViolations = onlinedb
                    .getViolations(workflowId);
            if (workflowViolations != null && !workflowViolations.keySet().isEmpty()) {
                try (TableFormatter formatter = PrintOnlineWorkflowUtils.createFormatter(tableFormatterConfig,
                        cvsOutFile, TABLE_TITLE, tableColumns)) {
                    new TreeMap<>(workflowViolations).forEach((stateId, stateViolations) -> {
                        if (stateViolations != null) {
                            new TreeMap<>(stateViolations)
                                    .forEach((step, violations) -> printStateStepViolations(formatter, stateId,
                                            step, violations, violationsFilter));
                        }
                    });
                }
            } else {
                System.out.println("\nNo violations for workflow " + workflowId);
            }
        }
    }
}