Example usage for java.util Arrays stream

List of usage examples for java.util Arrays stream

Introduction

In this page you can find the example usage for java.util Arrays stream.

Prototype

public static DoubleStream stream(double[] array) 

Source Link

Document

Returns a sequential DoubleStream with the specified array as its source.

Usage

From source file:com.github.springfox.loader.SpringfoxLoaderConfig.java

private List<VendorExtension> getVendorExtensions() {
    Extension[] extensions = springfoxLoader.extensions();
    if (extensions.length == 1 && StringUtils.isEmpty(extensions[0].name())) {
        return Collections.emptyList();
    }// w ww.ja v a2  s.  c o  m

    return Arrays.stream(extensions).map(extension -> {
        ExtensionProperty[] extensionProperties = extension.properties();
        List<StringVendorExtension> vendorExtensions = Arrays.stream(extensionProperties)
                .map(property -> new StringVendorExtension(property.name(), property.value()))
                .collect(Collectors.toList());
        ObjectVendorExtension vendorExtension = new ObjectVendorExtension(extension.name());
        vendorExtensions.forEach(vendorExtension::addProperty);
        return vendorExtension;
    }).collect(Collectors.toList());
}

From source file:com.github.tmyroadctfig.icloud4j.PhotosService.java

/**
 * Gets a list of albums.//  w  w  w . j ava2  s.co  m
 *
 * @return the list of albums.
 */
public List<PhotosFolder> getAlbums() {
    try {
        URIBuilder uriBuilder = new URIBuilder(endPoint + "/folders");
        populateUriParameters(uriBuilder);
        HttpGet httpGet = new HttpGet(uriBuilder.build());
        iCloudService.populateRequestHeadersParameters(httpGet);

        String rawResponse = iCloudService.getHttpClient().execute(httpGet, new StringResponseHandler());
        PhotosAlbumsResponse photosAlbumsResponse = ICloudUtils.fromJson(rawResponse,
                PhotosAlbumsResponse.class);

        return Arrays.stream(photosAlbumsResponse.folders).filter(folder -> "album".equals(folder.type))
                .collect(Collectors.toList());
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}

From source file:com.simiacryptus.mindseye.test.unit.SerializationTest.java

@Nullable
@Override/*w w  w  .jav  a  2  s  .  c o m*/
public ToleranceStatistics test(@Nonnull final NotebookOutput log, @Nonnull final Layer layer,
        final Tensor... inputPrototype) {
    log.h1("Serialization");
    log.p("This apply will demonstrate the key's JSON serialization, and verify deserialization integrity.");

    String prettyPrint = "";
    log.h2("Raw Json");
    try {
        prettyPrint = log.eval(() -> {
            final JsonObject json = layer.getJson();
            @Nonnull
            final Layer echo = Layer.fromJson(json);
            if (echo == null)
                throw new AssertionError("Failed to deserialize");
            if (layer == echo)
                throw new AssertionError("Serialization did not copy");
            if (!layer.equals(echo))
                throw new AssertionError("Serialization not equal");
            echo.freeRef();
            return new GsonBuilder().setPrettyPrinting().create().toJson(json);
        });
        @Nonnull
        String filename = layer.getClass().getSimpleName() + "_" + log.getName() + ".json";
        log.p(log.file(prettyPrint, filename,
                String.format("Wrote Model to %s; %s characters", filename, prettyPrint.length())));
    } catch (RuntimeException e) {
        e.printStackTrace();
        Util.sleep(1000);
    } catch (OutOfMemoryError e) {
        e.printStackTrace();
        Util.sleep(1000);
    }
    log.p("");
    @Nonnull
    Object outSync = new Object();
    if (prettyPrint.isEmpty() || prettyPrint.length() > 1024 * 64)
        Arrays.stream(SerialPrecision.values()).parallel().forEach(precision -> {
            try {
                @Nonnull
                File file = new File(log.getResourceDir(), log.getName() + "_" + precision.name() + ".zip");
                layer.writeZip(file, precision);
                @Nonnull
                final Layer echo = Layer.fromZip(new ZipFile(file));
                getModels().put(precision, echo);
                synchronized (outSync) {
                    log.h2(String.format("Zipfile %s", precision.name()));
                    log.p(log.link(file, String.format("Wrote Model apply %s precision to %s; %.3fMiB bytes",
                            precision, file.getName(), file.length() * 1.0 / (0x100000))));
                }
                if (!isPersist())
                    file.delete();
                if (echo == null)
                    throw new AssertionError("Failed to deserialize");
                if (layer == echo)
                    throw new AssertionError("Serialization did not copy");
                if (!layer.equals(echo))
                    throw new AssertionError("Serialization not equal");
            } catch (RuntimeException e) {
                e.printStackTrace();
            } catch (OutOfMemoryError e) {
                e.printStackTrace();
            } catch (ZipException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            }
        });

    return null;
}

From source file:it.unibo.alchemist.boundary.monitors.SAPERENearestNodeSampler.java

@Override
protected double[] getProperties(final Environment<List<? extends ILsaMolecule>> env, final Position pos,
        final Reaction<List<? extends ILsaMolecule>> r, final Time time, final long step) {
    if (mobility || !HashUtils.pointerEquals(env, envCache)) {
        pnCache.clear();/*from   w w w .  ja va 2  s  .  c om*/
        envCache = env;
    }
    if (!HashUtils.pointerEquals(propertyCache, property)) {
        propertyCache = property;
        properties.clear();
        final StringTokenizer tk = new StringTokenizer(propertyCache, propertySeparators);
        while (tk.hasMoreElements()) {
            properties.add(tk.nextToken());
        }
    }
    if (!HashUtils.pointerEquals(lsaCache, lsa)) {
        lsaCache = lsa;
        mol = sapere.createMolecule(lsaCache);
    }
    if (env.getNodesNumber() > 0 && mol != null) {
        final double[] res = new double[properties.size()];
        int i = 0;
        Node<List<? extends ILsaMolecule>> node = pnCache.get(pos);
        if (node == null) {
            double range = Arrays.stream(env.getSize()).reduce(1,
                    (x, y) -> FastMath.max(x, y) / FastMath.sqrt(env.getNodesNumber()));
            Collection<Node<List<? extends ILsaMolecule>>> neighs = env.getNodesWithinRange(pos, range);
            while (neighs.isEmpty()) {
                range *= 2;
                neighs = env.getNodesWithinRange(pos, range);
            }
            node = neighs.stream().reduce(
                    (n1, n2) -> env.getPosition(n1).getDistanceTo(pos) < env.getPosition(n2).getDistanceTo(pos)
                            ? n1
                            : n2)
                    .get();
            pnCache.put(pos, node);
        }
        for (final String prop : properties) {
            /*
             * Take the nearest node
             */
            res[i++] = sapere.getProperty(node, mol, prop);
        }
        return res;
    }
    return new double[0];
}

From source file:it.publisys.ims.discovery.job.EntityTasks.java

private void loadEntities(EntityDescriptorType[] entityDescriptorTypes) {
    Resource resource = new ClassPathResource(METADATA_DIR + "/" + PROTECTEDAPP_GUARD_XML);

    try {//from  ww  w .  j a va 2  s .c o m
        final EntityManager manager = loadEntityManager(Guanxi.CONTEXT_ATTR_IDP_ENTITY_FARM,
                resource.getFile().getAbsolutePath());

        // Store the new entity IDs for cleaning out old ones later
        List<String> newEntityIDs = new ArrayList<>();

        Arrays.stream(entityDescriptorTypes)
                .filter(entityDescriptor -> entityDescriptor.getSPSSODescriptorArray().length > 0)
                .forEach(entityDescriptor -> {
                    log.info("Loading SP metadata for : " + entityDescriptor.getEntityID());
                    try {
                        Metadata metadataHandler = manager.createNewEntityHandler();
                        metadataHandler.setPrivateData(entityDescriptor);

                        manager.addMetadata(metadataHandler);

                        newEntityIDs.add(entityDescriptor.getEntityID());
                    } catch (GuanxiException ge) {
                        log.warn(
                                String.format("Non sono riuscito a caricari i metadati del Service Provider %s",
                                        entityDescriptor.getEntityID()),
                                ge);
                    }
                });

        // Remove expired entities from the manager
        String[] oldEntityIDs = manager.getEntityIDs();
        for (String oldEntityID : oldEntityIDs) {
            if (!newEntityIDs.contains(oldEntityID)) {
                manager.removeMetadata(oldEntityID);
            }
        }
    } catch (Exception ge) {
        log.error("Could not get an entity handler from the metadata manager", ge);
    }
}

From source file:fr.zcraft.MultipleInventories.importers.ImportProcess.java

/**
 * Starts the import process.//from ww w.j a v a2s . c o m
 */
public void begin() {
    if (!importer.canImport()) {
        log(I.t("{ce}The {0} importer cannot run, probably due to a dependency missing. Aborting.",
                importerName));
        return;
    }

    importListener = new ImportListener();
    ZLib.registerEvents(importListener);

    Bukkit.getOnlinePlayers()
            .forEach(player -> player.kickPlayer(I.t("{ce}Maintenance started, please come back later.")
                    + "\n\n" + I.t("{gray}ETA: {0}", getHumanFriendlyETA())));

    started = true;
    running = true;
    importer.onBegin();

    Arrays.stream(Bukkit.getOfflinePlayers()).forEach(importQueue::offer);

    playersCountToProcess = importQueue.size();

    worldGroups.clear();
    worldGroups.putAll(importer.getWorldGroups());

    log(I.tn("{cs}Starting import, processing {0} player every {1} ticks, {2} players total.",
            "{cs}Starting import, processing {0} players every {1} ticks, {2} players total.", PLAYERS_PER_TICK,
            PLAYERS_PER_TICK, RUN_EVERY_N_TICKS, playersCountToProcess));

    PluginLogger.info(I.t("Groups found by the importer:"));
    worldGroups.forEach((group, worlds) -> PluginLogger.info(I.tn("- {0}, with world: {1}",
            "- {0}, with worlds: {1}", worlds.size(), group, StringUtils.join(worlds, ", "))));

    RunTask.timer(new ImportRunnable(), 2L, RUN_EVERY_N_TICKS);
}

From source file:org.pdfsam.ui.dashboard.preference.PreferenceConfig.java

@Bean(name = "newsDisplayPolicy")
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public PreferenceComboBox<KeyStringValueItem<String>> newsDisplayPolicy() {
    PreferenceComboBox<KeyStringValueItem<String>> newsDisplayPolicyCombo = new PreferenceComboBox<>(
            StringUserPreference.NEWS_POLICY, userContext);
    newsDisplayPolicyCombo.setId("newsPolicy");
    newsDisplayPolicyCombo.getItems().addAll(Arrays.stream(NewsPolicy.values())
            .map(t -> keyValue(t.toString(), t.friendlyName())).collect(Collectors.toList()));

    newsDisplayPolicyCombo.setValue(keyEmptyValue(userContext.getNewsPolicy().toString()));
    return newsDisplayPolicyCombo;
}

From source file:org.keycloak.testsuite.util.Matchers.java

/**
 * Matches when the SAML status of a {@link StatusResponseType} instance is equal to the given code.
 * @param expectedStatusCode/*from   www  .j  a v  a2s  . co m*/
 * @return
 */
public static <T> Matcher<SAML2Object> isSamlStatusResponse(JBossSAMLURIConstants... expectedStatus) {
    return allOf(instanceOf(StatusResponseType.class), new SamlStatusResponseTypeMatcher(
            Arrays.stream(expectedStatus).map(JBossSAMLURIConstants::getUri).toArray(i -> new URI[i])));
}

From source file:dk.dma.dmiweather.service.FTPLoader.java

/**
 * Check for files every 10 minutes. New files are given to the gridWeatherService
 *//*  w w  w  . j a  v  a2 s  .c  om*/
@Scheduled(initialDelay = 1000, fixedDelay = 10 * 60 * 1000)
public void checkFiles() {
    log.info("Checking FTP files at DMI.");
    FTPClient client = new FTPClient();
    try {
        client.setDataTimeout(20 * 1000);
        client.setBufferSize(1024 * 1024);
        client.connect(hostname);
        if (client.login("anonymous", "")) {
            try {
                client.enterLocalPassiveMode();
                client.setFileType(FTP.BINARY_FILE_TYPE);
                for (ForecastConfiguration configuration : configurations) {
                    // DMI creates a Newest link once all files have been created
                    if (client.changeWorkingDirectory(configuration.getFolder() + "/Newest")) {
                        if (client.getReplyCode() != 250) {
                            log.error("Did not get reply 250 as expected, got {} ", client.getReplyCode());
                        }
                        String workingDirectory = new File(client.printWorkingDirectory()).getName();
                        String previousNewest = newestDirectories.get(configuration);
                        if (!workingDirectory.equals(previousNewest)) {
                            // a new directory for this configuration is available on the server
                            FTPFile[] listFiles = client.listFiles();
                            List<FTPFile> files = Arrays.stream(listFiles)
                                    .filter(f -> configuration.getFilePattern().matcher(f.getName()).matches())
                                    .collect(Collectors.toList());

                            try {
                                Map<File, Instant> localFiles = transferFilesIfNeeded(client, workingDirectory,
                                        files);
                                gridWeatherService.newFiles(localFiles, configuration);
                            } catch (IOException e) {
                                log.warn("Unable to get new weather files from DMI", e);
                            }

                            if (previousNewest != null) {
                                File previous = new File(tempDirLocation, previousNewest);
                                deleteRecursively(previous);
                            }
                            newestDirectories.put(configuration, workingDirectory);
                        }

                    } else {
                        gridWeatherService.setErrorMessage(ErrorMessage.FTP_PROBLEM);
                        log.error("Unable to change ftp directory to {}", configuration.getFolder());
                    }
                }
            } finally {
                try {
                    client.logout();
                } catch (IOException e) {
                    log.info("Failed to logout", e);
                }
            }
        } else {
            gridWeatherService.setErrorMessage(ErrorMessage.FTP_PROBLEM);
            log.error("Unable to login to {}", hostname);
        }

    } catch (IOException e) {
        gridWeatherService.setErrorMessage(ErrorMessage.FTP_PROBLEM);
        log.error("Unable to update weather files from DMI", e);
    } finally {
        try {
            client.disconnect();
        } catch (IOException e) {
            log.info("Failed to disconnect", e);
        }
    }
    log.info("Check completed.");
}