Example usage for java.util Collection stream

List of usage examples for java.util Collection stream

Introduction

In this page you can find the example usage for java.util Collection stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:com.romeikat.datamessie.core.base.dao.impl.DocumentDao.java

public Map<RawContent, Document> getForRawContents(final SharedSessionContract ssc,
        final Collection<RawContent> rawContents) {
    // Query for documents
    final Set<Long> documentIds = rawContents.stream().map(c -> c.getDocumentId()).collect(Collectors.toSet());
    final Map<Long, Document> documentsById = getIdsWithEntities(ssc, documentIds);

    // Map rawContents -> documents
    final Map<RawContent, Document> result = Maps.newHashMapWithExpectedSize(rawContents.size());
    for (final RawContent rawContent : rawContents) {
        final Document document = documentsById.get(rawContent.getDocumentId());
        result.put(rawContent, document);
    }//from ww w  . j  a va  2s.  c o m
    return result;
}

From source file:io.klerch.alexa.state.handler.AlexaSessionStateHandler.java

/**
 * {@inheritDoc}// w ww  . j av  a 2 s . c om
 */
@Override
public void writeValues(final Collection<AlexaStateObject> stateObjects) throws AlexaStateException {
    Validate.notNull(stateObjects, "List of state objects to write to persistence store must not be null.");
    stateObjects.stream().filter(o -> AlexaScope.SESSION.includes(o.getScope()))
            .forEach(stateObject -> session.setAttribute(stateObject.getId(), stateObject.getValue()));
}

From source file:com.ikanow.aleph2.analytics.storm.utils.StormControllerUtil.java

/**
 * Starts up a storm job./*from ww  w  .  java 2s .c  om*/
 * 
 * 1. gets the storm instance from the yarn config
 * 2. Makes a mega jar consisting of:
 *    A. Underlying artefacts (system libs)
 *  B. User supplied libraries
 * 3. Submit megajar to storm with jobname of the bucket id
 * 
 * @param bucket
 * @param underlying_artefacts
 * @param yarn_config_dir
 * @param user_lib_paths
 * @param topology
 * @return
 */
public static CompletableFuture<BasicMessageBean> startJob(final IStormController storm_controller,
        final DataBucketBean bucket, final Optional<String> sub_job,
        final Collection<Object> underlying_artefacts, final Collection<String> user_lib_paths,
        final StormTopology topology, final Map<String, String> config, final String cached_jar_dir) {
    if (null == topology) {
        return CompletableFuture.completedFuture(ErrorUtils.buildErrorMessage(StormControllerUtil.class,
                "startJob", ErrorUtils.TOPOLOGY_NULL_ERROR, bucket.full_name()));
    }

    _logger.info("Retrieved user Storm config topology: spouts=" + topology.get_spouts_size() + " bolts="
            + topology.get_bolts_size() + " configs=" + config.toString());

    final Set<String> jars_to_merge = new TreeSet<String>();

    final CompletableFuture<String> jar_future = Lambdas.get(() -> {
        if (RemoteStormController.class.isAssignableFrom(storm_controller.getClass())) {
            // (This is only necessary in the remote case)

            jars_to_merge.addAll(underlying_artefacts.stream()
                    .map(artefact -> LiveInjector.findPathJar(artefact.getClass(), ""))
                    .filter(f -> !f.equals("")).collect(Collectors.toSet()));

            if (jars_to_merge.isEmpty()) { // special case: no aleph2 libs found, this is almost certainly because this is being run from eclipse...
                final GlobalPropertiesBean globals = ModuleUtils.getGlobalProperties();
                _logger.warn(
                        "WARNING: no library files found, probably because this is running from an IDE - instead taking all JARs from: "
                                + (globals.local_root_dir() + "/lib/"));
                try {
                    //... and LiveInjecter doesn't work on classes ... as a backup just copy everything from "<LOCAL_ALEPH2_HOME>/lib" into there 
                    jars_to_merge
                            .addAll(FileUtils
                                    .listFiles(new File(globals.local_root_dir() + "/lib/"),
                                            new String[] { "jar" }, false)
                                    .stream().map(File::toString).collect(Collectors.toList()));
                } catch (Exception e) {
                    throw new RuntimeException("In eclipse/IDE mode, directory not found: "
                            + (globals.local_root_dir() + "/lib/"));
                }
            }
            //add in the user libs
            jars_to_merge.addAll(user_lib_paths);

            //create jar
            return buildOrReturnCachedStormTopologyJar(jars_to_merge, cached_jar_dir);
        } else {
            return CompletableFuture.completedFuture("/unused/dummy.jar");
        }
    });

    //submit to storm
    @SuppressWarnings("unchecked")
    final CompletableFuture<BasicMessageBean> submit_future = Lambdas.get(() -> {
        long retries = 0;
        while (retries < MAX_RETRIES) {
            try {
                _logger.debug("Trying to submit job, try: " + retries + " of " + MAX_RETRIES);
                final String jar_file_location = jar_future.get();
                return storm_controller.submitJob(bucketPathToTopologyName(bucket, sub_job), jar_file_location,
                        topology, (Map<String, Object>) (Map<String, ?>) config);
            } catch (Exception ex) {
                if (ex instanceof AlreadyAliveException) {
                    retries++;
                    //sleep 1s, was seeing about 2s of sleep required before job successfully submitted on restart
                    try {
                        Thread.sleep(1000);
                    } catch (Exception e) {
                        final CompletableFuture<BasicMessageBean> error_future = new CompletableFuture<BasicMessageBean>();
                        error_future.completeExceptionally(e);
                        return error_future;
                    }
                } else {
                    retries = MAX_RETRIES; //we threw some other exception, bail out
                    final CompletableFuture<BasicMessageBean> error_future = new CompletableFuture<BasicMessageBean>();
                    error_future.completeExceptionally(ex);
                    return error_future;
                }
            }
        }
        //we maxed out our retries, throw failure
        final CompletableFuture<BasicMessageBean> error_future = new CompletableFuture<BasicMessageBean>();
        error_future.completeExceptionally(new Exception(
                "Error submitting job, ran out of retries (previous (same name) job is probably still alive)"));
        return error_future;
    });
    return submit_future;
}

From source file:com.romeikat.datamessie.core.base.dao.impl.DocumentDao.java

public Map<CleanedContent, Document> getForCleanedContents(final SharedSessionContract ssc,
        final Collection<CleanedContent> cleanedContents) {
    // Query for documents
    final Set<Long> documentIds = cleanedContents.stream().map(c -> c.getDocumentId())
            .collect(Collectors.toSet());
    final Map<Long, Document> documentsById = getIdsWithEntities(ssc, documentIds);

    // Map cleanedContents -> documents
    final Map<CleanedContent, Document> result = Maps.newHashMapWithExpectedSize(cleanedContents.size());
    for (final CleanedContent cleanedContent : cleanedContents) {
        final Document document = documentsById.get(cleanedContent.getDocumentId());
        result.put(cleanedContent, document);
    }//from  ww w .j  a va2s  .c o  m
    return result;
}

From source file:com.romeikat.datamessie.core.base.dao.impl.DocumentDao.java

public Map<StemmedContent, Document> getForStemmedContents(final SharedSessionContract ssc,
        final Collection<StemmedContent> stemmedContents) {
    // Query for documents
    final Set<Long> documentIds = stemmedContents.stream().map(c -> c.getDocumentId())
            .collect(Collectors.toSet());
    final Map<Long, Document> documentsById = getIdsWithEntities(ssc, documentIds);

    // Map rawContents -> documents
    final Map<StemmedContent, Document> result = Maps.newHashMapWithExpectedSize(stemmedContents.size());
    for (final StemmedContent stemmedContent : stemmedContents) {
        final Document document = documentsById.get(stemmedContent.getDocumentId());
        result.put(stemmedContent, document);
    }//from   w  w w.  j a va 2s . c  o m
    return result;
}

From source file:net.dv8tion.jda.core.entities.impl.TextChannelImpl.java

@Override
public RestAction<Void> deleteMessages(Collection<Message> messages) {
    Checks.notEmpty(messages, "Messages collection");

    return deleteMessagesByIds(messages.stream().map(ISnowflake::getId).collect(Collectors.toList()));
}

From source file:com.ikanow.aleph2.analytics.services.DeduplicationEnrichmentContext.java

/** A stream of object ids that should be deleted
 * @return// w  ww  .j a v  a2s.c o  m
 */
public Stream<JsonNode> getObjectIdsToDelete() {

    // Get a list of _ids that haven't been emitted
    final Collection<JsonNode> auto_delete = _delete_unhandled_duplicates ? _mutable_state._id_set
            : Collections.emptyList();

    return Stream.concat(_mutable_state._manual_ids_to_delete.stream(), auto_delete.stream());
}

From source file:com.haulmont.cuba.core.sys.dbupdate.DbUpdaterEngine.java

protected boolean containsIgnoringPrefix(Collection<String> strings, String s) {
    return strings.stream().anyMatch(it -> distinguishingSubstring(it).equals(distinguishingSubstring(s)));
}

From source file:ijfx.ui.explorer.DefaultFolder.java

private List<Explorable> fetchFiles(ProgressHandler progress, Void v) {

    if (progress == null)
        progress = new SilentProgressHandler();

    Timer timer = timerService.getTimer(this.getClass());
    timer.start();//from  w  ww  .j  a  v  a2  s  .c o m
    Collection<? extends ImageRecord> records = imageRecordService.getRecordsFromDirectory(file);
    timer.elapsed("record fetching");
    progress.setStatus("Reading folder...");
    List<Explorable> explorables = records.stream().map(record -> {
        File overlayJsonFile = overlayIOService.getOverlayFileFromImageFile(record.getFile());

        if (overlayJsonFile.exists())
            getObjectList().addAll(loadOverlay(record.getFile(), overlayJsonFile));
        return record;
    })

            .map(record -> new ImageRecordIconizer(context, record)).collect(Collectors.toList());

    System.out.println(String.format("%d records fetched", records.size()));
    imageRecordService.forceSave();
    return explorables;
}

From source file:at.ac.tuwien.qse.sepm.gui.controller.impl.GridViewImpl.java

private void handleUpdatePhotos(List<Photo> photos) {
    LOGGER.debug("updating {} photos in grid", photos.size());
    Platform.runLater(() -> {/*from   w w  w .ja  v  a 2 s  .c  o m*/
        grid.updatePhotos(photos);

        // update the inspector selection
        Collection<Photo> entities = inspector.getEntities();

        boolean updated = false;
        for (Photo photo : photos) {
            Optional<Photo> entity = entities.stream().filter(p -> p.getId().equals(photo.getId())).findFirst();
            if (entity.isPresent()) {
                entities.remove(entity.get());
                entities.add(photo);
                updated = true;
            }
        }

        if (updated) {
            inspector.setEntities(entities);
        }
    });
}