Example usage for java.util Collection stream

List of usage examples for java.util Collection stream

Introduction

In this page you can find the example usage for java.util Collection stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:com.facebook.presto.accumulo.index.ColumnCardinalityCache.java

/**
 * Gets the column cardinality for all of the given range values. May reach out to the
 * metrics table in Accumulo to retrieve new cache elements.
 *
 * @param schema Table schema/*ww w.  j av a 2  s  . co  m*/
 * @param table Table name
 * @param auths Scan authorizations
 * @param family Accumulo column family
 * @param qualifier Accumulo column qualifier
 * @param colValues All range values to summarize for the cardinality
 * @return The cardinality of the column
 */
public long getColumnCardinality(String schema, String table, Authorizations auths, String family,
        String qualifier, Collection<Range> colValues) throws ExecutionException {
    LOG.debug("Getting cardinality for %s:%s", family, qualifier);

    // Collect all exact Accumulo Ranges, i.e. single value entries vs. a full scan
    Collection<CacheKey> exactRanges = colValues.stream().filter(ColumnCardinalityCache::isExact)
            .map(range -> new CacheKey(schema, table, family, qualifier, range, auths))
            .collect(Collectors.toList());

    LOG.debug("Column values contain %s exact ranges of %s", exactRanges.size(), colValues.size());

    // Sum the cardinalities for the exact-value Ranges
    // This is where the reach-out to Accumulo occurs for all Ranges that have not
    // previously been fetched
    long sum = cache.getAll(exactRanges).values().stream().mapToLong(Long::longValue).sum();

    // If these collection sizes are not equal,
    // then there is at least one non-exact range
    if (exactRanges.size() != colValues.size()) {
        // for each range in the column value
        for (Range range : colValues) {
            // if this range is not exact
            if (!isExact(range)) {
                // Then get the value for this range using the single-value cache lookup
                sum += cache.get(new CacheKey(schema, table, family, qualifier, range, auths));
            }
        }
    }

    return sum;
}

From source file:io.neba.core.resourcemodels.registration.ModelRegistryConsolePlugin.java

private Collection<OsgiModelSource<?>> resolveModelTypesFor(String resourcePath) {
    Collection<OsgiModelSource<?>> types = new ArrayList<>(64);

    Optional<ResourceResolver> resolver = getResourceResolver();

    if (resolver.isPresent() && !isEmpty(resourcePath)) {
        try {//from   www . j a v a2s.c  o  m
            Resource resource = resolver.get().getResource(resourcePath);
            if (resource == null) {
                return types;
            }
            Collection<LookupResult> lookupResults = this.registry.lookupAllModels(resource);
            if (lookupResults == null) {
                return types;
            }
            types.addAll(lookupResults.stream().map(LookupResult::getSource).collect(Collectors.toList()));
        } finally {
            resolver.get().close();
        }
    }
    return types;
}

From source file:com.thinkbiganalytics.feedmgr.service.feed.FeedModelTransform.java

/**
 * Transforms the specified Metadata feeds to Feed Manager feeds.
 *
 * @param domain the Metadata feeds// ww w.  j ava  2 s  .  c o m
 * @return the Feed Manager feeds
 */
@Nonnull
public List<FeedMetadata> domainToFeedMetadata(@Nonnull final Collection<? extends Feed> domain) {
    final Map<Category, Set<UserFieldDescriptor>> userFieldMap = Maps.newHashMap();
    return domain.stream().map(f -> domainToFeedMetadata(f, userFieldMap)).collect(Collectors.toList());
}

From source file:eu.ggnet.dwoss.misc.op.listings.SalesListingProducerOperation.java

/**
 * Create a filejacket from a collection of lines that are filtered by configuration parameters.
 * Lines are filtered by brand and group.
 * <p>//from   ww w . j av  a2  s  .co  m
 * @param config configuration for filtering and file creation
 * @param all    lines to be considered
 * @return a filejacket from a collection of lines that are filtered by configuration parameters.
 */
private FileJacket createListing(ListingConfiguration config, Collection<StackedLine> all) {
    try {
        SortedSet<StackedLine> filtered = all.stream()
                .filter(line -> (config.getAllBrands().contains(line.getBrand())
                        && config.getGroups().contains(line.getGroup())))
                .collect(Collectors.toCollection(TreeSet::new));
        if (filtered.isEmpty())
            return null;
        L.info("Creating listing {} with {} lines", config.getName(), filtered.size());
        JRDataSource datasource = new JRBeanCollectionDataSource(filtered);
        JasperPrint jasperPrint = JasperFillManager.fillReport(config.getJasperTemplateFile(),
                config.toReportParamters(), datasource);
        byte[] pdfContend = JasperExportManager.exportReportToPdf(jasperPrint);
        return new FileJacket(config.getFilePrefix() + config.getName(), ".pdf", pdfContend);
    } catch (JRException ex) {
        throw new RuntimeException(ex);
    }
}

From source file:org.ligoj.app.plugin.vm.azure.VmAzurePluginResource.java

/**
 * Fill the given VM with its network details.
 *//*  w  w  w. j  a  va  2s . com*/
private void getNetworkDetails(final String name, final Map<String, String> parameters,
        final AzureCurlProcessor processor, final Collection<AzureVmNicRef> nicRefs,
        final Collection<VmNetwork> networks) {
    nicRefs.stream().map(
            nicRef -> getVmResource(name, parameters, processor, nicRef.getId() + "?api-version=2017-09-01"))
            // Parse the NIC JSON data and get the details
            .forEach(nicJson -> getNicDetails(name, parameters, processor, readValue(nicJson, AzureNic.class),
                    networks));
}

From source file:io.gravitee.management.service.impl.ApplicationServiceImpl.java

@Override
public Set<MemberEntity> getMembers(String applicationId,
        io.gravitee.management.model.MembershipType membershipType) {
    try {/*from w  w  w  .  j  av  a  2  s.co  m*/
        LOGGER.debug("Get members for application {}", applicationId);

        Collection<Membership> membersRepo = applicationRepository.getMembers(applicationId,
                (membershipType == null) ? null : MembershipType.valueOf(membershipType.toString()));

        final Set<MemberEntity> members = new HashSet<>(membersRepo.size());

        members.addAll(membersRepo.stream().map(member -> convert(member)).collect(Collectors.toSet()));

        return members;
    } catch (TechnicalException ex) {
        LOGGER.error("An error occurs while trying to get members for application {}", applicationId, ex);
        throw new TechnicalManagementException(
                "An error occurs while trying to get members for application " + applicationId, ex);
    }
}

From source file:io.neba.core.resourcemodels.registration.ModelRegistry.java

/**
 * Finds all {@link OsgiModelSource model sources} representing models for the given
 * {@link Resource}./*from ww w. ja  va  2  s .  c o  m*/
 *
 * @param resource            must not be <code>null</code>.
 * @param compatibleType      can be <code>null</code>. If provided, only models
 *                            compatible to the given type are returned.
 * @param resolveMostSpecific whether to resolve only the most specific models.
 * @return never <code>null</code> but rather an empty collection.
 */
private Collection<LookupResult> resolveModelSources(Resource resource, Class<?> compatibleType,
        boolean resolveMostSpecific) {
    Collection<LookupResult> sources = new ArrayList<>(64);
    for (final String resourceType : mappableTypeHierarchyOf(resource)) {
        Collection<OsgiModelSource<?>> allSourcesForType = this.typeNameToModelSourcesMap.get(resourceType);
        Collection<OsgiModelSource<?>> sourcesForCompatibleType = filter(allSourcesForType, compatibleType);
        if (sourcesForCompatibleType != null && !sourcesForCompatibleType.isEmpty()) {
            sources.addAll(sourcesForCompatibleType.stream()
                    .map(source -> new LookupResult(source, resourceType)).collect(Collectors.toList()));
            if (resolveMostSpecific) {
                break;
            }
        }
    }
    return unmodifiableCollection(sources);
}

From source file:org.sglover.entities.dao.cassandra.CassandraEntitiesDAO.java

@Override
public Stream<Entity<String>> getOrgs(Node node, int skip, int maxItems) {
    Collection<Entity<String>> ret = new HashSet<>();

    ResultSet rs = cassandraSession.getCassandraSession()
            .execute(getOrgsByNodeStatement.bind(node.getNodeId(), node.getNodeVersion()));
    for (Row row : rs) {
        String name = row.getString("org");
        Entity<String> entity = new Entity<String>(EntityType.orgs, name);
        ret.add(entity);//  www . j a va 2  s  .  com
    }

    return ret.stream();
}

From source file:com.intellij.lang.jsgraphql.endpoint.ide.annotator.JSGraphQLEndpointErrorAnnotator.java

private void annotateRedeclarations(@NotNull JSGraphQLEndpointNamedTypeDef element, PsiFile importingFile,
        Key<Multimap<String, JSGraphQLEndpointNamedTypeDefinition>> key, @NotNull AnnotationHolder holder) {
    final Key<Boolean> annotationKey = Key
            .create(element.getContainingFile().getName() + ":" + element.getTextOffset());
    if (holder.getCurrentAnnotationSession().getUserData(annotationKey) == Boolean.TRUE) {
        // already annotated about redeclaration
        return;//  www .j  ava2s.  c o m
    }
    Multimap<String, JSGraphQLEndpointNamedTypeDefinition> knownDefinitionsByName = holder
            .getCurrentAnnotationSession().getUserData(key);
    if (knownDefinitionsByName == null) {
        knownDefinitionsByName = HashMultimap.create();
        for (JSGraphQLEndpointNamedTypeDefinition definition : JSGraphQLEndpointPsiUtil
                .getKnownDefinitions(importingFile, JSGraphQLEndpointNamedTypeDefinition.class, true, null)) {
            if (definition.getNamedTypeDef() != null) {
                knownDefinitionsByName.put(definition.getNamedTypeDef().getText(), definition);
            }
        }
    }
    final String typeName = element.getText();
    final Collection<JSGraphQLEndpointNamedTypeDefinition> typesWithSameName = knownDefinitionsByName
            .get(typeName);
    if (typesWithSameName != null && typesWithSameName.size() > 1) {
        final Set<String> files = typesWithSameName.stream()
                .map(t -> "'" + t.getContainingFile().getName() + "'").collect(Collectors.toSet());
        holder.createErrorAnnotation(element,
                "'" + typeName + "' is redeclared in " + StringUtils.join(files, ", "));
        holder.getCurrentAnnotationSession().putUserData(annotationKey, Boolean.TRUE);
    }
}

From source file:org.sglover.entities.dao.cassandra.CassandraEntitiesDAO.java

@Override
public Stream<Entity<String>> getNames(Node node, int skip, int maxItems) {
    Collection<Entity<String>> ret = new HashSet<>();

    ResultSet rs = cassandraSession.getCassandraSession()
            .execute(getNamesByNodeStatement.bind(node.getNodeId(), node.getNodeVersion()));
    for (Row row : rs) {
        String name = row.getString("name");
        Entity<String> entity = new Entity<String>(EntityType.names, name);
        ret.add(entity);//w w  w  . jav  a  2 s . c  om
    }

    return ret.stream();
}