Example usage for java.util List forEach

List of usage examples for java.util List forEach

Introduction

In this page you can find the example usage for java.util List forEach.

Prototype

default void forEach(Consumer<? super T> action) 

Source Link

Document

Performs the given action for each element of the Iterable until all elements have been processed or the action throws an exception.

Usage

From source file:natalia.dymnikova.cluster.scheduler.impl.find.optimal.FindOptimalAddressesStrategy.java

private List<RouteWithValues> findAllWays(final Tree<List<Address>> variants,
        final List<RouteWithValues> routs) {
    if (variants.getChildren().isEmpty()) {
        if (variants.getRoot().isEmpty()) {
            return routs;
        } else {//w ww.  j a  v  a 2  s .  c o  m
            return variants.getRoot().stream()
                    .flatMap(address -> routs.stream().map(rout -> new RouteWithValues(rout, address)))
                    .collect(toList());
        }
    }

    final List<Tree<List<Address>>> children = variants.getChildren();
    final List<List<RouteWithValues>> bestWay = new ArrayList<>();
    children.forEach(child -> bestWay.add(findAllWays(child, routs)));
    final List<List<RouteWithValues>> allChildrenCombines = allCombines(bestWay);

    if (variants.getRoot().isEmpty()) {
        return allChildrenCombines.stream()
                .flatMap(rout -> rout.stream().map(r -> new RouteWithValues(r, null))).collect(toList());
    } else {
        return variants.getRoot().stream().flatMap(address -> allChildrenCombines.stream().map(rout -> {
            final RouteWithValues routeWithValues = new RouteWithValues(rout.get(0).getClusterMap());
            routeWithValues.setNextPoint(address);
            rout.stream().forEach(routeWithValues::setNextRoute);
            return routeWithValues;
        })).collect(toList());
    }
}

From source file:com.thoughtworks.go.server.service.RulesService.java

public boolean validateSecretConfigReferences(ScmMaterial scmMaterial) {
    List<CaseInsensitiveString> pipelines = goConfigService.pipelinesWithMaterial(scmMaterial.getFingerprint());

    HashMap<CaseInsensitiveString, StringBuilder> pipelinesWithErrors = new HashMap<>();
    pipelines.forEach(pipelineName -> {
        MaterialConfig materialConfig = goConfigService.findPipelineByName(pipelineName).materialConfigs()
                .getByMaterialFingerPrint(scmMaterial.getFingerprint());
        PipelineConfigs group = goConfigService.findGroupByPipeline(pipelineName);
        ScmMaterialConfig scmMaterialConfig = (ScmMaterialConfig) materialConfig;
        SecretParams secretParams = SecretParams.parse(scmMaterialConfig.getPassword());
        secretParams.forEach(secretParam -> {
            String secretConfigId = secretParam.getSecretConfigId();
            SecretConfig secretConfig = goConfigService.getSecretConfigById(secretConfigId);
            if (secretConfig == null) {
                addError(pipelinesWithErrors, pipelineName,
                        format("Pipeline '%s' is referring to none-existent secret config '%s'.", pipelineName,
                                secretConfigId));
            } else if (!secretConfig.canRefer(group.getClass(), group.getGroup())) {
                addError(pipelinesWithErrors, pipelineName, format(
                        "Pipeline '%s' does not have permission to refer to secrets using secret config '%s'",
                        pipelineName, secretConfigId));
            }//from  www . j  a  v  a2 s.  co  m
        });
    });
    StringBuilder errorMessage = new StringBuilder();
    if (!pipelinesWithErrors.isEmpty()) {
        errorMessage.append(StringUtils.join(pipelinesWithErrors.values(), '\n').trim());
        LOGGER.error("[Material Update] Failure: {}", errorMessage.toString());
    }
    if (pipelines.size() == pipelinesWithErrors.size()) {
        throw new RulesViolationException(errorMessage.toString());
    }
    return true;
}

From source file:com.bdb.weather.display.day.ItemRenderer.java

public void loadData(List<HistoricalRecord> list) {
    windDirSeries.clear();/*from  ww w. j a v  a 2 s.c o m*/

    List<HistoricalRecord> windy = list.stream()
            .filter((rec) -> rec.getAvgWind() != null && rec.getAvgWind().getSpeed().get() != 0.0)
            .collect(Collectors.toList());

    windy.forEach((rec) -> windDirSeries.add(rec.getAvgWind().getDirection().get(),
            rec.getTime().get(ChronoField.MINUTE_OF_DAY)));

    table.setItems(FXCollections.observableList(windy));
}

From source file:com.uber.hoodie.hadoop.realtime.HoodieRealtimeInputFormat.java

@Override
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {

    Stream<FileSplit> fileSplits = Arrays.stream(super.getSplits(job, numSplits)).map(is -> (FileSplit) is);

    // obtain all unique parent folders for splits
    Map<Path, List<FileSplit>> partitionsToParquetSplits = fileSplits
            .collect(Collectors.groupingBy(split -> split.getPath().getParent()));
    // TODO(vc): Should we handle also non-hoodie splits here?
    Map<String, HoodieTableMetaClient> metaClientMap = new HashMap<>();
    Map<Path, HoodieTableMetaClient> partitionsToMetaClient = partitionsToParquetSplits.keySet().stream()
            .collect(Collectors.toMap(Function.identity(), p -> {
                // find if we have a metaclient already for this partition.
                Optional<String> matchingBasePath = metaClientMap.keySet().stream()
                        .filter(basePath -> p.toString().startsWith(basePath)).findFirst();
                if (matchingBasePath.isPresent()) {
                    return metaClientMap.get(matchingBasePath.get());
                }//from w  w  w  .j a v  a  2 s. c o m

                try {
                    HoodieTableMetaClient metaClient = getTableMetaClient(p.getFileSystem(conf), p);
                    metaClientMap.put(metaClient.getBasePath(), metaClient);
                    return metaClient;
                } catch (IOException e) {
                    throw new HoodieIOException("Error creating hoodie meta client against : " + p, e);
                }
            }));

    // for all unique split parents, obtain all delta files based on delta commit timeline, grouped on file id
    List<HoodieRealtimeFileSplit> rtSplits = new ArrayList<>();
    partitionsToParquetSplits.keySet().stream().forEach(partitionPath -> {
        // for each partition path obtain the data & log file groupings, then map back to inputsplits
        HoodieTableMetaClient metaClient = partitionsToMetaClient.get(partitionPath);
        HoodieTableFileSystemView fsView = new HoodieTableFileSystemView(metaClient,
                metaClient.getActiveTimeline());
        String relPartitionPath = FSUtils.getRelativePartitionPath(new Path(metaClient.getBasePath()),
                partitionPath);

        try {
            Stream<FileSlice> latestFileSlices = fsView.getLatestFileSlices(relPartitionPath);

            // subgroup splits again by file id & match with log files.
            Map<String, List<FileSplit>> groupedInputSplits = partitionsToParquetSplits.get(partitionPath)
                    .stream()
                    .collect(Collectors.groupingBy(split -> FSUtils.getFileId(split.getPath().getName())));
            latestFileSlices.forEach(fileSlice -> {
                List<FileSplit> dataFileSplits = groupedInputSplits.get(fileSlice.getFileId());
                dataFileSplits.forEach(split -> {
                    try {
                        List<String> logFilePaths = fileSlice.getLogFiles()
                                .map(logFile -> logFile.getPath().toString()).collect(Collectors.toList());
                        // Get the maxCommit from the last delta or compaction or commit - when bootstrapped from COW table
                        String maxCommitTime = metaClient.getActiveTimeline()
                                .getTimelineOfActions(Sets.newHashSet(HoodieTimeline.COMMIT_ACTION,
                                        HoodieTimeline.COMPACTION_ACTION, HoodieTimeline.DELTA_COMMIT_ACTION))
                                .filterCompletedInstants().lastInstant().get().getTimestamp();
                        rtSplits.add(new HoodieRealtimeFileSplit(split, logFilePaths, maxCommitTime));
                    } catch (IOException e) {
                        throw new HoodieIOException("Error creating hoodie real time split ", e);
                    }
                });
            });
        } catch (Exception e) {
            throw new HoodieException("Error obtaining data file/log file grouping: " + partitionPath, e);
        }
    });
    LOG.info("Returning a total splits of " + rtSplits.size());
    return rtSplits.toArray(new InputSplit[rtSplits.size()]);
}

From source file:uk.co.flax.biosolr.ontology.search.elasticsearch.ElasticDocumentSearch.java

@Override
public ResultsList<Document> searchDocuments(String term, int start, int rows, List<String> additionalFields,
        List<String> filters) throws SearchEngineException {
    // Build the query
    MultiMatchQueryBuilder qb = QueryBuilders.multiMatchQuery(term, DEFAULT_FIELDS).minimumShouldMatch("2<25%");
    if (additionalFields != null && additionalFields.size() > 0) {
        List<String> parsedAdditional = parseAdditionalFields(additionalFields);
        parsedAdditional.forEach(qb::field);
    }//from   w w w . ja  v  a2s .  co  m

    TopHitsBuilder topHitsBuilder = AggregationBuilders.topHits(HITS_AGGREGATION).setFrom(0).setSize(1);

    /* Build the terms aggregation, since we need a result set grouped by study ID.
     * The "top_score" sub-agg allows us to sort by the top score of the results;
     * the topHits sub-agg actually pulls back the record data, returning just the first
     * hit in the aggregation.
     * Note that we have to get _all_ rows up to and including the last required, annoyingly. */
    AggregationBuilder termsAgg = AggregationBuilders.terms(HITS_AGGREGATION).field(GROUP_FIELD)
            .order(Terms.Order.aggregation(SCORE_AGGREGATION, false)).size(start + rows)
            .subAggregation(AggregationBuilders.max(SCORE_AGGREGATION)
                    .script(new Script("_score", ScriptService.ScriptType.INLINE, "expression", null)))
            .subAggregation(topHitsBuilder);

    // Build the actual search request, including another aggregation to get
    // the number of unique study IDs returned.
    SearchRequestBuilder srb = getClient().prepareSearch(getIndexName()).setTypes(getDocumentType())
            .setQuery(qb).setSize(0).addAggregation(termsAgg)
            .addAggregation(AggregationBuilders.cardinality(COUNT_AGGREGATION).field(GROUP_FIELD));
    LOGGER.debug("ES Query: {}", srb.toString());

    SearchResponse response = srb.execute().actionGet();

    // Handle the response
    long total = ((Cardinality) (response.getAggregations().get(COUNT_AGGREGATION))).getValue();
    List<Document> docs;
    if (total == 0) {
        docs = new ArrayList<>();
    } else {
        // Build a map - need to look up annotation data separately.
        // This is because it's not in _source, and the fields() method
        // is not visible for a TopHitsBuilder.
        Map<String, Document> documentMap = new LinkedHashMap<>(rows);
        ObjectMapper mapper = buildObjectMapper();

        int lastIdx = (int) (start + rows <= total ? start + rows : total);
        StringTerms terms = response.getAggregations().get(HITS_AGGREGATION);
        List<Terms.Bucket> termBuckets = terms.getBuckets().subList(start, lastIdx);
        for (Terms.Bucket bucket : termBuckets) {
            TopHits hits = bucket.getAggregations().get(HITS_AGGREGATION);
            SearchHit hit = hits.getHits().getAt(0);
            documentMap.put(hit.getId(), extractDocument(mapper, hit));
        }

        // Populate annotation data for the document
        lookupAnnotationFields(documentMap);

        docs = new ArrayList<>(documentMap.values());
    }

    return new ResultsList<>(docs, start, (start / rows), total);
}

From source file:com.hortonworks.streamline.streams.cluster.register.impl.AbstractServiceRegistrar.java

@Override
public Service register(Cluster cluster, Config config, List<ConfigFileInfo> configFileInfos)
        throws IOException {
    Service service = environmentService.initializeService(cluster, getServiceName());

    List<ServiceConfiguration> configurations = new ArrayList<>();
    Map<String, String> flattenConfigMap = new HashMap<>();

    List<ServiceConfiguration> serviceConfigurations = createServiceConfigurations(config);
    if (serviceConfigurations != null && !serviceConfigurations.isEmpty()) {
        serviceConfigurations.forEach(sc -> {
            configurations.add(sc);//from  ww w .  j ava2s . c  om
            try {
                flattenConfigMap.putAll(sc.getConfigurationMap());
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
    }

    for (ConfigFileInfo configFileInfo : configFileInfos) {
        Map<String, String> configMap = readConfigFile(configFileInfo);

        String fileName = FilenameUtils.getName(configFileInfo.getFileName());
        String confType = getConfType(fileName);
        String actualFileName = ConfigFilePattern.getOriginFileName(confType);

        ServiceConfiguration configuration = environmentService.initializeServiceConfiguration(service.getId(),
                confType, actualFileName, new HashMap<>(configMap));
        configurations.add(configuration);
        flattenConfigMap.putAll(configMap);
    }

    List<Component> components = createComponents(config, flattenConfigMap);

    if (!validateComponents(components)) {
        throw new IllegalArgumentException("Validation failed for components.");
    }

    if (!validateServiceConfigurations(configurations)) {
        throw new IllegalArgumentException("Validation failed for service configurations.");
    }

    if (!validateServiceConfiguationsAsFlattenedMap(flattenConfigMap)) {
        throw new IllegalArgumentException("Validation failed for service configurations.");
    }

    // here we are storing actual catalogs
    // before that we need to replace dummy service id to the actual one
    service = environmentService.addService(service);

    for (Component component : components) {
        component.setServiceId(service.getId());
        environmentService.addComponent(component);
    }

    for (ServiceConfiguration configuration : configurations) {
        configuration.setServiceId(service.getId());
        environmentService.addServiceConfiguration(configuration);
    }

    return service;
}

From source file:fi.vm.kapa.identification.metadata.background.MetadataFilesGenerator.java

License:asdf

private void createSPMetadata(List<Metadata> serviceProviders) throws Exception {
    EntitiesDescriptor entities = createSAMLObject(EntitiesDescriptor.class);

    serviceProviders.forEach(serviceProvider -> {
        try {/*from   w  ww . ja v  a2 s .c  om*/
            EntityDescriptor entityDescriptor = createSAMLObject(EntityDescriptor.class);
            entityDescriptor.setEntityID(serviceProvider.getEntityid());

            SPSSODescriptor spDescriptor = createSAMLObject(SPSSODescriptor.class);
            spDescriptor.addSupportedProtocol(SAMLConstants.SAML20P_NS);
            spDescriptor.setWantAssertionsSigned(true);

            KeyInfo keyInfo = createSAMLObject(KeyInfo.class);
            X509Data x509Data = createSAMLObject(X509Data.class);
            X509Certificate x509Cert = createSAMLObject(X509Certificate.class);
            x509Cert.setValue("asdf");
            x509Data.getX509Certificates().add(x509Cert);
            keyInfo.getX509Datas().add(x509Data);

            KeyDescriptor keyDescriptor = createSAMLObject(KeyDescriptor.class);
            keyDescriptor.setUse(UsageType.SIGNING);
            keyDescriptor.setKeyInfo(keyInfo);
            spDescriptor.getKeyDescriptors().add(keyDescriptor);

            NameIDFormat nameFormat = createSAMLObject(NameIDFormat.class);
            nameFormat.setFormat(NAME_ID_FORMAT);
            spDescriptor.getNameIDFormats().add(nameFormat);

            AssertionConsumerService consumer = createSAMLObject(AssertionConsumerService.class);
            consumer.setIndex(1);
            consumer.setBinding(SAMLConstants.SAML2_POST_BINDING_URI);
            consumer.setLocation("http://saml.post.url.com/SAML/Post");
            spDescriptor.getAssertionConsumerServices().add(consumer);

            entityDescriptor.getRoleDescriptors().add(spDescriptor);
            entities.getEntityDescriptors().add(entityDescriptor);
        } catch (Exception e) {
            logger.error("Error parsing service provider with entity ID: {}", serviceProvider.getEntityid(), e);
        }
    });

    FileOutputStream fos = new FileOutputStream(new File("/tmp/sp_metadata.xml"));
    fos.write(createXmlString(entities).getBytes());
}

From source file:com.oneops.cms.transmitter.CIEventReader.java

private CMSEvent getCi(CMSEventRecord record) {
    CMSEvent event = new CMSEvent();
    event.setEventId(record.getEventId());
    event.addHeaders("action", record.getEventType());
    try (SqlSession session = sqlsf.openSession()) {
        if (EVENT_TYPE_DELETE.equals(record.getEventType())) {
            //TODO add method to read ci from the log table
            event.addHeaders("source", "cm_ci");
            event.addHeaders("clazzName", "");
            event.setPayload(null);//from w  w  w.j a va2 s.c o m
            event.addHeaders("sourceId", String.valueOf(record.getSourcePk()));
        } else {
            event.addHeaders("source", "cm_ci_new");
            CIMapper ciMapper = session.getMapper(CIMapper.class);
            CmsCI ci = ciMapper.getCIById(record.getSourcePk());
            if (ci != null) {
                List<CmsCIAttribute> attrs = ciMapper.getCIAttrs(ci.getCiId());
                attrs.forEach(ci::addAttribute);
                CmsCISimpleWithTags simpleCI = cmsUtil.cmsCISimpleWithTags(ci, "df");
                addTags(simpleCI, ciMapper);
                event.addHeaders("clazzName", ci.getCiClassName());
                event.setPayload(simpleCI);
            } else {
                logger.warn("Can not get ci object for id=" + record.getSourcePk());
            }
        }
    }
    return event;
}

From source file:fi.vm.kapa.identification.metadata.background.MetadataFilesGenerator.java

License:asdf

private void createIdPMetadata(List<Metadata> identityProviders) throws Exception {
    EntitiesDescriptor entities = createSAMLObject(EntitiesDescriptor.class);

    identityProviders.forEach(identityProvider -> {
        try {/*from ww w  .  j  a  va 2 s .  c om*/
            EntityDescriptor entityDescriptor = createSAMLObject(EntityDescriptor.class);
            entityDescriptor.setEntityID(identityProvider.getEntityid());

            IDPSSODescriptor idpDescriptor = createSAMLObject(IDPSSODescriptor.class);
            idpDescriptor.addSupportedProtocol(SAMLConstants.SAML20P_NS);
            idpDescriptor.setWantAuthnRequestsSigned(true);

            KeyInfo keyInfo = createSAMLObject(KeyInfo.class);
            X509Data x509Data = createSAMLObject(X509Data.class);
            X509Certificate x509Cert = createSAMLObject(X509Certificate.class);
            x509Cert.setValue("asdf");
            x509Data.getX509Certificates().add(x509Cert);
            keyInfo.getX509Datas().add(x509Data);

            KeyDescriptor keyDescriptor = createSAMLObject(KeyDescriptor.class);
            keyDescriptor.setUse(UsageType.SIGNING);
            keyDescriptor.setKeyInfo(keyInfo);
            idpDescriptor.getKeyDescriptors().add(keyDescriptor);

            NameIDFormat nameFormat = createSAMLObject(NameIDFormat.class);
            nameFormat.setFormat(NAME_ID_FORMAT);
            idpDescriptor.getNameIDFormats().add(nameFormat);

            SingleSignOnService signOnSrv = createSAMLObject(SingleSignOnService.class);
            signOnSrv.setBinding(SAMLConstants.SAML2_POST_BINDING_URI);
            signOnSrv.setLocation("http://saml.post.url.com/SAML/Post");
            idpDescriptor.getSingleSignOnServices().add(signOnSrv);

            entityDescriptor.getRoleDescriptors().add(idpDescriptor);
            entities.getEntityDescriptors().add(entityDescriptor);
        } catch (Exception e) {
            logger.error("Error parsing service provider with entity ID: {}", identityProvider.getEntityid(),
                    e);
        }
    });

    FileOutputStream fos = new FileOutputStream(new File("/tmp/idp_metadata.xml"));
    fos.write(createXmlString(entities).getBytes());
}

From source file:org.fatal1t.forexapp.spring.api.adapters.APIStreamingAdapter.java

private List<String> loadSymbols() {

    List<String> newList = new ArrayList<>();
    symbRepository.findByTicks(true).forEach((Symbol symbol) -> {
        newList.add(symbol.getSymbol());
    });/*from  w w  w  .  j  a  v a 2s  . c  o  m*/
    newList.forEach((String s) -> {
        System.out.print(s + " ");
    });
    return newList;
}