Example usage for java.util List forEach

List of usage examples for java.util List forEach

Introduction

In this page you can find the example usage for java.util List forEach.

Prototype

default void forEach(Consumer<? super T> action) 

Source Link

Document

Performs the given action for each element of the Iterable until all elements have been processed or the action throws an exception.

Usage

From source file:net.nifheim.beelzebu.coins.common.utils.dependencies.DependencyManager.java

public static void loadDependencies(Set<Dependency> dependencies) throws RuntimeException {
    core.getMethods().log("Identified the following dependencies: " + dependencies.toString());

    File libDir = new File(core.getDataFolder(), "lib");
    if (!(libDir.exists() || libDir.mkdirs())) {
        throw new RuntimeException("Unable to create lib dir - " + libDir.getPath());
    }//w  ww . ja v  a  2  s .c o  m

    // Download files.
    List<File> filesToLoad = new ArrayList<>();
    dependencies.forEach(dependency -> {
        try {
            filesToLoad.add(downloadDependency(libDir, dependency));
        } catch (Exception e) {
            core.getMethods().log("Exception whilst downloading dependency " + dependency.name());
        }
    });

    // Load classes.
    filesToLoad.forEach(file -> {
        try {
            loadJar(file);
        } catch (Throwable t) {
            core.getMethods().log("Failed to load dependency jar " + file.getName());
        }
    });
}

From source file:com.netflix.spinnaker.halyard.deploy.spinnaker.v1.service.distributed.kubernetes.v1.ResourceBuilder.java

static Container buildContainer(String name, ServiceSettings settings, List<ConfigSource> configSources,
        DeploymentEnvironment deploymentEnvironment) {
    int port = settings.getPort();
    List<EnvVar> envVars = settings.getEnv().entrySet().stream().map(e -> {
        EnvVarBuilder envVarBuilder = new EnvVarBuilder();
        return envVarBuilder.withName(e.getKey()).withValue(e.getValue()).build();
    }).collect(Collectors.toList());

    configSources.forEach(c -> {
        c.getEnv().entrySet().forEach(envEntry -> {
            EnvVarBuilder envVarBuilder = new EnvVarBuilder();
            envVars.add(envVarBuilder.withName(envEntry.getKey()).withValue(envEntry.getValue()).build());
        });/*from  w  ww  . j  a  v  a2  s .com*/
    });

    ProbeBuilder probeBuilder = new ProbeBuilder();

    String scheme = settings.getScheme();
    if (StringUtils.isNotEmpty(scheme)) {
        scheme = scheme.toUpperCase();
    } else {
        scheme = null;
    }

    if (settings.getHealthEndpoint() != null) {
        probeBuilder = probeBuilder.withNewHttpGet().withNewPort(port).withPath(settings.getHealthEndpoint())
                .withScheme(scheme).endHttpGet();
    } else {
        probeBuilder = probeBuilder.withNewTcpSocket().withNewPort().withIntVal(port).endPort().endTcpSocket();
    }

    List<VolumeMount> volumeMounts = configSources.stream().map(c -> {
        return new VolumeMountBuilder().withMountPath(c.getMountPath()).withName(c.getId()).build();
    }).collect(Collectors.toList());

    ContainerBuilder containerBuilder = new ContainerBuilder();
    containerBuilder = containerBuilder.withName(name).withImage(settings.getArtifactId())
            .withPorts(new ContainerPortBuilder().withContainerPort(port).build())
            .withVolumeMounts(volumeMounts).withEnv(envVars).withReadinessProbe(probeBuilder.build())
            .withResources(buildResourceRequirements(name, deploymentEnvironment));

    return containerBuilder.build();
}

From source file:com.ikanow.aleph2.analytics.storm.utils.StormControllerUtil.java

/** Stops all jobs corresponding to a given bucket
 * @param storm_controller/*from  w  ww.  ja va  2 s  .  c om*/
 * @param bucket
 */
public static void stopAllJobsForBucket(IStormController storm_controller, DataBucketBean bucket) {
    final List<String> jobs = storm_controller.getJobNamesForBucket(bucket.full_name());
    jobs.forEach(job -> {
        storm_controller.stopJob(job);
    });
}

From source file:com.formkiq.core.form.FormTransformer.java

/**
 * Update {@link FormJSON} by the selected 'optionsgroup'.
 * @param form {@link FormJSON}/*from  w ww  .  j  a v a  2  s.co m*/
 */
public static void updateOptionsGroup(final FormJSON form) {

    List<FormJSONField> list = stream(form).collect(Collectors.toList());

    List<String> optionsGroup = list.stream()
            .filter(f -> !CollectionUtils.isEmpty(f.getOptionsgroup())
                    && f.getOptionsgroup().size() == f.getOptions().size()
                    && f.getOptions().indexOf(f.getValue()) != -1)
            .map(f -> f.getOptionsgroup().get(f.getOptions().indexOf(f.getValue())))
            .collect(Collectors.toList());

    list.forEach(f -> {
        f.setHide(ListUtils.intersection(f.getGroups(), optionsGroup).isEmpty());

        if (CollectionUtils.isEmpty(f.getGroups())) {
            f.setHide(false);
        }
    });
}

From source file:ai.grakn.graql.internal.gremlin.GraqlTraversal.java

/**
 * Create a semi-optimal plan using a greedy approach to execute a single conjunction
 * @param query the conjunction query to find a traversal plan
 * @return a semi-optimal traversal plan to execute the given conjunction
 *///from ww w.  j  av a 2s . c o  m
private static List<Fragment> semiOptimalConjunction(ConjunctionQuery query) {

    Set<EquivalentFragmentSet> fragmentSets = Sets.newHashSet(query.getEquivalentFragmentSets());
    Set<String> names = new HashSet<>();

    // This list is constructed over the course of the algorithm
    List<Fragment> fragments = new ArrayList<>();

    long numFragments = fragments(fragmentSets).count();
    long depth = 1;
    long numTraversalAttempts = numFragments;

    // Calculate the depth to descend in the tree, based on how many plans we want to evaluate
    while (numFragments > 0 && numTraversalAttempts < MAX_TRAVERSAL_ATTEMPTS) {
        depth += 1;
        numTraversalAttempts *= numFragments;
        numFragments -= 1;
    }

    double cost = 1;

    while (!fragmentSets.isEmpty()) {
        Pair<Double, List<Fragment>> pair = findPlan(fragmentSets, names, cost, depth);
        cost = pair.getValue0();
        List<Fragment> newFragments = Lists.reverse(pair.getValue1());

        if (newFragments.isEmpty()) {
            throw new RuntimeException(ErrorMessage.FAILED_TO_BUILD_TRAVERSAL.getMessage());
        }

        newFragments.forEach(fragment -> {
            fragmentSets.remove(fragment.getEquivalentFragmentSet());
            fragment.getVariableNames().forEach(names::add);
        });
        fragments.addAll(newFragments);
    }

    return fragments;
}

From source file:com.flurry.proguard.UploadMapping.java

/**
 * Tar a ProGuard/Native mapping file and send it to Flurry's crash service
 *
 * @param apiKey the API key for the project being built
 * @param uuid the uuid for this build//from ww w  . j a v a 2 s .  c om
 * @param paths the paths to the ProGuard/Native mapping.txt files
 * @param token the auth token for API calls
 * @param timeout the amount of time to wait for the upload to be processed (in ms)
 */
public static void uploadFiles(String apiKey, String uuid, List<String> paths, String token, int timeout,
        AndroidUploadType androidUploadType) throws IOException {
    ArrayList<File> files = new ArrayList<>();
    paths.forEach(path -> {
        File file = new File(path);
        if (file.isDirectory()) {
            failWithError("{} is a directory. Please provide the path to " + androidUploadType.getDisplayName()
                    + " mapping file " + path);
        }
        files.add(file);
    });

    if (apiKey == null) {
        failWithError("No API key provided");
    }
    if (androidUploadType == AndroidUploadType.ANDROID_JAVA && uuid == null) {
        failWithError("No UUID provided");
    }
    if (token == null) {
        failWithError("No token provided");
    }

    try {
        httpClient = HttpClientBuilder.create().setDefaultRequestConfig(REQUEST_CONFIG).build();

        File zippedFile = createArchive(files, uuid);
        String projectId = lookUpProjectId(apiKey, token);
        LOGGER.info("Found project {} for api key {}", projectId, apiKey);

        String payload = getUploadJson(zippedFile, projectId, androidUploadType.getUploadType());
        String uploadId = createUpload(projectId, payload, token);
        LOGGER.info("Created upload with ID: {}", uploadId);

        sendToUploadService(zippedFile, projectId, uploadId, token);
        LOGGER.info(androidUploadType.getDisplayName() + " mapping uploaded to Flurry");

        waitForUploadToBeProcessed(projectId, uploadId, token, timeout);
        LOGGER.info("Upload completed successfully!");
    } finally {
        httpClient.close();
        httpClient = null;
    }
}

From source file:ddf.catalog.transformer.csv.common.CsvTransformer.java

public static Appendable writeMetacardsToCsv(final List<Metacard> metacards,
        final List<AttributeDescriptor> orderedAttributeDescriptors, final Map<String, String> aliasMap)
        throws CatalogTransformerException {
    StringBuilder stringBuilder = new StringBuilder();

    try {/*from  w  ww  . j  a  va  2 s. c  o  m*/
        CSVPrinter csvPrinter = new CSVPrinter(stringBuilder, CSVFormat.RFC4180);
        printColumnHeaders(csvPrinter, orderedAttributeDescriptors, aliasMap);

        metacards.forEach(metacard -> printMetacard(csvPrinter, metacard, orderedAttributeDescriptors));

        return csvPrinter.getOut();
    } catch (IOException ioe) {
        throw new CatalogTransformerException(ioe);
    }
}

From source file:oct.util.Util.java

public static void graphPoints(List<LinePoint>... pointsList) {
    XYSeriesCollection dataset = new XYSeriesCollection();
    int seriesCntr = 1;
    for (List<LinePoint> points : pointsList) {
        XYSeries data = new XYSeries("Series " + seriesCntr);
        points.forEach((point) -> {
            data.add(point.getX(), point.getY());
        });//from   w w  w . j  a v a  2  s.c  o m
        dataset.addSeries(data);
        seriesCntr++;
    }

    JFrame graphFrame = new JFrame("Points graph");

    JPanel chartPanel = createChartPanel("Points graph", dataset);
    graphFrame.add(chartPanel, BorderLayout.SOUTH);
    SwingUtilities.invokeLater(() -> {
        graphFrame.pack();
        graphFrame.setVisible(true);
    });
}

From source file:io.apicurio.studio.tools.release.ReleaseTool.java

/**
 * Generates the release notes for a release.
 * @param releaseName/*from  w  w w.j a v a 2  s  .  c om*/
 * @param releaseTag
 * @param issues
 */
private static String generateReleaseNotes(String releaseName, String releaseTag, List<JSONObject> issues) {
    StringBuilder builder = new StringBuilder();

    builder.append("This represents the official release of Apicurio Studio, version ");
    builder.append(releaseTag);
    builder.append(".\n\n");
    builder.append("The following issues have been resolved in this release:\n\n");

    issues.forEach(issue -> {
        builder.append(String.format("* [#%d](%s) %s", issue.getInt("number"), issue.getString("html_url"),
                issue.getString("title")));
        builder.append("\n");
    });

    builder.append("\n\n");

    builder.append("For more information, please see the Apicurio Studio's official project site:\n\n");
    builder.append("* [General Information](http://www.apicur.io/)\n");
    builder.append("* [Download/Quickstart](http://www.apicur.io/download)\n");
    builder.append("* [Blog](http://www.apicur.io/blog)\n");

    return builder.toString();
}

From source file:com.vmware.photon.controller.api.frontend.utils.SecurityGroupUtils.java

/**
 * Merge the security inherited from parent to the current security groups.
 *
 * @param existingSecurityGroups Existing security groups including both inherited and self ones.
 * @param parentSecurityGroups   Security groups inherited from parent.
 * @return The merged security groups and the ones removed from 'self' ones due to duplication with parent.
 *//*from   w w w  .j a v  a  2s. c om*/
public static Pair<List<SecurityGroup>, List<String>> mergeParentSecurityGroups(
        List<SecurityGroup> existingSecurityGroups, List<String> parentSecurityGroups) {

    checkNotNull(existingSecurityGroups, "Provided value for existingSecurityGroups is unacceptably null");
    checkNotNull(parentSecurityGroups, "Provided value for parentSecurityGroups is unacceptably null");

    List<SecurityGroup> mergedSecurityGroups = new ArrayList<>();
    List<String> selfSecurityGroupsRemoved = new ArrayList<>();
    Set<String> inheritedSecurityGroupsNames = new HashSet<>();

    parentSecurityGroups.forEach(g -> {
        mergedSecurityGroups.add(new SecurityGroup(g, true));
        inheritedSecurityGroupsNames.add(g);
    });

    existingSecurityGroups.stream().filter(g -> !g.isInherited()).forEach(g -> {
        if (inheritedSecurityGroupsNames.contains(g.getName())) {
            selfSecurityGroupsRemoved.add(g.getName());
        } else {
            mergedSecurityGroups.add(g);
        }
    });

    return Pair.of(mergedSecurityGroups, selfSecurityGroupsRemoved);
}