List of usage examples for java.util Set forEach
default void forEach(Consumer<? super T> action)
From source file:specminers.evaluation.GetMethods.java
private static void extractRegexFromJavaFiles(Map<String, String> options) throws IOException { File javaFilesFolder = new File(options.get(PATH_OPTION)); String[] extensions = new String[] { "java" }; File outputDir = null;//w w w . j av a 2 s . c o m if (options.containsKey(OUTPUT_OPTION)) { outputDir = new File(options.get(OUTPUT_OPTION)); } List<File> sourceFiles = FileUtils.listFiles(javaFilesFolder, extensions, true).stream() .collect(Collectors.toList()); for (File sourceFile : sourceFiles) { GetMethodsViaRegexExtractor extractor = new GetMethodsViaRegexExtractor(sourceFile); // Set<String> result = new HashSet<>(extractor.getReadOnlyMethods()); Set<String> result = new HashSet<>(extractor.getAllMethods()); if (outputDir != null && outputDir.exists()) { File regexesFile; regexesFile = java.nio.file.Paths.get(outputDir.getAbsolutePath(), sourceFile.getName().replace(".java", "") + "_read_operations.txt").toFile(); FileUtils.writeLines(regexesFile, result); } else { System.out.println("Read operations found on file " + sourceFile.getName()); result.forEach(l -> System.out.println(l)); } } }
From source file:ai.grakn.kb.internal.computer.GraknSparkComputer.java
private static void updateConfigKeys(org.apache.commons.configuration.Configuration sparkConfiguration) { Set<String> wrongKeys = new HashSet<>(); sparkConfiguration.getKeys().forEachRemaining(wrongKeys::add); wrongKeys.forEach(key -> { if (key.startsWith("janusmr")) { String newKey = "janusgraphmr" + key.substring(7); sparkConfiguration.setProperty(newKey, sparkConfiguration.getString(key)); }//from ww w . j av a 2 s . c o m }); }
From source file:org.apache.pulsar.broker.loadbalance.impl.LoadManagerShared.java
/** * Using the given bundles, populate the namespace to bundle range map. * * @param bundles/* w ww. j ava2 s. com*/ * Bundles with which to populate. * @param target * Map to fill. */ public static void fillNamespaceToBundlesMap(final Set<String> bundles, final Map<String, Set<String>> target) { bundles.forEach(bundleName -> { final String namespaceName = getNamespaceNameFromBundleName(bundleName); final String bundleRange = getBundleRangeFromBundleName(bundleName); target.computeIfAbsent(namespaceName, k -> new HashSet<>()).add(bundleRange); }); }
From source file:rgu.jclos.foldbuilder.FoldBuilder.java
/** * Generates K folds and writes them to disk * @param inputFile The CSV file from which the data comes from. * @param outputDirectory The directory in which the folds will be written. * @param separator The separating character in the CSV file. * @param indexLabel The index of the labels in the CSV file. Used for stratification of the folds. * @param k The number of folds to generates. * @param speak Whether to print some status messages along the way. * @throws IOException If something stops the program from reading or writing the files. *//*from w w w . j a v a 2 s .c om*/ public static void computeAndWriteFolds(String inputFile, String outputDirectory, String separator, int indexLabel, int k, boolean speak) throws IOException { Pair<List<Set<String>>, Map<String, Instance>> tmp = getFolds(inputFile, outputDirectory, separator, indexLabel, k, speak); Map<String, Instance> dictionary = tmp.getRight(); List<Set<String>> folds = tmp.getLeft(); if (speak) System.out.println("Writing folds on disk"); for (int i = 0; i < folds.size(); i++) { Set<String> trainingSet = new HashSet<>(); Set<String> testSet = new HashSet<>(); Set<String> trainingSetIds = new HashSet<>(); trainingSetIds.addAll(folds.get(i)); Set<String> testSetIds = new HashSet<>(); for (int j = 0; j < folds.size(); j++) { if (i != j) { testSetIds.addAll(folds.get(j)); } } trainingSetIds.forEach(tid -> { trainingSet.add(dictionary.get(tid).content); }); testSetIds.forEach(tid -> { testSet.add(dictionary.get(tid).content); }); String filenameTraining = "Fold_" + (i + 1) + "_TrainingSet.csv"; String filenameTesting = "Fold_" + (i + 1) + "_TestingSet.csv"; File outputTraining = new File(outputDirectory + File.separator + filenameTraining); File outputTesting = new File(outputDirectory + File.separator + filenameTesting); Files.write(outputTraining.toPath(), trainingSet); Files.write(outputTesting.toPath(), testSet); } }
From source file:rgu.jclos.foldbuilder.FoldBuilder.java
/** * Generates K folds and writes them to disk * @param inputFile The CSV file from which the data comes from. * @param outputDirectory The directory in which the folds will be written. * @param separator The separating character in the CSV file. * @param indexLabel The index of the labels in the CSV file. Used for stratification of the folds. * @param k The number of folds to generates. * @param speak Whether to print some status messages along the way. * @throws IOException If something stops the program from reading or writing the files. */// ww w . ja va 2 s. co m private static void computeAndWriteFolds(String inputFile, String outputDirectory, String separator, String indexLabel, int k, boolean speak) throws IOException { Pair<List<Set<String>>, Map<String, Instance>> tmp = getFolds(inputFile, outputDirectory, separator, indexLabel, k, speak); Map<String, Instance> dictionary = tmp.getRight(); List<Set<String>> folds = tmp.getLeft(); if (speak) System.out.println("Writing folds on disk"); for (int i = 0; i < folds.size(); i++) { Set<String> trainingSet = new HashSet<>(); Set<String> testSet = new HashSet<>(); Set<String> trainingSetIds = new HashSet<>(); trainingSetIds.addAll(folds.get(i)); Set<String> testSetIds = new HashSet<>(); for (int j = 0; j < folds.size(); j++) { if (i != j) { testSetIds.addAll(folds.get(j)); } } trainingSetIds.forEach(tid -> { trainingSet.add(dictionary.get(tid).content); }); testSetIds.forEach(tid -> { testSet.add(dictionary.get(tid).content); }); String filenameTraining = "Fold_" + (i + 1) + "_TrainingSet.csv"; String filenameTesting = "Fold_" + (i + 1) + "_TestingSet.csv"; File outputTraining = new File(outputDirectory + File.separator + filenameTraining); File outputTesting = new File(outputDirectory + File.separator + filenameTesting); Files.write(outputTraining.toPath(), trainingSet); Files.write(outputTesting.toPath(), testSet); } }
From source file:net.nifheim.beelzebu.coins.common.utils.dependencies.DependencyManager.java
public static void loadDependencies(Set<Dependency> dependencies) throws RuntimeException { core.getMethods().log("Identified the following dependencies: " + dependencies.toString()); File libDir = new File(core.getDataFolder(), "lib"); if (!(libDir.exists() || libDir.mkdirs())) { throw new RuntimeException("Unable to create lib dir - " + libDir.getPath()); }//from w ww. j a v a2s .c om // Download files. List<File> filesToLoad = new ArrayList<>(); dependencies.forEach(dependency -> { try { filesToLoad.add(downloadDependency(libDir, dependency)); } catch (Exception e) { core.getMethods().log("Exception whilst downloading dependency " + dependency.name()); } }); // Load classes. filesToLoad.forEach(file -> { try { loadJar(file); } catch (Throwable t) { core.getMethods().log("Failed to load dependency jar " + file.getName()); } }); }
From source file:cz.muni.fi.editor.database.test.helpers.AbstractDAOTest.java
protected static void checkMethods(Class testClass, Class testingInterface) { Set<String> testMethods = new HashSet<>(); for (Method m : testClass.getDeclaredMethods()) { if (m.isAnnotationPresent(Test.class)) { testMethods.add(m.getName()); }/*ww w. ja v a 2s . c om*/ } List<String> targetMethods = new ArrayList<>( Arrays.asList("create", "update", "getById", "delete", "getAll", "getClassType")); targetMethods.addAll(new ArrayList<>(Arrays.asList(testingInterface.getDeclaredMethods())).stream() .map(m -> m.getName()).collect(Collectors.toList())); testMethods.forEach(targetMethods::remove); Assert.assertEquals("Following method(s) are missing in DAO test :" + targetMethods.toString(), 0, targetMethods.size()); }
From source file:com.netflix.spinnaker.halyard.deploy.spinnaker.v1.service.distributed.kubernetes.KubernetesProviderUtils.java
static void upsertSecret(AccountDeploymentDetails<KubernetesAccount> details, Set<Pair<File, String>> files, String secretName, String namespace) { KubernetesClient client = getClient(details); if (client.secrets().inNamespace(namespace).withName(secretName).get() != null) { client.secrets().inNamespace(namespace).withName(secretName).delete(); }/* ww w . j a v a 2s . co m*/ Map<String, String> secretContents = new HashMap<>(); files.forEach(pair -> { try { File file = pair.getLeft(); String name = pair.getRight(); String data = new String( Base64.getEncoder().encode(IOUtils.toByteArray(new FileInputStream(file)))); secretContents.putIfAbsent(name, data); } catch (IOException e) { throw new HalException(Severity.ERROR, "Unable to read contents of \"" + pair.getLeft() + "\": " + e); } }); SecretBuilder secretBuilder = new SecretBuilder(); secretBuilder = secretBuilder.withNewMetadata().withName(secretName).withNamespace(namespace).endMetadata() .withData(secretContents); client.secrets().inNamespace(namespace).create(secretBuilder.build()); }
From source file:org.openecomp.sdc.validation.impl.util.ResourceValidationHeatValidator.java
private static void removeSecurityGroupNamesFromListByGivenFunction(String filename, String functionName, Object securityGroup, Collection<String> securityGroupResourceNameList, GlobalValidationContext globalContext) { Set<String> securityGroupsNamesFromFunction = HeatStructureUtil.getReferencedValuesByFunctionName(filename, functionName, securityGroup, globalContext); securityGroupsNamesFromFunction.forEach(securityGroupResourceNameList::remove); }
From source file:org.apache.syncope.common.lib.AnyOperations.java
private static Collection<AttrTO> patch(final Map<String, AttrTO> attrs, final Set<AttrPatch> attrPatches) { Map<String, AttrTO> rwattrs = new HashMap<>(attrs); attrPatches.forEach(patch -> { if (patch.getAttrTO() == null) { LOG.warn("Invalid {} specified: {}", AttrPatch.class.getName(), patch); } else {/* ww w. ja v a 2 s . c o m*/ AttrTO removed = rwattrs.remove(patch.getAttrTO().getSchema()); if (removed != null && removed.getSchemaInfo() != null) { patch.getAttrTO().setSchemaInfo(removed.getSchemaInfo()); } if (patch.getOperation() == PatchOperation.ADD_REPLACE) { rwattrs.put(patch.getAttrTO().getSchema(), patch.getAttrTO()); } } }); return rwattrs.values(); }