List of usage examples for java.util Arrays stream
public static DoubleStream stream(double[] array)
From source file:io.tourniquet.junit.http.rules.ResponseStubbing.java
/** * Extract the query paramters from the path. If no path is set or no query parameters set, the map is empty. * * @return a map of key-value pairs resembling the query parameters *///w ww. ja v a 2 s . c om private Map<String, List<String>> getQueryParams() { return this.path.map(p -> { int idx = p.indexOf('?'); if (idx != -1) { return (Map<String, List<String>>) Arrays.stream(p.substring(idx + 1).split("(&|&)")) .map(kv -> kv.split("=")) .collect(groupingBy(s -> s[0], Collectors.mapping(s -> s[1], Collectors.toList()))); } return null; }).orElse(Collections.emptyMap()); }
From source file:de.fosd.jdime.config.CommandLineConfigSource.java
/** * Builds the <code>Options</code> instance describing the JDime command line configuration options. * * @return the <code>Options</code> instance *//*w ww. ja v a2s .c o m*/ private Options buildCliOptions() { Options options = new Options(); Option o; o = Option.builder(CLI_LOG_LEVEL).longOpt("log-level").desc( "Set the logging level to one of (OFF, SEVERE, WARNING, INFO, CONFIG, FINE, FINER, FINEST, ALL).") .hasArg().argName("level").build(); options.addOption(o); o = Option.builder(CLI_CONSECUTIVE).longOpt("consecutive") .desc("Requires diffonly mode. Treats versions as consecutive versions.").hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_DIFFONLY).longOpt("diffonly").desc("Only perform the diff stage.").hasArg(false) .build(); options.addOption(o); o = Option.builder(CLI_FORCE_OVERWRITE).longOpt("force-overwrite") .desc("Force overwriting of output files.").hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_HELP).longOpt("help").desc("Print this message.").hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_KEEPGOING).longOpt("keep-going") .desc("Whether to skip a set of files if there is an exception merging them.").hasArg(false) .build(); options.addOption(o); o = Option.builder(CLI_LOOKAHEAD).longOpt("lookahead").desc( "Use heuristics for matching. Supply 'off', 'full', or a non-negative integer as the argument.") .hasArg().argName("level").build(); options.addOption(o); o = Option.builder(CLI_INSPECT_ELEMENT).longOpt("inspect-element") .desc("Inspect an AST element. Supply number of element.").hasArg().argName("element").build(); options.addOption(o); o = Option.builder(CLI_INSPECT_METHOD).longOpt("inspect-method") .desc("Inspect the method of an AST element. Supply number of element.").hasArg().argName("element") .build(); options.addOption(o); { String strategies = String.join(", ", MergeStrategy.listStrategies()); o = Option.builder(CLI_MODE).longOpt("mode") .desc("Set the mode to one of (" + strategies + ") or a comma separated combination " + "thereof. In the latter case the strategies will be executed in order until one " + "does not produce conflicts.") .hasArg().argName("mode").build(); options.addOption(o); } { String formats = Arrays.stream(DumpMode.values()).map(DumpMode::name).reduce("", (s, s2) -> s + " " + s2); o = Option.builder(CLI_DUMP).longOpt("dump") .desc("Dumps the inputs using one of the formats: " + formats).hasArg().argName("format") .build(); options.addOption(o); } o = Option.builder(CLI_OUTPUT).longOpt("output").desc("Set the output directory/file.").hasArg() .argName("file").build(); options.addOption(o); o = Option.builder(CLI_OPTIMIZE_MULTI_CONFLICTS).longOpt("optimize-multi-conflicts") .desc("Merge successive conflicts after running structured strategy.").hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_RECURSIVE).longOpt("recursive").desc("Merge directories recursively.").hasArg(false) .build(); options.addOption(o); o = Option.builder(CLI_STATS).longOpt("stats").desc("Collect statistical data about the merge.") .hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_PRETEND).longOpt("pretend") .desc("Prints the merge result to stdout instead of an output file.").hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_QUIET).longOpt("quiet").desc("Do not print the merge result to stdout.") .hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_VERSION).longOpt("version").desc("Print the version information and exit.") .hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_PROP_FILE).longOpt("properties-file") .desc("Set the path to the properties file to use for additional configuration options.").hasArg() .argName("path").build(); options.addOption(o); o = Option.builder(CLI_EXIT_ON_ERROR).longOpt("exit-on-error") .desc("Whether to end the merge if there is an exception merging a set of files. If neither this " + "option nor keep-going is set the fallback line based strategy will be tried.") .hasArg(false).build(); options.addOption(o); { String modes = Arrays.stream(CMMode.values()).map(CMMode::name).reduce("", (s, s2) -> s + " " + s2); o = Option.builder(CLI_CM).longOpt("cost-model-matcher") .desc("Sets the cost model matcher operation mode to one of " + modes).hasArg(true).build(); options.addOption(o); } o = Option.builder(CLI_CM_REMATCH_BOUND).longOpt("cost-model-rematch-bound") .desc("If the cost model matcher operation mode is " + CMMode.INTEGRATED + " the cost model matcher will " + "be used to try and improve subtree matches with a percentage lower than this bound. " + "Should be from (0, 1]. The default is 30%.") .hasArg(true).build(); options.addOption(o); o = Option.builder(CLI_CM_OPTIONS).longOpt("cost-model-options") .desc("Accepts a comma separated list of parameters for the cost model matcher. The list must have " + "the form: <int iterations>,<float pAssign>,<float wr>,<float wn>,<float wa>,<float ws>,<float wo>") .hasArg(true).build(); options.addOption(o); o = Option.builder(CLI_CM_PARALLEL).longOpt("cost-model-parallel") .desc("Whether to speed up the cost model matcher by calculating the edge costs in parallel.") .hasArg(false).build(); options.addOption(o); o = Option.builder(CLI_CM_FIX_PERCENTAGE).longOpt("cost-model-fix-percentage") .desc("Accepts a comma separated list of two percentages. <float fixLower>,<float fixUpper> both " + "from the range [0, 1]. If these percentages are given, a random number (from the given range) " + "of matchings from the previous iteration will be fixed for the next.") .hasArg(true).build(); options.addOption(o); o = Option.builder(CLI_CM_SEED).longOpt("cost-model-seed") .desc("The seed for the PRNG used by the cost model matcher. If set to \"none\" a random seed will " + "be used. Otherwise the default is 42.") .hasArg(true).build(); options.addOption(o); return options; }
From source file:de.perdian.apps.tagtiger.fx.handlers.batchupdate.UpdateFileNamesFromTagsActionEventHandler.java
private void computeNewFileNames(List<UpdateFileNamesFromTagsItem> items, String fileNamePattern) { for (UpdateFileNamesFromTagsItem item : items) { Map<String, String> replacementValues = Arrays.stream(UpdateFileNamesPlaceholder.values()) .collect(Collectors.toMap(p -> p.getPlaceholder(), p -> p.resolveValue(item.getFile()))); StrSubstitutor substitutor = new StrSubstitutor(replacementValues); String substitutionEvaluationResult = substitutor.replace(fileNamePattern); String substitutionSanitizedResult = this.sanitizeFileName(substitutionEvaluationResult).trim(); if (!Objects.equals(substitutionSanitizedResult, item.getNewFileName().getValue())) { item.getNewFileName().setValue(substitutionSanitizedResult); }/* w ww.jav a 2 s .c o m*/ } }
From source file:com.simiacryptus.mindseye.lang.Layer.java
/** * Eval nn result.//from w w w. jav a 2 s.c o m * * @param array the array * @return the nn result */ @Nullable default Result eval(@Nonnull final Tensor[][] array) { Result[] input = ConstantResult.singleResultArray(array); Result eval = eval(input); Arrays.stream(input).forEach(ReferenceCounting::freeRef); Arrays.stream(input).map(Result::getData).forEach(ReferenceCounting::freeRef); return eval; }
From source file:com.blackducksoftware.integration.hub.detect.configuration.DetectConfigurationManager.java
private void resolveBomToolSearchProperties() { bomToolSearchDirectoryExclusions = new ArrayList<>(); for (final String exclusion : detectConfiguration .getStringArrayProperty(DetectProperty.DETECT_BOM_TOOL_SEARCH_EXCLUSION, PropertyAuthority.None)) { bomToolSearchDirectoryExclusions.add(exclusion); }/*from w w w.jav a 2 s .c o m*/ if (detectConfiguration.getBooleanProperty(DetectProperty.DETECT_BOM_TOOL_SEARCH_EXCLUSION_DEFAULTS, PropertyAuthority.None)) { final List<String> defaultExcludedNames = Arrays.stream(DetectorSearchExcludedDirectories.values()) .map(DetectorSearchExcludedDirectories::getDirectoryName).collect(Collectors.toList()); bomToolSearchDirectoryExclusions.addAll(defaultExcludedNames); } }
From source file:com.hortonworks.streamline.streams.cluster.register.impl.StormServiceRegistrar.java
private Pair<Component, List<ComponentProcess>> createNimbusComponent(Config config, Map<String, String> flatConfigMap) { if (!config.contains(PARAM_NIMBUS_SEEDS)) { throw new IllegalArgumentException("Required parameter " + PARAM_NIMBUS_SEEDS + " not present."); }//from w w w . jav a2s . c o m if (!config.contains(PARAM_NIMBUS_THRIFT_PORT)) { throw new IllegalArgumentException("Required parameter " + PARAM_NIMBUS_THRIFT_PORT + " not present."); } String nimbusSeeds; try { nimbusSeeds = config.getString(PARAM_NIMBUS_SEEDS); } catch (ClassCastException e) { throw new IllegalArgumentException("Required parameter " + PARAM_NIMBUS_SEEDS + " should be a string."); } Number nimbusThriftPort = readNumberFromConfig(config, PARAM_NIMBUS_THRIFT_PORT); Component nimbus = new Component(); nimbus.setName(COMPONENT_NIMBUS); List<ComponentProcess> componentProcesses = Arrays.stream(nimbusSeeds.split(",")).map(nimbusHost -> { ComponentProcess cp = new ComponentProcess(); cp.setHost(nimbusHost); cp.setPort(nimbusThriftPort.intValue()); return cp; }).collect(toList()); return new Pair<>(nimbus, componentProcesses); }
From source file:com.epam.ta.reportportal.database.dao.ReportPortalRepositoryImpl.java
@Override public Page<T> findByFilterExcluding(Filter filter, Pageable pageable, String... exclude) { Query query = QueryBuilder.newBuilder().with(filter).with(pageable).build(); org.springframework.data.mongodb.core.query.Field fields = query.fields(); if (null != exclude) { Arrays.stream(exclude).forEach(fields::exclude); }/* w w w .ja va 2s.c o m*/ return findPage(query, pageable); }
From source file:eu.amidst.core.inference.MPEInferenceExperiments_Deliv1.java
/** * The class constructor./*from w w w . ja v a 2 s.com*/ * @param args Array of options: "filename variable a b N useVMP" if variable is continuous or "filename variable w N useVMP" for discrete */ public static void main(String[] args) throws Exception { // args: seedNetwork numberGaussians numberDiscrete seedAlgorithms int seedNetwork = 23423523; int numberOfGaussians = 20; int numberOfMultinomials = 20; int seed = 634634534; int parallelSamples = 100; int samplingMethodSize = 10000; int repetitions = 10; int numberOfIterations = 200; if (args.length != 8) { if (Main.VERBOSE) System.out.println("Invalid number of parameters. Using default values"); } else { try { seedNetwork = Integer.parseInt(args[0]); numberOfGaussians = Integer.parseInt(args[1]); numberOfMultinomials = Integer.parseInt(args[2]); seed = Integer.parseInt(args[3]); parallelSamples = Integer.parseInt(args[4]); samplingMethodSize = Integer.parseInt(args[5]); repetitions = Integer.parseInt(args[6]); numberOfIterations = Integer.parseInt(args[7]); } catch (NumberFormatException ex) { if (Main.VERBOSE) System.out.println( "Invalid parameters. Provide integers: seedNetwork numberGaussians numberDiscrete seedAlgorithms parallelSamples sampleSize repetitions"); if (Main.VERBOSE) System.out.println("Using default parameters"); if (Main.VERBOSE) System.out.println(ex.toString()); System.exit(20); } } int numberOfLinks = (int) 1.3 * (numberOfGaussians + numberOfMultinomials); BayesianNetworkGenerator.setSeed(seedNetwork); BayesianNetworkGenerator.setNumberOfGaussianVars(numberOfGaussians); BayesianNetworkGenerator.setNumberOfMultinomialVars(numberOfMultinomials, 2); BayesianNetworkGenerator.setNumberOfLinks(numberOfLinks); String filename = "./networks/simulated/RandomBN_" + Integer.toString(numberOfMultinomials) + "D_" + Integer.toString(numberOfGaussians) + "C_" + Integer.toString(seedNetwork) + "_Seed.bn"; //BayesianNetworkGenerator.generateBNtoFile(numberOfMultinomials,2,numberOfGaussians,numberOfLinks,seedNetwork,filename); BayesianNetwork bn = BayesianNetworkGenerator.generateBayesianNetwork(); //if (Main.VERBOSE) System.out.println(bn.getDAG()); //if (Main.VERBOSE) System.out.println(bn.toString()); MPEInference mpeInference = new MPEInference(); mpeInference.setModel(bn); mpeInference.setParallelMode(true); //if (Main.VERBOSE) System.out.println("CausalOrder: " + Arrays.toString(Utils.getCausalOrder(mpeInference.getOriginalModel().getDAG()).stream().map(Variable::getName).toArray())); List<Variable> modelVariables = Utils.getTopologicalOrder(bn.getDAG()); if (Main.VERBOSE) System.out.println(); // Including evidence: //double observedVariablesRate = 0.00; //Assignment evidence = randomEvidence(seed, observedVariablesRate, bn); //mpeInference.setEvidence(evidence); mpeInference.setSampleSize(parallelSamples); mpeInference.setSeed(seed); mpeInference.setNumberOfIterations(numberOfIterations); double[] SA_All_prob = new double[repetitions]; double[] SA_Some_prob = new double[repetitions]; double[] HC_All_prob = new double[repetitions]; double[] HC_Some_prob = new double[repetitions]; double[] sampling_prob = new double[repetitions]; double[] SA_All_time = new double[repetitions]; double[] SA_Some_time = new double[repetitions]; double[] HC_All_time = new double[repetitions]; double[] HC_Some_time = new double[repetitions]; double[] sampling_time = new double[repetitions]; long timeStart; long timeStop; double execTime; Assignment mpeEstimate; mpeInference.setParallelMode(true); for (int k = 0; k < repetitions; k++) { mpeInference.setSampleSize(parallelSamples); /*********************************************** * SIMULATED ANNEALING ************************************************/ // MPE INFERENCE WITH SIMULATED ANNEALING, ALL VARIABLES //if (Main.VERBOSE) System.out.println(); timeStart = System.nanoTime(); mpeInference.runInference(MPEInference.SearchAlgorithm.SA_GLOBAL); //mpeEstimate = mpeInference.getEstimate(); //if (Main.VERBOSE) System.out.println("MPE estimate (SA.All): " + mpeEstimate.outputString(modelVariables)); //toString(modelVariables) //if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mpeInference.getLogProbabilityOfEstimate()) + ", logProb: " + mpeInference.getLogProbabilityOfEstimate()); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; //if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); //if (Main.VERBOSE) System.out.println(); SA_All_prob[k] = mpeInference.getLogProbabilityOfEstimate(); SA_All_time[k] = execTime; // MPE INFERENCE WITH SIMULATED ANNEALING, SOME VARIABLES AT EACH TIME timeStart = System.nanoTime(); mpeInference.runInference(MPEInference.SearchAlgorithm.SA_LOCAL); //mpeEstimate = mpeInference.getEstimate(); //if (Main.VERBOSE) System.out.println("MPE estimate (SA.Some): " + mpeEstimate.outputString(modelVariables)); //toString(modelVariables) //if (Main.VERBOSE) System.out.println("with probability: "+ Math.exp(mpeInference.getLogProbabilityOfEstimate()) + ", logProb: " + mpeInference.getLogProbabilityOfEstimate()); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; //if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); //if (Main.VERBOSE) System.out.println(); SA_Some_prob[k] = mpeInference.getLogProbabilityOfEstimate(); SA_Some_time[k] = execTime; /*********************************************** * HILL CLIMBING ************************************************/ // MPE INFERENCE WITH HILL CLIMBING, ALL VARIABLES timeStart = System.nanoTime(); mpeInference.runInference(MPEInference.SearchAlgorithm.HC_GLOBAL); //mpeEstimate = mpeInference.getEstimate(); //modelVariables = mpeInference.getOriginalModel().getVariables().getListOfVariables(); //if (Main.VERBOSE) System.out.println("MPE estimate (HC.All): " + mpeEstimate.outputString(modelVariables)); //if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mpeInference.getLogProbabilityOfEstimate()) + ", logProb: " + mpeInference.getLogProbabilityOfEstimate()); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; //if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(); HC_All_prob[k] = mpeInference.getLogProbabilityOfEstimate(); HC_All_time[k] = execTime; // MPE INFERENCE WITH HILL CLIMBING, ONE VARIABLE AT EACH TIME timeStart = System.nanoTime(); mpeInference.runInference(MPEInference.SearchAlgorithm.HC_LOCAL); //mpeEstimate = mpeInference.getEstimate(); //if (Main.VERBOSE) System.out.println("MPE estimate (HC.Some): " + mpeEstimate.outputString(modelVariables)); //toString(modelVariables) //if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mpeInference.getLogProbabilityOfEstimate()) + ", logProb: " + mpeInference.getLogProbabilityOfEstimate()); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; //if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(); HC_Some_prob[k] = mpeInference.getLogProbabilityOfEstimate(); HC_Some_time[k] = execTime; /*********************************************** * SAMPLING AND DETERMINISTIC ************************************************/ // MPE INFERENCE WITH SIMULATION AND PICKING MAX mpeInference.setSampleSize(samplingMethodSize); timeStart = System.nanoTime(); mpeInference.runInference(MPEInference.SearchAlgorithm.SAMPLING); //mpeEstimate = mpeInference.getEstimate(); //modelVariables = mpeInference.getOriginalModel().getVariables().getListOfVariables(); //if (Main.VERBOSE) System.out.println("MPE estimate (SAMPLING): " + mpeEstimate.outputString(modelVariables)); //if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mpeInference.getLogProbabilityOfEstimate()) + ", logProb: " + mpeInference.getLogProbabilityOfEstimate()); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; //if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(); sampling_prob[k] = mpeInference.getLogProbabilityOfEstimate(); sampling_time[k] = execTime; } double determ_prob = 0; double determ_time = 0; if (bn.getNumberOfVars() <= 50) { // MPE INFERENCE, DETERMINISTIC timeStart = System.nanoTime(); //mpeInference.runInference(-2); //mpeEstimate = mpeInference.getEstimate(); //modelVariables = mpeInference.getOriginalModel().getVariables().getListOfVariables(); //if (Main.VERBOSE) System.out.println("MPE estimate (DETERM.): " + mpeEstimate.outputString(modelVariables)); //if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mpeInference.getLogProbabilityOfEstimate()) + ", logProb: " + mpeInference.getLogProbabilityOfEstimate()); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; //if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(); determ_prob = mpeInference.getLogProbabilityOfEstimate(); determ_time = execTime; determ_prob = -34.64743236365595; determ_time = 0; } else { if (Main.VERBOSE) System.out.println("Too many variables for deterministic method"); } /*********************************************** * DISPLAY OF RESULTS ************************************************/ if (Main.VERBOSE) System.out.println("*** RESULTS ***"); // if (Main.VERBOSE) System.out.println("SA_All log-probabilities"); // if (Main.VERBOSE) System.out.println(Arrays.toString(SA_All_prob)); // if (Main.VERBOSE) System.out.println("SA_Some log-probabilities"); // if (Main.VERBOSE) System.out.println(Arrays.toString(SA_Some_prob)); // if (Main.VERBOSE) System.out.println("HC_All log-probabilities"); // if (Main.VERBOSE) System.out.println(Arrays.toString(HC_All_prob)); // if (Main.VERBOSE) System.out.println("HC_Some log-probabilities"); // if (Main.VERBOSE) System.out.println(Arrays.toString(HC_Some_prob)); // if (Main.VERBOSE) System.out.println("Sampling log-probabilities"); // if (Main.VERBOSE) System.out.println(Arrays.toString(sampling_prob)); // // if(bn.getNumberOfVars()<=50) { // if (Main.VERBOSE) System.out.println("Deterministic log-probability"); // if (Main.VERBOSE) System.out.println(Double.toString(determ_prob)); // } // if (Main.VERBOSE) System.out.println(); final double determ_prob_FINAL = determ_prob; // int SA_All_success = (int) Arrays.stream(SA_All_prob).filter(db -> (db <= determ_prob_FINAL+0.001 && db >=determ_prob_FINAL-0.001)).count(); // int SA_Some_success = (int) Arrays.stream(SA_Some_prob).filter(db -> (db <= determ_prob_FINAL+0.001 && db >=determ_prob_FINAL-0.001)).count(); // int HC_All_success = (int) Arrays.stream(HC_All_prob).filter(db -> (db <= determ_prob_FINAL+0.001 && db >=determ_prob_FINAL-0.001)).count(); // int HC_Some_success = (int) Arrays.stream(HC_Some_prob).filter(db -> (db <= determ_prob_FINAL+0.001 && db >=determ_prob_FINAL-0.001)).count(); // int sampling_success = (int) Arrays.stream(sampling_prob).filter(db -> (db <= determ_prob_FINAL+0.001 && db >=determ_prob_FINAL-0.001)).count(); // // if (Main.VERBOSE) System.out.println("SA_All % success"); // if (Main.VERBOSE) System.out.println(Double.toString( 100 * SA_All_success/repetitions )); // if (Main.VERBOSE) System.out.println("SA_Some % success"); // if (Main.VERBOSE) System.out.println(Double.toString( 100 * SA_Some_success/repetitions )); // if (Main.VERBOSE) System.out.println("HC_All % success"); // if (Main.VERBOSE) System.out.println(Double.toString( 100 * HC_All_success/repetitions )); // if (Main.VERBOSE) System.out.println("HC_Some % success"); // if (Main.VERBOSE) System.out.println(Double.toString( 100 * HC_Some_success/repetitions )); // if (Main.VERBOSE) System.out.println("Sampling % success"); // if (Main.VERBOSE) System.out.println(Double.toString( 100 * sampling_success/repetitions )); // if (Main.VERBOSE) System.out.println(); if (Main.VERBOSE) System.out.println("SA_All RMS probabilities"); if (Main.VERBOSE) System.out.println(Double.toString(Arrays.stream(SA_All_prob) .map(value -> Math.pow(value - determ_prob_FINAL, 2)).average().getAsDouble())); if (Main.VERBOSE) System.out.println("SA_Some RMS probabilities"); if (Main.VERBOSE) System.out.println(Double.toString(Arrays.stream(SA_Some_prob) .map(value -> Math.pow(value - determ_prob_FINAL, 2)).average().getAsDouble())); if (Main.VERBOSE) System.out.println("HC_All RMS probabilities"); if (Main.VERBOSE) System.out.println(Double.toString(Arrays.stream(HC_All_prob) .map(value -> Math.pow(value - determ_prob_FINAL, 2)).average().getAsDouble())); if (Main.VERBOSE) System.out.println("HC_Some RMS probabilities"); if (Main.VERBOSE) System.out.println(Double.toString(Arrays.stream(HC_Some_prob) .map(value -> Math.pow(value - determ_prob_FINAL, 2)).average().getAsDouble())); if (Main.VERBOSE) System.out.println("Sampling RMS probabilities"); if (Main.VERBOSE) System.out.println(Double.toString(Arrays.stream(sampling_prob) .map(value -> Math.pow(value - determ_prob_FINAL, 2)).average().getAsDouble())); if (Main.VERBOSE) System.out.println(); if (Main.VERBOSE) System.out.println("SA_All times"); //if (Main.VERBOSE) System.out.println(Arrays.toString(SA_All_time)); if (Main.VERBOSE) System.out.println("Mean time: " + Double.toString(Arrays.stream(SA_All_time).average().getAsDouble())); if (Main.VERBOSE) System.out.println("SA_Some times"); //if (Main.VERBOSE) System.out.println(Arrays.toString(SA_Some_time)); if (Main.VERBOSE) System.out .println("Mean time: " + Double.toString(Arrays.stream(SA_Some_time).average().getAsDouble())); if (Main.VERBOSE) System.out.println("HC_All times"); //if (Main.VERBOSE) System.out.println(Arrays.toString(HC_All_time)); if (Main.VERBOSE) System.out.println("Mean time: " + Double.toString(Arrays.stream(HC_All_time).average().getAsDouble())); if (Main.VERBOSE) System.out.println("HC_Some times"); //if (Main.VERBOSE) System.out.println(Arrays.toString(HC_Some_time)); if (Main.VERBOSE) System.out .println("Mean time: " + Double.toString(Arrays.stream(HC_Some_time).average().getAsDouble())); if (Main.VERBOSE) System.out.println("Sampling times"); //if (Main.VERBOSE) System.out.println(Arrays.toString(sampling_time)); if (Main.VERBOSE) System.out .println("Mean time: " + Double.toString(Arrays.stream(sampling_time).average().getAsDouble())); if (bn.getNumberOfVars() <= 50) { if (Main.VERBOSE) System.out.println("Deterministic time"); if (Main.VERBOSE) System.out.println(Double.toString(determ_time)); if (Main.VERBOSE) System.out.println("and probability"); if (Main.VERBOSE) System.out.println(determ_prob); } }
From source file:core.Annotator.java
private void injectVEPHeaders() { Arrays.stream(HEADERS).forEach(vcfFile.getHeader()::addHeader); }
From source file:com.github.anba.es6draft.util.TestGlobals.java
private static void optionsFromStage(EnumSet<CompatibilityOption> options, String stage) { options.addAll(Arrays.stream(CompatibilityOption.Stage.values()).filter(s -> { if (stage.length() == 1 && Character.isDigit(stage.charAt(0))) { return s.getLevel() == Character.digit(stage.charAt(0), 10); } else {/*from w ww. j a va 2 s . co m*/ return s.name().equalsIgnoreCase(stage); } }).findAny().map(CompatibilityOption::Stage).orElseThrow(IllegalArgumentException::new)); }