List of usage examples for java.util Collections shuffle
public static void shuffle(List<?> list)
From source file:com.amazonaws.services.kinesis.leases.impl.LeaseTaker.java
/** * Compute the number of leases I should try to take based on the state of the system. * //from w w w.j av a2 s .c om * @param allLeases map of shardId to lease containing all leases * @param expiredLeases list of leases we determined to be expired * @return set of leases to take. */ private Set<T> computeLeasesToTake(List<T> expiredLeases) { Map<String, Integer> leaseCounts = computeLeaseCounts(expiredLeases); Set<T> leasesToTake = new HashSet<T>(); IMetricsScope metrics = MetricsHelper.getMetricsScope(); int numLeases = allLeases.size(); int numWorkers = leaseCounts.size(); if (numLeases == 0) { // If there are no leases, I shouldn't try to take any. return leasesToTake; } int target; if (numWorkers >= numLeases) { // If we have n leases and n or more workers, each worker can have up to 1 lease, including myself. target = 1; } else { /* * numWorkers must be < numLeases. * * Our target for each worker is numLeases / numWorkers (+1 if numWorkers doesn't evenly divide numLeases) */ target = numLeases / numWorkers + (numLeases % numWorkers == 0 ? 0 : 1); // Spill over is the number of leases this worker should have claimed, but did not because it would // exceed the max allowed for this worker. int leaseSpillover = Math.max(0, target - maxLeasesForWorker); if (target > maxLeasesForWorker) { LOG.warn(String.format("Worker %s target is %d leases and maxLeasesForWorker is %d." + " Resetting target to %d, lease spillover is %d. " + " Note that some shards may not be processed if no other workers are able to pick them up.", workerIdentifier, target, maxLeasesForWorker, maxLeasesForWorker, leaseSpillover)); target = maxLeasesForWorker; } metrics.addData("LeaseSpillover", leaseSpillover, StandardUnit.Count, MetricsLevel.SUMMARY); } int myCount = leaseCounts.get(workerIdentifier); int numLeasesToReachTarget = target - myCount; if (numLeasesToReachTarget <= 0) { // If we don't need anything, return the empty set. return leasesToTake; } // Shuffle expiredLeases so workers don't all try to contend for the same leases. Collections.shuffle(expiredLeases); int originalExpiredLeasesSize = expiredLeases.size(); if (expiredLeases.size() > 0) { // If we have expired leases, get up to <needed> leases from expiredLeases for (; numLeasesToReachTarget > 0 && expiredLeases.size() > 0; numLeasesToReachTarget--) { leasesToTake.add(expiredLeases.remove(0)); } } else { // If there are no expired leases and we need a lease, consider stealing. List<T> leasesToSteal = chooseLeasesToSteal(leaseCounts, numLeasesToReachTarget, target); for (T leaseToSteal : leasesToSteal) { LOG.info(String.format( "Worker %s needed %d leases but none were expired, so it will steal lease %s from %s", workerIdentifier, numLeasesToReachTarget, leaseToSteal.getLeaseKey(), leaseToSteal.getLeaseOwner())); leasesToTake.add(leaseToSteal); } } if (!leasesToTake.isEmpty()) { LOG.info(String.format( "Worker %s saw %d total leases, %d available leases, %d " + "workers. Target is %d leases, I have %d leases, I will take %d leases", workerIdentifier, numLeases, originalExpiredLeasesSize, numWorkers, target, myCount, leasesToTake.size())); } metrics.addData("TotalLeases", numLeases, StandardUnit.Count, MetricsLevel.DETAILED); metrics.addData("ExpiredLeases", originalExpiredLeasesSize, StandardUnit.Count, MetricsLevel.SUMMARY); metrics.addData("NumWorkers", numWorkers, StandardUnit.Count, MetricsLevel.SUMMARY); metrics.addData("NeededLeases", numLeasesToReachTarget, StandardUnit.Count, MetricsLevel.DETAILED); metrics.addData("LeasesToTake", leasesToTake.size(), StandardUnit.Count, MetricsLevel.DETAILED); return leasesToTake; }
From source file:MSUmpire.DIA.TargetMatchScoring.java
private void EM_LDATraining() throws IOException { Logger.getRootLogger()// ww w .ja v a 2 s . c o m .info("Linear discriminant analysis using identified peptides and decoys as training set."); Regression regression = new Regression(); XYPointCollection points = new XYPointCollection(); matchSubscore.InitializeLDACoeff(); int NoLDAComp = matchSubscore.NoEnableSubscores; CalcUmpireScore(); float LDASimialrity = 0f; float StopThreshold = 0.95f; int MaxIterations = 50; int iteration = 0; float samplingratio = 0.5f; ArrayList<PeakGroupScore> IDList = new ArrayList<>(); ArrayList<PeakGroupScore> decoyList = new ArrayList<>(); for (UmpireSpecLibMatch match : libTargetMatches) { if (match.BestDecoyHit != null) { decoyList.add(match.BestDecoyHit); } if (match.BestMS2DecoyHit != null) { decoyList.add(match.BestMS2DecoyHit); } } for (UmpireSpecLibMatch match : libIDMatches) { if (match.BestHit != null) { IDList.add(match.BestHit); } } Collections.shuffle(decoyList); ArrayList<PeakGroupScore> decoyTList = new ArrayList<>(); for (int i = 0; i < decoyList.size() / 2; i++) { decoyModelingList.add(decoyList.get(i)); } for (int i = decoyList.size() / 2; i < decoyList.size(); i++) { decoyTList.add(decoyList.get(i)); } decoyList = decoyTList; int targetNo = (int) (IDList.size() * samplingratio); int TrainNo = Math.min(targetNo, decoyList.size()); Logger.getRootLogger().info("No. of identified peptide ions:" + IDList.size()); Logger.getRootLogger().info("No. of decoys:" + decoyList.size()); if (TrainNo < 5) { Terminate = true; Logger.getRootLogger().warn("No. of training data is less than 5, the training process will exit."); return; } while (LDASimialrity < StopThreshold && iteration < MaxIterations) { Collections.shuffle(decoyList); Collections.shuffle(IDList); double[][] testdata = new double[NoLDAComp][2 * TrainNo]; double[] testgroup = new double[2 * TrainNo]; int idx = 0; for (int i = 0; i < TrainNo; i++) { PeakGroupScore peakgroup = IDList.get(i); double[] enablesubscore = matchSubscore.GetEnableSubScoreArray(peakgroup); for (int j = 0; j < enablesubscore.length; j++) { testdata[j][idx] = enablesubscore[j]; } testgroup[idx] = 1; idx++; } for (int i = 0; i < TrainNo; i++) { PeakGroupScore peakgroup = decoyList.get(i); double[] enablesubscore = matchSubscore.GetEnableSubScoreArray(peakgroup); for (int j = 0; j < enablesubscore.length; j++) { testdata[j][idx] = enablesubscore[j]; } testgroup[idx] = 0; idx++; } DiscriminantAnalysis LDA = new DiscriminantAnalysis(); int[] group = LDA.predictedGroup(testgroup, testdata, testdata); boolean modelvalid = true; for (int i = 0; i < NoLDAComp; i++) { if (Float.isNaN((float) LDA.linearDiscriminants[0][i])) { modelvalid = false; break; } points.AddPoint((float) matchSubscore.SubSCoeff[i], (float) -LDA.linearDiscriminants[0][i]); matchSubscore.SubSCoeff[i] = -LDA.linearDiscriminants[0][i]; } if (!modelvalid) { Logger.getRootLogger().debug("LDA failed at iteration:" + iteration); break; } regression.SetData(points); LDASimialrity = regression.GetR2(); if (Float.isNaN(LDASimialrity)) { LDASimialrity = -1f; } DecimalFormat df = new DecimalFormat("#.####"); Logger.getRootLogger().debug( "----------------------------------------------------------------------------------------"); Logger.getRootLogger().debug("Iteration:" + (iteration++)); Logger.getRootLogger().debug("No of target hits:" + targetNo); Logger.getRootLogger().debug("No of decoy hits:" + decoyList.size()); Logger.getRootLogger().debug("Training set size:" + TrainNo * 2); Logger.getRootLogger().debug("Trained weights:"); for (int i = 0; i < NoLDAComp; i++) { Logger.getRootLogger() .debug(matchSubscore.SubSName[i] + ":" + df.format(matchSubscore.SubSCoeff[i])); } Logger.getRootLogger().debug("LDA weight similarity to previous iteration:" + df.format(LDASimialrity)); CalcUmpireScore(); } }
From source file:annis.AnnisRunner.java
public void doBenchmark(String benchmarkCount) { int count = Integer.parseInt(benchmarkCount); out.println("---> executing " + benchmarks.size() + " queries " + count + " times"); AnnisRunner.OS currentOS = AnnisRunner.OS.other; try {// w w w . j a v a 2 s . c om currentOS = AnnisRunner.OS.valueOf(System.getProperty("os.name").toLowerCase()); } catch (IllegalArgumentException ex) { } List<AnnisRunner.Benchmark> session = new ArrayList<AnnisRunner.Benchmark>(); // create sql + plan for each query and create count copies for each benchmark for (AnnisRunner.Benchmark benchmark : benchmarks) { if (clearCaches) { resetCaches(currentOS); } SqlGenerator<QueryData, ?> generator = getGeneratorForQueryFunction(benchmark.functionCall); benchmark.sql = getGeneratorForQueryFunction(benchmark.functionCall).toSql(benchmark.queryData); out.println("---> SQL query for: " + benchmark.functionCall); out.println(benchmark.sql); try { benchmark.plan = annisDao.explain(generator, benchmark.queryData, false); out.println("---> query plan for: " + benchmark.functionCall); out.println(benchmark.plan); } catch (RuntimeException e) { // nested DataAccessException would be better out.println("---> query plan failed for " + benchmark.functionCall); } benchmark.bestTimeInMilliseconds = Long.MAX_VALUE; benchmark.worstTimeInMilliseconds = Long.MIN_VALUE; out.println("---> running query sequentially " + SEQUENTIAL_RUNS + " times"); String options = benchmarkOptions(benchmark.queryData); for (int i = 0; i < SEQUENTIAL_RUNS; ++i) { if (i > 0) { out.print(", "); } boolean error = false; long start = new Date().getTime(); try { annisDao.executeQueryFunction(benchmark.queryData, generator); } catch (RuntimeException e) { error = true; } long end = new Date().getTime(); long runtime = end - start; benchmark.values.add(runtime); benchmark.bestTimeInMilliseconds = Math.min(benchmark.bestTimeInMilliseconds, runtime); benchmark.worstTimeInMilliseconds = Math.max(benchmark.worstTimeInMilliseconds, runtime); ++benchmark.runs; if (error) { ++benchmark.errors; } out.print(runtime + " ms"); } out.println(); out.println(benchmark.bestTimeInMilliseconds + " ms best time for '" + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options)); session.addAll(Collections.nCopies(count, benchmark)); } // clear cache again in order to treat the last query in the list equal to // the others if (clearCaches) { resetCaches(currentOS); } // shuffle the benchmark queries Collections.shuffle(session); out.println(); out.println("---> running queries in random order"); // execute the queries, record test times for (AnnisRunner.Benchmark benchmark : session) { if (benchmark.errors >= 3) { continue; } boolean error = false; SqlGenerator<QueryData, ?> generator = getGeneratorForQueryFunction(benchmark.functionCall); long start = new Date().getTime(); try { annisDao.executeQueryFunction(benchmark.queryData, generator); } catch (RuntimeException e) { error = true; } long end = new Date().getTime(); long runtime = end - start; benchmark.avgTimeInMilliseconds += runtime; benchmark.values.add(runtime); benchmark.bestTimeInMilliseconds = Math.min(benchmark.bestTimeInMilliseconds, runtime); benchmark.worstTimeInMilliseconds = Math.max(benchmark.worstTimeInMilliseconds, runtime); ++benchmark.runs; if (error) { ++benchmark.errors; } String options = benchmarkOptions(benchmark.queryData); out.println(runtime + " ms for '" + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options) + (error ? " ERROR" : "")); } // compute average runtime for each query out.println(); out.println("---> benchmark complete"); for (AnnisRunner.Benchmark benchmark : benchmarks) { benchmark.avgTimeInMilliseconds = Math .round((double) benchmark.avgTimeInMilliseconds / (double) benchmark.runs); String options = benchmarkOptions(benchmark.queryData); out.println(benchmark.getMedian() + " ms (median for " + benchmark.runs + " runs" + (benchmark.errors > 0 ? ", " + benchmark.errors + " errors)" : ")") + " for '" + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options)); } // show best runtime for each query out.println(); out.println("---> worst times"); for (AnnisRunner.Benchmark benchmark : benchmarks) { String options = benchmarkOptions(benchmark.queryData); out.println(benchmark.worstTimeInMilliseconds + " ms " + (benchmark.errors > 0 ? "(" + benchmark.errors + " errors)" : "") + " for '" + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options)); } // show best runtime for each query out.println(); out.println("---> best times"); for (AnnisRunner.Benchmark benchmark : benchmarks) { String options = benchmarkOptions(benchmark.queryData); out.println(benchmark.bestTimeInMilliseconds + " ms " + (benchmark.errors > 0 ? "(" + benchmark.errors + " errors)" : "") + " for '" + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options)); } out.println(); // CSV output try { CSVWriter csv = new CSVWriter( new FileWriterWithEncoding(new File("annis_benchmark_result.csv"), "UTF-8")); String[] header = new String[] { "corpora", "query", "median", "diff-best", "diff-worst" }; csv.writeNext(header); for (AnnisRunner.Benchmark benchmark : benchmarks) { long median = benchmark.getMedian(); String[] line = new String[5]; line[0] = StringUtils.join(benchmark.queryData.getCorpusList(), ","); line[1] = benchmark.functionCall; line[2] = "" + median; line[3] = "" + Math.abs(benchmark.bestTimeInMilliseconds - median); line[4] = "" + Math.abs(median - benchmark.worstTimeInMilliseconds); csv.writeNext(line); } csv.close(); } catch (IOException ex) { log.error(null, ex); } }
From source file:com.intuit.tank.harness.functions.StringFunctions.java
/** * @param minId//w w w. jav a2 s .com * @param maxId * @return */ public synchronized static Stack<Integer> getStack(Integer minId, Integer maxId, String exclusions, boolean include) { String key = getStackKey(minId, maxId, exclusions, include); Stack<Integer> stack = stackMap.get(key); if (stack == null) { int blockSize = (maxId - minId) / APITestHarness.getInstance().getAgentRunData().getTotalAgents(); int offset = APITestHarness.getInstance().getAgentRunData().getAgentInstanceNum() * blockSize; LOG.info(LogUtil.getLogMessage("Creating userId Block starting at " + (offset + minId) + " and containing " + blockSize + " entries with " + (include ? "inclusion" : "exclusion") + " pattern(s) of " + exclusions, LogEventType.System)); List<Integer> list = new ArrayList<Integer>(); List<String> exclusionList = parseExclusions(exclusions); for (int i = 0; i < blockSize; i++) { int nextNum = i + minId + offset; if (nextNum < maxId) { if (shouldInclude(Integer.toString(nextNum), exclusionList, include)) { list.add(nextNum); } } } Collections.shuffle(list); // Collections.reverse(list); stack = new Stack<Integer>(); stack.addAll(list); stackMap.put(key, stack); } return stack; }
From source file:com.archsystemsinc.ipms.sec.webapp.controller.MeetingController.java
@RequestMapping(value = "/new-agenda", method = RequestMethod.GET) public String newAgendaMeeting(final Model model) { final List<MeetingAgendaItem> meetingAgendaItem = meetingAgendaItemService.findAll(); if (meetingAgendaItem != null) { Collections.shuffle((List<MeetingAgendaItem>) meetingAgendaItem); }/* www .ja va2 s. c o m*/ model.addAttribute("meetingAgendaItem", meetingAgendaItem); return "agendaadd"; }
From source file:voldemort.ServerTestUtils.java
/** * Returns a cluster with <b>numberOfNodes</b> nodes in <b>numberOfZones</b> * zones. It is important that <b>numberOfNodes</b> be divisible by * <b>numberOfZones</b>// ww w.java2 s .co m * * @param numberOfNodes Number of nodes in the cluster * @param partitionsPerNode Number of partitions in one node * @param numberOfZones Number of zones * @return Cluster */ public static Cluster getLocalCluster(int numberOfNodes, int partitionsPerNode, int numberOfZones) { if (numberOfZones > 0 && numberOfNodes > 0 && numberOfNodes % numberOfZones != 0) { throw new VoldemortException("The number of nodes (" + numberOfNodes + ") is not divisible by number of zones (" + numberOfZones + ")"); } int[] ports = findFreePorts(3 * numberOfNodes); List<Integer> partitions = Lists.newArrayList(); for (int i = 0; i < partitionsPerNode * numberOfNodes; i++) partitions.add(i); Collections.shuffle(partitions); // Generate nodes int numberOfNodesPerZone = numberOfNodes / numberOfZones; List<Node> nodes = new ArrayList<Node>(); for (int i = 0; i < numberOfNodes; i++) { nodes.add(new Node(i, "localhost", ports[3 * i], ports[3 * i + 1], ports[3 * i + 2], i / numberOfNodesPerZone, partitions.subList(partitionsPerNode * i, partitionsPerNode * i + partitionsPerNode))); } // Generate zones if (numberOfZones > 1) { List<Zone> zones = getZones(numberOfZones); return new Cluster("cluster", nodes, zones); } else { return new Cluster("cluster", nodes); } }
From source file:edu.cmu.tetrad.search.IndTestMultiFisherZ2.java
private List<Double> getCutoff1(List<Node> aa, List<Node> bb, List<Node> cc) { Node x = new ContinuousVariable("X"); Node y = new ContinuousVariable("Y"); Node z = new ContinuousVariable("Z"); int numPermutations = 5; double[] k_ = new double[numPermutations]; for (int c = 0; c < numPermutations; c++) { List<Integer> indices = new ArrayList<Integer>(); for (int j = 0; j < cov.getDimension(); j++) { indices.add(j);/*from w w w . j a va2 s .com*/ } Collections.shuffle(indices); Map<Node, List<Node>> nodeMap = new HashMap<Node, List<Node>>(); List<Node> _nodes = cov.getVariables(); int _count = 0; List<Node> nx = new ArrayList<Node>(); for (int k = 0; k < aa.size(); k++) { nx.add(_nodes.get(indices.get(_count++))); } nodeMap.put(x, nx); List<Node> ny = new ArrayList<Node>(); for (int k = 0; k < bb.size(); k++) { ny.add(_nodes.get(indices.get(_count++))); } nodeMap.put(y, ny); List<Node> nz = new ArrayList<Node>(); for (int k = 0; k < cc.size(); k++) { nz.add(_nodes.get(indices.get(_count++))); } nodeMap.put(z, nz); IndTestMultiFisherZ2 test = new IndTestMultiFisherZ2(nodeMap, cov, alpha); test.setVerbose(true); TetradMatrix submatrix = subMatrix(cov, nx, ny, nz); TetradMatrix inverse; int rank; try { inverse = submatrix.inverse(); rank = inverse.columns(); } catch (Exception e) { System.out.println("Couldn't invert " + submatrix.columns()); throw new IllegalArgumentException(); } List<Double> pValues = new ArrayList<Double>(); for (int i = 0; i < nx.size(); i++) { for (int m = 0; m < ny.size(); m++) { int j = nx.size() + m; double a = -1.0 * inverse.get(i, j); double v0 = inverse.get(i, i); double v1 = inverse.get(j, j); double b = Math.sqrt(v0 * v1); double r = a / b; int dof = cov.getSampleSize() - 1 - rank; if (dof < 0) { System.out.println("Negative dof: " + dof + " n = " + cov.getSampleSize() + " cols = " + inverse.columns()); dof = 0; } double _z = Math.sqrt(dof) * 0.5 * (Math.log(1.0 + r) - Math.log(1.0 - r)); double p = 2.0 * (1.0 - RandomUtil.getInstance().normalCdf(0, 1, abs(_z))); pValues.add(p); } } int k = 0; for (double p : pValues) { if (p < alpha) k++; } k_[c] = k; } double mean = StatUtils.mean(k_); double sd = StatUtils.sd(k_); List<Double> ret = new ArrayList<Double>(); ret.add(mean); ret.add(sd); return ret; }
From source file:ezbake.thrift.ThriftClientPool.java
private TProtocol getProtocol(String applicationName, String serviceName, int attempt) throws Exception { int endpointCount; StringBuilder exceptionList = new StringBuilder(); synchronized (serviceMap) { Collection<HostAndPort> endPoints = serviceMap.get(serviceName); List<HostAndPort> list = Lists.newArrayList(endPoints); if (endPoints.size() > 1) { Collections.shuffle(list); // distributes load on endpoints }//www. j a v a 2 s .c om endpointCount = endPoints.size(); for (HostAndPort hostAndPort : list) { try { final String securityId; if (applicationName != null) { //Getting another app's security id securityId = getSecurityId(applicationName); } else if (commonServices.contains(serviceName)) { //isCommonService reconnects to zookeeper, don't need that here //Getting a common service's security id securityId = getSecurityId(serviceName); } else { //Use your own app's security id logger.debug("could not find security id for {}. using {}", serviceName, applicationSecurityId); securityId = applicationSecurityId; } return ThriftUtils.getProtocol(hostAndPort, securityId, configuration); } catch (Exception ex) { exceptionList.append("\nHost: "); exceptionList.append(hostAndPort.toString()); exceptionList.append(" Exception: "); exceptionList.append(ex.getMessage()); logger.warn("Failed to connect to host(" + hostAndPort.toString() + ") Trying next...", ex); } } } if (attempt == 1) { ServiceDiscovery serviceDiscoveryClient = getServiceDiscoveryClient(); RefreshEndpoints(serviceDiscoveryClient); RefreshCommonEndpoints(serviceDiscoveryClient); closeClient(serviceDiscoveryClient); return getProtocol(applicationName, serviceName, 2); } throw new RuntimeException("Could not connect to service " + serviceName + " (found " + endpointCount + " endpoints)" + exceptionList.toString()); }
From source file:ec.coevolve.MultiPopCoevolutionaryEvaluatorExtra.java
@Override void loadElites(final EvolutionState state, int whichSubpop) { Subpopulation subpop = state.population.subpops[whichSubpop]; // Update hall of fame if (hallOfFame != null) { int best = 0; Individual[] oldinds = subpop.individuals; for (int x = 1; x < oldinds.length; x++) { if (betterThan(oldinds[x], oldinds[best])) { best = x;// www . j ava 2 s . c om } } hallOfFame[whichSubpop].add((Individual) subpop.individuals[best].clone()); } int index = 0; // Last champions if (lastChampions > 0) { for (int i = 1; i <= lastChampions && i <= hallOfFame[whichSubpop].size(); i++) { eliteIndividuals[whichSubpop][index++] = (Individual) hallOfFame[whichSubpop] .get(hallOfFame[whichSubpop].size() - i).clone(); } } double randChamps = randomChampions; // Novel champions if (novelChampions > 0) { Individual[] behaviourElite = behaviourElite(state, whichSubpop); for (int i = 0; i < behaviourElite.length; i++) { eliteIndividuals[whichSubpop][index++] = (Individual) behaviourElite[i].clone(); //System.out.println(whichSubpop + "\t" + ((ExpandedFitness) behaviourElite[i].fitness).getFitnessScore()); } randChamps = randomChampions + (novelChampions - behaviourElite.length); } // Random champions if (randChamps > 0) { // Choose random positions ArrayList<Integer> pos = new ArrayList<Integer>(hallOfFame[whichSubpop].size()); for (int i = 0; i < hallOfFame[whichSubpop].size(); i++) { pos.add(i); } Collections.shuffle(pos); for (int i = 0; i < pos.size() && i < randChamps; i++) { eliteIndividuals[whichSubpop][index++] = (Individual) hallOfFame[whichSubpop].get(pos.get(i)) .clone(); } } // NEAT Elite if (neatElite > 0) { NEATGeneticAlgorithm neat = ((NEATSubpop) subpop).getNEAT(); if (!neat.getSpecies().specieList().isEmpty()) { HashMap<Integer, Individual> specieBests = new HashMap<Integer, Individual>( neat.getSpecies().specieList().size() * 2); Chromosome[] genoTypes = neat.population().genoTypes(); for (int i = 0; i < genoTypes.length; i++) { int specie = ((NEATChromosome) genoTypes[i]).getSpecieId(); if (!specieBests.containsKey(specie) || betterThan(subpop.individuals[i], specieBests.get(specie))) { specieBests.put(specie, subpop.individuals[i]); } } Individual[] specBests = new Individual[specieBests.size()]; specieBests.values().toArray(specBests); QuickSort.qsort(specBests, new EliteComparator2()); for (int i = 0; i < specBests.length && i < neatElite; i++) { eliteIndividuals[whichSubpop][index++] = (Individual) specBests[i].clone(); } } } // Fill remaining with the elite of the current pop int toFill = numElite - index; if (toFill == 1) { // Just one to place Individual best = subpop.individuals[0]; for (int x = 1; x < subpop.individuals.length; x++) { if (betterThan(subpop.individuals[x], best)) { best = subpop.individuals[x]; } } eliteIndividuals[whichSubpop][index++] = (Individual) best.clone(); } else if (toFill > 1) { Individual[] orderedPop = Arrays.copyOf(subpop.individuals, subpop.individuals.length); QuickSort.qsort(orderedPop, new EliteComparator2()); // load the top N individuals for (int j = 0; j < toFill; j++) { eliteIndividuals[whichSubpop][index++] = (Individual) orderedPop[j].clone(); } } }
From source file:com.gs.collections.impl.parallel.SerialParallelLazyPerformanceTest.java
private void shuffleAndRun(MutableList<Runnable> runnables) { Collections.shuffle(runnables); runnables.forEach(Procedures.cast(Runnable::run)); }