List of usage examples for java.util Collections shuffle
public static void shuffle(List<?> list)
From source file:fr.univnantes.lina.UIMAProfiler.java
@Override public void display(PrintStream stream) { if (isEmpty()) return;/*from www . j a v a2s .c om*/ stream.println( "###########################################################################################"); stream.println( "#################################### " + this.name + " ####################################"); stream.println( "###########################################################################################"); String formatString = "%30s %10sms\n"; if (!tasks.isEmpty()) { stream.println("--------------- Tasks --------------"); for (String taskName : tasks.keySet()) { long t = 0; for (ProfilingTask task : tasks.get(taskName)) t += task.getTotal(); stream.format(formatString, taskName, t); } stream.format(formatString, "TOTAL", getTotal()); } if (!counters.isEmpty()) { stream.println("--------------- Hits ------------------"); formatString = "%30s %10s\n"; long total = 0; Comparator<String> comp = new Comparator<String>() { @Override public int compare(String o1, String o2) { return Integer.compare(counters.get(o2).intValue(), counters.get(o1).intValue()); } }; List<String> sortedkeys = new LinkedList<String>(); sortedkeys.addAll(counters.keySet()); Collections.sort(sortedkeys, comp); for (String hitName : sortedkeys) { int h = counters.get(hitName).intValue(); total += h; stream.format(formatString, hitName, h); } stream.format(formatString, "TOTAL", total); if (latexFormat) { stream.println("--------------- Hits (Latex) ----------"); total = 0; sortedkeys = new LinkedList<String>(); sortedkeys.addAll(counters.keySet()); Collections.sort(sortedkeys, comp); for (String hitName : sortedkeys) { int h = counters.get(hitName).intValue(); total += h; stream.println(hitName + " & " + counters.get(hitName).intValue() + " \\\\"); } stream.println("TOTAL & " + total + " \\\\"); } } if (!examples.isEmpty()) { stream.println("-------------- Examples ---------------"); List<String> keySet = Lists.newArrayList(examples.keySet()); Collections.shuffle(keySet); for (String hitName : keySet) { int i = 0; stream.println("* " + hitName); for (Object o : examples.get(hitName)) { i++; if (i > exampleLimit) break; String str = o == null ? "null" : o.toString().replaceAll("\n", " ").replaceAll("\r", " ").toLowerCase(); stream.println("\t" + str); } } } if (!pointStatus.isEmpty()) { stream.println("-------------- Point status -----------"); for (String point : pointStatus.keySet()) { stream.println(point + ": " + pointStatus.get(point)); } } stream.flush(); }
From source file:eu.amidst.core.utils.Utils.java
public static void shuffleData(String inputPath, String outputPath) { DataOnMemory<DataInstance> dataOnMemory = DataStreamLoader.loadDataOnMemoryFromFile(inputPath); Collections.shuffle(dataOnMemory.getList()); try {/*from w ww .j a v a2 s . c o m*/ DataStreamWriter.writeDataToFile(dataOnMemory, outputPath); } catch (IOException e) { } }
From source file:com.homeadvisor.kafdrop.service.CuratorKafkaMonitor.java
private Integer randomBroker() { if (brokerCache.size() > 0) { List<Integer> brokerIds = brokerCache.keySet().stream().collect(Collectors.toList()); Collections.shuffle(brokerIds); return brokerIds.get(0); } else {/* w ww . ja va 2 s . com*/ return null; } }
From source file:com.linkedin.pinot.segments.v1.creator.DictionariesTest.java
@Test public void test2() throws Exception { final IndexSegmentImpl heapSegment = (IndexSegmentImpl) ColumnarSegmentLoader.load(INDEX_DIR, ReadMode.heap);/*from w w w .j a va 2s. c o m*/ final IndexSegmentImpl mmapSegment = (IndexSegmentImpl) ColumnarSegmentLoader.load(INDEX_DIR, ReadMode.mmap); final Map<String, ColumnMetadata> metadataMap = ((SegmentMetadataImpl) mmapSegment.getSegmentMetadata()) .getColumnMetadataMap(); for (final String column : metadataMap.keySet()) { final ImmutableDictionaryReader heapDictionary = heapSegment.getDictionaryFor(column); final ImmutableDictionaryReader mmapDictionary = mmapSegment.getDictionaryFor(column); final Set<Object> uniques = uniqueEntries.get(column); final List<Object> list = Arrays.asList(uniques.toArray()); Collections.shuffle(list); for (final Object entry : list) { Assert.assertEquals(mmapDictionary.indexOf(entry), heapDictionary.indexOf(entry)); if (!column.equals("pageKey")) { Assert.assertFalse(heapDictionary.indexOf(entry) < 0); Assert.assertFalse(mmapDictionary.indexOf(entry) < 0); } } } }
From source file:com.streamsets.pipeline.stage.origin.jdbc.AbstractTableJdbcSource.java
private Map<Integer, Integer> decideMaxTableSlotsForThreads() { Map<Integer, Integer> threadNumberToMaxQueueSize = new HashMap<>(); if (tableJdbcConfigBean.batchTableStrategy == BatchTableStrategy.SWITCH_TABLES) { //If it is switch table strategy, we equal divide the work between all threads //(and if it cannot be equal distribute the remaining table slots to subset of threads) int totalNumberOfTables = allTableContexts.size(); int balancedQueueSize = totalNumberOfTables / numberOfThreads; //first divide total tables / number of threads to get //an exact balanced number of table slots to be assigned to all threads IntStream.range(0, numberOfThreads) .forEach(threadNumber -> threadNumberToMaxQueueSize.put(threadNumber, balancedQueueSize)); //Remaining table slots which are not assigned, can be assigned to a subset of threads int toBeAssignedTableSlots = totalNumberOfTables % numberOfThreads; //Randomize threads and pick a set of threads for processing extra slots List<Integer> threadNumbers = IntStream.range(0, numberOfThreads).boxed().collect(Collectors.toList()); Collections.shuffle(threadNumbers); threadNumbers = threadNumbers.subList(0, toBeAssignedTableSlots); //Assign the remaining table slots to thread by incrementing the max table slot for each of the randomly selected //thread by 1 for (int threadNumber : threadNumbers) { threadNumberToMaxQueueSize.put(threadNumber, threadNumberToMaxQueueSize.get(threadNumber) + 1); }/* w w w . ja v a2s . c o m*/ } else { //Assign one table slot to each thread if the strategy is process all available rows //So each table will pick up one table process it completely then return it back to pool //then pick up a new table and work on it. IntStream.range(0, numberOfThreads) .forEach(threadNumber -> threadNumberToMaxQueueSize.put(threadNumber, 1)); } return threadNumberToMaxQueueSize; }
From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkLikelihoodFromGTT.java
protected double findOptimalBranchLength(final Network<Object> speciesNetwork, final Map<String, List<String>> species2alleles, final List distinctTrees, final List gtCorrespondence, final Set<String> singleAlleleSpecies) { boolean continueRounds = true; // keep trying to improve network for (NetNode<Object> node : speciesNetwork.dfs()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, 1.0); if (node.isNetworkNode()) { node.setParentProbability(parent, 0.5); }//www .j a va 2s . c o m } } Set<NetNode> node2ignoreForBL = findEdgeHavingNoBL(speciesNetwork, singleAlleleSpecies); double initalProb = computeProbabilityForCached(speciesNetwork, distinctTrees, species2alleles, gtCorrespondence); if (_printDetails) System.out.println(speciesNetwork.toString() + " : " + initalProb); final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>(initalProb); // records the GTProb of the network at all times int roundIndex = 0; for (; roundIndex < _maxRounds && continueRounds; roundIndex++) { /* * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score. */ double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. for (final NetNode<Object> parent : edu.rice.cs.bioinfo.programs.phylonet.structs.network.util.Networks .postTraversal(speciesNetwork)) { for (final NetNode<Object> child : parent.getChildren()) { if (node2ignoreForBL.contains(child)) { continue; } assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedBranchLength) { double incumbentBranchLength = child.getParentDistance(parent); child.setParentDistance(parent, suggestedBranchLength); double lnProb = updateProbabilityForCached(speciesNetwork, distinctTrees, gtCorrespondence, child, parent); //System.out.println(speciesNetwork + ": " + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // didn't improve, roll back change { child.setParentDistance(parent, incumbentBranchLength); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, Double.MIN_VALUE, _maxBranchLength); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } updateProbabilityForCached(speciesNetwork, distinctTrees, gtCorrespondence, child, parent); if (_printDetails) System.out.println( speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents()); } }); } } for (final NetNode<Object> child : speciesNetwork.getNetworkNodes()) // find every hybrid node { Iterator<NetNode<Object>> hybridParents = child.getParents().iterator(); final NetNode hybridParent1 = hybridParents.next(); final NetNode hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = updateProbabilityForCached(speciesNetwork, distinctTrees, gtCorrespondence, child, null); //System.out.println(speciesNetwork + ": " + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { if (child.getName().equals("Y")) optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0.6, 0.8); else optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } updateProbabilityForCached(speciesNetwork, distinctTrees, gtCorrespondence, child, null); if (_printDetails) System.out.println( speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents()); } }); } // add hybrid probs to hybrid edges Collections.shuffle(assigmentActions); for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (_printDetails) { System.out.println("Round end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"); System.out .println(speciesNetwork.toString() + "\n" + lnGtProbOfSpeciesNetwork.getContents() + "\n"); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } //System.out.println("\n" + lnGtProbOfSpeciesNetwork.getContents() + ": " + speciesNetwork); return lnGtProbOfSpeciesNetwork.getContents(); }
From source file:ml.shifu.shifu.core.dvarsel.wrapper.CandidateGenerator.java
@SuppressWarnings("unchecked") private CandidateSeed doMutation(CandidateSeed seed) { List<Integer> geneList = new ArrayList<Integer>(); List<Integer> unselectedGeneList = ListUtils.subtract(variables, seed.getColumnIdList()); Collections.shuffle(unselectedGeneList); int replaceCnt = 0; for (int i = 0; i < seed.getColumnIdList().size(); i++) { if (rd.nextDouble() < 0.05) { replaceCnt++;/*from www . ja v a2s .co m*/ } else { geneList.add(seed.getColumnIdList().get(i)); } } if (replaceCnt > 0) { geneList.addAll(unselectedGeneList.subList(0, replaceCnt)); } return new CandidateSeed(this.genSeedId(), geneList); }
From source file:com.archsystemsinc.ipms.sec.webapp.controller.MeetingController.java
@RequestMapping(value = "/agenda") public String program(final Model model) { final List<MeetingAgendaItem> meetingAgendaItem = meetingAgendaItemService.findAll(); if (meetingAgendaItem != null) { Collections.shuffle((List<MeetingAgendaItem>) meetingAgendaItem); }//from ww w. j ava2 s.com model.addAttribute("meetingAgendaItem", meetingAgendaItem); return "agenda"; }
From source file:com.cong.chenchong.wifi.manager.ProxyConnector.java
private String[] getProxyList() { SharedPreferences prefs = Globals.getContext().getSharedPreferences(PREFERRED_SERVER, 0); String preferred = prefs.getString(PREFERRED_SERVER, null); String[] allProxies;/*from w ww. j ava 2 s . c o m*/ if (Defaults.release) { allProxies = new String[] { "c1.swiftp.org", "c2.swiftp.org", "c3.swiftp.org", "c4.swiftp.org", "c5.swiftp.org", "c6.swiftp.org", "c7.swiftp.org", "c8.swiftp.org", "c9.swiftp.org" }; } else { //allProxies = new String[] { // "cdev.swiftp.org" //}; allProxies = new String[] { "c1.swiftp.org", "c2.swiftp.org", "c3.swiftp.org", "c4.swiftp.org", "c5.swiftp.org", "c6.swiftp.org", "c7.swiftp.org", "c8.swiftp.org", "c9.swiftp.org" }; } // We should randomly permute the server list in order to spread // load between servers. Collections offers a shuffle() function // that does this, so we'll convert to List and back to String[]. List<String> proxyList = Arrays.asList(allProxies); Collections.shuffle(proxyList); allProxies = proxyList.toArray(new String[] {}); // arg used for type // Return preferred server first, followed by all others if (preferred == null) { return allProxies; } else { return RemoteUtil.concatStrArrays(new String[] { preferred }, allProxies); } }
From source file:com.ddschool.wifi.foregin.ProxyConnector.java
private String[] getProxyList() { SharedPreferences prefs = Globals.getContext().getSharedPreferences(PREFERRED_SERVER, 0); String preferred = prefs.getString(PREFERRED_SERVER, null); String[] allProxies;/*www. ja v a 2s . c om*/ if (Defaults.release) { allProxies = new String[] { "c1.swiftp.org", "c2.swiftp.org", "c3.swiftp.org", "c4.swiftp.org", "c5.swiftp.org", "c6.swiftp.org", "c7.swiftp.org", "c8.swiftp.org", "c9.swiftp.org" }; } else { //allProxies = new String[] { // "cdev.swiftp.org" //}; allProxies = new String[] { "c1.swiftp.org", "c2.swiftp.org", "c3.swiftp.org", "c4.swiftp.org", "c5.swiftp.org", "c6.swiftp.org", "c7.swiftp.org", "c8.swiftp.org", "c9.swiftp.org" }; } // We should randomly permute the server list in order to spread // load between servers. Collections offers a shuffle() function // that does this, so we'll convert to List and back to String[]. List<String> proxyList = Arrays.asList(allProxies); Collections.shuffle(proxyList); allProxies = proxyList.toArray(new String[] {}); // arg used for type // Return preferred server first, followed by all others if (preferred == null) { return allProxies; } else { return RemoteUtil.concatStrArrays(new String[] { preferred }, allProxies); } }