List of usage examples for java.util Collections shuffle
@SuppressWarnings({ "rawtypes", "unchecked" }) public static void shuffle(List<?> list, Random rnd)
From source file:MainClass.java
public static void main(String args[]) { String s[] = { "A", "B", "C", "D", "E", "H", "I" }; List list1 = Arrays.asList(s); List list2 = Arrays.asList(s); Random rand = new Random(100); Collections.shuffle(list1, rand); Collections.shuffle(list2, rand); System.out.println(list1);/* w w w .ja v a2 s . c o m*/ System.out.println(list2); }
From source file:ShuffleTest.java
public static void main(String args[]) { String simpsons[] = { "Bart", "Hugo", "Lisa", "Marge", "Homer", "Maggie", "Roy" }; List list1 = Arrays.asList(simpsons); List list2 = Arrays.asList(simpsons); Random rand = new Random(100); Collections.shuffle(list1, rand); Collections.shuffle(list2, rand); System.out.println(list1);//from w ww .j av a 2s . c om System.out.println(list2); }
From source file:Main.java
public static void main(String args[]) { // create Linked List List<Integer> list = new LinkedList<Integer>(); // populate list list.add(5);/*from w ww . j a v a2 s .c o m*/ list.add(2); list.add(1); list.add(-3); System.out.println("List before shuffle: " + list); // shuffle the list Collections.shuffle(list, new Random()); System.out.println("List after shuffle: " + list); }
From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step2ArgumentPairsSampling.java
public static void main(String[] args) throws Exception { String inputDir = args[0];//w w w. ja v a 2 s. c o m // /tmp File outputDir = new File(args[1]); if (!outputDir.exists()) { outputDir.mkdirs(); } // pseudo-random final Random random = new Random(1); int totalPairsCount = 0; // read all debates for (File file : IOHelper.listXmlFiles(new File(inputDir))) { Debate debate = DebateSerializer.deserializeFromXML(FileUtils.readFileToString(file, "utf-8")); // get two stances SortedSet<String> originalStances = debate.getStances(); // cleaning: some debate has three or more stances (data are inconsistent) // remove those with only one argument SortedSet<String> stances = new TreeSet<>(); for (String stance : originalStances) { if (debate.getArgumentsForStance(stance).size() > 1) { stances.add(stance); } } if (stances.size() != 2) { throw new IllegalStateException( "2 stances per debate expected, was " + stances.size() + ", " + stances); } // for each stance, get pseudo-random N arguments for (String stance : stances) { List<Argument> argumentsForStance = debate.getArgumentsForStance(stance); // shuffle Collections.shuffle(argumentsForStance, random); // and get max first N arguments List<Argument> selectedArguments = argumentsForStance.subList(0, argumentsForStance.size() < MAX_SELECTED_ARGUMENTS_PRO_SIDE ? argumentsForStance.size() : MAX_SELECTED_ARGUMENTS_PRO_SIDE); List<ArgumentPair> argumentPairs = new ArrayList<>(); // now create pairs for (int i = 0; i < selectedArguments.size(); i++) { for (int j = (i + 1); j < selectedArguments.size(); j++) { Argument arg1 = selectedArguments.get(i); Argument arg2 = selectedArguments.get(j); ArgumentPair argumentPair = new ArgumentPair(); argumentPair.setDebateMetaData(debate.getDebateMetaData()); // assign arg1 and arg2 pseudo-randomly // (not to have the same argument as arg1 all the time) if (random.nextBoolean()) { argumentPair.setArg1(arg1); argumentPair.setArg2(arg2); } else { argumentPair.setArg1(arg2); argumentPair.setArg2(arg1); } // set unique id argumentPair.setId(argumentPair.getArg1().getId() + "_" + argumentPair.getArg2().getId()); argumentPairs.add(argumentPair); } } String fileName = IOHelper.createFileName(debate.getDebateMetaData(), stance); File outputFile = new File(outputDir, fileName); // and save all sampled pairs into a XML file XStreamTools.toXML(argumentPairs, outputFile); System.out.println("Saved " + argumentPairs.size() + " pairs to " + outputFile); totalPairsCount += argumentPairs.size(); } } System.out.println("Total pairs generated: " + totalPairsCount); }
From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step3HITCreator.java
@SuppressWarnings("unchecked") public static void main(String[] args) throws IOException { String inputDir = args[0];//from ww w. j a v a2s.com File outputDir = new File(args[1]); // sandbox or real MTurk? final boolean useSandbox = false; // required only for pilot // final int randomArgumentPairsCount = 50; // pseudo-random generator final Random random = new Random(1); for (Map.Entry<String, SortedSet<String>> entry : BATCHES.entrySet()) { Step3HITCreator hitCreator = new Step3HITCreator(useSandbox); hitCreator.outputPath = new File(outputDir, entry.getKey()); hitCreator.initialize(); // we will process only a subset first List<ArgumentPair> allArgumentPairs = new ArrayList<>(); Collection<File> files = IOHelper.listXmlFiles(new File(inputDir)); System.out.println(files); // read all files for the given batch for (File file : files) { if (entry.getValue().contains(file.getName())) { allArgumentPairs.addAll((List<ArgumentPair>) XStreamTools.getXStream().fromXML(file)); } } // we have to shuffle them Collections.shuffle(allArgumentPairs, random); // only for pilot // List<ArgumentPair> selectedArgumentPairs = allArgumentPairs // .subList(0, randomArgumentPairsCount); // for (ArgumentPair argumentPair : selectedArgumentPairs) { for (ArgumentPair argumentPair : allArgumentPairs) { hitCreator.process(argumentPair); } hitCreator.collectionProcessComplete(); } }
From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java
public static void main(String[] args) throws IOException, InterruptedException, ExecutionException { final int n = 10000; final int nDist = 10000; //prepare problems and methods List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7", "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text"); List<String> mappers = new ArrayList<>(); for (int gs : new int[] { 64, 128, 256, 512, 1024 }) { mappers.add("ge-" + gs + "-2"); mappers.add("ge-" + gs + "-4"); mappers.add("ge-" + gs + "-8"); mappers.add("ge-" + gs + "-12"); mappers.add("pige-" + gs + "-4"); mappers.add("pige-" + gs + "-8"); mappers.add("pige-" + gs + "-16"); mappers.add("pige-" + gs + "-24"); mappers.add("hge-" + gs + "-0"); mappers.add("whge-" + gs + "-2"); mappers.add("whge-" + gs + "-3"); mappers.add("whge-" + gs + "-5"); }//from ww w.j a v a 2 s . c o m mappers.add("sge-0-5"); mappers.add("sge-0-6"); mappers.add("sge-0-7"); mappers.add("sge-0-8"); mappers.clear(); mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6")); PrintStream filePrintStream = null; if (args.length > 0) { filePrintStream = new PrintStream(args[0]); } else { filePrintStream = System.out; } filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n"); //prepare distances Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>()); Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming()); //iterate for (String problemName : problems) { for (String mapperName : mappers) { System.out.printf("%20.20s, %20.20s", problemName, mapperName); //build problem Problem<String, NumericFitness> problem = null; if (problemName.equals("bool-parity5")) { problem = new Parity(5); } else if (problemName.equals("bool-mopm3")) { problem = new MultipleOutputParallelMultiplier(3); } else if (problemName.equals("sr-keijzer6")) { problem = new HarmonicCurve(); } else if (problemName.equals("sr-nguyen7")) { problem = new Nguyen7(1); } else if (problemName.equals("sr-pagie1")) { problem = new Pagie1(); } else if (problemName.equals("sr-vladislavleva4")) { problem = new Vladislavleva4(1); } else if (problemName.equals("other-klandscapes3")) { problem = new KLandscapes(3); } else if (problemName.equals("other-klandscapes7")) { problem = new KLandscapes(7); } else if (problemName.equals("other-text")) { problem = new Text(); } //build configuration and evolver Mapper mapper = null; int genotypeSize = Integer.parseInt(mapperName.split("-")[1]); int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]); if (mapperName.split("-")[0].equals("ge")) { mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("pige")) { mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("sge")) { mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("hge")) { mapper = new HierarchicalMapper<>(problem.getGrammar()); } else if (mapperName.split("-")[0].equals("whge")) { mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar()); } //prepare things Random random = new Random(1); Set<Sequence> genotypes = new LinkedHashSet<>(n); //build genotypes if (mapperName.split("-")[0].equals("sge")) { SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } genotypeSize = factory.getBitSize(); } else { BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } } //build and fill map Multimap<Node<String>, Sequence> multimap = HashMultimap.create(); int progress = 0; for (Sequence genotype : genotypes) { Node<String> phenotype; try { if (mapperName.split("-")[0].equals("sge")) { phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>()); } else { phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>()); } } catch (MappingException e) { phenotype = Node.EMPTY_TREE; } multimap.put(phenotype, genotype); progress = progress + 1; if (progress % Math.round(n / 10) == 0) { System.out.print("."); } } System.out.println(); //compute distances List<Pair<Double, Double>> allDistances = new ArrayList<>(); List<Pair<Double, Double>> allValidDistances = new ArrayList<>(); Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create(); for (Node<String> phenotype : multimap.keySet()) { for (Sequence genotype1 : multimap.get(phenotype)) { for (Sequence genotype2 : multimap.get(phenotype)) { double gDistance = genotypeDistance.d(genotype1, genotype2); genotypeDistances.put(phenotype, gDistance); if (genotypeDistances.get(phenotype).size() > nDist) { break; } } if (genotypeDistances.get(phenotype).size() > nDist) { break; } } } List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries()); Collections.shuffle(entries, random); for (Map.Entry<Node<String>, Sequence> entry1 : entries) { for (Map.Entry<Node<String>, Sequence> entry2 : entries) { double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue()); double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey()); allDistances.add(new Pair<>(gDistance, pDistance)); if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) { allValidDistances.add(new Pair<>(gDistance, pDistance)); } if (allDistances.size() > nDist) { break; } } if (allDistances.size() > nDist) { break; } } //compute properties double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size(); double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size(); double validRedundancy = redundancy; if (multimap.keySet().contains(Node.EMPTY_TREE)) { validRedundancy = 1 - ((double) multimap.keySet().size() - 1d) / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size()); } double locality = Utils.pearsonCorrelation(allDistances); double validLocality = Utils.pearsonCorrelation(allValidDistances); double[] sizes = new double[multimap.keySet().size()]; double[] meanGenotypeDistances = new double[multimap.keySet().size()]; int invalidIndex = -1; int c = 0; for (Node<String> phenotype : multimap.keySet()) { if (Node.EMPTY_TREE.equals(phenotype)) { invalidIndex = c; } sizes[c] = multimap.get(phenotype).size(); double[] distances = new double[genotypeDistances.get(phenotype).size()]; int k = 0; for (Double distance : genotypeDistances.get(phenotype)) { distances[k] = distance; k = k + 1; } meanGenotypeDistances[c] = StatUtils.mean(distances); c = c + 1; } double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes); double nonSynonymousity = StatUtils.mean(meanGenotypeDistances) / StatUtils.mean(firsts(allDistances)); double validNonUniformity = nonUniformity; double validNonSynonymousity = nonSynonymousity; if (invalidIndex != -1) { double[] validSizes = new double[multimap.keySet().size() - 1]; double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1]; if (invalidIndex > 0) { System.arraycopy(sizes, 0, validSizes, 0, invalidIndex); System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex); } System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex, sizes.length - invalidIndex - 1); System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances, invalidIndex, meanGenotypeDistances.length - invalidIndex - 1); validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes); validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances) / StatUtils.mean(firsts(allValidDistances)); } //compute locality filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, invalidity); filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, redundancy); filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validRedundancy); filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, locality); filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validLocality); filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonUniformity); filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity); filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonSynonymousity); filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity); } } if (filePrintStream != null) { filePrintStream.close(); } }
From source file:Main.java
public static <T> List<T> shuffle(Collection<T> items, Random random) { List<T> result = new ArrayList<T>(items); Collections.shuffle(result, random); return result; }
From source file:Main.java
private static Bundle createNamePortBundle(String name, int port, TreeMap<String, ArrayList<String>> ips) { Bundle namePort = new Bundle(); namePort.putString("name", name); namePort.putInt("port", port); if (ips != null) { ArrayList<String> ip = ips.get(name); Collections.shuffle(ip, new Random()); namePort.putString("ip", ip.get(0)); }//from ww w. jav a 2 s. c om return namePort; }
From source file:com.google.android.apps.santatracker.games.cityquiz.CityQuizUtil.java
/** * Retrieve a random list of cities./* ww w. j av a 2 s . c om*/ * * @param amt Max number of cities to retrieve. * * @return Random list of cities. If amt is more than the amount of cities available, all cities are returned. */ public static List<City> getCities(Context context, int amt) { List<City> allCities = getCities(context); Collections.shuffle(allCities, new Random()); if (amt > allCities.size()) { amt = allCities.size(); } // Only return the cities that will be used in the game. List<City> cities = new ArrayList<>(); cities.addAll(0, allCities.subList(0, amt)); return cities; }
From source file:edu.byu.nlp.util.Counters.java
/** * Get the n entries with the largest value based on some comparator. * Used by Counter's argMaxList method. *//*w w w . ja v a 2 s . c o m*/ public static <E, V extends Comparable<V>> List<E> argMaxList(Set<Entry<E, V>> entrySet, int topn, RandomGenerator rnd) { topn = (topn > 0) ? topn : entrySet.size(); List<Entry<E, V>> entries = Lists.newArrayList(entrySet); // shuffle to ensure that ties are broken randomly if (rnd != null) { Collections.shuffle(entries, new RandomAdaptor(rnd)); } // sort to ensure most voted-for options are at the beginning Collections.sort(entries, new Comparator<Entry<E, V>>() { @Override public int compare(Entry<E, V> o1, Entry<E, V> o2) { return (o2.getValue()).compareTo(o1.getValue()); // descending order } }); // pull out the top n values List<E> vals = Lists.newArrayList(); for (int i = 0; i < Math.min(topn, entries.size()); i++) { vals.add(entries.get(i).getKey()); } return vals; }