List of usage examples for java.util Collections shuffle
public static void shuffle(List<?> list)
From source file:com.ichi2.libanki.Sched.java
public void sortCards(long[] cids, int start, int step, boolean shuffle, boolean shift) { String scids = Utils.ids2str(cids); long now = Utils.intNow(); ArrayList<Long> nids = new ArrayList<Long>(); for (long id : cids) { long nid = mCol.getDb().queryLongScalar("SELECT nid FROM cards WHERE id = " + id); if (!nids.contains(nid)) { nids.add(nid);/*from ww w.ja v a 2s.c o m*/ } } if (nids.size() == 0) { // no new cards return; } // determine nid ordering HashMap<Long, Long> due = new HashMap<Long, Long>(); if (shuffle) { Collections.shuffle(nids); } for (int c = 0; c < nids.size(); c++) { due.put(nids.get(c), (long) (start + c * step)); } int high = start + step * (nids.size() - 1); // shift? if (shift) { int low = mCol.getDb().queryScalar( "SELECT min(due) FROM cards WHERE due >= " + start + " AND type = 0 AND id NOT IN " + scids); if (low != 0) { int shiftby = high - low + 1; mCol.getDb().execute("UPDATE cards SET mod = " + now + ", usn = " + mCol.usn() + ", due = due + " + shiftby + " WHERE id NOT IN " + scids + " AND due >= " + low + " AND queue = 0"); } } // reorder cards ArrayList<Object[]> d = new ArrayList<Object[]>(); Cursor cur = null; try { cur = mCol.getDb().getDatabase().rawQuery("SELECT id, nid FROM cards WHERE type = 0 AND id IN " + scids, null); while (cur.moveToNext()) { long nid = cur.getLong(1); d.add(new Object[] { due.get(nid), now, mCol.usn(), cur.getLong(0) }); } } finally { if (cur != null && !cur.isClosed()) { cur.close(); } } mCol.getDb().executeMany("UPDATE cards SET due = ?, mod = ?, usn = ? WHERE id = ?", d); }
From source file:com.hichinaschool.flashcards.libanki.Sched.java
public void sortCards(long[] cids, int start, int step, boolean shuffle, boolean shift) { String scids = Utils.ids2str(cids); long now = Utils.intNow(); ArrayList<Long> nids = new ArrayList<Long>(); for (long id : cids) { long nid = mCol.getDb().queryLongScalar("SELECT nid FROM cards WHERE id = " + id); if (!nids.contains(nid)) { nids.add(nid);//from w w w . j av a2 s . c om } } if (nids.size() == 0) { // no new cards return; } // determine nid ordering HashMap<Long, Long> due = new HashMap<Long, Long>(); if (shuffle) { Collections.shuffle(nids); } for (int c = 0; c < nids.size(); c++) { due.put(nids.get(c), (long) (start + c * step)); } int high = start + step * (nids.size() - 1); // shift? if (shift) { int low = mCol.getDb().queryScalar( "SELECT min(due) FROM cards WHERE due >= " + start + " AND type = 0 AND id NOT IN " + scids, false); if (low != 0) { int shiftby = high - low + 1; mCol.getDb().execute("UPDATE cards SET mod = " + now + ", usn = " + mCol.usn() + ", due = due + " + shiftby + " WHERE id NOT IN " + scids + " AND due >= " + low + " AND queue = 0"); } } // reorder cards ArrayList<Object[]> d = new ArrayList<Object[]>(); Cursor cur = null; try { cur = mCol.getDb().getDatabase().rawQuery("SELECT id, nid FROM cards WHERE type = 0 AND id IN " + scids, null); while (cur.moveToNext()) { long nid = cur.getLong(1); d.add(new Object[] { due.get(nid), now, mCol.usn(), cur.getLong(0) }); } } finally { if (cur != null && !cur.isClosed()) { cur.close(); } } mCol.getDb().executeMany("UPDATE cards SET due = ?, mod = ?, usn = ? WHERE id = ?", d); }
From source file:de.tum.bgu.msm.syntheticPopulationGenerator.capeTown.SyntheticPopCT.java
private void assignJobs() { //Method to allocate workers at workplaces //todo. Things to consider: //If there are no more workplaces of the specific job type, the worker is sent outside the area (workplace = -2; distance = 1000 km) //Workers that also attend school are considered only as workers (educational place is not selected for them) //Calculate distance impedance alphaJob = ResourceUtil.getDoubleProperty(rb, PROPERTIES_JOB_ALPHA); gammaJob = ResourceUtil.getDoubleProperty(rb, PROPERTIES_JOB_GAMMA); distanceImpedance = new Matrix(distanceMatrix.getRowCount(), distanceMatrix.getColumnCount()); for (int i = 1; i <= distanceMatrix.getRowCount(); i++) { for (int j = 1; j <= distanceMatrix.getColumnCount(); j++) { distanceImpedance.setValueAt(i, j, (float) Math.exp(alphaJob * Math.exp(distanceMatrix.getValueAt(i, j) * gammaJob))); }//from w w w. jav a 2 s .c om } //Identify vacant jobs and schools by zone and type identifyVacantJobsByZoneType(); //For validation - obtain the trip length distribution /* Frequency commuteDistance = new Frequency(); validationCommutersFlow(); //Generates the validation tabledatasets int[] flow = SiloUtil.createArrayWithValue(odMunicipalityFlow.getRowCount(),0); int[] flowR = SiloUtil.createArrayWithValue(odCountyFlow.getRowCount(),0); int count = 0; odMunicipalityFlow.appendColumn(flow,Integer.toString(count)); odCountyFlow.appendColumn(flowR,Integer.toString(count));*/ //Produce one array list with workers' ID Collection<Person> persons = dataContainer.getHouseholdData().getPersons(); ArrayList<Person> workerArrayList = new ArrayList<>(); for (Person person : persons) { if (person.getOccupation() == Occupation.EMPLOYED) { workerArrayList.add(person); } } //Randomize the order of the worker array list Collections.shuffle(workerArrayList); //Job type probabilities probabilitiesJob = SiloUtil.readCSVfile(rb.getString(PROPERTIES_EMPLOYMENT_BY_GENDER_EDU)); probabilitiesJob.buildStringIndex(1); //Start the selection of the jobs in random order to avoid geographical bias logger.info(" Started assigning workplaces"); int assignedJobs = 0; RealEstateDataManager realEstate = dataContainer.getRealEstateData(); JobDataManager jobDataManager = dataContainer.getJobData(); for (Person pp : workerArrayList) { //Select the zones with vacant jobs for that person, given the job type int selectedJobType = selectJobType(pp); int[] keys = idZonesVacantJobsByType.get(selectedJobType); int lengthKeys = numberZonesByType.get(selectedJobType); // if there are still TAZ with vacant jobs in the region, select one of them. If not, assign them outside the area if (lengthKeys > 0) { //Select the workplace location (TAZ) for that person given his/her job type Household hh = pp.getHousehold(); int origin = realEstate.getDwelling(hh.getDwellingId()).getZoneId(); int[] workplace = selectWorkplace(origin, numberVacantJobsByZoneByType, keys, lengthKeys, distanceImpedance); //Assign last vacant jobID from the TAZ int jobID = idVacantJobsByZoneType.get(workplace[0])[numberVacantJobsByZoneByType.get(workplace[0]) - 1]; //Assign values to job and person jobDataManager.getJobFromId(jobID).setWorkerID(pp.getId()); pp.setJobTAZ(jobDataManager.getJobFromId(jobID).getZoneId()); pp.setWorkplace(jobID); //pp.setTravelTime(distanceMatrix.getValueAt(pp.getZone(), Job.getJobFromId(jobID).getZone())); //For validation OD TableDataSet /* commuteDistance.addValue((int) distanceMatrix.getValueAt(pp.getZone(), Job.getJobFromId(jobID).getZone())); int homeMun = (int) cellsMatrix.getIndexedValueAt(pp.getZone(), "smallID"); int workMun = (int) cellsMatrix.getIndexedValueAt(pp.getWorkplace(), "smallID"); int odPair = homeMun * 1000 + workMun; odMunicipalityFlow.setIndexedValueAt(odPair,Integer.toString(count),odMunicipalityFlow.getIndexedValueAt(odPair,Integer.toString(count))+ 1); homeMun = (int) cellsMatrix.getIndexedValueAt(pp.getZone(), "smallCenter"); workMun = (int) cellsMatrix.getIndexedValueAt(pp.getWorkplace(), "smallCenter"); odPair = homeMun * 1000 + workMun; odCountyFlow.setIndexedValueAt(odPair,Integer.toString(count),odCountyFlow.getIndexedValueAt(odPair,Integer.toString(count))+ 1); */ //Update counts of vacant jobs numberVacantJobsByZoneByType.put(workplace[0], numberVacantJobsByZoneByType.get(workplace[0]) - 1); numberVacantJobsByType.put(selectedJobType, numberVacantJobsByType.get(selectedJobType) - 1); if (numberVacantJobsByZoneByType.get(workplace[0]) < 1) { keys[workplace[1]] = keys[numberZonesByType.get(selectedJobType) - 1]; idZonesVacantJobsByType.put(selectedJobType, keys); numberZonesByType.put(selectedJobType, numberZonesByType.get(selectedJobType) - 1); if (numberZonesByType.get(selectedJobType) < 1) { int w = 0; while (w < jobStringTypes.length & selectedJobType > jobIntTypes.get(jobStringTypes[w])) { w++; } jobIntTypes.remove(jobStringTypes[w]); jobStringTypes[w] = jobStringTypes[jobStringTypes.length - 1]; jobStringTypes = SiloUtil.removeOneElementFromZeroBasedArray(jobStringTypes, jobStringTypes.length - 1); } } //logger.info(" Job " + assignedJobs + " assigned at " + workplace[0]); assignedJobs++; } else { //No more vacant jobs in the study area. This person will work outside the study area pp.setWorkplace(-2); //pp.setTravelTime(1000); logger.info(" No more jobs available of " + selectedJobType + " class. Person " + pp.getId() + " has workplace outside the study area."); } } //For validation - trip length distribution //checkTripLengthDistribution(commuteDistance, alphaJob, gammaJob, "microData/interimFiles/tripLengthDistributionWork.csv", 1); //Trip length frequency distribution //checkodMatrix(odMunicipalityFlow, alphaJob, gammaJob, count,"microData/interimFiles/odMunicipalityDifference.csv"); //SiloUtil.writeTableDataSet(odMunicipalityFlow,"microData/interimFiles/odMunicipalityFlow.csv"); //SiloUtil.writeTableDataSet(odCountyFlow,"microData/interimFiles/odRegionFlow.csv"); //count++; }
From source file:de.tum.bgu.msm.syntheticPopulationGenerator.capeTown.SyntheticPopCT.java
private void assignSchools() { //method to assign the school location for students. They should be registered on the microdata as students //todo. Things to consider: //The location of the school is stored under "schoolplace location" //Students from Berufschule are considered to be working full-time and therefore they don't attend class //If there are no more school places for the student, they are sent outside the area (schoolplace = -2) //For the following years, we school transition should be accomplished logger.info(" Started assigning schools"); int count = 0; //Calculate distance impedance for students double alphaUniversity = ResourceUtil.getDoubleProperty(rb, PROPERTIES_UNIVERSITY_ALPHA); double gammaUniversity = ResourceUtil.getDoubleProperty(rb, PROPERTIES_UNIVERSITY_GAMMA); Matrix universityDistanceImpedance = new Matrix(distanceMatrix.getRowCount(), distanceMatrix.getColumnCount()); Matrix schoolDistanceImpedance = new Matrix(distanceMatrix.getRowCount(), distanceMatrix.getColumnCount()); for (int i = 1; i <= distanceMatrix.getRowCount(); i++) { for (int j = 1; j <= distanceMatrix.getColumnCount(); j++) { universityDistanceImpedance.setValueAt(i, j, (float) Math .exp(alphaUniversity * Math.exp(distanceMatrix.getValueAt(i, j) * gammaUniversity))); schoolDistanceImpedance.setValueAt(i, j, distanceMatrix.getValueAt(i, j)); }// w w w . java 2s. com } //Identify vacant schools by zone and type identifyVacantSchoolsByZoneByType(); //For validation - obtain the trip length distribution Frequency travelSecondary = new Frequency(); Frequency travelUniversity = new Frequency(); Frequency travelPrimary = new Frequency(); validationCommutersFlow(); //Generates the validation tabledatasets int[] flow = SiloUtil.createArrayWithValue(odMunicipalityFlow.getRowCount(), 0); odMunicipalityFlow.appendColumn(flow, Integer.toString(count)); //Produce one array list with students' ID Map<Integer, Person> personMap = (Map<Integer, Person>) dataContainer.getHouseholdData().getPersons(); ArrayList<Person> studentArrayList = new ArrayList<>(); int[] studentsByType2 = new int[schoolTypes.length]; for (Map.Entry<Integer, Person> pair : personMap.entrySet()) { int school = pair.getValue().getSchoolType(); if (school > 0) { //They are studying studentArrayList.add(pair.getValue()); studentsByType2[school - 1] = studentsByType2[school - 1] + 1; } } //Randomize the order of the students Collections.shuffle(studentArrayList); //Start the selection of schools in random order to avoid geographical bias logger.info(" Started assigning schools"); int assignedSchools = 0; RealEstateDataManager realEstate = dataContainer.getRealEstateData(); int[] studentsOutside = new int[schoolTypes.length]; int[] studentsByType = new int[schoolTypes.length]; for (Person pp : studentArrayList) { //Select the zones with vacant schools for that person, given the school type int schoolType = pp.getSchoolType(); studentsByType[schoolType - 1] = studentsByType[schoolType - 1] + 1; int[] keys = idZonesVacantSchoolsByType.get(schoolType); int lengthKeys = numberZonesWithVacantSchoolsByType.get(schoolType); if (lengthKeys > 0) {//if there are still TAZ with school capacity in the region, select one of them. If not, assign them outside the area //Select the school location (which raster cell) for that person given his/her job type int[] schoolPlace = new int[2]; Household hh = pp.getHousehold(); int origin = realEstate.getDwelling(hh.getDwellingId()).getZoneId(); if (schoolType == 3) { schoolPlace = selectWorkplace(origin, numberVacantSchoolsByZoneByType, keys, lengthKeys, universityDistanceImpedance); travelUniversity.addValue((int) distanceMatrix.getValueAt(origin, schoolPlace[0] / 100)); } else { schoolPlace = selectClosestSchool(origin, numberVacantSchoolsByZoneByType, keys, lengthKeys, schoolDistanceImpedance); if (schoolType == 1) { travelPrimary.addValue((int) distanceMatrix.getValueAt(origin, schoolPlace[0] / 100)); } else if (schoolType == 2) { travelSecondary.addValue((int) distanceMatrix.getValueAt(origin, schoolPlace[0] / 100)); } } //Assign values to job and person pp.setSchoolPlace(schoolPlace[0] / 100); //pp.setTravelTime(distanceMatrix.getValueAt(pp.getZone(), pp.getSchoolPlace())); //For validation OD TableDataSet int homeMun = (int) cellsMatrix.getIndexedValueAt(origin, "smallID"); int workMun = (int) cellsMatrix.getIndexedValueAt(pp.getSchoolPlace(), "smallID"); int odPair = homeMun * 1000 + workMun; odMunicipalityFlow.setIndexedValueAt(odPair, Integer.toString(count), odMunicipalityFlow.getIndexedValueAt(odPair, Integer.toString(count)) + 1); //Update counts of vacant school places numberVacantSchoolsByZoneByType.put(schoolPlace[0], numberVacantSchoolsByZoneByType.get(schoolPlace[0]) - 1); if (numberVacantSchoolsByZoneByType.get(schoolPlace[0]) < 1) { numberVacantSchoolsByZoneByType.put(schoolPlace[0], 0); keys[schoolPlace[1]] = keys[numberZonesWithVacantSchoolsByType.get(schoolType) - 1]; idZonesVacantSchoolsByType.put(schoolType, keys); numberZonesWithVacantSchoolsByType.put(schoolType, numberZonesWithVacantSchoolsByType.get(schoolType) - 1); if (numberZonesWithVacantSchoolsByType.get(schoolType) < 1) { numberZonesWithVacantSchoolsByType.put(schoolType, 0); } } assignedSchools++; } else {//No more school capacity in the study area. This person will study outside the area pp.setSchoolPlace(-2); //they attend one school out of the area studentsOutside[schoolType - 1] = studentsOutside[schoolType - 1] + 1; } } //For validation - trip length distribution checkTripLengthDistribution(travelPrimary, 0, 0, "microData/interimFiles/tripLengthDistributionPrimary.csv", 1); checkTripLengthDistribution(travelSecondary, 0, 0, "microData/interimFiles/tripLengthDistributionSecondary.csv", 1); //Trip length frequency distribution checkTripLengthDistribution(travelUniversity, alphaJob, gammaJob, "microData/interimFiles/tripLengthDistributionUniversity.csv", 1); SiloUtil.writeTableDataSet(odMunicipalityFlow, "microData/interimFiles/odMunicipalityFlow.csv"); for (int i = 0; i < schoolTypes.length; i++) { logger.info(" School type: " + schoolTypes[i] + ". " + studentsOutside[schoolTypes[i] - 1] + " students out of " + studentsByType[schoolTypes[i] - 1] + " study outside the area"); } }
From source file:org.alienlabs.hatchetharry.view.page.HomePage.java
private void generateShuffleLibraryLink(final String id) { final AjaxLink<Void> insertDivisionLink = new AjaxLink<Void>(id) { private static final long serialVersionUID = 1L; @Override//from w w w . j ava2 s . co m public void onClick(final AjaxRequestTarget target) { final Long _gameId = HomePage.this.session.getGameId(); final List<BigInteger> allPlayersInGame = HomePage.this.persistenceService .giveAllPlayersFromGame(_gameId); final ConsoleLogStrategy logger = AbstractConsoleLogStrategy.chooseStrategy( ConsoleLogType.SHUFFLE_LIBRARY, null, null, null, null, HomePage.this.session.getPlayer().getName(), null, null, null, null, _gameId); final Player me = HomePage.this.session.getPlayer(); final NotifierCometChannel ncc = new NotifierCometChannel(NotifierAction.SHUFFLE_LIBRARY_ACTION, null, me.getId(), me.getName(), me.getSide().getSideName(), null, null, ""); final List<MagicCard> allCardsInLibrary = HomePage.this.persistenceService .getAllCardsInLibraryForDeckAndPlayer(HomePage.this.session.getGameId(), HomePage.this.session.getPlayer().getId(), HomePage.this.session.getPlayer().getDeck().getDeckId()); Collections.shuffle(allCardsInLibrary); Collections.shuffle(allCardsInLibrary); Collections.shuffle(allCardsInLibrary); for (int i = 0; i < allCardsInLibrary.size(); i++) { allCardsInLibrary.get(i).setZoneOrder(Long.valueOf(i)); } HomePage.this.persistenceService.saveOrUpdateAllMagicCards(allCardsInLibrary); EventBusPostService.post(allPlayersInGame, new ConsoleLogCometChannel(logger), ncc); } }; insertDivisionLink.setOutputMarkupId(true).setMarkupId(id); this.add(insertDivisionLink); }
From source file:com.github.podd.utils.test.OntologyUtilsTest.java
/** * Randomised test, to fuzz test the algorithm. * * @throws Exception//www . j a va2 s.co m */ @Test public void testSchemaImportsRealisticPoddV1V2AllToBaseRandomOrder() throws Exception { final Model model = Rio.parse(this.getClass().getResourceAsStream("/test/test-podd-schema-manifest.ttl"), "", RDFFormat.TURTLE); final List<OWLOntologyID> imports = new ArrayList<OWLOntologyID>(Arrays.asList( OntologyConstant.testPoddBaseV1, OntologyConstant.testPoddUserV1, OntologyConstant.testPoddUserV2, OntologyConstant.testPoddFoafV1, OntologyConstant.testPoddBaseV2, OntologyConstant.testPoddFoafV2, OntologyConstant.testPoddDcV1, OntologyConstant.testPoddDcV2)); Collections.shuffle(imports); // DebugUtils.printContents(model); final List<OWLOntologyID> schemaManifestImports = OntologyUtils.schemaImports(model, new LinkedHashSet<OWLOntologyID>(imports), this.importsMap); Assert.assertNotNull(schemaManifestImports); Assert.assertEquals(8, schemaManifestImports.size()); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddDcV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddDcV2)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddFoafV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddFoafV2)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddUserV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddUserV2)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddFoafV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddUserV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddFoafV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddUserV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV2)); this.assertRealisticImportsMapV2(this.importsMap); }
From source file:com.github.podd.utils.test.OntologyUtilsTest.java
/** * Randomised test, to fuzz test the algorithm. * * @throws Exception/* www.j a va 2 s . c om*/ */ @Test public void testSchemaImportsRealisticPoddV1V2AllToPlantRandomOrder() throws Exception { final Model model = Rio.parse(this.getClass().getResourceAsStream("/test/test-podd-schema-manifest.ttl"), "", RDFFormat.TURTLE); final List<OWLOntologyID> imports = new ArrayList<OWLOntologyID>(Arrays.asList( OntologyConstant.testPoddBaseV1, OntologyConstant.testPoddUserV1, OntologyConstant.testPoddUserV2, OntologyConstant.testPoddFoafV1, OntologyConstant.testPoddPlantV1, OntologyConstant.testPoddPlantV2, OntologyConstant.testPoddBaseV2, OntologyConstant.testPoddFoafV2, OntologyConstant.testPoddDcV1, OntologyConstant.testPoddDcV2, OntologyConstant.testPoddScienceV1, OntologyConstant.testPoddScienceV2)); // Randomise the order to fuzz test the algorithm Collections.shuffle(imports); // DebugUtils.printContents(model); final List<OWLOntologyID> result = OntologyUtils.schemaImports(model, new LinkedHashSet<OWLOntologyID>(imports), this.importsMap); Assert.assertNotNull(result); Assert.assertEquals(12, result.size()); Assert.assertTrue(result.contains(OntologyConstant.testPoddDcV1)); Assert.assertTrue(result.contains(OntologyConstant.testPoddDcV2)); Assert.assertTrue(result.contains(OntologyConstant.testPoddFoafV1)); Assert.assertTrue(result.contains(OntologyConstant.testPoddFoafV2)); Assert.assertTrue(result.contains(OntologyConstant.testPoddUserV1)); Assert.assertTrue(result.contains(OntologyConstant.testPoddUserV2)); Assert.assertTrue(result.contains(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(result.contains(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(result.contains(OntologyConstant.testPoddScienceV1)); Assert.assertTrue(result.contains(OntologyConstant.testPoddScienceV2)); Assert.assertTrue(result.contains(OntologyConstant.testPoddPlantV1)); Assert.assertTrue(result.contains(OntologyConstant.testPoddPlantV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV1) < result.indexOf(OntologyConstant.testPoddFoafV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV1) < result.indexOf(OntologyConstant.testPoddUserV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV1) < result.indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV1) < result.indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV1) < result.indexOf(OntologyConstant.testPoddPlantV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddFoafV1) < result.indexOf(OntologyConstant.testPoddUserV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddFoafV1) < result.indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddFoafV1) < result .indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddFoafV1) < result.indexOf(OntologyConstant.testPoddPlantV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddUserV1) < result.indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddUserV1) < result .indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddUserV1) < result.indexOf(OntologyConstant.testPoddPlantV1)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddBaseV1) < result .indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddBaseV1) < result.indexOf(OntologyConstant.testPoddPlantV1)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddScienceV1) < result .indexOf(OntologyConstant.testPoddPlantV1)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV2) < result.indexOf(OntologyConstant.testPoddFoafV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV2) < result.indexOf(OntologyConstant.testPoddUserV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV2) < result.indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV2) < result.indexOf(OntologyConstant.testPoddScienceV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddDcV2) < result.indexOf(OntologyConstant.testPoddPlantV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddFoafV2) < result.indexOf(OntologyConstant.testPoddUserV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddFoafV2) < result.indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddFoafV2) < result .indexOf(OntologyConstant.testPoddScienceV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddFoafV2) < result.indexOf(OntologyConstant.testPoddPlantV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddUserV2) < result.indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddUserV2) < result .indexOf(OntologyConstant.testPoddScienceV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddUserV2) < result.indexOf(OntologyConstant.testPoddPlantV2)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddBaseV2) < result .indexOf(OntologyConstant.testPoddScienceV2)); Assert.assertTrue( result.indexOf(OntologyConstant.testPoddBaseV2) < result.indexOf(OntologyConstant.testPoddPlantV2)); Assert.assertTrue(result.indexOf(OntologyConstant.testPoddScienceV2) < result .indexOf(OntologyConstant.testPoddPlantV2)); this.assertRealisticImportsMapV2(this.importsMap); }
From source file:com.github.podd.utils.test.OntologyUtilsTest.java
/** * Randomised test, to fuzz test the algorithm. * * @throws Exception/*from w w w . j a va2 s . co m*/ */ @Test public void testSchemaImportsRealisticPoddV1V2AllToScienceRandomOrder() throws Exception { final Model model = Rio.parse(this.getClass().getResourceAsStream("/test/test-podd-schema-manifest.ttl"), "", RDFFormat.TURTLE); final List<OWLOntologyID> imports = new ArrayList<OWLOntologyID>(Arrays.asList( OntologyConstant.testPoddBaseV1, OntologyConstant.testPoddUserV1, OntologyConstant.testPoddUserV2, OntologyConstant.testPoddFoafV1, OntologyConstant.testPoddBaseV2, OntologyConstant.testPoddFoafV2, OntologyConstant.testPoddDcV1, OntologyConstant.testPoddDcV2, OntologyConstant.testPoddScienceV1, OntologyConstant.testPoddScienceV2)); Collections.shuffle(imports); // DebugUtils.printContents(model); final List<OWLOntologyID> schemaManifestImports = OntologyUtils.schemaImports(model, new LinkedHashSet<OWLOntologyID>(imports), this.importsMap); Assert.assertNotNull(schemaManifestImports); Assert.assertEquals(10, schemaManifestImports.size()); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddDcV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddDcV2)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddFoafV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddFoafV2)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddUserV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddUserV2)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddScienceV1)); Assert.assertTrue(schemaManifestImports.contains(OntologyConstant.testPoddScienceV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddFoafV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddUserV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddUserV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddBaseV1) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV1)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddFoafV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddDcV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddUserV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddFoafV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddUserV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddBaseV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddUserV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV2)); Assert.assertTrue(schemaManifestImports.indexOf(OntologyConstant.testPoddBaseV2) < schemaManifestImports .indexOf(OntologyConstant.testPoddScienceV2)); this.assertRealisticImportsMapV2(this.importsMap); }
From source file:com.cloud.vm.VirtualMachineManagerImpl.java
/** * We will add a mapping of volume to storage pool if needed. The conditions to add a mapping are the following: * <ul>//from w w w . ja v a 2 s. c om * <li> The current storage pool where the volume is allocated can be accessed by the target host * <li> If not storage pool is found to allocate the volume we throw an exception. * </ul> * */ private void createVolumeToStoragePoolMappingIfNeeded(VirtualMachineProfile profile, Host targetHost, Map<Volume, StoragePool> volumeToPoolObjectMap, VolumeVO volume, StoragePoolVO currentPool) { List<StoragePool> poolList = getCandidateStoragePoolsToMigrateLocalVolume(profile, targetHost, volume); Collections.shuffle(poolList); boolean canTargetHostAccessVolumeStoragePool = false; for (StoragePool storagePool : poolList) { if (storagePool.getId() == currentPool.getId()) { canTargetHostAccessVolumeStoragePool = true; break; } } if (!canTargetHostAccessVolumeStoragePool && CollectionUtils.isEmpty(poolList)) { throw new CloudRuntimeException(String.format( "There is not storage pools avaliable at the target host [%s] to migrate volume [%s]", targetHost.getUuid(), volume.getUuid())); } if (!canTargetHostAccessVolumeStoragePool) { volumeToPoolObjectMap.put(volume, _storagePoolDao.findByUuid(poolList.get(0).getUuid())); } if (!canTargetHostAccessVolumeStoragePool && !volumeToPoolObjectMap.containsKey(volume)) { throw new CloudRuntimeException(String.format( "Cannot find a storage pool which is available for volume [%s] while migrating virtual machine [%s] to host [%s]", volume.getUuid(), profile.getUuid(), targetHost.getUuid())); } }
From source file:com.guardtrax.ui.screens.HomeScreen.java
private static List<Boolean> refreshtourtagList(int count, boolean create, boolean readScanned, boolean readIncluded, boolean write) { try {//ww w. ja va2 s.co m List<Boolean> returnList = new ArrayList<Boolean>(); List<Boolean> tempList = new ArrayList<Boolean>(); List<Integer> orderList = new ArrayList<Integer>(); String outString = ""; int listMin = 0; int listMax = count; //if we need to create a new file if (create) { //if count = 0 then cleaning up so delete the file if (count == 0) { Utility.delete_file(ctx, GTConstants.dardestinationFolder, "toursScanned.txt"); } else { //create an order list. This list will be shuffled if it is a randomized tour for (int i = 0; i < count; i++) orderList.add(i, i); //check if it is a tour that needs to be randomized //Make sure tour name is long enough so as no to cause acrash when tryimng to look at the last two characters //a space as the last character indicates randomize the tour if (isRandomTour()) { listMin = (int) (0.4 * count); listMax = (int) (0.6 * count); listMin = Utility.randInt(0, listMin); listMax = Utility.randInt(listMax, count); Collections.shuffle(orderList); } for (int i = 0; i < count; i++) { //if bracketed by listMin and listMax then include, else exclude if (i >= listMin && i <= listMax) outString = outString + "false,true," + String.valueOf(orderList.get(i)) + ","; else outString = outString + "false,false," + String.valueOf(orderList.get(i)) + ","; } //remove the last comma outString = outString.substring(0, outString.length() - 1); Utility.write_to_file(ctx, GTConstants.dardestinationFolder + "toursScanned.txt", outString, false); } return null; } //read the file and put into a list the items that have been scanned if (readScanned) { String inFile = Utility.read_from_file(ctx, GTConstants.dardestinationFolder + "toursScanned.txt"); String[] parse = inFile.split(","); for (int i = 0; i < 3 * count; i += 3) returnList.add(Boolean.valueOf(parse[i])); return returnList; } //read the file and put into a list the items that are included in the tour if (readIncluded) { String inFile = Utility.read_from_file(ctx, GTConstants.dardestinationFolder + "toursScanned.txt"); String[] parse = inFile.split(","); for (int i = 1; i < 3 * count; i += 3) returnList.add(Boolean.valueOf(parse[i])); return returnList; } //write to the file if (write) { //read the current file into a list String inFile = Utility.read_from_file(ctx, GTConstants.dardestinationFolder + "toursScanned.txt"); String[] parse = inFile.split(","); int size = parse.length / 3; //parse the file into the three lists (scanned, included and order) for (int i = 0; i < 3 * size; i += 3) returnList.add(Boolean.valueOf(parse[i])); for (int i = 1; i < 3 * size; i += 3) tempList.add(Boolean.valueOf(parse[i])); for (int i = 2; i < 3 * size; i += 3) orderList.add(Integer.parseInt(parse[i])); //replace the specified location returnList.set(count, true); //write the new file for (int i = 0; i < size; i++) outString = outString + String.valueOf(returnList.get(i)) + "," + String.valueOf(tempList.get(i)) + "," + String.valueOf(orderList.get(i)) + ","; outString = outString.substring(0, outString.length() - 1); Utility.write_to_file(ctx, GTConstants.dardestinationFolder + "toursScanned.txt", outString, false); return null; } return returnList; } catch (Exception e) { Toast.makeText(ctx, "Error " + e, Toast.LENGTH_LONG).show(); return null; } }