List of usage examples for java.util BitSet get
public boolean get(int bitIndex)
From source file:com.microsoft.tfs.core.httpclient.URI.java
/** * Validate the URI characters within a specific component. The component * must be performed after escape encoding. Or it doesn't include escaped * characters.//from w ww. ja v a2s .co m * <p> * It's not that much strict, generous. The strict validation might be * performed before being called this method. * * @param component * the characters sequence within the component * @param soffset * the starting offset of the given component * @param eoffset * the ending offset of the given component if -1, it means the * length of the component * @param generous * those characters that are allowed within a component * @return if true, it's the correct URI character sequence */ protected boolean validate(final char[] component, final int soffset, int eoffset, final BitSet generous) { // validate each component by generous characters if (eoffset == -1) { eoffset = component.length - 1; } for (int i = soffset; i <= eoffset; i++) { if (!generous.get(component[i])) { return false; } } return true; }
From source file:com.microsoft.tfs.core.httpclient.URI.java
/** * Pre-validate the unescaped URI string within a specific component. * * @param component/*from w ww . ja va 2 s. com*/ * the component string within the component * @param disallowed * those characters disallowed within the component * @return if true, it doesn't have the disallowed characters if false, the * component is undefined or an incorrect one */ protected boolean prevalidate(final String component, final BitSet disallowed) { // prevalidate the given component by disallowed characters if (component == null) { return false; // undefined } final char[] target = component.toCharArray(); for (int i = 0; i < target.length; i++) { if (disallowed.get(target[i])) { return false; } } return true; }
From source file:edu.brown.hstore.PartitionExecutor.java
/** * This site is requesting that the coordinator execute work on its behalf * at remote sites in the cluster/*from w w w . java 2 s. c om*/ * * @param ftasks */ private void requestWork(LocalTransaction ts, Collection<WorkFragment> tasks, List<ByteString> parameterSets) { assert (!tasks.isEmpty()); assert (ts != null); Long txn_id = ts.getTransactionId(); if (t) LOG.trace(String.format("Wrapping %d WorkFragments into a TransactionWorkRequest for %s", tasks.size(), ts)); // If our transaction was originally designated as a single-partitioned, // then we need to make // sure that we don't touch any partition other than our local one. If // we do, then we need abort // it and restart it as multi-partitioned boolean need_restart = false; boolean predict_singlepartition = ts.isPredictSinglePartition(); BitSet done_partitions = ts.getDonePartitions(); boolean new_done = false; if (hstore_conf.site.exec_speculative_execution) { new_done = ts.calculateDonePartitions(this.thresholds); } // Now we can go back through and start running all of the // FragmentTaskMessages that were not blocked // waiting for an input dependency. Note that we pack all the fragments // into a single // CoordinatorFragment rather than sending each FragmentTaskMessage in // its own message for (WorkFragment ftask : tasks) { assert (!ts.isBlocked(ftask)); int target_partition = ftask.getPartitionId(); int target_site = hstore_site.getSiteIdForPartitionId(target_partition); // Make sure that this isn't a single-partition txn trying to access // a remote partition if (predict_singlepartition && target_partition != this.partitionId) { if (d) LOG.debug(String.format( "%s on partition %d is suppose to be single-partitioned, but it wants to execute a fragment on partition %d", ts, this.partitionId, target_partition)); need_restart = true; break; } // Make sure that this txn isn't trying ot access a partition that // we said we were // done with earlier else if (done_partitions.get(target_partition)) { if (d) LOG.debug(String.format( "%s on partition %d was marked as done on partition %d but now it wants to go back for more!", ts, this.partitionId, target_partition)); need_restart = true; break; } // Make sure we at least have something to do! else if (ftask.getFragmentIdCount() == 0) { LOG.warn(String.format("%s - Trying to send a WorkFragment request with 0 fragments", ts)); continue; } // Get the TransactionWorkRequest.Builder for the remote HStoreSite // We will use this store our serialized input dependencies TransactionWorkRequestBuilder requestBuilder = tmp_transactionRequestBuilders[target_site]; if (requestBuilder == null) { requestBuilder = tmp_transactionRequestBuilders[target_site] = new TransactionWorkRequestBuilder(); } TransactionWorkRequest.Builder builder = requestBuilder.getBuilder(ts); // Also keep track of what Statements they are executing so that we // know // we need to send over the wire to them. requestBuilder.addParamIndexes(ftask.getParamIndexList()); // Input Dependencies if (ftask.getNeedsInput()) { if (d) LOG.debug("Retrieving input dependencies for " + ts); tmp_removeDependenciesMap.clear(); this.getFragmentInputs(ts, ftask, tmp_removeDependenciesMap); for (Entry<Integer, List<VoltTable>> e : tmp_removeDependenciesMap.entrySet()) { if (requestBuilder.hasInputDependencyId(e.getKey())) continue; if (d) LOG.debug(String.format("%s - Attaching %d input dependencies to be sent to %s", ts, e.getValue().size(), HStoreThreadManager.formatSiteName(target_site))); for (VoltTable vt : e.getValue()) { this.fs.clear(); try { this.fs.writeObject(vt); builder.addAttachedDepId(e.getKey().intValue()); builder.addAttachedData(ByteString.copyFrom(this.fs.getBBContainer().b)); } catch (Exception ex) { String msg = String.format("Failed to serialize input dependency %d for %s", e.getKey(), ts); throw new ServerFaultException(msg, ts.getTransactionId()); } if (d) LOG.debug(String.format( "%s - Storing %d rows for InputDependency %d to send to partition %d [bytes=%d]", ts, vt.getRowCount(), e.getKey(), ftask.getPartitionId(), CollectionUtil.last(builder.getAttachedDataList()).size())); } // FOR requestBuilder.addInputDependencyId(e.getKey()); } // FOR this.fs.getBBContainer().discard(); } builder.addFragments(ftask); } // FOR (tasks) // Bad mojo! We need to throw a MispredictionException so that the // VoltProcedure // will catch it and we can propagate the error message all the way back // to the HStoreSite if (need_restart) { if (t) LOG.trace(String.format("Aborting %s because it was mispredicted", ts)); // This is kind of screwy because we don't actually want to send the // touched partitions // histogram because VoltProcedure will just do it for us... throw new MispredictionException(txn_id, null); } // Stick on the ParameterSets that each site needs into the // TransactionWorkRequest for (int target_site = 0; target_site < tmp_transactionRequestBuilders.length; target_site++) { TransactionWorkRequestBuilder builder = tmp_transactionRequestBuilders[target_site]; if (builder == null || builder.isDirty() == false) { continue; } assert (builder != null); builder.addParameterSets(parameterSets); // Bombs away! this.hstore_coordinator.transactionWork(ts, target_site, builder.build(), this.request_work_callback); if (d) LOG.debug(String.format("%s - Sent Work request to remote HStoreSites for %s", ts, target_site)); } // FOR // TODO: We need to check whether we need to notify other HStoreSites // that we didn't send // a new FragmentTaskMessage to that we are done with their partitions if (new_done) { } }
From source file:org.unitime.timetable.solver.TimetableDatabaseLoader.java
private void load(org.hibernate.Session hibSession) throws Exception { iProgress.setStatus("Loading input data ..."); TravelTime.populateTravelTimes(getModel().getDistanceMetric(), iSessionId, hibSession); iSolverGroup = null;/*from w w w . j av a2s .c o m*/ iSession = null; if (iSolverGroup == null) { iSolverGroup = new SolverGroup[iSolverGroupId.length]; for (int i = 0; i < iSolverGroupId.length; i++) { iSolverGroup[i] = SolverGroupDAO.getInstance().get(iSolverGroupId[i], hibSession); if (iSolverGroup[i] == null) { iProgress.message(msglevel("loadFailed", Progress.MSGLEVEL_FATAL), "Unable to load solver group " + iSolverGroupId[i] + "."); return; } iProgress.debug("solver group[" + (i + 1) + "]: " + iSolverGroup[i].getName()); } } if (iSolverGroup == null || iSolverGroup.length == 0) { iProgress.message(msglevel("loadFailed", Progress.MSGLEVEL_FATAL), "No solver group loaded."); return; } iDepartmentIds = ""; for (int j = 0; j < iSolverGroup.length; j++) { for (Iterator i = iSolverGroup[j].getDepartments().iterator(); i.hasNext();) { Department d = (Department) i.next(); if (iDepartmentIds.length() > 0) iDepartmentIds += ","; iDepartmentIds += d.getUniqueId().toString(); } } getModel().getProperties().setProperty("General.DepartmentIds", iDepartmentIds); Hashtable<Long, Solution> solutions = null; if (iSolutionId != null && iSolutionId.length > 0) { solutions = new Hashtable<Long, Solution>(); String note = ""; for (int i = 0; i < iSolutionId.length; i++) { Solution solution = (new SolutionDAO()).get(iSolutionId[i], hibSession); if (solution == null) { iProgress.message(msglevel("loadFailed", Progress.MSGLEVEL_FATAL), "Unable to load solution " + iSolutionId[i] + "."); return; } iProgress.debug("solution[" + (i + 1) + "] version: " + solution.getUniqueId() + " (created " + solution.getCreated() + ", solver group " + solution.getOwner().getName() + ")"); if (solution.getNote() != null) { if (note.length() > 0) note += "\n"; note += solution.getNote(); } solutions.put(solution.getOwner().getUniqueId(), solution); } getModel().getProperties().setProperty("General.Note", note); String solutionIdStr = ""; for (int i = 0; i < iSolverGroupId.length; i++) { Solution solution = solutions.get(iSolverGroupId[i]); if (solution != null) { if (solutionIdStr.length() > 0) solutionIdStr += ","; solutionIdStr += solution.getUniqueId().toString(); } } getModel().getProperties().setProperty("General.SolutionId", solutionIdStr); } if (iSession == null) iSession = (new SessionDAO()).get(iSessionId, hibSession); if (iSession == null) { iProgress.message(msglevel("loadFailed", Progress.MSGLEVEL_FATAL), "No session loaded."); return; } iProgress.debug("session: " + iSession.getLabel()); getModel().getProperties().setProperty("Data.Term", iSession.getAcademicYearTerm()); getModel().getProperties().setProperty("Data.Initiative", iSession.getAcademicInitiative()); getModel().setYear(iSession.getSessionStartYear()); getModel().getProperties().setProperty("DatePattern.DayOfWeekOffset", String.valueOf(Constants.getDayOfWeek( DateUtils.getDate(1, iSession.getPatternStartMonth(), iSession.getSessionStartYear())))); if (iSession.getDefaultDatePattern() != null) { BitSet pattern = iSession.getDefaultDatePattern().getPatternBitSet(); String patternStr = ""; for (int i = 0; i < pattern.length(); i++) patternStr += (pattern.get(i) ? "1" : "0"); getModel().getProperties().setProperty("DatePattern.Default", patternStr); } iAllClasses = new TreeSet(new ClassComparator(ClassComparator.COMPARE_BY_HIERARCHY)); for (int i = 0; i < iSolverGroup.length; i++) { for (Iterator j = iSolverGroup[i].getDepartments().iterator(); j.hasNext();) { Department d = (Department) j.next(); iAllClasses.addAll(d.getClassesFetchWithStructure()); } } if (iAllClasses == null || iAllClasses.isEmpty()) { iProgress.message(msglevel("noClasses", Progress.MSGLEVEL_FATAL), "No classes to load."); return; } iProgress.debug("classes to load: " + iAllClasses.size()); iProgress.setPhase("Loading classes ...", iAllClasses.size()); int ord = 0; HashSet<SchedulingSubpart> subparts = new HashSet<SchedulingSubpart>(); for (Iterator i1 = iAllClasses.iterator(); i1.hasNext();) { Class_ clazz = (Class_) i1.next(); Lecture lecture = loadClass(clazz, hibSession); subparts.add(clazz.getSchedulingSubpart()); if (lecture != null) lecture.setOrd(ord++); iClasses.put(clazz.getUniqueId(), clazz); iProgress.incProgress(); } loadInstructorAvailabilities(hibSession); loadRoomAvailabilities(hibSession); iProgress.setPhase("Loading offerings ...", iAllClasses.size()); Set<Long> loadedOfferings = new HashSet<Long>(); for (Class_ clazz : iAllClasses) { Lecture lecture = (Lecture) iLectures.get(clazz.getUniqueId()); iProgress.incProgress(); if (lecture == null) continue; //skip classes that were not loaded InstructionalOffering offering = clazz.getSchedulingSubpart().getInstrOfferingConfig() .getInstructionalOffering(); if (!loadedOfferings.add(offering.getUniqueId())) continue; // already loaded iOfferings.put(offering, loadOffering(offering, false)); } List<DistributionPref> distPrefs = new ArrayList<DistributionPref>(); for (int i = 0; i < iSolverGroup.length; i++) { distPrefs.addAll(iSolverGroup[i].getDistributionPreferences()); } iProgress.setPhase("Loading distribution preferences ...", distPrefs.size()); for (Iterator i = distPrefs.iterator(); i.hasNext();) { DistributionPref distributionPref = (DistributionPref) i.next(); if (!PreferenceLevel.sNeutral.equals(distributionPref.getPrefLevel().getPrefProlog())) loadGroupConstraint(distributionPref); iProgress.incProgress(); } Set<Long> checkedDistPrefIds = new HashSet<Long>(); for (int i = 0; i < iSolverGroup.length; i++) { for (Iterator j = iSolverGroup[i].getDepartments().iterator(); j.hasNext();) { loadInstructorGroupConstraints((Department) j.next(), checkedDistPrefIds, hibSession); } } if (iAutoSameStudents) { iProgress.setPhase("Posting automatic same_students constraints ...", iAllClasses.size()); for (Iterator i1 = iAllClasses.iterator(); i1.hasNext();) { Class_ clazz = (Class_) i1.next(); Lecture lecture = (Lecture) iLectures.get(clazz.getUniqueId()); if (lecture == null) continue; if (!lecture.hasAnyChildren()) postSameStudentConstraint(clazz, iAutoSameStudentsConstraint); iProgress.incProgress(); } } if (iAutoPrecedence != null) { PreferenceLevel pref = PreferenceLevel.getPreferenceLevel(iAutoPrecedence); if (pref == null) { // Lookup preference if needed for (PreferenceLevel p : PreferenceLevel.getPreferenceLevelList()) if (iAutoPrecedence.equalsIgnoreCase(p.getPrefProlog()) || iAutoPrecedence.equalsIgnoreCase(p.getPrefName()) || iAutoPrecedence.equals(p.getAbbreviation())) { pref = p; break; } } if (pref == null) { iProgress.message(msglevel("autoPrecedence", Progress.MSGLEVEL_WARN), "Preference " + iAutoPrecedence + " not recognized."); } else if (!PreferenceLevel.sNeutral.equals(pref.getPrefProlog())) { iProgress.setPhase("Posting automatic precedence constraints ...", iAllClasses.size()); for (Iterator i1 = iAllClasses.iterator(); i1.hasNext();) { Class_ clazz = (Class_) i1.next(); Lecture lecture = (Lecture) iLectures.get(clazz.getUniqueId()); if (lecture == null) continue; if (!lecture.hasAnyChildren()) postPrecedenceConstraint(clazz, pref.getPrefProlog()); iProgress.incProgress(); } } } postAutomaticHierarchicalConstraints(); assignCommited(); iProgress.setPhase("Posting class limit constraints ...", iOfferings.size()); for (Map.Entry<InstructionalOffering, Hashtable<InstrOfferingConfig, Set<SchedulingSubpart>>> entry : iOfferings .entrySet()) { Hashtable<InstrOfferingConfig, Set<SchedulingSubpart>> topSubparts = entry.getValue(); for (Map.Entry<InstrOfferingConfig, Set<SchedulingSubpart>> subpartEntry : topSubparts.entrySet()) { InstrOfferingConfig config = subpartEntry.getKey(); Set<SchedulingSubpart> topSubpartsThisConfig = subpartEntry.getValue(); for (SchedulingSubpart subpart : topSubpartsThisConfig) { boolean isMakingSense = false; for (Class_ clazz : subpart.getClasses()) { Lecture lecture = iLectures.get(clazz.getUniqueId()); if (lecture == null) continue; createChildrenClassLimitConstraits(lecture); if (!lecture.isCommitted() && lecture.minClassLimit() != lecture.maxClassLimit()) isMakingSense = true; } if (!isMakingSense) continue; if (subpart.getParentSubpart() == null) { ClassLimitConstraint clc = new ClassLimitConstraint(config.getLimit(), getClassLimitConstraitName(subpart)); for (Class_ clazz : subpart.getClasses()) { Lecture lecture = iLectures.get(clazz.getUniqueId()); if (lecture == null || lecture.isCommitted()) { clc.setClassLimitDelta(clc.getClassLimitDelta() - clazz.getClassLimit()); continue; } clc.addVariable(lecture); } if (clc.variables().isEmpty()) continue; iProgress.trace("Added constraint " + clc.getName() + " between " + clc.variables()); getModel().addConstraint(clc); } else { Hashtable<Long, ClassLimitConstraint> clcs = new Hashtable<Long, ClassLimitConstraint>(); for (Class_ clazz : subpart.getClasses()) { Lecture lecture = iLectures.get(clazz.getUniqueId()); Class_ parentClazz = clazz.getParentClass(); ClassLimitConstraint clc = clcs.get(parentClazz.getUniqueId()); if (clc == null) { clc = new ClassLimitConstraint(parentClazz.getClassLimit(), parentClazz.getClassLabel()); clcs.put(parentClazz.getUniqueId(), clc); } if (lecture == null || lecture.isCommitted()) { clc.setClassLimitDelta(clc.getClassLimitDelta() - clazz.getClassLimit()); } else { clc.addVariable(lecture); } } for (ClassLimitConstraint clc : clcs.values()) { if (!clc.variables().isEmpty()) { iProgress .trace("Added constraint " + clc.getName() + " between " + clc.variables()); getModel().addConstraint(clc); } } } } } iProgress.incProgress(); } iStudentCourseDemands.init(hibSession, iProgress, iSession, iOfferings.keySet()); iProgress.setPhase("Loading students ...", iOfferings.size()); for (InstructionalOffering offering : iOfferings.keySet()) { boolean unlimitedOffering = false; int offeringLimit = 0; for (InstrOfferingConfig config : offering.getInstrOfferingConfigs()) if (config.isUnlimitedEnrollment()) unlimitedOffering = true; else offeringLimit += config.getLimit(); Double factor = null; if (!unlimitedOffering) { int totalCourseLimit = 0; for (CourseOffering course : offering.getCourseOfferings()) { int courseLimit = -1; if (course.getReservation() != null) courseLimit = course.getReservation(); if (courseLimit < 0) { if (offering.getCourseOfferings().size() == 1) courseLimit = offeringLimit; else { iProgress.message(msglevel("crossListWithoutReservation", Progress.MSGLEVEL_INFO), "Cross-listed course " + getOfferingLabel(course) + " does not have any course reservation."); if (course.getProjectedDemand() != null && offering.getProjectedDemand() > 0) courseLimit = course.getProjectedDemand(); else if (course.getDemand() != null && offering.getDemand() > 0) courseLimit = course.getDemand(); else courseLimit = offeringLimit / offering.getCourseOfferings().size(); } } totalCourseLimit += courseLimit; } if (totalCourseLimit < offeringLimit) iProgress.message( msglevel("courseReservationsBelowLimit", totalCourseLimit == 0 ? Progress.MSGLEVEL_INFO : Progress.MSGLEVEL_WARN), "Total number of course reservations is below the offering limit for instructional offering " + getOfferingLabel(offering) + " (" + totalCourseLimit + "<" + offeringLimit + ")."); if (totalCourseLimit > offeringLimit) iProgress.message(msglevel("courseReservationsOverLimit", Progress.MSGLEVEL_INFO), "Total number of course reservations exceeds the offering limit for instructional offering " + getOfferingLabel(offering) + " (" + totalCourseLimit + ">" + offeringLimit + ")."); if (totalCourseLimit == 0) continue; if (totalCourseLimit != offeringLimit) factor = new Double(((double) offeringLimit) / totalCourseLimit); } for (CourseOffering course : offering.getCourseOfferings()) { Set<WeightedStudentId> studentIds = iStudentCourseDemands.getDemands(course); float studentWeight = 0.0f; if (studentIds != null) for (WeightedStudentId studentId : studentIds) studentWeight += studentId.getWeight(); int courseLimit = -1; if (course.getReservation() != null) courseLimit = course.getReservation(); if (courseLimit < 0) { if (offering.getCourseOfferings().size() == 1 && !unlimitedOffering) courseLimit = offeringLimit; else { courseLimit = Math.round(studentWeight); } } if (factor != null) courseLimit = (int) Math.round(courseLimit * factor); if (studentIds == null || studentIds.isEmpty()) { iProgress.message(msglevel("offeringWithoutDemand", Progress.MSGLEVEL_INFO), "No student enrollments for course " + getOfferingLabel(course) + "."); continue; } if (courseLimit == 0 && offering.getCourseOfferings().size() > 1) { iProgress.message(msglevel("noCourseReservation", Progress.MSGLEVEL_WARN), "No reserved space for students of course " + getOfferingLabel(course) + "."); } double weight = (iStudentCourseDemands.isWeightStudentsToFillUpOffering() && courseLimit != 0 ? (double) courseLimit / studentWeight : 1.0); Set<Lecture> cannotAttendLectures = null; if (offering.getCourseOfferings().size() > 1) { Set<Long> reservedClasses = new HashSet<Long>(); int limit = 0; boolean unlimited = false; for (Reservation r : offering.getReservations()) { if (r instanceof CourseReservation && course.equals(((CourseReservation) r).getCourse())) { for (Class_ clazz : r.getClasses()) { limit += clazz.getMaxExpectedCapacity(); propagateReservedClasses(clazz, reservedClasses); Class_ parent = clazz.getParentClass(); while (parent != null) { reservedClasses.add(parent.getUniqueId()); parent = parent.getParentClass(); } } for (InstrOfferingConfig config : r.getConfigurations()) { if (config.isUnlimitedEnrollment()) unlimited = true; else limit += config.getLimit(); for (SchedulingSubpart subpart : config.getSchedulingSubparts()) for (Class_ clazz : subpart.getClasses()) reservedClasses.add(clazz.getUniqueId()); } } } if (!reservedClasses.isEmpty()) { iProgress.debug("Course requests for course " + getOfferingLabel(course) + " are " + reservedClasses); if (!unlimited && courseLimit > limit) iProgress.message(msglevel("insufficientCourseReservation", Progress.MSGLEVEL_WARN), "Too little space reserved in for course " + getOfferingLabel(course) + " (" + limit + "<" + courseLimit + ")."); cannotAttendLectures = new HashSet<Lecture>(); for (InstrOfferingConfig config : course.getInstructionalOffering() .getInstrOfferingConfigs()) { boolean hasConfigReservation = false; subparts: for (SchedulingSubpart subpart : config.getSchedulingSubparts()) for (Class_ clazz : subpart.getClasses()) if (reservedClasses.contains(clazz.getUniqueId())) { hasConfigReservation = true; break subparts; } for (SchedulingSubpart subpart : config.getSchedulingSubparts()) { boolean hasSubpartReservation = false; for (Class_ clazz : subpart.getClasses()) if (reservedClasses.contains(clazz.getUniqueId())) { hasSubpartReservation = true; break; } // !hasConfigReservation >> all lectures are cannot attend (there is a reservation on a different config) // otherwise if !hasSubpartReservation >> there is reservation on some other subpoart --> can attend any of the classes of this subpart if (!hasConfigReservation || hasSubpartReservation) for (Class_ clazz : subpart.getClasses()) { if (reservedClasses.contains(clazz.getUniqueId())) continue; Lecture lecture = iLectures.get(clazz.getUniqueId()); if (lecture != null && !lecture.isCommitted()) cannotAttendLectures.add(lecture); } } } if (!cannotAttendLectures.isEmpty()) { iProgress.debug("Prohibited lectures for course " + getOfferingLabel(course) + " are " + cannotAttendLectures); checkReservation(course, cannotAttendLectures, iAltConfigurations.get(offering)); } } } for (WeightedStudentId studentId : studentIds) { Student student = iStudents.get(studentId.getStudentId()); if (student == null) { student = new Student(studentId.getStudentId()); student.setAcademicArea(studentId.getArea()); student.setAcademicClassification(studentId.getClasf()); student.setMajor(studentId.getMajor()); student.setCurriculum(studentId.getCurriculum()); getModel().addStudent(student); iStudents.put(studentId.getStudentId(), student); } student.addOffering(offering.getUniqueId(), weight * studentId.getWeight(), iStudentCourseDemands.getEnrollmentPriority(studentId.getStudentId(), course.getUniqueId())); Set<Student> students = iCourse2students.get(course); if (students == null) { students = new HashSet<Student>(); iCourse2students.put(course, students); } students.add(student); student.addCanNotEnroll(offering.getUniqueId(), cannotAttendLectures); Set<Long> reservedClasses = new HashSet<Long>(); for (Reservation reservation : offering.getReservations()) { if (reservation.getClasses().isEmpty() && reservation.getConfigurations().isEmpty()) continue; if (reservation instanceof CourseReservation) continue; if (reservation instanceof CurriculumReservation) { CurriculumReservation cr = (CurriculumReservation) reservation; if (studentId.getArea() == null) continue; if (!studentId.hasArea(cr.getArea().getAcademicAreaAbbreviation())) continue; if (!cr.getClassifications().isEmpty()) { boolean match = false; for (AcademicClassification clasf : cr.getClassifications()) { if (studentId.hasClassification(cr.getArea().getAcademicAreaAbbreviation(), clasf.getCode())) { match = true; break; } } if (!match) continue; } if (!cr.getMajors().isEmpty()) { if (studentId.getMajor() == null) continue; boolean match = false; for (PosMajor major : cr.getMajors()) { if (studentId.hasMajor(cr.getArea().getAcademicAreaAbbreviation(), major.getCode())) { match = true; break; } } if (!match) continue; } } else continue; for (Class_ clazz : reservation.getClasses()) { propagateReservedClasses(clazz, reservedClasses); Class_ parent = clazz.getParentClass(); while (parent != null) { reservedClasses.add(parent.getUniqueId()); parent = parent.getParentClass(); } } for (InstrOfferingConfig config : reservation.getConfigurations()) { for (SchedulingSubpart subpart : config.getSchedulingSubparts()) for (Class_ clazz : subpart.getClasses()) reservedClasses.add(clazz.getUniqueId()); } } if (!reservedClasses.isEmpty()) { iProgress.debug(course.getCourseName() + ": Student " + student.getId() + " has reserved classes " + reservedClasses); Set<Lecture> prohibited = new HashSet<Lecture>(); for (InstrOfferingConfig config : course.getInstructionalOffering() .getInstrOfferingConfigs()) { boolean hasConfigReservation = false; subparts: for (SchedulingSubpart subpart : config.getSchedulingSubparts()) for (Class_ clazz : subpart.getClasses()) if (reservedClasses.contains(clazz.getUniqueId())) { hasConfigReservation = true; break subparts; } for (SchedulingSubpart subpart : config.getSchedulingSubparts()) { boolean hasSubpartReservation = false; for (Class_ clazz : subpart.getClasses()) if (reservedClasses.contains(clazz.getUniqueId())) { hasSubpartReservation = true; break; } // !hasConfigReservation >> all lectures are cannot attend (there is a reservation on a different config) // otherwise if !hasSubpartReservation >> there is reservation on some other subpoart --> can attend any of the classes of this subpart if (!hasConfigReservation || hasSubpartReservation) for (Class_ clazz : subpart.getClasses()) { if (reservedClasses.contains(clazz.getUniqueId())) continue; Lecture lecture = iLectures.get(clazz.getUniqueId()); if (lecture != null && !lecture.isCommitted()) prohibited.add(lecture); } } } iProgress.debug(course.getCourseName() + ": Student " + student.getId() + " cannot attend classes " + prohibited); student.addCanNotEnroll(offering.getUniqueId(), prohibited); } } } iProgress.incProgress(); } iProgress.debug(iStudents.size() + " students loaded."); if (!hibSession.isOpen()) iProgress.message(msglevel("hibernateFailure", Progress.MSGLEVEL_FATAL), "Hibernate session not open."); if (iCommittedStudentConflictsMode == CommittedStudentConflictsMode.Load && !iStudentCourseDemands.isMakingUpStudents()) loadCommittedStudentConflicts(hibSession, loadedOfferings); else if (iCommittedStudentConflictsMode != CommittedStudentConflictsMode.Ignore) makeupCommittedStudentConflicts(loadedOfferings); if (!hibSession.isOpen()) iProgress.message(msglevel("hibernateFailure", Progress.MSGLEVEL_FATAL), "Hibernate session not open."); Hashtable<Student, Set<Lecture>> iPreEnrollments = new Hashtable<Student, Set<Lecture>>(); if (iLoadStudentEnrlsFromSolution) { if (iStudentCourseDemands.canUseStudentClassEnrollmentsAsSolution()) { // Load real student enrollments (not saved last-like) List<Object[]> enrollments = (List<Object[]>) hibSession .createQuery("select distinct e.student.uniqueId, e.clazz.uniqueId from " + "StudentClassEnrollment e, Class_ c where " + "e.courseOffering.instructionalOffering = c.schedulingSubpart.instrOfferingConfig.instructionalOffering and " + "c.managingDept.solverGroup.uniqueId in (" + iSolverGroupIds + ")") .list(); iProgress.setPhase("Loading current student enrolments ...", enrollments.size()); int totalEnrollments = 0; for (Object[] o : enrollments) { Long studentId = (Long) o[0]; Long clazzId = (Long) o[1]; Student student = (Student) iStudents.get(studentId); if (student == null) continue; Lecture lecture = (Lecture) iLectures.get(clazzId); if (lecture != null) { Set<Lecture> preEnrollments = iPreEnrollments.get(student); if (preEnrollments == null) { preEnrollments = new HashSet<Lecture>(); iPreEnrollments.put(student, preEnrollments); } preEnrollments.add(lecture); if (student.hasOffering(lecture.getConfiguration().getOfferingId()) && student.canEnroll(lecture)) { student.addLecture(lecture); lecture.addStudent(getAssignment(), student); totalEnrollments++; } } iProgress.incProgress(); } iProgress.message(msglevel("enrollmentsLoaded", Progress.MSGLEVEL_INFO), "Loaded " + totalEnrollments + " enrollments of " + iPreEnrollments.size() + " students."); } else { // Load enrollments from selected / committed solutions for (int idx = 0; idx < iSolverGroupId.length; idx++) { Solution solution = (solutions == null ? null : solutions.get(iSolverGroupId[idx])); List studentEnrls = null; if (solution != null) { studentEnrls = hibSession.createQuery( "select distinct e.studentId, e.clazz.uniqueId from StudentEnrollment e where e.solution.uniqueId=:solutionId") .setLong("solutionId", solution.getUniqueId()).list(); } else { studentEnrls = hibSession.createQuery( "select distinct e.studentId, e.clazz.uniqueId from StudentEnrollment e where e.solution.owner.uniqueId=:sovlerGroupId and e.solution.commited = true") .setLong("sovlerGroupId", iSolverGroupId[idx]).list(); } iProgress.setPhase("Loading student enrolments [" + (idx + 1) + "] ...", studentEnrls.size()); for (Iterator i1 = studentEnrls.iterator(); i1.hasNext();) { Object o[] = (Object[]) i1.next(); Long studentId = (Long) o[0]; Long clazzId = (Long) o[1]; Student student = (Student) iStudents.get(studentId); if (student == null) continue; Lecture lecture = (Lecture) iLectures.get(clazzId); if (lecture != null && lecture.getConfiguration() != null) { Set<Lecture> preEnrollments = iPreEnrollments.get(student); if (preEnrollments == null) { preEnrollments = new HashSet<Lecture>(); iPreEnrollments.put(student, preEnrollments); } preEnrollments.add(lecture); if (student.hasOffering(lecture.getConfiguration().getOfferingId()) && student.canEnroll(lecture)) { student.addLecture(lecture); lecture.addStudent(getAssignment(), student); } } iProgress.incProgress(); } } if (getModel().getProperties().getPropertyBoolean("Global.LoadOtherCommittedStudentEnrls", true)) { // Other committed enrollments List<Object[]> enrollments = (List<Object[]>) hibSession .createQuery("select distinct e.studentId, e.clazz.uniqueId from " + "StudentEnrollment e, Class_ c where " + "e.solution.commited = true and e.solution.owner.uniqueId not in (" + iSolverGroupIds + ") and " + "e.clazz.schedulingSubpart.instrOfferingConfig.instructionalOffering = c.schedulingSubpart.instrOfferingConfig.instructionalOffering and " + "c.managingDept.solverGroup.uniqueId in (" + iSolverGroupIds + ")") .list(); iProgress.setPhase("Loading other committed student enrolments ...", enrollments.size()); for (Object[] o : enrollments) { Long studentId = (Long) o[0]; Long clazzId = (Long) o[1]; Student student = (Student) iStudents.get(studentId); if (student == null) continue; Lecture lecture = (Lecture) iLectures.get(clazzId); if (lecture != null && lecture.getConfiguration() != null) { Set<Lecture> preEnrollments = iPreEnrollments.get(student); if (preEnrollments == null) { preEnrollments = new HashSet<Lecture>(); iPreEnrollments.put(student, preEnrollments); } preEnrollments.add(lecture); if (student.hasOffering(lecture.getConfiguration().getOfferingId()) && student.canEnroll(lecture)) { student.addLecture(lecture); lecture.addStudent(getAssignment(), student); } } iProgress.incProgress(); } } } } if (!hibSession.isOpen()) iProgress.message(msglevel("hibernateFailure", Progress.MSGLEVEL_FATAL), "Hibernate session not open."); RoomAvailabilityInterface availability = null; if (SolverServerImplementation.getInstance() != null) availability = SolverServerImplementation.getInstance().getRoomAvailability(); else availability = RoomAvailability.getInstance(); if (availability != null) { Date[] startEnd = initializeRoomAvailability(availability); if (startEnd != null) { loadRoomAvailability(availability, startEnd); loadInstructorAvailability(availability, startEnd); } } if (!hibSession.isOpen()) iProgress.message(msglevel("hibernateFailure", Progress.MSGLEVEL_FATAL), "Hibernate session not open."); iProgress.setPhase("Initial sectioning ...", iOfferings.size()); for (InstructionalOffering offering : iOfferings.keySet()) { Set<Student> students = new HashSet<Student>(); for (CourseOffering course : offering.getCourseOfferings()) { Set<Student> courseStudents = iCourse2students.get(course); if (courseStudents != null) students.addAll(courseStudents); } if (students.isEmpty()) continue; getModel().getStudentSectioning().initialSectioning(getAssignment(), offering.getUniqueId(), offering.getCourseName(), students, iAltConfigurations.get(offering)); iProgress.incProgress(); } for (Enumeration e = iStudents.elements(); e.hasMoreElements();) { ((Student) e.nextElement()).clearDistanceCache(); } if (!iPreEnrollments.isEmpty()) { iProgress.setPhase("Checking loaded enrollments ....", iPreEnrollments.size()); for (Map.Entry<Student, Set<Lecture>> entry : iPreEnrollments.entrySet()) { iProgress.incProgress(); Student student = entry.getKey(); Set<Lecture> lectures = entry.getValue(); for (Lecture lecture : lectures) { if (!lecture.students().contains(student)) { iProgress.message(msglevel("studentNotEnrolled", Progress.MSGLEVEL_WARN), "Student " + student.getId() + " is supposed to be enrolled to " + getClassLabel(lecture)); } } for (Lecture lecture : student.getLectures()) { if (!lectures.contains(lecture)) { Lecture instead = null; if (lecture.sameStudentsLectures() != null) { for (Lecture other : lecture.sameStudentsLectures()) { if (lectures.contains(other)) instead = other; } } if (instead != null) iProgress.message(msglevel("studentEnrolled", Progress.MSGLEVEL_WARN), "Student " + student.getId() + " is NOT supposed to be enrolled to " + getClassLabel(lecture) + ", he/she should have " + getClassLabel(instead) + " instead."); else iProgress.message(msglevel("studentEnrolled", Progress.MSGLEVEL_INFO), "Student " + student.getId() + " is NOT supposed to be enrolled to " + getClassLabel(lecture) + "."); } } } } if (!hibSession.isOpen()) iProgress.message(msglevel("hibernateFailure", Progress.MSGLEVEL_FATAL), "Hibernate session not open."); if (iLoadStudentInstructorConflicts) loadInstructorStudentConflicts(hibSession); iProgress.setPhase("Computing jenrl ...", iStudents.size()); Hashtable jenrls = new Hashtable(); for (Iterator i1 = iStudents.values().iterator(); i1.hasNext();) { Student st = (Student) i1.next(); for (Iterator i2 = st.getLectures().iterator(); i2.hasNext();) { Lecture l1 = (Lecture) i2.next(); for (Iterator i3 = st.getLectures().iterator(); i3.hasNext();) { Lecture l2 = (Lecture) i3.next(); if (l1.getId() >= l2.getId()) continue; Hashtable x = (Hashtable) jenrls.get(l1); if (x == null) { x = new Hashtable(); jenrls.put(l1, x); } JenrlConstraint jenrl = (JenrlConstraint) x.get(l2); if (jenrl == null) { jenrl = new JenrlConstraint(); getModel().addConstraint(jenrl); jenrl.addVariable(l1); jenrl.addVariable(l2); x.put(l2, jenrl); } jenrl.incJenrl(getAssignment(), st); } } iProgress.incProgress(); } if (!hibSession.isOpen()) iProgress.message(msglevel("hibernateFailure", Progress.MSGLEVEL_FATAL), "Hibernate session not open."); if (solutions != null) { for (int idx = 0; idx < iSolverGroupId.length; idx++) { Solution solution = (Solution) solutions.get(iSolverGroupId[idx]); if (solution == null) continue; iProgress.setPhase("Creating initial assignment [" + (idx + 1) + "] ...", solution.getAssignments().size()); for (Iterator i1 = solution.getAssignments().iterator(); i1.hasNext();) { Assignment assignment = (Assignment) i1.next(); loadAssignment(assignment); iProgress.incProgress(); } } } else if (iLoadCommittedAssignments) { iProgress.setPhase("Creating initial assignment ...", getModel().variables().size()); for (Lecture lecture : getModel().variables()) { if (lecture.isCommitted()) continue; Class_ clazz = iClasses.get(lecture.getClassId()); if (clazz != null && clazz.getCommittedAssignment() != null) loadAssignment(clazz.getCommittedAssignment()); iProgress.incProgress(); } } if (!hibSession.isOpen()) iProgress.message(msglevel("hibernateFailure", Progress.MSGLEVEL_FATAL), "Hibernate session not open."); if (iSpread) { iProgress.setPhase("Posting automatic spread constraints ...", subparts.size()); for (SchedulingSubpart subpart : subparts) { if (subpart.getClasses().size() <= 1) { iProgress.incProgress(); continue; } if (!subpart.isAutoSpreadInTime().booleanValue()) { iProgress.debug("Automatic spread constraint disabled for " + getSubpartLabel(subpart)); iProgress.incProgress(); continue; } SpreadConstraint spread = new SpreadConstraint(getModel().getProperties(), subpart.getCourseName() + " " + subpart.getItypeDesc().trim()); for (Iterator i2 = subpart.getClasses().iterator(); i2.hasNext();) { Class_ clazz = (Class_) i2.next(); Lecture lecture = (Lecture) getLecture(clazz); if (lecture == null) continue; spread.addVariable(lecture); } if (spread.variables().isEmpty()) iProgress.message(msglevel("courseWithNoClasses", Progress.MSGLEVEL_WARN), "No class for course " + getSubpartLabel(subpart)); else getModel().addConstraint(spread); iProgress.incProgress(); } } if (iDeptBalancing) { iProgress.setPhase("Creating dept. spread constraints ...", getModel().variables().size()); Hashtable<Long, DepartmentSpreadConstraint> depSpreadConstraints = new Hashtable<Long, DepartmentSpreadConstraint>(); for (Lecture lecture : getModel().variables()) { if (lecture.getDepartment() == null) continue; DepartmentSpreadConstraint deptConstr = (DepartmentSpreadConstraint) depSpreadConstraints .get(lecture.getDepartment()); if (deptConstr == null) { deptConstr = new DepartmentSpreadConstraint(getModel().getProperties(), lecture.getDepartment(), (String) iDeptNames.get(lecture.getDepartment())); depSpreadConstraints.put(lecture.getDepartment(), deptConstr); getModel().addConstraint(deptConstr); } deptConstr.addVariable(lecture); iProgress.incProgress(); } } if (iSubjectBalancing) { iProgress.setPhase("Creating subject spread constraints ...", getModel().variables().size()); Hashtable<Long, SpreadConstraint> subjectSpreadConstraints = new Hashtable<Long, SpreadConstraint>(); for (Lecture lecture : getModel().variables()) { Class_ clazz = iClasses.get(lecture.getClassId()); if (clazz == null) continue; for (CourseOffering co : clazz.getSchedulingSubpart().getInstrOfferingConfig() .getInstructionalOffering().getCourseOfferings()) { Long subject = co.getSubjectArea().getUniqueId(); SpreadConstraint subjectSpreadConstr = subjectSpreadConstraints.get(subject); if (subjectSpreadConstr == null) { subjectSpreadConstr = new SpreadConstraint(getModel().getProperties(), co.getSubjectArea().getSubjectAreaAbbreviation()); subjectSpreadConstraints.put(subject, subjectSpreadConstr); getModel().addConstraint(subjectSpreadConstr); } subjectSpreadConstr.addVariable(lecture); } iProgress.incProgress(); } } if (getModel().getProperties().getPropertyBoolean("General.PurgeInvalidPlacements", true)) purgeInvalidValues(); /* for (Constraint c: getModel().constraints()) { if (c instanceof SpreadConstraint) ((SpreadConstraint)c).init(); if (c instanceof DiscouragedRoomConstraint) ((DiscouragedRoomConstraint)c).setEnabled(true); if (c instanceof MinimizeNumberOfUsedRoomsConstraint) ((MinimizeNumberOfUsedRoomsConstraint)c).setEnabled(true); if (c instanceof MinimizeNumberOfUsedGroupsOfTime) ((MinimizeNumberOfUsedGroupsOfTime)c).setEnabled(true); } */ iProgress.setPhase("Checking for inconsistencies...", getModel().variables().size()); for (Lecture lecture : getModel().variables()) { iProgress.incProgress(); for (Iterator i = lecture.students().iterator(); i.hasNext();) { Student s = (Student) i.next(); if (!s.canEnroll(lecture)) iProgress.message(msglevel("badStudentEnrollment", Progress.MSGLEVEL_INFO), "Invalid student enrollment of student " + s.getId() + " in class " + getClassLabel(lecture) + " found."); } //check same instructor constraint if (!lecture.values(getAssignment()).isEmpty() && lecture.timeLocations().size() == 1 && !lecture.getInstructorConstraints().isEmpty()) { for (Lecture other : getModel().variables()) { if (other.values(getAssignment()).isEmpty() || other.timeLocations().size() != 1 || lecture.getClassId().compareTo(other.getClassId()) <= 0) continue; Placement p1 = lecture.values(getAssignment()).get(0); Placement p2 = other.values(getAssignment()).get(0); if (!other.getInstructorConstraints().isEmpty()) { for (InstructorConstraint ic : lecture.getInstructorConstraints()) { if (!other.getInstructorConstraints().contains(ic)) continue; if (p1.canShareRooms(p2) && p1.sameRooms(p2)) continue; if (p1.getTimeLocation().hasIntersection(p2.getTimeLocation())) { iProgress.message(msglevel("reqInstructorOverlap", Progress.MSGLEVEL_WARN), "Same instructor and overlapping time required:" + "<br> " + getClassLabel(lecture) + " ← " + p1.getLongName(iUseAmPm) + "<br> " + getClassLabel(other) + " ← " + p2.getLongName(iUseAmPm)); } else if (ic.getDistancePreference(p1, p2) == PreferenceLevel.sIntLevelProhibited && lecture.roomLocations().size() == 1 && other.roomLocations().size() == 1) { iProgress.message(msglevel("reqInstructorBackToBack", Progress.MSGLEVEL_WARN), "Same instructor, back-to-back time and rooms too far (distance=" + Math.round(10.0 * Placement.getDistanceInMeters( getModel().getDistanceMetric(), p1, p2)) + "m) required:" + "<br> " + getClassLabel(lecture) + " ← " + p1.getLongName(iUseAmPm) + "<br> " + getClassLabel(other) + " ← " + p2.getLongName(iUseAmPm)); } } } } } if (!lecture.isSingleton()) continue; for (Lecture other : getModel().variables()) { if (!other.isSingleton() || lecture.getClassId().compareTo(other.getClassId()) <= 0) continue; Placement p1 = new Placement(lecture, lecture.timeLocations().get(0), lecture.roomLocations()); Placement p2 = new Placement(other, other.timeLocations().get(0), other.roomLocations()); if (p1.shareRooms(p2) && p1.getTimeLocation().hasIntersection(p2.getTimeLocation()) && !p1.canShareRooms(p2)) { iProgress.message(msglevel("reqRoomOverlap", Progress.MSGLEVEL_WARN), "Same room and overlapping time required:" + "<br> " + getClassLabel(lecture) + " ← " + p1.getLongName(iUseAmPm) + "<br> " + getClassLabel(other) + " ← " + p2.getLongName(iUseAmPm)); } } if (getAssignment().getValue(lecture) == null) { Placement placement = new Placement(lecture, lecture.timeLocations().get(0), lecture.roomLocations()); if (!placement.isValid()) { String reason = ""; for (InstructorConstraint ic : lecture.getInstructorConstraints()) { if (!ic.isAvailable(lecture, placement)) reason += "<br> instructor " + ic.getName() + " not available"; } if (lecture.getNrRooms() > 0) { if (placement.isMultiRoom()) { for (RoomLocation roomLocation : placement.getRoomLocations()) { if (!roomLocation.getRoomConstraint().isAvailable(lecture, placement.getTimeLocation(), lecture.getScheduler())) reason += "<br> room " + roomLocation.getName() + " not available"; } } else { if (!placement.getRoomLocation().getRoomConstraint().isAvailable(lecture, placement.getTimeLocation(), lecture.getScheduler())) reason += "<br> room " + placement.getRoomLocation().getName() + " not available"; } } Map<Constraint<Lecture, Placement>, Set<Placement>> conflictConstraints = getModel() .conflictConstraints(getAssignment(), placement); if (!conflictConstraints.isEmpty()) { for (Constraint<Lecture, Placement> c : conflictConstraints.keySet()) { Set<Placement> vals = conflictConstraints.get(c); for (Placement p : vals) { Lecture l = p.variable(); if (l.isCommitted()) reason += "<br> conflict with committed assignment " + getClassLabel(l) + " = " + p.getLongName(iUseAmPm) + " (in constraint " + c + ")"; if (p.equals(placement)) reason += "<br> constraint " + c; } } } iProgress.message(msglevel("reqInvalidPlacement", Progress.MSGLEVEL_WARN), "Class " + getClassLabel(lecture) + " requires an invalid placement " + placement.getLongName(iUseAmPm) + (reason.length() == 0 ? "." : ":" + reason)); } else if (iAssignSingleton && getModel().conflictValues(getAssignment(), placement).isEmpty()) getAssignment().assign(0, placement); } } getModel().createAssignmentContexts(getAssignment(), true); if (getModel().getProperties().getPropertyBoolean("General.EnrollmentCheck", true)) new EnrollmentCheck(getModel(), getAssignment(), msglevel("enrollmentCheck", Progress.MSGLEVEL_WARN)) .checkStudentEnrollments(iProgress); if (getModel().getProperties().getPropertyBoolean("General.SwitchStudents", true) && getAssignment().nrAssignedVariables() != 0 && !iLoadStudentEnrlsFromSolution) getModel().switchStudents(getAssignment()); iProgress.setPhase("Done", 1); iProgress.incProgress(); iProgress.message(msglevel("allDone", Progress.MSGLEVEL_INFO), "Model successfully loaded."); }
From source file:dendroscope.autumn.hybridnetwork.ComputeHybridizationNetwork.java
/** * recursively compute the hybrid number * * @param root1/*from w w w .j a v a 2s . co m*/ * @param root2 * @param isReduced @return hybrid number * @param k * @param totalResults */ private int computeRec(Root root1, Root root2, boolean isReduced, BitSet candidateHybridsOriginal, int k, Collection<Root> totalResults, String depth) throws IOException, CanceledException { if (verbose) { System.err.println(depth + "---------- ComputeRec:"); System.err.println(depth + "Tree1: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2: " + root2.toStringFullTreeX()); } if (System.currentTimeMillis() > nextTime) { progressListener.incrementProgress(); nextTime += waitTime; waitTime *= 1.5; } else progressListener.checkForCancel(); // root1.reorderSubTree(); // root2.reorderSubTree(); if (checking) { root1.checkTree(); root2.checkTree(); if (!root2.getTaxa().equals(root1.getTaxa())) throw new RuntimeException("Unequal taxon sets: X=" + Basic.toString(root1.getTaxa()) + " vs " + Basic.toString(root2.getTaxa())); } if (!isReduced) { // 1. try to perform a subtree reduction: { final Single<Integer> placeHolderTaxon = new Single<Integer>(); List<Pair<Root, Root>> reducedSubtreePairs = new LinkedList<Pair<Root, Root>>(); switch (SubtreeReduction.apply(root1, root2, reducedSubtreePairs, placeHolderTaxon)) { case ISOMORPHIC: Root isomorphicTree = MergeIsomorphicInducedTrees.apply(root1, root2); if (verbose) { System.err.println(depth + "Trees are isomorphic"); System.err.println(depth + "Isomorphic tree: " + isomorphicTree.toStringFullTreeX()); } totalResults.add(isomorphicTree); return 0; // two trees are isomorphic, no hybrid node needed case REDUCED: // a reduction was performed, cannot maintain lexicographical ordering in removal loop below List<Root> subTrees = new LinkedList<Root>(); for (Pair<Root, Root> pair : reducedSubtreePairs) { subTrees.add(MergeIsomorphicInducedTrees.apply(pair.getFirst(), pair.getSecond())); } if (verbose) { System.err.println(depth + "Trees are reducible:"); System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX()); for (Root root : subTrees) { System.err.println(depth + "Merged reduced subtree: " + root.toStringFullTreeX()); } } BitSet candidateHybrids; if (false) candidateHybrids = getAllAliveTaxa(root1, root2); // need to reconsider all possible hybrids else { candidateHybrids = (BitSet) candidateHybridsOriginal.clone(); candidateHybrids.set(placeHolderTaxon.get(), true); } Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); int h = cacheComputeRec(root1, root2, false, candidateHybrids, k, currentResults, depth + " >"); List<Root> merged = MergeNetworks.apply(currentResults, subTrees); if (verbose) { for (Root r : merged) { System.err.println(depth + "Result-merged: " + r.toStringNetworkFull()); } } totalResults.addAll(fixOrdering(merged)); return h; case IRREDUCIBLE: if (verbose) System.err.println(depth + "Trees are subtree-irreducible"); break; } } // 2. try to perform a cluster reduction: { final Single<Integer> placeHolderTaxon = new Single<Integer>(); Pair<Root, Root> clusterTrees = ClusterReduction.apply(root1, root2, placeHolderTaxon); if (clusterTrees != null) { Set<Root> resultBottomPair = new TreeSet<Root>(new NetworkComparator()); int h = cacheComputeRec(clusterTrees.getFirst(), clusterTrees.getSecond(), true, candidateHybridsOriginal, k, resultBottomPair, depth + " >"); // for the top pair, we should reconsider the place holder in the top pair as a possible place holder BitSet candidateHybrids = (BitSet) candidateHybridsOriginal.clone(); candidateHybrids.set(placeHolderTaxon.get(), true); Set<Root> resultTopPair = new TreeSet<Root>(new NetworkComparator()); h += cacheComputeRec(root1, root2, false, candidateHybrids, k - h, resultTopPair, depth + " >"); Set<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); for (Root r : resultBottomPair) { currentResults.addAll(MergeNetworks.apply(resultTopPair, Arrays.asList(r))); } if (verbose) { System.err.println(depth + "Cluster reduction applied::"); System.err.println(depth + "Tree1-reduced: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2-reduced: " + root2.toStringFullTreeX()); System.err.println(depth + "Subtree-1: " + clusterTrees.getFirst().toStringFullTreeX()); System.err .println(depth + "Subtree-2: " + clusterTrees.getSecond().toStringFullTreeX()); for (Root r : resultBottomPair) { System.err.println(depth + "Results for reduced-trees: " + r.toStringNetworkFull()); } for (Root r : resultTopPair) { System.err.println(depth + "Results for sub-trees: " + r.toStringNetworkFull()); } for (Root r : currentResults) { System.err .println(depth + "Merged cluster-reduced networks: " + r.toStringNetworkFull()); } } totalResults.addAll(currentResults); clusterTrees.getFirst().deleteSubTree(); clusterTrees.getSecond().deleteSubTree(); return h; } } } else { if (verbose) System.err.println(depth + "Trees are already reduced"); } if (k <= 0) // 1, if only interested in number or in finding only one network, 0 else return LARGE; int hBest = LARGE; List<Root> leaves1 = getAllAliveLeaves(root1); /* if (leaves1.size() <= 2) // try 2 rather than one... { totalResults.add(MergeNetworks.apply(root1,root2)); // todo: this needs to be fixed return 0; } */ for (Root leaf2remove : leaves1) { BitSet taxa2remove = leaf2remove.getTaxa(); if (taxa2remove.cardinality() != 1) throw new IOException(depth + "Leaf taxa cardinality: " + taxa2remove.cardinality()); int hybridTaxon = taxa2remove.nextSetBit(0); if (candidateHybridsOriginal.get(hybridTaxon)) { if (verbose) { System.err.println(depth + "Removing: " + hybridTaxon); System.err.println(depth + "candidateHybrids: " + Basic.toString(candidateHybridsOriginal)); System.err.println(depth + "Tree1: " + root1.toStringFullTreeX()); System.err.println(depth + "Tree2: " + root2.toStringFullTreeX()); } Root root1x = root1.copySubNetwork(); Root root2x = root2.copySubNetwork(); RemoveTaxon.apply(root1x, 1, hybridTaxon); RemoveTaxon.apply(root2x, 2, hybridTaxon); // now we keep removed taxa as separate sets if (verbose) { System.err.println(depth + "Tree1-x: " + root1x.toStringFullTreeX()); System.err.println(depth + "Tree2-x: " + root2x.toStringFullTreeX()); } Refine.apply(root1x, root2x); if (verbose) { System.err.println(depth + "Tree1-x-refined: " + root1x.toStringFullTreeX()); System.err.println(depth + "Tree2-x-refined: " + root2x.toStringFullTreeX()); } Collection<Root> currentResults = new TreeSet<Root>(new NetworkComparator()); candidateHybridsOriginal.set(hybridTaxon, false); int h = cacheComputeRec(root1x, root2x, false, candidateHybridsOriginal, k - 1, currentResults, depth + " >") + 1; candidateHybridsOriginal.set(hybridTaxon, true); if (h < k) k = h; // System.err.println("Subproblem with " + Basic.toString(taxa2remove) + " removed, h=" + h); if (h < hBest && h <= k) { hBest = h; totalResults.clear(); } if (h == hBest && h <= k) { if (verbose) { for (Root r : currentResults) { System.err.println(depth + "Result: " + r.toStringNetworkFull()); } } // add the hybrid node: currentResults = copyAll(currentResults); AddHybridNode.apply(currentResults, hybridTaxon); totalResults.addAll(fixOrdering(currentResults)); } root1x.deleteSubTree(); root2x.deleteSubTree(); } } return hBest; }
From source file:org.apache.drill.exec.planner.logical.partition.PruneScanRule.java
protected void doOnMatch(RelOptRuleCall call, Filter filterRel, Project projectRel, TableScan scanRel) { final String pruningClassName = getClass().getName(); logger.info("Beginning partition pruning, pruning class: {}", pruningClassName); Stopwatch totalPruningTime = Stopwatch.createStarted(); final PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner()); PartitionDescriptor descriptor = getPartitionDescriptor(settings, scanRel); final BufferAllocator allocator = optimizerContext.getAllocator(); final Object selection = getDrillTable(scanRel).getSelection(); MetadataContext metaContext = null;// ww w . j av a 2s .c o m if (selection instanceof FormatSelection) { metaContext = ((FormatSelection) selection).getSelection().getMetaContext(); } RexNode condition = null; if (projectRel == null) { condition = filterRel.getCondition(); } else { // get the filter as if it were below the projection. condition = RelOptUtil.pushFilterPastProject(filterRel.getCondition(), projectRel); } RewriteAsBinaryOperators visitor = new RewriteAsBinaryOperators(true, filterRel.getCluster().getRexBuilder()); condition = condition.accept(visitor); Map<Integer, String> fieldNameMap = Maps.newHashMap(); List<String> fieldNames = scanRel.getRowType().getFieldNames(); BitSet columnBitset = new BitSet(); BitSet partitionColumnBitSet = new BitSet(); Map<Integer, Integer> partitionMap = Maps.newHashMap(); int relColIndex = 0; for (String field : fieldNames) { final Integer partitionIndex = descriptor.getIdIfValid(field); if (partitionIndex != null) { fieldNameMap.put(partitionIndex, field); partitionColumnBitSet.set(partitionIndex); columnBitset.set(relColIndex); // mapping between the relColIndex and partitionIndex partitionMap.put(relColIndex, partitionIndex); } relColIndex++; } if (partitionColumnBitSet.isEmpty()) { logger.info("No partition columns are projected from the scan..continue. " + "Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS)); setPruneStatus(metaContext, PruneStatus.NOT_PRUNED); return; } // stop watch to track how long we spend in different phases of pruning Stopwatch miscTimer = Stopwatch.createUnstarted(); // track how long we spend building the filter tree miscTimer.start(); FindPartitionConditions c = new FindPartitionConditions(columnBitset, filterRel.getCluster().getRexBuilder()); c.analyze(condition); RexNode pruneCondition = c.getFinalCondition(); BitSet referencedDirsBitSet = c.getReferencedDirs(); logger.info("Total elapsed time to build and analyze filter tree: {} ms", miscTimer.elapsed(TimeUnit.MILLISECONDS)); miscTimer.reset(); if (pruneCondition == null) { logger.info("No conditions were found eligible for partition pruning." + "Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS)); setPruneStatus(metaContext, PruneStatus.NOT_PRUNED); return; } // set up the partitions List<PartitionLocation> newPartitions = Lists.newArrayList(); long numTotal = 0; // total number of partitions int batchIndex = 0; PartitionLocation firstLocation = null; LogicalExpression materializedExpr = null; String[] spInfo = null; int maxIndex = -1; BitSet matchBitSet = new BitSet(); // Outer loop: iterate over a list of batches of PartitionLocations for (List<PartitionLocation> partitions : descriptor) { numTotal += partitions.size(); logger.debug("Evaluating partition pruning for batch {}", batchIndex); if (batchIndex == 0) { // save the first location in case everything is pruned firstLocation = partitions.get(0); } final NullableBitVector output = new NullableBitVector( MaterializedField.create("", Types.optional(MinorType.BIT)), allocator); final VectorContainer container = new VectorContainer(); try { final ValueVector[] vectors = new ValueVector[descriptor.getMaxHierarchyLevel()]; for (int partitionColumnIndex : BitSets.toIter(partitionColumnBitSet)) { SchemaPath column = SchemaPath.getSimplePath(fieldNameMap.get(partitionColumnIndex)); MajorType type = descriptor.getVectorType(column, settings); MaterializedField field = MaterializedField.create(column.getAsUnescapedPath(), type); ValueVector v = TypeHelper.getNewVector(field, allocator); v.allocateNew(); vectors[partitionColumnIndex] = v; container.add(v); } // track how long we spend populating partition column vectors miscTimer.start(); // populate partition vectors. descriptor.populatePartitionVectors(vectors, partitions, partitionColumnBitSet, fieldNameMap); logger.info("Elapsed time to populate partitioning column vectors: {} ms within batchIndex: {}", miscTimer.elapsed(TimeUnit.MILLISECONDS), batchIndex); miscTimer.reset(); // materialize the expression; only need to do this once if (batchIndex == 0) { materializedExpr = materializePruneExpr(pruneCondition, settings, scanRel, container); if (materializedExpr == null) { // continue without partition pruning; no need to log anything here since // materializePruneExpr logs it already logger.info("Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS)); setPruneStatus(metaContext, PruneStatus.NOT_PRUNED); return; } } output.allocateNew(partitions.size()); // start the timer to evaluate how long we spend in the interpreter evaluation miscTimer.start(); InterpreterEvaluator.evaluate(partitions.size(), optimizerContext, container, output, materializedExpr); logger.info( "Elapsed time in interpreter evaluation: {} ms within batchIndex: {} with # of partitions : {}", miscTimer.elapsed(TimeUnit.MILLISECONDS), batchIndex, partitions.size()); miscTimer.reset(); int recordCount = 0; int qualifiedCount = 0; if (descriptor.supportsMetadataCachePruning() && partitions.get(0) .isCompositePartition() /* apply single partition check only for composite partitions */) { // Inner loop: within each batch iterate over the PartitionLocations for (PartitionLocation part : partitions) { assert part.isCompositePartition(); if (!output.getAccessor().isNull(recordCount) && output.getAccessor().get(recordCount) == 1) { newPartitions.add(part); // Rather than using the PartitionLocation, get the array of partition values for the directories that are // referenced by the filter since we are not interested in directory references in other parts of the query. Pair<String[], Integer> p = composePartition(referencedDirsBitSet, partitionMap, vectors, recordCount); String[] parts = p.getLeft(); int tmpIndex = p.getRight(); maxIndex = Math.max(maxIndex, tmpIndex); if (spInfo == null) { // initialization spInfo = parts; for (int j = 0; j <= tmpIndex; j++) { if (parts[j] != null) { matchBitSet.set(j); } } } else { // compare the new partition with existing partition for (int j = 0; j <= tmpIndex; j++) { if (parts[j] == null || spInfo[j] == null) { // nulls don't match matchBitSet.clear(j); } else { if (!parts[j].equals(spInfo[j])) { matchBitSet.clear(j); } } } } qualifiedCount++; } recordCount++; } } else { // Inner loop: within each batch iterate over the PartitionLocations for (PartitionLocation part : partitions) { if (!output.getAccessor().isNull(recordCount) && output.getAccessor().get(recordCount) == 1) { newPartitions.add(part); qualifiedCount++; } recordCount++; } } logger.debug("Within batch {}: total records: {}, qualified records: {}", batchIndex, recordCount, qualifiedCount); batchIndex++; } catch (Exception e) { logger.warn("Exception while trying to prune partition.", e); logger.info("Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS)); setPruneStatus(metaContext, PruneStatus.NOT_PRUNED); return; // continue without partition pruning } finally { container.clear(); if (output != null) { output.clear(); } } } try { if (newPartitions.size() == numTotal) { logger.info("No partitions were eligible for pruning"); return; } // handle the case all partitions are filtered out. boolean canDropFilter = true; boolean wasAllPartitionsPruned = false; String cacheFileRoot = null; if (newPartitions.isEmpty()) { assert firstLocation != null; // Add the first non-composite partition location, since execution requires schema. // In such case, we should not drop filter. newPartitions.add(firstLocation.getPartitionLocationRecursive().get(0)); canDropFilter = false; // NOTE: with DRILL-4530, the PruneScanRule may be called with only a list of // directories first and the non-composite partition location will still return // directories, not files. So, additional processing is done depending on this flag wasAllPartitionsPruned = true; logger.info( "All {} partitions were pruned; added back a single partition to allow creating a schema", numTotal); // set the cacheFileRoot appropriately if (firstLocation.isCompositePartition()) { cacheFileRoot = descriptor.getBaseTableLocation() + firstLocation.getCompositePartitionPath(); } } logger.info("Pruned {} partitions down to {}", numTotal, newPartitions.size()); List<RexNode> conjuncts = RelOptUtil.conjunctions(condition); List<RexNode> pruneConjuncts = RelOptUtil.conjunctions(pruneCondition); conjuncts.removeAll(pruneConjuncts); RexNode newCondition = RexUtil.composeConjunction(filterRel.getCluster().getRexBuilder(), conjuncts, false); RewriteCombineBinaryOperators reverseVisitor = new RewriteCombineBinaryOperators(true, filterRel.getCluster().getRexBuilder()); condition = condition.accept(reverseVisitor); pruneCondition = pruneCondition.accept(reverseVisitor); if (descriptor.supportsMetadataCachePruning() && !wasAllPartitionsPruned) { // if metadata cache file could potentially be used, then assign a proper cacheFileRoot int index = -1; if (!matchBitSet.isEmpty()) { String path = ""; index = matchBitSet.length() - 1; for (int j = 0; j < matchBitSet.length(); j++) { if (!matchBitSet.get(j)) { // stop at the first index with no match and use the immediate // previous index index = j - 1; break; } } for (int j = 0; j <= index; j++) { path += "/" + spInfo[j]; } cacheFileRoot = descriptor.getBaseTableLocation() + path; } if (index != maxIndex) { // if multiple partitions are being selected, we should not drop the filter // since we are reading the cache file at a parent/ancestor level canDropFilter = false; } } RelNode inputRel = descriptor.supportsMetadataCachePruning() ? descriptor.createTableScan(newPartitions, cacheFileRoot, wasAllPartitionsPruned, metaContext) : descriptor.createTableScan(newPartitions, wasAllPartitionsPruned); if (projectRel != null) { inputRel = projectRel.copy(projectRel.getTraitSet(), Collections.singletonList(inputRel)); } if (newCondition.isAlwaysTrue() && canDropFilter) { call.transformTo(inputRel); } else { final RelNode newFilter = filterRel.copy(filterRel.getTraitSet(), Collections.singletonList(inputRel)); call.transformTo(newFilter); } setPruneStatus(metaContext, PruneStatus.PRUNED); } catch (Exception e) { logger.warn("Exception while using the pruned partitions.", e); } finally { logger.info("Total pruning elapsed time: {} ms", totalPruningTime.elapsed(TimeUnit.MILLISECONDS)); } }