List of usage examples for java.util Hashtable containsKey
public synchronized boolean containsKey(Object key)
From source file:org.tinygroup.jspengine.compiler.TagLibraryInfoImpl.java
private void parseTLD(JspCompilationContext ctxt, String uri, InputStream in, URL jarFileUrl) throws JasperException { Vector tagVector = new Vector(); Vector tagFileVector = new Vector(); Hashtable functionTable = new Hashtable(); // Create an iterator over the child elements of our <taglib> element ParserUtils pu = new ParserUtils(); TreeNode tld = pu.parseXMLDocument(uri, in); // Check to see if the <taglib> root element contains a 'version' // attribute, which was added in JSP 2.0 to replace the <jsp-version> // subelement this.jspversion = tld.findAttribute("version"); // Process each child element of our <taglib> element Iterator list = tld.findChildren(); while (list.hasNext()) { TreeNode element = (TreeNode) list.next(); String tname = element.getName(); if ("tlibversion".equals(tname) // JSP 1.1 || "tlib-version".equals(tname)) { // JSP 1.2 this.tlibversion = element.getBody(); } else if ("jspversion".equals(tname) || "jsp-version".equals(tname)) { this.jspversion = element.getBody(); } else if ("shortname".equals(tname) || "short-name".equals(tname)) this.shortname = element.getBody(); else if ("uri".equals(tname)) this.urn = element.getBody(); else if ("info".equals(tname) || "description".equals(tname)) this.info = element.getBody(); else if ("validator".equals(tname)) this.tagLibraryValidator = createValidator(element); else if ("tag".equals(tname)) tagVector.addElement(createTagInfo(element, jspversion)); else if ("tag-file".equals(tname)) { TagFileInfo tagFileInfo = createTagFileInfo(element, uri, jarFileUrl); tagFileVector.addElement(tagFileInfo); } else if ("function".equals(tname)) { // JSP2.0 FunctionInfo funcInfo = createFunctionInfo(element); String funcName = funcInfo.getName(); if (functionTable.containsKey(funcName)) { err.jspError("jsp.error.tld.fn.duplicate.name", funcName, uri); }/* w w w. j av a2s .c o m*/ functionTable.put(funcName, funcInfo); } else if ("display-name".equals(tname) || // Ignored elements "small-icon".equals(tname) || "large-icon".equals(tname) || "listener".equals(tname)) { ; } else if ("taglib-extension".equals(tname)) { // Recognized but ignored } else { err.jspError("jsp.error.unknown.element.in.taglib", tname); } } if (tlibversion == null) { err.jspError("jsp.error.tld.mandatory.element.missing", "tlib-version"); } if (jspversion == null) { err.jspError("jsp.error.tld.mandatory.element.missing", "jsp-version"); } this.tags = new TagInfo[tagVector.size()]; tagVector.copyInto(this.tags); this.tagFiles = new TagFileInfo[tagFileVector.size()]; tagFileVector.copyInto(this.tagFiles); this.functions = new FunctionInfo[functionTable.size()]; int i = 0; Enumeration enumeration = functionTable.elements(); while (enumeration.hasMoreElements()) { this.functions[i++] = (FunctionInfo) enumeration.nextElement(); } }
From source file:oscar.dms.actions.DmsInboxManageAction.java
public ActionForward prepareForContentPage(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) {//from ww w.java 2s . c om HttpSession session = request.getSession(); try { if (session.getAttribute("userrole") == null) response.sendRedirect("../logout.jsp"); } catch (Exception e) { logger.error("Error", e); } // can't use userrole from session, because it changes if provider A search for provider B's documents // oscar.oscarMDS.data.MDSResultsData mDSData = new oscar.oscarMDS.data.MDSResultsData(); CommonLabResultData comLab = new CommonLabResultData(); // String providerNo = request.getParameter("providerNo"); String providerNo = (String) session.getAttribute("user"); String searchProviderNo = request.getParameter("searchProviderNo"); String ackStatus = request.getParameter("status"); String demographicNo = request.getParameter("demographicNo"); // used when searching for labs by patient instead of provider String scannedDocStatus = request.getParameter("scannedDocument"); Integer page = 0; try { page = Integer.parseInt(request.getParameter("page")); if (page > 0) { page--; } } catch (NumberFormatException nfe) { page = 0; } Integer pageSize = 20; try { String tmp = request.getParameter("pageSize"); pageSize = Integer.parseInt(tmp); } catch (NumberFormatException nfe) { pageSize = 20; } scannedDocStatus = "I"; String startDateStr = request.getParameter("startDate"); String endDateStr = request.getParameter("endDate"); String view = request.getParameter("view"); if (view == null || "".equals(view)) { view = "all"; } boolean mixLabsAndDocs = "normal".equals(view) || "all".equals(view); Date startDate = null; Date endDate = null; try { if (startDateStr != null && startDateStr.length() > 0) { startDateStr = startDateStr.trim(); startDate = UtilDateUtilities.StringToDate(startDateStr); } if (endDateStr != null && endDateStr.length() > 0) { endDateStr = endDateStr.trim(); endDate = UtilDateUtilities.StringToDate(endDateStr); } } catch (Exception e) { startDate = null; endDate = null; } Boolean isAbnormal = null; if ("abnormal".equals(view)) isAbnormal = new Boolean(true); if ("normal".equals(view)) isAbnormal = new Boolean(false); if (ackStatus == null) { ackStatus = "N"; } // default to new labs only if (providerNo == null) { providerNo = ""; } if (searchProviderNo == null) { searchProviderNo = providerNo; } String roleName = ""; List<SecUserRole> roles = secUserRoleDao.getUserRoles(searchProviderNo); for (SecUserRole r : roles) { if (r != null) { if (roleName.length() == 0) { roleName = r.getRoleName(); } else { roleName += "," + r.getRoleName(); } } } roleName += "," + searchProviderNo; // mDSData.populateMDSResultsData2(searchProviderNo, demographicNo, request.getParameter("fname"), request.getParameter("lname"), request.getParameter("hnum"), ackStatus); // HashMap<String,String> docQueue=comLab.getDocumentQueueLinks(); List<QueueDocumentLink> qd = queueDocumentLinkDAO.getQueueDocLinks(); HashMap<String, String> docQueue = new HashMap(); for (QueueDocumentLink qdl : qd) { Integer i = qdl.getDocId(); Integer n = qdl.getQueueId(); docQueue.put(i.toString(), n.toString()); } InboxResultsDao inboxResultsDao = (InboxResultsDao) SpringUtils.getBean("inboxResultsDao"); String patientFirstName = request.getParameter("fname"); String patientLastName = request.getParameter("lname"); String patientHealthNumber = request.getParameter("hnum"); ArrayList<LabResultData> labdocs = new ArrayList<LabResultData>(); if (!"labs".equals(view) && !"abnormal".equals(view)) { labdocs = inboxResultsDao.populateDocumentResultsData(searchProviderNo, demographicNo, patientFirstName, patientLastName, patientHealthNumber, ackStatus, true, page, pageSize, mixLabsAndDocs, isAbnormal); } if (!"documents".equals(view)) { labdocs.addAll(comLab.populateLabResultsData(searchProviderNo, demographicNo, patientFirstName, patientLastName, patientHealthNumber, ackStatus, scannedDocStatus, true, page, pageSize, mixLabsAndDocs, isAbnormal)); } labdocs = (ArrayList<LabResultData>) filterLabDocsForSuperSite(labdocs, providerNo); ArrayList<LabResultData> validlabdocs = new ArrayList<LabResultData>(); DocumentResultsDao documentResultsDao = (DocumentResultsDao) SpringUtils.getBean("documentResultsDao"); // check privilege for documents only for (LabResultData data : labdocs) { if (data.isDocument()) { String docid = data.getSegmentID(); String queueid = docQueue.get(docid); if (queueid != null) { queueid = queueid.trim(); int queueIdInt = Integer.parseInt(queueid); // if doc sent to default queue and no valid provider, do NOT include it if (queueIdInt == Queue.DEFAULT_QUEUE_ID && !documentResultsDao.isSentToValidProvider(docid) && isSegmentIDUnique(validlabdocs, data)) { // validlabdocs.add(data); } // if doc sent to default queue && valid provider, check if it's sent to this provider, if yes include it else if (queueIdInt == Queue.DEFAULT_QUEUE_ID && documentResultsDao.isSentToValidProvider(docid) && documentResultsDao.isSentToProvider(docid, searchProviderNo) && isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } // if doc setn to non-default queue and valid provider, check if provider is in the queue or equal to the provider else if (queueIdInt != Queue.DEFAULT_QUEUE_ID && documentResultsDao.isSentToValidProvider(docid)) { Vector vec = OscarRoleObjectPrivilege.getPrivilegeProp("_queue." + queueid); if (OscarRoleObjectPrivilege.checkPrivilege(roleName, (Properties) vec.get(0), (Vector) vec.get(1)) || documentResultsDao.isSentToProvider(docid, searchProviderNo)) { // labs is in provider's queue,do nothing if (isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } } } // if doc sent to non default queue and no valid provider, check if provider is in the non default queue else if (!queueid.equals(Queue.DEFAULT_QUEUE_ID) && !documentResultsDao.isSentToValidProvider(docid)) { Vector vec = OscarRoleObjectPrivilege.getPrivilegeProp("_queue." + queueid); if (OscarRoleObjectPrivilege.checkPrivilege(roleName, (Properties) vec.get(0), (Vector) vec.get(1))) { // labs is in provider's queue,do nothing if (isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } } } } } else {// add lab if (isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } } } // Find the oldest lab returned in labdocs, use that as the limit date for the HRM query Date oldestLab = null; Date newestLab = null; if (request.getParameter("newestDate") != null) { try { SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); newestLab = formatter.parse(request.getParameter("newestDate")); } catch (Exception e) { logger.error("Couldn't parse date + " + request.getParameter("newestDate"), e); } } for (LabResultData result : labdocs) { if (result != null && result.getDateObj() != null) { if (oldestLab == null || oldestLab.compareTo(result.getDateObj()) > 0) oldestLab = result.getDateObj(); if (request.getParameter("newestDate") != null && (newestLab == null || newestLab.compareTo(result.getDateObj()) < 0)) newestLab = result.getDateObj(); } } HRMResultsData hrmResult = new HRMResultsData(); Collection<LabResultData> hrmDocuments = hrmResult.populateHRMdocumentsResultsData(searchProviderNo, ackStatus, newestLab, oldestLab); if (oldestLab == null) { for (LabResultData hrmDocument : hrmDocuments) { if (oldestLab == null || (hrmDocument.getDateObj() != null && oldestLab.compareTo(hrmDocument.getDateObj()) > 0)) oldestLab = hrmDocument.getDateObj(); } } //labdocs is already filtered for super site access.. not just filter hrmDocuments hrmDocuments = filterLabDocsForSuperSite(hrmDocuments, providerNo); labdocs.addAll(hrmDocuments); Collections.sort(labdocs); HashMap<String, LabResultData> labMap = new HashMap<String, LabResultData>(); LinkedHashMap<String, ArrayList<String>> accessionMap = new LinkedHashMap<String, ArrayList<String>>(); int accessionNumCount = 0; for (LabResultData result : labdocs) { if (startDate != null && startDate.after(result.getDateObj())) { continue; } if (endDate != null && endDate.before(result.getDateObj())) { continue; } String segmentId = result.getSegmentID(); if (result.isDocument()) segmentId += "d"; else if (result.isHRM()) segmentId += "h"; labMap.put(segmentId, result); ArrayList<String> labNums = new ArrayList<String>(); if (result.accessionNumber == null || result.accessionNumber.equals("")) { labNums.add(segmentId); accessionNumCount++; accessionMap.put("noAccessionNum" + accessionNumCount + result.labType, labNums); } else if (!accessionMap.containsKey(result.accessionNumber + result.labType)) { labNums.add(segmentId); accessionMap.put(result.accessionNumber + result.labType, labNums); // Different MDS Labs may have the same accession Number if they are seperated // by two years. So accession numbers are limited to matching only if their // labs are within one year of eachother } else { labNums = accessionMap.get(result.accessionNumber + result.labType); boolean matchFlag = false; for (int j = 0; j < labNums.size(); j++) { LabResultData matchingResult = labMap.get(labNums.get(j)); Date dateA = result.getDateObj(); Date dateB = matchingResult.getDateObj(); int monthsBetween = 0; if (dateA == null || dateB == null) { monthsBetween = 5; } else if (dateA.before(dateB)) { monthsBetween = UtilDateUtilities.getNumMonths(dateA, dateB); } else { monthsBetween = UtilDateUtilities.getNumMonths(dateB, dateA); } if (monthsBetween < 4) { matchFlag = true; break; } } if (!matchFlag) { labNums.add(segmentId); accessionMap.put(result.accessionNumber + result.labType, labNums); } } } labdocs.clear(); for (ArrayList<String> labNums : accessionMap.values()) { // must sort through in reverse to keep the labs in the correct order for (int j = labNums.size() - 1; j >= 0; j--) { labdocs.add(labMap.get(labNums.get(j))); } } logger.debug("labdocs.size()=" + labdocs.size()); /* find all data for the index.jsp page */ Hashtable patientDocs = new Hashtable(); Hashtable patientIdNames = new Hashtable(); String patientIdNamesStr = ""; Hashtable docStatus = new Hashtable(); Hashtable docType = new Hashtable(); Hashtable<String, List<String>> ab_NormalDoc = new Hashtable(); for (int i = 0; i < labdocs.size(); i++) { LabResultData data = labdocs.get(i); List<String> segIDs = new ArrayList<String>(); String labPatientId = data.getLabPatientId(); if (labPatientId == null || labPatientId.equals("-1")) labPatientId = "-1"; if (data.isAbnormal()) { List<String> abns = ab_NormalDoc.get("abnormal"); if (abns == null) { abns = new ArrayList<String>(); abns.add(data.getSegmentID()); } else { abns.add(data.getSegmentID()); } ab_NormalDoc.put("abnormal", abns); } else { List<String> ns = ab_NormalDoc.get("normal"); if (ns == null) { ns = new ArrayList<String>(); ns.add(data.getSegmentID()); } else { ns.add(data.getSegmentID()); } ab_NormalDoc.put("normal", ns); } if (patientDocs.containsKey(labPatientId)) { segIDs = (List) patientDocs.get(labPatientId); segIDs.add(data.getSegmentID()); patientDocs.put(labPatientId, segIDs); } else { segIDs.add(data.getSegmentID()); patientDocs.put(labPatientId, segIDs); patientIdNames.put(labPatientId, data.patientName); patientIdNamesStr += ";" + labPatientId + "=" + data.patientName; } docStatus.put(data.getSegmentID(), data.getAcknowledgedStatus()); docType.put(data.getSegmentID(), data.labType); } Integer totalDocs = 0; Integer totalHL7 = 0; Hashtable<String, List<String>> typeDocLab = new Hashtable(); Enumeration keys = docType.keys(); while (keys.hasMoreElements()) { String keyDocLabId = ((String) keys.nextElement()); String valType = (String) docType.get(keyDocLabId); if (valType.equalsIgnoreCase("DOC")) { if (typeDocLab.containsKey("DOC")) { List<String> docids = typeDocLab.get("DOC"); docids.add(keyDocLabId);// add doc id to list typeDocLab.put("DOC", docids); } else { List<String> docids = new ArrayList<String>(); docids.add(keyDocLabId); typeDocLab.put("DOC", docids); } totalDocs++; } else if (valType.equalsIgnoreCase("HL7")) { if (typeDocLab.containsKey("HL7")) { List<String> hl7ids = typeDocLab.get("HL7"); hl7ids.add(keyDocLabId); typeDocLab.put("HL7", hl7ids); } else { List<String> hl7ids = new ArrayList<String>(); hl7ids.add(keyDocLabId); typeDocLab.put("HL7", hl7ids); } totalHL7++; } } Hashtable patientNumDoc = new Hashtable(); Enumeration patientIds = patientDocs.keys(); String patientIdStr = ""; Integer totalNumDocs = 0; while (patientIds.hasMoreElements()) { String key = (String) patientIds.nextElement(); patientIdStr += key; patientIdStr += ","; List<String> val = (List<String>) patientDocs.get(key); Integer numDoc = val.size(); patientNumDoc.put(key, numDoc); totalNumDocs += numDoc; } List<String> normals = ab_NormalDoc.get("normal"); List<String> abnormals = ab_NormalDoc.get("abnormal"); logger.debug("labdocs.size()=" + labdocs.size()); // set attributes request.setAttribute("pageNum", page); request.setAttribute("docType", docType); request.setAttribute("patientDocs", patientDocs); request.setAttribute("providerNo", providerNo); request.setAttribute("searchProviderNo", searchProviderNo); request.setAttribute("patientIdNames", patientIdNames); request.setAttribute("docStatus", docStatus); request.setAttribute("patientIdStr", patientIdStr); request.setAttribute("typeDocLab", typeDocLab); request.setAttribute("demographicNo", demographicNo); request.setAttribute("ackStatus", ackStatus); request.setAttribute("labdocs", labdocs); request.setAttribute("patientNumDoc", patientNumDoc); request.setAttribute("totalDocs", totalDocs); request.setAttribute("totalHL7", totalHL7); request.setAttribute("normals", normals); request.setAttribute("abnormals", abnormals); request.setAttribute("totalNumDocs", totalNumDocs); request.setAttribute("patientIdNamesStr", patientIdNamesStr); request.setAttribute("oldestLab", oldestLab != null ? DateUtils.formatDate(oldestLab, "yyyy-MM-dd HH:mm:ss") : null); return mapping.findForward("dms_page"); }
From source file:com.rhfung.P2PDictionary.DataConnection.java
private void HandleReadOne(String verb, String contentLocation, InputStreamReader reader, Hashtable<String, String> headers) { byte[] readData = null; ListInt senders = new ListInt(10); // do a bunch of checks before processing the packet // assign remote UID DetectBrowser(headers);// w ww . ja v a2 s . co m // read data if GET request if (headers.containsKey("Content-Length") && !verb.equals(PUSH)) { int length = Integer.parseInt(headers.get("Content-Length")); readData = ReadBytes(reader, length); if (debugBuffer != null) { MemoryStream s = new MemoryStream(readData); debugBuffer.Log(0, s); } } else { // no data was sent; this is a notification } // inspect the sender list int lastSender = 0; if (headers.containsKey("P2P-Sender-List")) { // save the list of senders senders.addAll(GetArrayOf(headers.get("P2P-Sender-List"))); lastSender = senders.getLastItem(); } else { lastSender = 0; } // inspect for a response path ListInt responsePath = null; if (headers.containsKey("P2P-Response-Path")) { responsePath = new ListInt(GetArrayOf(headers.get("P2P-Response-Path"))); } // inspect for a closing command issued by the caller, // which happens when this is a duplicate connection if (headers.containsKey("Connection")) { if (headers.get("Connection").equals("close")) { this.state = ConnectionState.Closing; } } WriteDebug(this.local_uid + " read " + verb + " " + contentLocation + " from " + this.remote_uid + "Senders: " + headers.get("P2P-Sender-List")); // !senders.Contains(this.local_uid) --> if message hasn't been stamped by this node before... if (!senders.contains(this.local_uid) && verb.equals(DELETE) && headers.containsKey("ETag")) { HandleReadDelete(contentLocation, headers.get("ETag"), senders, responsePath); } else if (!senders.contains(this.local_uid) && contentLocation.equals(CLOSE_MESSAGE)) { this.state = ConnectionState.Closing; this.killBit = true; } else if (!senders.contains(this.local_uid) && verb.equals(PUT)) { HandleReadPut(contentLocation, headers.get("Content-Type"), readData, headers.get("ETag"), senders, responsePath); } else if (!senders.contains(this.local_uid) && verb.equals(PUSH)) { HandleReadPush(contentLocation, headers.get("Content-Type"), headers.get("ETag"), senders, lastSender, responsePath); } else if (!senders.contains(this.local_uid) && verb.equals(POST)) { HandleReadPost(contentLocation, headers.get("Content-Type"), headers.get("Accept"), senders, readData); } else if (!senders.contains(this.local_uid) && verb.equals(RESPONSECODE_PROXY)) { SendMemoryToPeer mem = RespondOrForwardProxy(PROXY_PREFIX + contentLocation, new ListInt()); if (mem != null) { // well, I already have the result so why is 305 being used // should broadcast the result //mem.Senders = new ListInt(1) { lastSender }; //controller.SendToPeer(mem); controller.onSendToPeer(mem); } else { // TODO: figure out what this does } } else if (!senders.contains(this.local_uid) && verb.equals(RESPONSECODE_PROXY2)) { SendMemoryToPeer mem = RespondOrForwardProxy(contentLocation, new ListInt(senders)); if (mem != null) { // well, I already have the result so why is 305 being used // should broadcast the result //mem.Senders = new ListInt(1) { lastSender }; //controller.SendToPeer(mem); controller.onSendToPeer(mem); } else { // TODO: figure out what this does } } else if (senders.contains(this.local_uid)) { // drop packet , already read } else { throw new NotImplementedException(); } }
From source file:org.unitime.timetable.solver.exam.ExamSolver.java
public Collection<ExamAssignmentInfo> getPeriods(long examId, ExamProposedChange change) { Lock lock = currentSolution().getLock().readLock(); lock.lock();/*from w ww.j a va 2 s. c om*/ try { //lookup exam Exam exam = getExam(examId); if (exam == null) return null; //assign change Hashtable<Exam, ExamPlacement> undoAssign = new Hashtable(); HashSet<Exam> undoUnassing = new HashSet(); if (change != null) { for (ExamAssignment assignment : change.getConflicts()) { ExamPlacement placement = getPlacement(assignment); if (placement == null) continue; undoAssign.put((Exam) placement.variable(), placement); currentSolution().getAssignment().unassign(0, placement.variable()); } for (ExamAssignment assignment : change.getAssignments()) { ExamPlacement placement = getPlacement(assignment); if (placement == null) continue; for (Iterator i = placement.variable().getModel() .conflictValues(currentSolution().getAssignment(), placement).iterator(); i .hasNext();) { ExamPlacement conflict = (ExamPlacement) i.next(); if (conflict.variable().equals(placement.variable())) continue; Exam conflictingExam = (Exam) conflict.variable(); if (!undoAssign.containsKey(conflictingExam) && !undoUnassing.contains(conflictingExam)) undoAssign.put(conflictingExam, conflict); currentSolution().getAssignment().unassign(0, conflict.variable()); } if (currentSolution().getAssignment().getValue(placement.variable()) != null) undoAssign.put((Exam) placement.variable(), currentSolution().getAssignment().getValue(placement.variable())); else undoUnassing.add((Exam) placement.variable()); currentSolution().getAssignment().assign(0, placement); } } Vector<ExamAssignmentInfo> periods = new Vector<ExamAssignmentInfo>(); for (ExamPeriodPlacement period : exam.getPeriodPlacements()) { Set rooms = exam.findBestAvailableRooms(currentSolution().getAssignment(), period); if (rooms == null) rooms = new HashSet(); boolean conf = !exam.checkDistributionConstraints(currentSolution().getAssignment(), period); ExamAssignmentInfo assignment = new ExamAssignmentInfo(new ExamPlacement(exam, period, rooms), currentSolution().getAssignment()); if (conf) assignment.setPeriodPref("P"); periods.add(assignment); } //undo change for (Exam undoExam : undoUnassing) if (currentSolution().getAssignment().getValue(undoExam) != null) currentSolution().getAssignment().unassign(0, undoExam); for (Map.Entry<Exam, ExamPlacement> entry : undoAssign.entrySet()) currentSolution().getAssignment().unassign(0, entry.getKey()); for (Map.Entry<Exam, ExamPlacement> entry : undoAssign.entrySet()) currentSolution().getAssignment().assign(0, entry.getValue()); return periods; } finally { lock.unlock(); } }
From source file:org.unitime.timetable.solver.exam.ExamSolver.java
public Vector<ExamRoomInfo> getRooms(long examId, long periodId, ExamProposedChange change, int minRoomSize, int maxRoomSize, String filter, boolean allowConflicts) { Lock lock = currentSolution().getLock().readLock(); lock.lock();// w ww. ja v a 2 s .co m try { //lookup exam, period etc. Exam exam = getExam(examId); if (exam == null) return null; ExamPeriodPlacement period = null; for (ExamPeriodPlacement p : exam.getPeriodPlacements()) { if (periodId == p.getId()) { period = p; break; } } if (period == null) return null; Vector<ExamRoomInfo> rooms = new Vector<ExamRoomInfo>(); if (exam.getMaxRooms() == 0) return rooms; //assign change Hashtable<Exam, ExamPlacement> undoAssign = new Hashtable(); HashSet<Exam> undoUnassing = new HashSet(); if (change != null) { for (ExamAssignment assignment : change.getConflicts()) { ExamPlacement placement = getPlacement(assignment); if (placement == null) continue; undoAssign.put((Exam) placement.variable(), placement); currentSolution().getAssignment().unassign(0, placement.variable()); } for (ExamAssignment assignment : change.getAssignments()) { ExamPlacement placement = getPlacement(assignment); if (placement == null) continue; for (Iterator i = placement.variable().getModel() .conflictValues(currentSolution().getAssignment(), placement).iterator(); i .hasNext();) { ExamPlacement conflict = (ExamPlacement) i.next(); if (conflict.variable().equals(placement.variable())) continue; Exam conflictingExam = (Exam) conflict.variable(); if (!undoAssign.containsKey(conflictingExam) && !undoUnassing.contains(conflictingExam)) undoAssign.put(conflictingExam, conflict); currentSolution().getAssignment().unassign(0, conflict.variable()); } if (currentSolution().getAssignment().getValue(placement.variable()) != null) undoAssign.put((Exam) placement.variable(), currentSolution().getAssignment().getValue(placement.variable())); else undoUnassing.add((Exam) placement.variable()); currentSolution().getAssignment().assign(0, placement); } } ExamRoomSharing sharing = ((ExamModel) currentSolution().getModel()).getRoomSharing(); //compute rooms for (ExamRoomPlacement room : exam.getRoomPlacements()) { int cap = room.getSize(exam.hasAltSeating()); if (minRoomSize >= 0 && cap < minRoomSize) continue; if (maxRoomSize >= 0 && cap > maxRoomSize) continue; if (!ExamInfoModel.match(room.getName(), filter)) continue; if (!room.isAvailable(period.getPeriod())) continue; boolean conf = !exam.checkDistributionConstraints(currentSolution().getAssignment(), room); if (sharing == null) { for (ExamPlacement p : room.getRoom().getPlacements(currentSolution().getAssignment(), period.getPeriod())) if (!p.variable().equals(exam)) conf = true; } else { if (sharing.inConflict(exam, room.getRoom().getPlacements(currentSolution().getAssignment(), period.getPeriod()), room.getRoom())) conf = true; } if (!allowConflicts && conf) continue; rooms.add(new ExamRoomInfo(room.getRoom(), (conf ? 100 : 0) + room.getPenalty(period.getPeriod()))); } //undo change for (Exam undoExam : undoUnassing) if (currentSolution().getAssignment().getValue(undoExam) != null) currentSolution().getAssignment().unassign(0, undoExam); for (Map.Entry<Exam, ExamPlacement> entry : undoAssign.entrySet()) currentSolution().getAssignment().unassign(0, entry.getKey()); for (Map.Entry<Exam, ExamPlacement> entry : undoAssign.entrySet()) currentSolution().getAssignment().assign(0, entry.getValue()); return rooms; } finally { lock.unlock(); } }
From source file:org.kepler.kar.KARFile.java
/** * Here we go through all the KAREntries and call the open method of the * appropriate KAREntryHandlers. It is assumed that cacheKARContents() has * been called at some point before openKAR() In other words everything in * the kar is already cached when calling the open() method of the * KAREntryHandlers./* w w w . jav a2 s . c o m*/ * * Note: There is some issue with having this method here, since it is * really a gui specific function it probably does not belong here in the * core module. * * @param tableauFrame * @param forceOpen * @throws Exception * @returns true if at least one of the entries in the KAR was opened */ public boolean openKARContents(TableauFrame tableauFrame, boolean forceOpen) throws Exception { if (isDebugging) log.debug("openKAR: " + this.toString()); if (!forceOpen && !isOpenable()) { return false; } try { /** * Loop through the kar entries and call the open method of the * appropriate KAREntryHandler */ Vector<KAREntry> unopenedEntries = (Vector<KAREntry>) karEntries(); Hashtable<KeplerLSID, KAREntry> openedEntries = new Hashtable<KeplerLSID, KAREntry>(); // keep cycling through the unopened entries until the list is empty while (unopenedEntries.size() > 0) { // keep track of the entries that were opened during this pass Vector<KAREntry> openedThisPass = new Vector<KAREntry>(unopenedEntries.size()); // cycle through all of the remaining, unopened entries for (KAREntry entry : unopenedEntries) { if (isDebugging) { log.debug(entry.getName()); } // get the dependency list for this entry List<KeplerLSID> depList = entry.getLsidDependencies(); if (depList.size() == 0) { // if there are no dependencies we just open it up boolean success = open(entry, tableauFrame); if (success) { openedEntries.put(entry.getLSID(), entry); openedThisPass.add(entry); break; } if (isDebugging) log.debug(success); } else { // if there are dependencies then we check to make sure // that all of the dependencies have already been opened boolean allDependenciesHaveBeenOpened = true; for (KeplerLSID lsid : depList) { // if any of the dependencies have not been opened, // set false if (!openedEntries.containsKey(lsid)) { allDependenciesHaveBeenOpened = false; } } if (allDependenciesHaveBeenOpened) { // dependencies have been opened so OK to open this // one boolean success = open(entry, tableauFrame); if (success) { openedEntries.put(entry.getLSID(), entry); openedThisPass.add(entry); break; } if (isDebugging) log.debug(success); } } } if (openedThisPass.size() == 0) { // Bad news, nothing is getting opened // break out to avoid infinite loop break; } // remove the entries that were opened during this pass for (KAREntry entry : openedThisPass) { unopenedEntries.remove(entry); } } if (openedEntries.size() == 0) { return false; } } catch (Exception e) { throw new Exception("Error on Open: " + e.getMessage()); } return true; }
From source file:org.accada.reader.rprm.core.Source.java
/** * Adds a tag with its fields to the report. * @param tagId/* w w w . j a va 2 s .co m*/ * The tag to add * @param report * The report to modify * @param dataSelector * The data to report * @param closure * The closure * @param trigger * The trigger that caused the event * @throws TagMemoryServiceException * Problems with the tag memory * @throws HardwareException * Problems with the HAL */ protected void addTagToReport(final String tagId, final SourceReport report, final DataSelector dataSelector, final Vector closure, final Trigger trigger) throws HardwareException { TagType curTag; if (!report.containsTag(tagId)) { curTag = new TagType(); curTag.setId(tagId); report.addTag(curTag); } else { curTag = report.getTag(tagId); } Hashtable fieldNames = dataSelector.getFieldNames(); // TAG_TYPE if (fieldNames.containsKey(FieldName.TAG_TYPE) || fieldNames.containsKey(FieldName.ALL_TAG) || fieldNames.containsKey(FieldName.ALL)) { // Not supported in HardwareAbstraction curTag.setTagType("not supported"); } else { curTag.setTagType(null); } // TAG_ID_AS_PURE_URI if (fieldNames.containsKey(FieldName.TAG_ID_AS_PURE_URI) || fieldNames.containsKey(FieldName.ALL_TAG) || fieldNames.containsKey(FieldName.ALL)) { // Only for non-EPC tags // final int num = 4; // int numOfBits = num * curTag.getId().length(); // curTag.setIdAsPureURI("urn:epc:raw:" + numOfBits + ".x" // + curTag.getId()); curTag.setIdAsPureURI(getTagId(tagId, 0, 64, 8, LevelTypeList.PURE_IDENTITY)); } else { curTag.setIdAsPureURI(null); } // TAG_ID_AS_TAG_URI if (fieldNames.containsKey(FieldName.TAG_ID_AS_TAG_URI) || fieldNames.containsKey(FieldName.ALL_TAG) || fieldNames.containsKey(FieldName.ALL)) { // Only for non-EPC tags final int num = 4; int numOfBits = num * curTag.getId().length(); // curTag.setIdAsTagURI("urn:epc:raw:" + numOfBits + ".x" // + curTag.getId()); curTag.setIdAsTagURI(getTagId(tagId, 0, 64, 8, LevelTypeList.TAG_ENCODING)); } else { curTag.setIdAsTagURI(null); } // OTHER_FIELDS // tag field names Enumeration tagFieldNameIterator = dataSelector.getTagFieldNames().elements(); String curTagFieldName; String curTagFieldValue; while (tagFieldNameIterator.hasMoreElements()) { curTagFieldName = (String) tagFieldNameIterator.nextElement(); // check if already in report if (!curTag.getAllTagFields().containsKey(curTagFieldName)) { TagField tf; try { tf = readerDevice.getTagField(curTagFieldName); curTagFieldValue = readTagField(curTag.getId(), closure, tf); // add to report TagFieldValueParamType tfvp = new TagFieldValueParamType(); tfvp.setTagFieldName(curTagFieldName); tfvp.setTagFieldValue(curTagFieldValue); curTag.addTagField(tfvp); } catch (ReaderProtocolException e) { } } } // events Hashtable tagEvents = ((TagState) currentState.get(curTag.getId())).getTagEvents(); curTag.setTagEvents(tagEvents); }
From source file:trendanalisis.main.tools.weka.CoreWekaTFIDF.java
/** * determines the dictionary.//from w w w . j a va2 s . c o m */ private void determineDictionary() { // Operate on a per-class basis if class attribute is set int classInd = getInputFormat().classIndex(); int values = 1; if (!m_doNotOperateOnPerClassBasis && (classInd != -1)) { values = getInputFormat().attribute(classInd).numValues(); } // TreeMap dictionaryArr [] = new TreeMap[values]; @SuppressWarnings("unchecked") TreeMap<String, Count>[] dictionaryArr = new TreeMap[values]; for (int i = 0; i < values; i++) { dictionaryArr[i] = new TreeMap<String, Count>(); } // Make sure we know which fields to convert determineSelectedRange(); // Tokenize all training text into an orderedMap of "words". long pruneRate = Math.round((m_PeriodicPruningRate / 100.0) * getInputFormat().numInstances()); for (int i = 0; i < getInputFormat().numInstances(); i++) { Instance instance = getInputFormat().instance(i); int vInd = 0; if (!m_doNotOperateOnPerClassBasis && (classInd != -1)) { vInd = (int) instance.classValue(); } // Iterate through all relevant string attributes of the current instance Hashtable<String, Integer> h = new Hashtable<String, Integer>(); for (int j = 0; j < instance.numAttributes(); j++) { if (m_SelectedRange.isInRange(j) && (instance.isMissing(j) == false)) { // Get tokenizer m_Tokenizer.tokenize(instance.stringValue(j)); // Iterate through tokens, perform stemming, and remove stopwords // (if required) while (m_Tokenizer.hasMoreElements()) { String word = m_Tokenizer.nextElement().intern(); if (this.m_lowerCaseTokens == true) { word = word.toLowerCase(); } word = m_Stemmer.stem(word); if (m_StopwordsHandler.isStopword(word)) { continue; } if (!(h.containsKey(word))) { h.put(word, new Integer(0)); } Count count = dictionaryArr[vInd].get(word); if (count == null) { dictionaryArr[vInd].put(word, new Count(1)); } else { count.count++; } } } } // updating the docCount for the words that have occurred in this // instance(document). Enumeration<String> e = h.keys(); while (e.hasMoreElements()) { String word = e.nextElement(); Count c = dictionaryArr[vInd].get(word); if (c != null) { c.docCount++; } else { System.err.println( "Warning: A word should definitely be in the " + "dictionary.Please check the code"); } } if (pruneRate > 0) { if (i % pruneRate == 0 && i > 0) { for (int z = 0; z < values; z++) { ArrayList<String> d = new ArrayList<String>(1000); Iterator<String> it = dictionaryArr[z].keySet().iterator(); while (it.hasNext()) { String word = it.next(); Count count = dictionaryArr[z].get(word); if (count.count <= 1) { d.add(word); } } Iterator<String> iter = d.iterator(); while (iter.hasNext()) { String word = iter.next(); dictionaryArr[z].remove(word); } } } } } // Figure out the minimum required word frequency int totalsize = 0; int prune[] = new int[values]; for (int z = 0; z < values; z++) { totalsize += dictionaryArr[z].size(); int array[] = new int[dictionaryArr[z].size()]; int pos = 0; Iterator<String> it = dictionaryArr[z].keySet().iterator(); while (it.hasNext()) { String word = it.next(); Count count = dictionaryArr[z].get(word); array[pos] = count.count; pos++; } // sort the array sortArray(array); if (array.length < m_WordsToKeep) { // if there aren't enough words, set the threshold to // minFreq prune[z] = m_minTermFreq; } else { // otherwise set it to be at least minFreq prune[z] = Math.max(m_minTermFreq, array[array.length - m_WordsToKeep]); } } // Convert the dictionary into an attribute index // and create one attribute per word ArrayList<Attribute> attributes = new ArrayList<Attribute>(totalsize + getInputFormat().numAttributes()); // Add the non-converted attributes int classIndex = -1; for (int i = 0; i < getInputFormat().numAttributes(); i++) { if (!m_SelectedRange.isInRange(i)) { if (getInputFormat().classIndex() == i) { classIndex = attributes.size(); } attributes.add((Attribute) getInputFormat().attribute(i).copy()); } } // Add the word vector attributes (eliminating duplicates // that occur in multiple classes) TreeMap<String, Integer> newDictionary = new TreeMap<String, Integer>(); int index = attributes.size(); for (int z = 0; z < values; z++) { Iterator<String> it = dictionaryArr[z].keySet().iterator(); while (it.hasNext()) { String word = it.next(); Count count = dictionaryArr[z].get(word); if (count.count >= prune[z]) { if (newDictionary.get(word) == null) { newDictionary.put(word, new Integer(index++)); attributes.add(new Attribute(m_Prefix + word)); } } } } // Compute document frequencies global_tf = new double[attributes.size()]; df_prob = new double[attributes.size()]; m_DocsCounts = new int[attributes.size()]; IG = new double[attributes.size()]; Iterator<String> it = newDictionary.keySet().iterator(); while (it.hasNext()) { String word = it.next(); int idx = newDictionary.get(word).intValue(); int docsCount = 0; for (int j = 0; j < values; j++) { Count c = dictionaryArr[j].get(word); if (c != null) { docsCount += c.docCount; } } m_DocsCounts[idx] = docsCount; } // Trim vector and set instance variables attributes.trimToSize(); m_Dictionary = newDictionary; m_NumInstances = getInputFormat().numInstances(); // Set the filter's output format Instances outputFormat = new Instances(getInputFormat().relationName(), attributes, 0); outputFormat.setClassIndex(classIndex); setOutputFormat(outputFormat); }
From source file:org.accada.reader.rprm.core.Source.java
/** * Adds information about the reader to a repot. * @param report/*from ww w . j a va 2 s. c o m*/ * The report to modify * @param dataSelector * The dataSelector to use */ protected void addReaderInfo(final ReadReport report, final DataSelector dataSelector) { Hashtable fieldNames = dataSelector.getFieldNames(); if (!report.containsReaderInfo()) { ReaderInfoType newReaderInfo = new ReaderInfoType(); report.setReaderInfo(newReaderInfo); } // READER_EPC if (fieldNames.containsKey(FieldName.READER_EPC) || fieldNames.containsKey(FieldName.ALL_READER) || fieldNames.containsKey(FieldName.ALL)) { report.getReaderInfo().setEpc(this.readerDevice.getEPC()); } else { report.getReaderInfo().setEpc(null); } // READER_HANDLE if (fieldNames.containsKey(FieldName.READER_HANDLE) || fieldNames.containsKey(FieldName.ALL_READER) || fieldNames.containsKey(FieldName.ALL)) { report.getReaderInfo().setHandle(this.readerDevice.getHandle()); } else { report.getReaderInfo().setHandle(-1); } // READER_NAME if (fieldNames.containsKey(FieldName.READER_NAME) || fieldNames.containsKey(FieldName.ALL_READER) || fieldNames.containsKey(FieldName.ALL)) { report.getReaderInfo().setName(this.readerDevice.getName()); } else { report.getReaderInfo().setName(null); } // READER_ROLE if (fieldNames.containsKey(FieldName.READER_ROLE) || fieldNames.containsKey(FieldName.ALL_READER) || fieldNames.containsKey(FieldName.ALL)) { report.getReaderInfo().setRole(this.readerDevice.getRole()); } else { report.getReaderInfo().setRole(null); } // READER_NOW_TICK if (fieldNames.containsKey(FieldName.READER_NOW_TICK) || fieldNames.containsKey(FieldName.ALL_READER) || fieldNames.containsKey(FieldName.ALL)) { report.getReaderInfo().setNowTick(this.readerDevice.getTimeTicks()); } else { report.getReaderInfo().setNowTick(0); } // READER_NOW_UTC if (fieldNames.containsKey(FieldName.READER_NOW_UTC) || fieldNames.containsKey(FieldName.ALL_READER) || fieldNames.containsKey(FieldName.ALL)) { report.getReaderInfo().setNowUTC(this.readerDevice.getTimeUTC()); } else { report.getReaderInfo().setNowUTC(null); } }
From source file:org.unitime.timetable.solver.TimetableSolver.java
public Hashtable conflictInfo(Collection hints) { Hashtable conflictTable = new Hashtable(); Lock lock = currentSolution().getLock().readLock(); lock.lock();//from w ww. ja v a2 s . c o m try { HashSet done = new HashSet(); for (Iterator i = hints.iterator(); i.hasNext();) { Hint hint = (Hint) i.next(); Placement p = hint.getPlacement((TimetableModel) currentSolution().getModel()); if (p == null) continue; for (Constraint constraint : p.variable().hardConstraints()) { HashSet conflicts = new HashSet(); constraint.computeConflicts(currentSolution().getAssignment(), p, conflicts); if (conflicts != null && !conflicts.isEmpty()) { for (Iterator j = conflicts.iterator(); j.hasNext();) { Placement conflict = (Placement) j.next(); Hint confHint = new Hint(this, conflict); if (done.contains(confHint)) continue; if (!conflictTable.containsKey(confHint)) { String name = constraint.getName(); if (constraint instanceof RoomConstraint) { name = "Room " + constraint.getName(); } else if (constraint instanceof InstructorConstraint) { name = "Instructor " + constraint.getName(); } else if (constraint instanceof GroupConstraint) { name = "Distribution " + constraint.getName(); } else if (constraint instanceof DepartmentSpreadConstraint) { name = "Balancing of department " + constraint.getName(); } else if (constraint instanceof SpreadConstraint) { name = "Same subpart spread " + constraint.getName(); } else if (constraint instanceof ClassLimitConstraint) { name = "Class limit " + constraint.getName(); } conflictTable.put(confHint, name); } } } } done.add(hint); } } finally { lock.unlock(); } return conflictTable; }