List of usage examples for java.util Vector get
public synchronized E get(int index)
From source file:Admin_Thesaurus.ImportData.java
public String getXMLMiddle(Vector<String> thesaurusVector, String importThesaurusMessage, String thesaurusOrBulkMode) { // get the active sessions String resultTagName = "importThesaurusMessage"; if (thesaurusOrBulkMode.compareTo("bulkImport") == 0) { resultTagName = "bulkImportThesaurusMessage"; }//from www .ja v a 2 s . com int OtherActiveSessionsNO = SessionListener.activesessionsNO - 1; String XMLMiddleStr = "<content_Admin_Thesaurus>"; XMLMiddleStr += "<CurrentShownDIV>ImportExport_Data_DIV</CurrentShownDIV>"; XMLMiddleStr += "<OtherActiveSessionsNO>" + OtherActiveSessionsNO + "</OtherActiveSessionsNO>"; // write the existing Thesaurus in DB int thesaurusVectorCount = thesaurusVector.size(); XMLMiddleStr += "<existingThesaurus>"; for (int i = 0; i < thesaurusVectorCount; i++) { XMLMiddleStr += "<Thesaurus>" + thesaurusVector.get(i) + "</Thesaurus>"; } XMLMiddleStr += "</existingThesaurus>"; XMLMiddleStr += "<" + resultTagName + ">" + importThesaurusMessage + "</" + resultTagName + ">"; XMLMiddleStr += "</content_Admin_Thesaurus>"; return XMLMiddleStr; }
From source file:marytts.tools.dbselection.WikipediaMarkupCleaner.java
void processWikipediaSQLTablesDebug() throws Exception { DBHandler wikiToDB = new DBHandler(locale); wikiToDB.createDBConnection(mysqlHost, mysqlDB, mysqlUser, mysqlPasswd); String text;/*from ww w . j a v a 2s.com*/ StringBuilder textId = new StringBuilder(); int numPagesUsed = 0; PrintWriter pw = null; if (wikiLog != null) pw = new PrintWriter(new FileWriter(new File(wikiLog))); // get text from the DB text = wikiToDB.getTextFromWikiPage(debugPageId, minPageLength, textId, pw); System.out.println("\nPAGE SIZE=" + text.length() + " text:\n" + text); Vector<String> textList; if (text != null) { textList = removeMarkup(text); System.out.println("\nCLEANED TEXT:"); for (int i = 0; i < textList.size(); i++) System.out.println("text(" + i + "): \n" + textList.get(i)); } else System.out.println("NO CLEANED TEXT."); if (pw != null) pw.close(); wikiToDB.closeDBConnection(); }
From source file:edu.ku.brc.specify.datamodel.CollectionObjectAttribute.java
@Override @Transient//from www . java 2 s .c o m public Integer getParentId() { Vector<Object> ids = BasicSQLUtils.querySingleCol( "SELECT CollectionObjectID FROM collectionobject WHERE CollectionObjectAttributeID = " + collectionObjectAttributeId); if (ids.size() == 1) { return (Integer) ids.get(0); } return null; }
From source file:com.xpn.xwiki.user.impl.xwiki.XWikiRightServiceImpl.java
public boolean checkRight(String userOrGroupName, XWikiDocument doc, String accessLevel, boolean user, boolean allow, boolean global, XWikiContext context) throws XWikiRightNotFoundException, XWikiException { String className = global ? "XWiki.XWikiGlobalRights" : "XWiki.XWikiRights"; String fieldName = user ? "users" : "groups"; boolean found = false; // Here entity is either a user or a group DocumentReference userOrGroupDocumentReference = this.currentMixedDocumentReferenceResolver .resolve(userOrGroupName);/*w w w .java 2 s . co m*/ String prefixedFullName = this.entityReferenceSerializer.serialize(userOrGroupDocumentReference); String shortname = userOrGroupName; int i0 = userOrGroupName.indexOf(":"); if (i0 != -1) { shortname = userOrGroupName.substring(i0 + 1); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Checking right: " + userOrGroupName + "," + doc.getFullName() + "," + accessLevel + "," + user + "," + allow + "," + global); } Vector<BaseObject> vobj = doc.getObjects(className); if (vobj != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Checking objects " + vobj.size()); } for (int i = 0; i < vobj.size(); i++) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Checking object " + i); } BaseObject bobj = vobj.get(i); if (bobj == null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Bypass object " + i); } continue; } String users = bobj.getStringValue(fieldName); String levels = bobj.getStringValue("levels"); boolean allowdeny = (bobj.getIntValue("allow") == 1); if (allowdeny == allow) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Checking match: " + accessLevel + " in " + levels); } String[] levelsarray = StringUtils.split(levels, " ,|"); if (ArrayUtils.contains(levelsarray, accessLevel)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Found a right for " + allow); } found = true; if (LOGGER.isDebugEnabled()) { LOGGER.debug("Checking match: " + userOrGroupName + " in " + users); } String[] userarray = GroupsClass.getListFromString(users).toArray(new String[0]); for (int ii = 0; ii < userarray.length; ii++) { String value = userarray[ii]; if (value.indexOf(".") == -1) { userarray[ii] = "XWiki." + value; } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Checking match: " + userOrGroupName + " in " + StringUtils.join(userarray, ",")); } // In the case where the document database and the user database is the same // then we allow the usage of the short name, otherwise the fully qualified // name is requested if (doc.getWikiName().equals(userOrGroupDocumentReference.getWikiReference().getName())) { if (ArrayUtils.contains(userarray, shortname)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Found matching right in " + users + " for " + shortname); } return true; } // We should also allow to skip "XWiki." from the usernames and group // lists String veryshortname = shortname.substring(shortname.indexOf(".") + 1); if (ArrayUtils.contains(userarray, veryshortname)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Found matching right in " + users + " for " + shortname); } return true; } } if ((context.getDatabase() != null) && (ArrayUtils.contains(userarray, userOrGroupName))) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Found matching right in " + users + " for " + userOrGroupName); } return true; } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Failed match: " + userOrGroupName + " in " + users); } } } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Bypass object " + i + " because wrong allow/deny"); } } } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for matching rights at group level"); } // Didn't found right at this level.. Let's go to group level Map<String, Collection<String>> grouplistcache = (Map<String, Collection<String>>) context.get("grouplist"); if (grouplistcache == null) { grouplistcache = new HashMap<String, Collection<String>>(); context.put("grouplist", grouplistcache); } Collection<String> grouplist = new HashSet<String>(); // Get member groups from document's wiki addMemberGroups(doc.getWikiName(), prefixedFullName, userOrGroupDocumentReference, grouplist, context); // Get member groups from member's wiki if (context.getWiki().isVirtualMode() && !context.getDatabase() .equalsIgnoreCase(userOrGroupDocumentReference.getWikiReference().getName())) { addMemberGroups(userOrGroupDocumentReference.getWikiReference().getName(), prefixedFullName, userOrGroupDocumentReference, grouplist, context); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for matching rights for [" + grouplist.size() + "] groups: " + grouplist); } for (String group : grouplist) { try { // We need to construct the full group name to make sure the groups are // handled separately boolean result = checkRight(group, doc, accessLevel, false, allow, global, context); if (result) { return true; } } catch (XWikiRightNotFoundException e) { } catch (Exception e) { LOGGER.error("Failed to chech right [" + accessLevel + "] for group [" + group + "] on document [" + doc.getPrefixedFullName() + "]", e); } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Finished searching for rights for " + userOrGroupName + ": " + found); } if (found) { return false; } else { throw new XWikiRightNotFoundException(); } }
From source file:it.classhidra.core.controller.bsController.java
public static Vector getActionStreams(String id_action) { Vector _streams = null;/*from w w w.j av a2 s. c o m*/ info_action iActionMapped = (info_action) getAction_config().get_actions().get(id_action); if (iActionMapped == null) return new Vector(); else if (iActionMapped.getVm_streams() != null) _streams = iActionMapped.getVm_streams(); else { _streams = new Vector(); Vector _streams_orig = (Vector) getAction_config().get_streams_apply_to_actions().get("*"); if (_streams_orig != null) _streams.addAll(_streams_orig); Vector _streams4action = (Vector) getAction_config().get_streams_apply_to_actions().get(id_action); if (_streams4action != null) { Vector _4add = new Vector(); HashMap _4remove = new HashMap(); for (int i = 0; i < _streams4action.size(); i++) { info_stream currentis = (info_stream) _streams4action.get(i); if (currentis.get_apply_to_action() != null) { info_apply_to_action currentiata = (info_apply_to_action) currentis.get_apply_to_action() .get(id_action); if (currentiata.getExcluded() != null && currentiata.getExcluded().equalsIgnoreCase("true")) _4remove.put(currentis.getName(), currentis.getName()); else _4add.add(currentis); } } _streams.addAll(_4add); if (_4remove.size() > 0) { int i = 0; while (i < _streams.size()) { info_stream currentis = (info_stream) _streams.get(i); if (_4remove.get(currentis.getName()) != null) _streams.remove(i); else i++; } } _streams = new util_sort().sort(_streams, "int_order", "A"); } iActionMapped.setVm_streams(_streams); } return _streams; }
From source file:adams.ml.Dataset.java
protected String summariseNumeric(String columnname) { Vector<Float> vf = new Vector<>(); Hashtable<Float, Boolean> ind = new Hashtable<>(); int numErrors = 0; int missing = 0; int ignored = 0; for (int i = 0; i < count(); i++) { DataRow dr = get(i);/*w w w . j a v a 2 s . co m*/ if (dr.get(columnname) == null) { missing++; continue; } dr = getSafe(i); if (dr == null) { ignored++; continue; } try { Number val = (Number) dr.get(columnname).getData(); vf.add(val.floatValue()); if (ind.get(val.floatValue()) == null) { ind.put(val.floatValue(), true); } } catch (Exception e) { try { Float pi = Float.parseFloat(dr.getAsString(columnname)); vf.add(pi); } catch (Exception e1) { numErrors++; continue; } } } double[] f = new double[vf.size()]; for (int i = 0; i < f.length; i++) { f[i] = vf.get(i); } StringBuffer ret = new StringBuffer(); ret.append("Total OK: " + vf.size() + "\n"); ret.append("Ignored Rows: " + ignored + "\n"); ret.append("Missing: " + missing + "\n"); ret.append("Errors: " + numErrors + "\n"); ret.append("Different: " + ind.size() + "\n"); double min = StatUtils.min(f); double max = StatUtils.max(f); double mean = StatUtils.mean(f); double stdev = Math.sqrt(StatUtils.variance(f)); double median = StatUtils.percentile(f, 50); ret.append("Min,Max: " + min + "," + max + "\n"); ret.append("Mean: " + mean + "\n"); ret.append("Standard Deviation: " + stdev + "\n"); ret.append("Median: " + median + "\n"); return (ret.toString()); }
From source file:fr.inrialpes.exmo.align.cli.GenPlot.java
public void iterateAlignments(File dir, Vector<GraphEvaluator> evaluators) { //logger.trace( "Directory : {}", dir ); String prefix = dir.toURI().toString() + "/"; AlignmentParser aparser = new AlignmentParser(); Alignment refalign = null;/* w w w. j a v a 2s . c om*/ try { // Load the reference alignment... refalign = aparser.parse(prefix + "refalign.rdf"); //logger.trace(" Reference alignment parsed"); } catch (Exception aex) { logger.error("GenPlot cannot parse refalign", aex); System.exit(-1); } // for all alignments there, for (int i = 0; i < size; i++) { String algo = listAlgo[i]; Alignment al = null; //logger.trace(" Considering result {} ({})", algo, i ); try { aparser.initAlignment(null); al = aparser.parse(prefix + algo + ".rdf"); //logger.trace(" Alignment {} parsed", algo ); } catch (Exception ex) { logger.error("IGNORED Exception", ex); } // even if empty, declare refalign evaluators.get(i).ingest(al, refalign); } // Unload the ontologies. try { OntologyFactory.clear(); } catch (OntowrapException owex) { // only report logger.error("IGNORED Exception", owex); } }
From source file:edu.ku.brc.dbsupport.MySQLDMBSUserMgr.java
@Override public boolean verifyEngineAndCharSet(final String dbName) { errMsg = null;/* w ww .ja v a 2 s . com*/ Vector<Object[]> rows = BasicSQLUtils.query(connection, "select ENGINE,TABLE_COLLATION FROM information_schema.tables WHERE table_schema = '" + dbName + "'"); if (rows != null && rows.size() > 0) { Object[] row = rows.get(0); if (row[0] != null && !row[0].toString().equalsIgnoreCase("InnoDB")) { errMsg = "The engine is not InnoDB."; } if (row[1] != null && !StringUtils.contains(row[1].toString(), "utf8")) { errMsg = (errMsg == null ? "" : errMsg + "\n") + "The character set is not UTF-8."; } } else { errMsg = "Error checking the database engine and character set."; } return errMsg == null; }
From source file:oscar.dms.actions.DmsInboxManageAction.java
public ActionForward prepareForContentPage(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) {/*from ww w . j a v a 2 s . c o m*/ HttpSession session = request.getSession(); try { if (session.getAttribute("userrole") == null) response.sendRedirect("../logout.jsp"); } catch (Exception e) { logger.error("Error", e); } // can't use userrole from session, because it changes if provider A search for provider B's documents // oscar.oscarMDS.data.MDSResultsData mDSData = new oscar.oscarMDS.data.MDSResultsData(); CommonLabResultData comLab = new CommonLabResultData(); // String providerNo = request.getParameter("providerNo"); String providerNo = (String) session.getAttribute("user"); String searchProviderNo = request.getParameter("searchProviderNo"); String ackStatus = request.getParameter("status"); String demographicNo = request.getParameter("demographicNo"); // used when searching for labs by patient instead of provider String scannedDocStatus = request.getParameter("scannedDocument"); Integer page = 0; try { page = Integer.parseInt(request.getParameter("page")); if (page > 0) { page--; } } catch (NumberFormatException nfe) { page = 0; } Integer pageSize = 20; try { String tmp = request.getParameter("pageSize"); pageSize = Integer.parseInt(tmp); } catch (NumberFormatException nfe) { pageSize = 20; } scannedDocStatus = "I"; String startDateStr = request.getParameter("startDate"); String endDateStr = request.getParameter("endDate"); String view = request.getParameter("view"); if (view == null || "".equals(view)) { view = "all"; } boolean mixLabsAndDocs = "normal".equals(view) || "all".equals(view); Date startDate = null; Date endDate = null; try { if (startDateStr != null && startDateStr.length() > 0) { startDateStr = startDateStr.trim(); startDate = UtilDateUtilities.StringToDate(startDateStr); } if (endDateStr != null && endDateStr.length() > 0) { endDateStr = endDateStr.trim(); endDate = UtilDateUtilities.StringToDate(endDateStr); } } catch (Exception e) { startDate = null; endDate = null; } Boolean isAbnormal = null; if ("abnormal".equals(view)) isAbnormal = new Boolean(true); if ("normal".equals(view)) isAbnormal = new Boolean(false); if (ackStatus == null) { ackStatus = "N"; } // default to new labs only if (providerNo == null) { providerNo = ""; } if (searchProviderNo == null) { searchProviderNo = providerNo; } String roleName = ""; List<SecUserRole> roles = secUserRoleDao.getUserRoles(searchProviderNo); for (SecUserRole r : roles) { if (r != null) { if (roleName.length() == 0) { roleName = r.getRoleName(); } else { roleName += "," + r.getRoleName(); } } } roleName += "," + searchProviderNo; // mDSData.populateMDSResultsData2(searchProviderNo, demographicNo, request.getParameter("fname"), request.getParameter("lname"), request.getParameter("hnum"), ackStatus); // HashMap<String,String> docQueue=comLab.getDocumentQueueLinks(); List<QueueDocumentLink> qd = queueDocumentLinkDAO.getQueueDocLinks(); HashMap<String, String> docQueue = new HashMap(); for (QueueDocumentLink qdl : qd) { Integer i = qdl.getDocId(); Integer n = qdl.getQueueId(); docQueue.put(i.toString(), n.toString()); } InboxResultsDao inboxResultsDao = (InboxResultsDao) SpringUtils.getBean("inboxResultsDao"); String patientFirstName = request.getParameter("fname"); String patientLastName = request.getParameter("lname"); String patientHealthNumber = request.getParameter("hnum"); ArrayList<LabResultData> labdocs = new ArrayList<LabResultData>(); if (!"labs".equals(view) && !"abnormal".equals(view)) { labdocs = inboxResultsDao.populateDocumentResultsData(searchProviderNo, demographicNo, patientFirstName, patientLastName, patientHealthNumber, ackStatus, true, page, pageSize, mixLabsAndDocs, isAbnormal); } if (!"documents".equals(view)) { labdocs.addAll(comLab.populateLabResultsData(searchProviderNo, demographicNo, patientFirstName, patientLastName, patientHealthNumber, ackStatus, scannedDocStatus, true, page, pageSize, mixLabsAndDocs, isAbnormal)); } labdocs = (ArrayList<LabResultData>) filterLabDocsForSuperSite(labdocs, providerNo); ArrayList<LabResultData> validlabdocs = new ArrayList<LabResultData>(); DocumentResultsDao documentResultsDao = (DocumentResultsDao) SpringUtils.getBean("documentResultsDao"); // check privilege for documents only for (LabResultData data : labdocs) { if (data.isDocument()) { String docid = data.getSegmentID(); String queueid = docQueue.get(docid); if (queueid != null) { queueid = queueid.trim(); int queueIdInt = Integer.parseInt(queueid); // if doc sent to default queue and no valid provider, do NOT include it if (queueIdInt == Queue.DEFAULT_QUEUE_ID && !documentResultsDao.isSentToValidProvider(docid) && isSegmentIDUnique(validlabdocs, data)) { // validlabdocs.add(data); } // if doc sent to default queue && valid provider, check if it's sent to this provider, if yes include it else if (queueIdInt == Queue.DEFAULT_QUEUE_ID && documentResultsDao.isSentToValidProvider(docid) && documentResultsDao.isSentToProvider(docid, searchProviderNo) && isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } // if doc setn to non-default queue and valid provider, check if provider is in the queue or equal to the provider else if (queueIdInt != Queue.DEFAULT_QUEUE_ID && documentResultsDao.isSentToValidProvider(docid)) { Vector vec = OscarRoleObjectPrivilege.getPrivilegeProp("_queue." + queueid); if (OscarRoleObjectPrivilege.checkPrivilege(roleName, (Properties) vec.get(0), (Vector) vec.get(1)) || documentResultsDao.isSentToProvider(docid, searchProviderNo)) { // labs is in provider's queue,do nothing if (isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } } } // if doc sent to non default queue and no valid provider, check if provider is in the non default queue else if (!queueid.equals(Queue.DEFAULT_QUEUE_ID) && !documentResultsDao.isSentToValidProvider(docid)) { Vector vec = OscarRoleObjectPrivilege.getPrivilegeProp("_queue." + queueid); if (OscarRoleObjectPrivilege.checkPrivilege(roleName, (Properties) vec.get(0), (Vector) vec.get(1))) { // labs is in provider's queue,do nothing if (isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } } } } } else {// add lab if (isSegmentIDUnique(validlabdocs, data)) { validlabdocs.add(data); } } } // Find the oldest lab returned in labdocs, use that as the limit date for the HRM query Date oldestLab = null; Date newestLab = null; if (request.getParameter("newestDate") != null) { try { SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); newestLab = formatter.parse(request.getParameter("newestDate")); } catch (Exception e) { logger.error("Couldn't parse date + " + request.getParameter("newestDate"), e); } } for (LabResultData result : labdocs) { if (result != null && result.getDateObj() != null) { if (oldestLab == null || oldestLab.compareTo(result.getDateObj()) > 0) oldestLab = result.getDateObj(); if (request.getParameter("newestDate") != null && (newestLab == null || newestLab.compareTo(result.getDateObj()) < 0)) newestLab = result.getDateObj(); } } HRMResultsData hrmResult = new HRMResultsData(); Collection<LabResultData> hrmDocuments = hrmResult.populateHRMdocumentsResultsData(searchProviderNo, ackStatus, newestLab, oldestLab); if (oldestLab == null) { for (LabResultData hrmDocument : hrmDocuments) { if (oldestLab == null || (hrmDocument.getDateObj() != null && oldestLab.compareTo(hrmDocument.getDateObj()) > 0)) oldestLab = hrmDocument.getDateObj(); } } //labdocs is already filtered for super site access.. not just filter hrmDocuments hrmDocuments = filterLabDocsForSuperSite(hrmDocuments, providerNo); labdocs.addAll(hrmDocuments); Collections.sort(labdocs); HashMap<String, LabResultData> labMap = new HashMap<String, LabResultData>(); LinkedHashMap<String, ArrayList<String>> accessionMap = new LinkedHashMap<String, ArrayList<String>>(); int accessionNumCount = 0; for (LabResultData result : labdocs) { if (startDate != null && startDate.after(result.getDateObj())) { continue; } if (endDate != null && endDate.before(result.getDateObj())) { continue; } String segmentId = result.getSegmentID(); if (result.isDocument()) segmentId += "d"; else if (result.isHRM()) segmentId += "h"; labMap.put(segmentId, result); ArrayList<String> labNums = new ArrayList<String>(); if (result.accessionNumber == null || result.accessionNumber.equals("")) { labNums.add(segmentId); accessionNumCount++; accessionMap.put("noAccessionNum" + accessionNumCount + result.labType, labNums); } else if (!accessionMap.containsKey(result.accessionNumber + result.labType)) { labNums.add(segmentId); accessionMap.put(result.accessionNumber + result.labType, labNums); // Different MDS Labs may have the same accession Number if they are seperated // by two years. So accession numbers are limited to matching only if their // labs are within one year of eachother } else { labNums = accessionMap.get(result.accessionNumber + result.labType); boolean matchFlag = false; for (int j = 0; j < labNums.size(); j++) { LabResultData matchingResult = labMap.get(labNums.get(j)); Date dateA = result.getDateObj(); Date dateB = matchingResult.getDateObj(); int monthsBetween = 0; if (dateA == null || dateB == null) { monthsBetween = 5; } else if (dateA.before(dateB)) { monthsBetween = UtilDateUtilities.getNumMonths(dateA, dateB); } else { monthsBetween = UtilDateUtilities.getNumMonths(dateB, dateA); } if (monthsBetween < 4) { matchFlag = true; break; } } if (!matchFlag) { labNums.add(segmentId); accessionMap.put(result.accessionNumber + result.labType, labNums); } } } labdocs.clear(); for (ArrayList<String> labNums : accessionMap.values()) { // must sort through in reverse to keep the labs in the correct order for (int j = labNums.size() - 1; j >= 0; j--) { labdocs.add(labMap.get(labNums.get(j))); } } logger.debug("labdocs.size()=" + labdocs.size()); /* find all data for the index.jsp page */ Hashtable patientDocs = new Hashtable(); Hashtable patientIdNames = new Hashtable(); String patientIdNamesStr = ""; Hashtable docStatus = new Hashtable(); Hashtable docType = new Hashtable(); Hashtable<String, List<String>> ab_NormalDoc = new Hashtable(); for (int i = 0; i < labdocs.size(); i++) { LabResultData data = labdocs.get(i); List<String> segIDs = new ArrayList<String>(); String labPatientId = data.getLabPatientId(); if (labPatientId == null || labPatientId.equals("-1")) labPatientId = "-1"; if (data.isAbnormal()) { List<String> abns = ab_NormalDoc.get("abnormal"); if (abns == null) { abns = new ArrayList<String>(); abns.add(data.getSegmentID()); } else { abns.add(data.getSegmentID()); } ab_NormalDoc.put("abnormal", abns); } else { List<String> ns = ab_NormalDoc.get("normal"); if (ns == null) { ns = new ArrayList<String>(); ns.add(data.getSegmentID()); } else { ns.add(data.getSegmentID()); } ab_NormalDoc.put("normal", ns); } if (patientDocs.containsKey(labPatientId)) { segIDs = (List) patientDocs.get(labPatientId); segIDs.add(data.getSegmentID()); patientDocs.put(labPatientId, segIDs); } else { segIDs.add(data.getSegmentID()); patientDocs.put(labPatientId, segIDs); patientIdNames.put(labPatientId, data.patientName); patientIdNamesStr += ";" + labPatientId + "=" + data.patientName; } docStatus.put(data.getSegmentID(), data.getAcknowledgedStatus()); docType.put(data.getSegmentID(), data.labType); } Integer totalDocs = 0; Integer totalHL7 = 0; Hashtable<String, List<String>> typeDocLab = new Hashtable(); Enumeration keys = docType.keys(); while (keys.hasMoreElements()) { String keyDocLabId = ((String) keys.nextElement()); String valType = (String) docType.get(keyDocLabId); if (valType.equalsIgnoreCase("DOC")) { if (typeDocLab.containsKey("DOC")) { List<String> docids = typeDocLab.get("DOC"); docids.add(keyDocLabId);// add doc id to list typeDocLab.put("DOC", docids); } else { List<String> docids = new ArrayList<String>(); docids.add(keyDocLabId); typeDocLab.put("DOC", docids); } totalDocs++; } else if (valType.equalsIgnoreCase("HL7")) { if (typeDocLab.containsKey("HL7")) { List<String> hl7ids = typeDocLab.get("HL7"); hl7ids.add(keyDocLabId); typeDocLab.put("HL7", hl7ids); } else { List<String> hl7ids = new ArrayList<String>(); hl7ids.add(keyDocLabId); typeDocLab.put("HL7", hl7ids); } totalHL7++; } } Hashtable patientNumDoc = new Hashtable(); Enumeration patientIds = patientDocs.keys(); String patientIdStr = ""; Integer totalNumDocs = 0; while (patientIds.hasMoreElements()) { String key = (String) patientIds.nextElement(); patientIdStr += key; patientIdStr += ","; List<String> val = (List<String>) patientDocs.get(key); Integer numDoc = val.size(); patientNumDoc.put(key, numDoc); totalNumDocs += numDoc; } List<String> normals = ab_NormalDoc.get("normal"); List<String> abnormals = ab_NormalDoc.get("abnormal"); logger.debug("labdocs.size()=" + labdocs.size()); // set attributes request.setAttribute("pageNum", page); request.setAttribute("docType", docType); request.setAttribute("patientDocs", patientDocs); request.setAttribute("providerNo", providerNo); request.setAttribute("searchProviderNo", searchProviderNo); request.setAttribute("patientIdNames", patientIdNames); request.setAttribute("docStatus", docStatus); request.setAttribute("patientIdStr", patientIdStr); request.setAttribute("typeDocLab", typeDocLab); request.setAttribute("demographicNo", demographicNo); request.setAttribute("ackStatus", ackStatus); request.setAttribute("labdocs", labdocs); request.setAttribute("patientNumDoc", patientNumDoc); request.setAttribute("totalDocs", totalDocs); request.setAttribute("totalHL7", totalHL7); request.setAttribute("normals", normals); request.setAttribute("abnormals", abnormals); request.setAttribute("totalNumDocs", totalNumDocs); request.setAttribute("patientIdNamesStr", patientIdNamesStr); request.setAttribute("oldestLab", oldestLab != null ? DateUtils.formatDate(oldestLab, "yyyy-MM-dd HH:mm:ss") : null); return mapping.findForward("dms_page"); }
From source file:gate.util.reporting.DocTimeReporter.java
/** * A method to ensure that the required line is read from the given file part. * * @param bytearray/*w ww . j a v a 2 s . c o m*/ * A part of a file being read upside down. * @param lastNlines * A vector containing the lines extracted from file part. * @param fromPos * A long value indicating the start of a file part. * * @return true if marker indicating the logical start of run is found; false * otherwise. */ private boolean parseLinesFromLast(byte[] bytearray, Vector<String> lastNlines, long fromPos) { String lastNChars = new String(bytearray); StringBuffer sb = new StringBuffer(lastNChars); lastNChars = sb.reverse().toString(); StringTokenizer tokens = new StringTokenizer(lastNChars, NL); while (tokens.hasMoreTokens()) { StringBuffer sbLine = new StringBuffer(tokens.nextToken()); lastNlines.add(sbLine.reverse().toString()); if ((lastNlines.get(lastNlines.size() - 1)).trim().endsWith(getLogicalStart())) { return true; } } return false; }