List of usage examples for java.lang StringBuffer delete
@Override public synchronized StringBuffer delete(int start, int end)
From source file:dao.CarryonDaoDb.java
/** * updates caption for the blob in carryon. * @param btitle - the btitle/* w w w.j a v a 2 s . co m*/ * @param zoom - zoom applies to photos * @param entryid - the entry id * @param memberId - the member id * @param category - the category * @param member - the member * @param def - the default * @param usertags - usertags * @param caption - caption * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public void updateCaption(String btitle, String zoom, String entryid, String memberId, String category, String member, boolean def, String usertags, String caption) throws BaseDaoException { if (RegexStrUtil.isNull(memberId) || RegexStrUtil.isNull(btitle) || RegexStrUtil.isNull(entryid) || RegexStrUtil.isNull(member)) { throw new BaseDaoException("null parameters passed"); } boolean isFile = true; // zoom applies only to photos currently if ((category != null) && (category.equalsIgnoreCase("1"))) { isFile = false; if (RegexStrUtil.isNull(zoom)) { throw new BaseDaoException("zoom parameter is null"); } } /** * Set the source based on scalability */ String sourceName = scalabilityManager.getWriteBlobScalability(memberId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds is null for sourceName = " + sourceName); } boolean exists = false; try { Object[] params = { (Object) entryid }; List result = defaultQuery.execute(params); if (result != null && result.size() > 0) { exists = true; } } catch (Exception e) { throw new BaseDaoException("error while" + defaultQuery.getSql()); } if (WebUtil.isSanEnabled()) { Photo photo = getPhoto(memberId, entryid, DbConstants.READ_FROM_SLAVE); if (photo != null) { String srcFileName = photo.getValue(DbConstants.BTITLE); if (!RegexStrUtil.isNull(srcFileName) && !srcFileName.equals(btitle)) { try { SanUtils sanUtils = new SanUtils(); sanUtils.renameSanFile(member, SanConstants.sanUserPath, srcFileName, btitle); } catch (SanException e) { throw new BaseDaoException("renameSanFile() in CarryonDaoDb " + member + " srcFileName " + srcFileName + " destFileName " + btitle, e); } } } } Connection conn = null; try { conn = ds.getConnection(); if (conn != null) { /* delete the photo, if this entry exists */ conn.setAutoCommit(false); if (RegexStrUtil.isNull(caption)) { caption = btitle; } updateQuery.run(conn, btitle, zoom, entryid, memberId, isFile, caption); /** if this is the default photo and this photo does not exist, add this entry */ if (def) { if (!exists) { deleteDefQuery.run(conn, memberId); addDefQuery.run(conn, entryid, memberId); } } else { /** no more a default photo, delete this entry */ if (exists) { deleteDefQuery.run(conn, memberId); } } } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException("connection close exception", e2); } throw new BaseDaoException("error occured while rollingback entries from carryon/defcarryon", e1); } } try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit exception", e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("connection close exception", e4); } // update the blob title updateTitleCarryonHits(entryid, memberId, caption); /** * updateTags */ sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds is null for sourceName = " + sourceName); } conn = null; try { conn = ds.getConnection(); if (conn != null) { updateTagsQuery.run(conn, caption, entryid, memberId, usertags); updateRecentQuery.run(conn, btitle, zoom, entryid, memberId, isFile, caption); } } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("connection close exception for updateTagsQuery()", e1); } StringBuffer sb = new StringBuffer("error occured while executing in updateCaption() caption = "); sb.append(caption); sb.append(" zoom = "); sb.append(zoom); sb.append(" entryid = "); sb.append(entryid); sb.append(" memberId = "); sb.append(memberId); throw new BaseDaoException(sb.toString(), e); } // close the connection try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("connection close exception", e1); } /** * remove from userstreamblob based on the key (memberId+entryid) */ StringBuffer sb = new StringBuffer(memberId); sb.append("-"); sb.append(entryid); String key = sb.toString(); Fqn fqn = cacheUtil.fqn(DbConstants.USER_STREAM_BLOB_ENTRY); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.USER_STREAM_BLOB_DATA); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } /** * uses memberid-category key */ sb.delete(0, sb.length()); sb.append(memberId); sb.append("-"); sb.append(category); fqn = cacheUtil.fqn(DbConstants.USER_STREAM_BLOBS_CAT); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } /** * remove all entries for this user, key based on memberId */ fqn = cacheUtil.fqn(DbConstants.USER_STREAM_BLOB); if (treeCache.exists(fqn, memberId)) { treeCache.remove(fqn, memberId); } fqn = cacheUtil.fqn(DbConstants.USER_PAGE); if (treeCache.exists(fqn, member)) { treeCache.remove(fqn, member); } fqn = cacheUtil.fqn(DbConstants.DEFAULT_PHOTO); if (treeCache.exists(fqn, memberId)) { treeCache.remove(fqn, memberId); } /* remove this from cache only when the blogger matches one of the recent blog entry matches */ /* Fqn fqn = cacheUtil.fqn(DbConstants.RECENT_BLOGS); if (treeCache.exists(fqn, DbConstants.RECENT_BLOG_KEY)) { treeCache.remove(fqn, DbConstants.RECENT_BLOG_KEY); } */ }
From source file:org.etudes.component.app.melete.ModuleDB.java
public void archiveModules(List selModBeans, List moduleDateBeans, String courseId) throws Exception { Transaction tx = null;//www . j a va 2 s . c o m StringBuffer moduleIds = new StringBuffer(); moduleIds.append("("); ModuleDateBean mdbean = null; for (ListIterator i = selModBeans.listIterator(); i.hasNext();) { mdbean = (ModuleDateBean) i.next(); moduleIds.append(mdbean.getModule().getModuleId().toString()); moduleIds.append(", "); } moduleIds.delete(moduleIds.toString().length() - 2, moduleIds.toString().length()); moduleIds.append(")"); try { Session session = hibernateUtil.currentSession(); tx = session.beginTransaction(); Date currentDate = Calendar.getInstance().getTime(); String updCourseModuleStr = "update CourseModule cm set cm.seqNo=-1, cm.archvFlag=1,cm.dateArchived=:currentDate where cm.moduleId in " + moduleIds.toString(); int updatedEntities = session.createQuery(updCourseModuleStr).setParameter("currentDate", currentDate) .executeUpdate(); logger.debug("course module updated " + updatedEntities); String updMshdatesStr = "update ModuleShdates mshdates set mshdates.addtoSchedule=0 where mshdates.moduleId in " + moduleIds.toString(); updatedEntities = session.createQuery(updMshdatesStr).executeUpdate(); logger.debug("ModuleShdates updated " + updatedEntities); moduleDateBeans.removeAll(selModBeans); List<CourseModule> courseModules = new ArrayList<CourseModule>(0); for (ListIterator i = moduleDateBeans.listIterator(); i.hasNext();) { mdbean = (ModuleDateBean) i.next(); courseModules.add((CourseModule) mdbean.getCmod()); } logger.debug("Updating sequence for all other modules"); assignSeqs(session, courseModules); tx.commit(); } catch (HibernateException he) { if (tx != null) tx.rollback(); logger.error(he.toString()); throw he; } catch (Exception e) { if (tx != null) tx.rollback(); logger.error(e.toString()); e.printStackTrace(); throw e; } finally { try { hibernateUtil.closeSession(); } catch (HibernateException he) { logger.error(he.toString()); throw he; } } List modList = new ArrayList(); try { Session session = hibernateUtil.currentSession(); String queryString = "from Module as mod where mod.moduleId in " + moduleIds.toString(); Query query = session.createQuery(queryString); modList = query.list(); } catch (HibernateException he) { logger.error(he.toString()); } finally { try { hibernateUtil.closeSession(); } catch (HibernateException he) { logger.error(he.toString()); } } for (ListIterator i = modList.listIterator(); i.hasNext();) { Module mod = (Module) i.next(); updateCalendar(mod, (ModuleShdates) mod.getModuleshdate(), courseId); } logger.debug("Calendar updated"); /* try { Session session = hibernateUtil.currentSession(); tx = session.beginTransaction(); String getArchvQueryString = "select cmod from CourseModule as cmod where cmod.module.moduleId = :moduleId and cmod.courseId = :courseId"; Query getArchvQuery = session.createQuery(getArchvQueryString); getArchvQuery.setParameter("moduleId", new Integer(archvModuleId)); getArchvQuery.setParameter("courseId", courseId); CourseModule cmod = (CourseModule)getArchvQuery.uniqueResult(); int modSeqNo = -1; modSeqNo = cmod.getSeqNo(); cmod.setSeqNo(-1); cmod.setArchvFlag(true); Date currentDate = Calendar.getInstance().getTime(); cmod.setDateArchived(currentDate); session.saveOrUpdate(cmod); String queryString = "from CourseModule cmod1 where cmod1.courseId = :courseId and cmod1.seqNo > :seqno"; Query query = session.createQuery(queryString); query.setParameter("courseId",courseId); query.setParameter("seqno",new Integer(modSeqNo)); Iterator itr = query.iterate(); CourseModule cmodObj = null; while (itr.hasNext()) { cmodObj = (CourseModule) itr.next(); cmodObj.setSeqNo(cmodObj.getSeqNo() - 1); session.saveOrUpdate(cmodObj); } tx.commit(); //session.flush(); } catch (HibernateException he) { logger.error(he.toString()); throw he; } catch (Exception e) { if (tx!=null) tx.rollback(); logger.error(e.toString()); throw e; } finally { try { hibernateUtil.closeSession(); } catch (HibernateException he) { logger.error(he.toString()); throw he; } }*/ }
From source file:com.mysql.stresstool.RunnableQueryInsertPartRange.java
@Override public boolean createSchema(StressTool sTool) { // Custom schema creation this is the default for the stresstool but can be anything String DropTables1 = "Drop table IF EXISTS tbtest"; String DropTables2 = "Drop table IF EXISTS tbtest_child"; String TruncateTables1 = "Truncate table tbtest"; String TruncateTables2 = "Truncate table tbtest_child"; Connection conn = null;/* w w w. j a v a2s . c om*/ Statement stmt = null; try { if (jdbcUrlMap.get("dbType") != null && !((String) jdbcUrlMap.get("dbType")).equals("MySQL")) { conn = DriverManager.getConnection((String) jdbcUrlMap.get("dbType"), "test", "test"); } else conn = DriverManager.getConnection((String) jdbcUrlMap.get("jdbcUrl")); conn.setAutoCommit(false); stmt = conn.createStatement(); StringBuffer sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest" + iTable + "("); if (this.isUseAutoIncrement()) { sb.append("`autoInc` bigint(11) AUTO_INCREMENT NOT NULL,"); } sb.append(" `a` int(11) NOT NULL,"); sb.append(" `uuid` char(36) NOT NULL,"); sb.append(" `b` varchar(100) NOT NULL,"); sb.append(" `c` char(200) NOT NULL,"); sb.append(" `counter` bigint(20) NULL, "); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); sb.append(" `date` DATE NOT NULL,"); sb.append(" `strrecordtype` char(3) NULL"); if (this.isUseAutoIncrement()) { if (this.partitionType.equals("range")) { sb.append( ", PRIMARY KEY (`autoInc`,`date`), INDEX `IDX_a` (a), INDEX `IDX_uuid` (uuid) "); } else { sb.append( ", PRIMARY KEY (`autoInc`,`partitionid`), INDEX `IDX_a` (a), INDEX `IDX_uuid` (uuid) "); } } else { if (!this.doSimplePk) if (this.partitionType.equals("range")) { sb.append(", PRIMARY KEY (`uuid`,`date`), INDEX `IDX_a` (a) "); } else { sb.append(", PRIMARY KEY (`uuid`,`partitionid`), INDEX `IDX_a` (a) "); } else { if (this.partitionType.equals("range")) { sb.append(", PRIMARY KEY (`a`,`date`), INDEX `IDX_uuid` (uuid) "); } else { sb.append(", PRIMARY KEY (`a`,`partitionid`), INDEX `IDX_uuid` (uuid) "); } } } sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts1 = sb.toString(); sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest_child" + iTable); sb.append("(`a` int(11) NOT NULL,"); sb.append("`bb` int(11) AUTO_INCREMENT NOT NULL,"); sb.append(" `date` DATE NOT NULL,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); if (operationShort) sb.append(" `stroperation` VARCHAR(254) NULL,"); else sb.append(" `stroperation` TEXT(41845) NULL,"); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP"); sb.append(", PRIMARY KEY (`a`,`bb`), UNIQUE(`bb`)"); sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts2 = sb.toString(); System.out.println(tbts1); if (!doSimplePk) System.out.println(tbts2); if (sTool.droptable) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println( "**** Please wait DROP table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables1 + iTable); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait DROP table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables2 + iTable); } } stmt.execute("COMMIT"); System.out.println("**** DROP finished *******"); System.out.println( "****============================================================================*******"); } if (sTool.createtable) stmt.executeBatch(); if (sTool.truncate) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables1 + iTable); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables2 + iTable); } } System.out.println("**** TRUNCATE finish *******"); System.out.println( "****============================================================================*******"); } } catch (Exception ex) { ex.printStackTrace( ); return false; } finally { try { conn.close(); return true; } catch (SQLException ex1) { ex1.printStackTrace(); return false; } } }
From source file:org.jmlspecs.util.QDoxUtil.java
private static String getFileEnumTypeErasureProcessingAsString(StringBuffer bufferedFile, String fileAsString, List<JavaClass> javaDeclEnumTypes) throws IOException { BufferedReader buffer = null; String line = ""; List<com.github.antlrjavaparser.api.body.EnumDeclaration> javaDeclEnumTypes2 = QDoxUtil .getAllDeclaredJavaEnumConstantsInFile(fileAsString); fileAsString = fileAsString.replace("enum ", "final class "); // handling enumeration fields for (int i = 0; i < javaDeclEnumTypes.size(); i++) { JavaClass enumType = javaDeclEnumTypes.get(i); com.github.antlrjavaparser.api.body.EnumDeclaration enumType2 = javaDeclEnumTypes2.get(i); List<JavaField> enumTypeFields = QDoxUtil.getADeclaredJavaEnumerationFieldsInEnumType(enumType); if (enumTypeFields.size() > 0) { String enumFieldsConverted = QDoxUtil .getADeclaredJavaEnumerationFieldsInEnumTypeAsErasure(enumType); List<String> listEnumConstLineNumbers = getEnumConstantLineNumbersPerEnumDecl(enumType2); buffer = new BufferedReader(new StringReader(fileAsString)); bufferedFile.delete(0, (bufferedFile.length() - 1)); // resetting buffer line = buffer.readLine();//from w w w.j a va 2 s . c om int lineStart = enumType2.getBeginLine(); int lineEnd = enumType2.getEndLine(); int lineNumber = 1; String lastLine = ""; while (line != null) { if (listEnumConstLineNumbers.get(listEnumConstLineNumbers.size() - 1).equals(lineNumber + "")) { lastLine = "/* " + line + " */#"; fileAsString = StringUtils.replaceOnce(fileAsString, line, lastLine); } else if (listEnumConstLineNumbers.contains(lineNumber + "")) { String lineProcessed = "/* " + line + " */"; fileAsString = StringUtils.replaceOnce(fileAsString, line, lineProcessed); } line = buffer.readLine(); lineNumber++; } buffer.close(); fileAsString = StringUtils.replaceOnce(fileAsString, lastLine, lastLine.replace("#", enumFieldsConverted)); } } return fileAsString; }
From source file:marytts.tools.dbselection.WikipediaMarkupCleaner.java
private StringBuffer removeSection(Scanner s, StringBuffer lineIn, String iniTag, String endTag) { String next;/*from w ww .j av a 2 s. c o m*/ int index1 = 0, index2 = -1, endTagLength = 0, numRef = 0, lastEndTag = 0, lastIniTag = 0; boolean closeRef = true; StringBuffer line = new StringBuffer(lineIn); StringBuffer nextLine; if (debug) System.out.println("Removing tag: " + iniTag + " LINE (BEFORE): " + line); while ((index1 = line.indexOf(iniTag)) >= 0) { // in one line can be more than one iniTag numRef++; if ((index2 = line.indexOf(endTag, index1)) >= 0) endTagLength = endTag.length() + index2; if (index2 == -1) {// the iniTag most be in the next lines, so get more lines until the endTag is found lastEndTag = 0; // start to look for the endTag in 0 while (s.hasNext() && numRef != 0) { lastIniTag = 0; nextLine = new StringBuffer(s.nextLine()); //if(debug) // System.out.println(" NEXTLINE: " + nextLine); while ((index1 = nextLine.indexOf(iniTag, lastIniTag)) >= 0) { numRef++; lastIniTag = iniTag.length() + index1; } line.append(nextLine); // next time it will look for the endTag after the position of the last it found. while ((index2 = line.indexOf(endTag, lastEndTag)) >= 0) { numRef--; lastEndTag = index2 + endTag.length(); // I need to remember where the last endTag was found endTagLength = endTag.length() + index2; } //if(debug) // System.out.println("LINE (numRef=" + numRef + "): " + line); } } else // the endTag was found numRef--; if (numRef == 0) { index1 = line.indexOf(iniTag); // get again this because the position might change if (endTagLength > index1) { if (debug) { System.out.println(" FINAL LINE: " + line); System.out.print("iniTag: " + iniTag + " index1=" + index1); System.out.print(" endTagLength=" + endTagLength); System.out.println(" line.length=" + line.length() + " line: " + line); System.out.println(" line.length=" + line.length()); } line.delete(index1, endTagLength); } else { if (debug) { System.out.println("removeSection: WARNING endTagLength > length of line: "); System.out.print("iniTag: " + iniTag + " index1=" + index1); System.out.print(" endTagLength=" + endTagLength); System.out.println(" line.length=" + line.length() + " line: " + line); System.out.println("removeSection: WARNING endTagLength > length of line: " + line); } line = new StringBuffer(""); } //System.out.println("nextline="+line); } else { if (debug) System.out.println("removeSection: WARNING no " + endTag); line = new StringBuffer(""); } } // while this line contains iniTag-s if (debug) System.out.println(" LINE (AFTER): " + line); return line; }
From source file:marytts.tools.dbselection.WikipediaMarkupCleaner.java
private StringBuffer removeSectionTable(Scanner s, StringBuffer lineIn, String iniTag, String endTag) { String next;/*from w w w .j av a 2s . c o m*/ int index1 = 0, index2 = -1, endTagLength = 0, numRef = 0, lastEndTag = 0, lastIniTag = 0; boolean closeRef = true; StringBuffer line = new StringBuffer(lineIn); StringBuffer nextLine; if (debug) System.out.println("Removing tag: " + iniTag + " LINE (BEFORE): " + line); while ((index1 = line.indexOf(iniTag)) >= 0) { // in one line can be more than one iniTag numRef++; if ((index2 = line.indexOf(endTag, index1)) >= 0) endTagLength = endTag.length() + index2; if (index2 == -1) {// the iniTag most be in the next lines, so get more lines until the endTag is found lastEndTag = 0; // start to look for the endTag in 0 while (s.hasNext() && numRef != 0) { lastIniTag = 0; nextLine = new StringBuffer(s.nextLine()); //if(debug) // System.out.println(" NEXTLINE: " + nextLine); while ((index1 = nextLine.indexOf(iniTag, lastIniTag)) >= 0) { numRef++; lastIniTag = iniTag.length() + index1; } // next time it will look for the endTag after the position of the last it found. //while( (index2 = line.indexOf(endTag, lastEndTag)) >= 0 ){ if (nextLine.toString().startsWith(endTag)) { numRef--; //index2 = line.length(); //lastEndTag = index2 + endTag.length(); // I need to remember where the last endTag was found endTagLength = line.length() + endTag.length(); } line.append(nextLine); //if(debug) // System.out.println("LINE (numRef=" + numRef + "): " + line); } } else // the endTag was found numRef--; if (numRef == 0) { index1 = line.indexOf(iniTag); // get again this because the position might change if (endTagLength > index1) { if (debug) { System.out.println(" FINAL LINE: " + line); System.out.print("iniTag: " + iniTag + " index1=" + index1); System.out.print(" endTagLength=" + endTagLength); System.out.println(" line.length=" + line.length() + " line: " + line); System.out.println(" line.length=" + line.length()); } line.delete(index1, endTagLength); } else { if (debug) { System.out.println("removeSection: WARNING endTagLength > length of line: "); System.out.print("iniTag: " + iniTag + " index1=" + index1); System.out.print(" endTagLength=" + endTagLength); System.out.println(" line.length=" + line.length() + " line: " + line); System.out.println("removeSection: WARNING endTagLength > length of line: " + line); } line = new StringBuffer(""); } //System.out.println("nextline="+line); } else { if (debug) System.out.println("removeSection: WARNING no " + endTag); line = new StringBuffer(""); } } // while this line contains iniTag-s if (debug) System.out.println(" LINE (AFTER): " + line); return line; }
From source file:org.etudes.component.app.melete.ModuleDB.java
private void processViewSections(Map vsBeanMap, List vsBeanList, List xmlSecList, StringBuffer rowClassesBuf) { ViewSecBean vsBean = null;//w w w. j ava2 s . c o m //SectionBean secBean = null; if ((vsBeanMap != null) && (xmlSecList != null)) { if (vsBeanMap.size() == xmlSecList.size()) { for (ListIterator k = xmlSecList.listIterator(); k.hasNext();) { SecLevelObj slObj = (SecLevelObj) k.next(); if (slObj != null) { vsBean = (ViewSecBean) vsBeanMap.get(new Integer(slObj.getSectionId())); if (vsBean != null) { vsBean.setDisplaySequence(slObj.getDispSeq()); vsBeanList.add(vsBean); rowClassesBuf.append("secrow" + slObj.getLevel() + ","); } } } rowClassesBuf.delete(rowClassesBuf.toString().length() - 1, rowClassesBuf.toString().length()); } } }
From source file:com.juce.JuceAppActivity.java
public static final HTTPStream createHTTPStream (String address, boolean isPost, byte[] postData, String headers, int timeOutMs, int[] statusCode, StringBuffer responseHeaders, int numRedirectsToFollow, String httpRequestCmd) { // timeout parameter of zero for HttpUrlConnection is a blocking connect (negative value for juce::URL) if (timeOutMs < 0) timeOutMs = 0;/*from w ww. jav a2s. co m*/ else if (timeOutMs == 0) timeOutMs = 30000; // headers - if not empty, this string is appended onto the headers that are used for the request. It must therefore be a valid set of HTML header directives, separated by newlines. // So convert headers string to an array, with an element for each line String headerLines[] = headers.split("\\n"); for (;;) { try { HttpURLConnection connection = (HttpURLConnection) (new URL(address).openConnection()); if (connection != null) { try { connection.setInstanceFollowRedirects (false); connection.setConnectTimeout (timeOutMs); connection.setReadTimeout (timeOutMs); // Set request headers for (int i = 0; i < headerLines.length; ++i) { int pos = headerLines[i].indexOf (":"); if (pos > 0 && pos < headerLines[i].length()) { String field = headerLines[i].substring (0, pos); String value = headerLines[i].substring (pos + 1); if (value.length() > 0) connection.setRequestProperty (field, value); } } connection.setRequestMethod (httpRequestCmd); if (isPost) { connection.setDoOutput (true); if (postData != null) { OutputStream out = connection.getOutputStream(); out.write(postData); out.flush(); } } HTTPStream httpStream = new HTTPStream (connection, statusCode, responseHeaders); // Process redirect & continue as necessary int status = statusCode[0]; if (--numRedirectsToFollow >= 0 && (status == 301 || status == 302 || status == 303 || status == 307)) { // Assumes only one occurrence of "Location" int pos1 = responseHeaders.indexOf ("Location:") + 10; int pos2 = responseHeaders.indexOf ("\n", pos1); if (pos2 > pos1) { String newLocation = responseHeaders.substring(pos1, pos2); // Handle newLocation whether it's absolute or relative URL baseUrl = new URL (address); URL newUrl = new URL (baseUrl, newLocation); String transformedNewLocation = newUrl.toString(); if (transformedNewLocation != address) { address = transformedNewLocation; // Clear responseHeaders before next iteration responseHeaders.delete (0, responseHeaders.length()); continue; } } } return httpStream; } catch (Throwable e) { connection.disconnect(); } } } catch (Throwable e) {} return null; } }
From source file:org.kuali.ole.module.purap.document.PurchaseOrderDocument.java
/** * @see org.kuali.ole.sys.document.GeneralLedgerPostingDocumentBase#doRouteStatusChange() *///from w w w .j a v a 2 s .co m @Override public void doRouteStatusChange(DocumentRouteStatusChange statusChangeEvent) { LOG.debug("doRouteStatusChange() started"); super.doRouteStatusChange(statusChangeEvent); String currentDocumentTypeName = this.getDocumentHeader().getWorkflowDocument().getDocumentTypeName(); // child classes need to call super, but we don't want to inherit the post-processing done by this PO class other than to the Split if (this.getFinancialSystemDocumentHeader().getWorkflowDocument().isFinal() && !(currentDocumentTypeName .equals(OLEConstants.FinancialDocumentTypeCodes.PURCHASE_ORDER_RETRANSMIT))) { OleDocstoreHelperService oleDocstoreHelperService = SpringContext .getBean(OleDocstoreHelperService.class); List<OlePurchaseOrderItem> items = this.getItems(); StringBuffer cancellationNote = new StringBuffer(); List<Note> noteList = new ArrayList<>(); for (OlePurchaseOrderItem item : items) { if (item.getItemType().isQuantityBasedGeneralLedgerIndicator()) { if (currentDocumentTypeName .equalsIgnoreCase(PurchaseOrderDocTypes.PURCHASE_ORDER_VOID_DOCUMENT)) { for (Note noteObj : this.getNotes()) { if (noteObj.getNoteText().contains(PODocumentsStrings.VOID_NOTE_PREFIX)) { noteList.add(noteObj); } } //Both Cancellation reason and free text are saved in Docstore's item record. if (noteList.size() > 1) { for (int noteObj = 0; noteObj < noteList.size(); noteObj++) { String[] attachedNote = noteList.get(noteObj).getNoteText() .split(PODocumentsStrings.VOID_NOTE_PREFIX); cancellationNote = cancellationNote.append(attachedNote[1].trim()); if (noteObj == 0) { cancellationNote = cancellationNote.append(OLEConstants.COMMA); } } } else { String[] attachedNote = noteList.get(0).getNoteText() .split(PODocumentsStrings.VOID_NOTE_PREFIX); cancellationNote = cancellationNote.append(attachedNote[1].trim()); } } if (currentDocumentTypeName.equals(OLEConstants.FinancialDocumentTypeCodes.PURCHASE_ORDER)) { oleDocstoreHelperService.createOrUpdateDocStoreBasedOnLocation(this, item, currentDocumentTypeName, cancellationNote.toString()); } cancellationNote.delete(0, cancellationNote.length()); noteList.clear(); } } } if (PurapConstants.PurchaseOrderDocTypes.PURCHASE_ORDER_DOCUMENT.equals(currentDocumentTypeName) || PurapConstants.PurchaseOrderDocTypes.PURCHASE_ORDER_SPLIT_DOCUMENT .equals(currentDocumentTypeName)) { try { // DOCUMENT PROCESSED if (this.getFinancialSystemDocumentHeader().getWorkflowDocument().isProcessed()) { setPurchaseOrderLastTransmitTimestamp( SpringContext.getBean(DateTimeService.class).getCurrentTimestamp()); SpringContext.getBean(PurchaseOrderService.class).completePurchaseOrder(this); if (this.getFinancialSystemDocumentHeader().getWorkflowDocument().isProcessed() && !this.getFinancialSystemDocumentHeader().getWorkflowDocument().isFinal()) { SpringContext.getBean(WorkflowDocumentService.class) .saveRoutingData(this.getFinancialSystemDocumentHeader().getWorkflowDocument()); } } // DOCUMENT DISAPPROVED else if (this.getFinancialSystemDocumentHeader().getWorkflowDocument().isDisapproved()) { String nodeName = SpringContext.getBean(WorkflowDocumentService.class).getCurrentRouteLevelName( this.getFinancialSystemDocumentHeader().getWorkflowDocument()); String disapprovalStatus = PurapConstants.PurchaseOrderStatuses .getPurchaseOrderAppDocDisapproveStatuses().get(nodeName); if (ObjectUtils.isNotNull(disapprovalStatus)) { //update the appDocStatus and save the workflow data updateAndSaveAppDocStatus(disapprovalStatus); RequisitionDocument req = getPurApSourceDocumentIfPossible(); String principalId = req.getFinancialSystemDocumentHeader().getWorkflowDocument() .getRoutedByPrincipalId(); appSpecificRouteDocumentToUser( this.getFinancialSystemDocumentHeader().getWorkflowDocument(), principalId, "Notification of Order Disapproval for Requisition " + req.getPurapDocumentIdentifier() + "(document id " + req.getDocumentNumber() + ")", "Requisition Routed By User"); return; } logAndThrowRuntimeException( "No status found to set for document being disapproved in node '" + nodeName + "'"); } // DOCUMENT CANCELED else if (this.getFinancialSystemDocumentHeader().getWorkflowDocument().isCanceled()) { updateAndSaveAppDocStatus(PurchaseOrderStatuses.APPDOC_CANCELLED); } } catch (WorkflowException e) { logAndThrowRuntimeException( "Error saving routing data while saving document with id " + getDocumentNumber(), e); } } }
From source file:org.etudes.component.app.melete.ModuleDB.java
private void processSections(Map sectionMap, List sectionBeanList, List xmlSecList, StringBuffer rowClassesBuf) { Section sec = null;/*from ww w . j a v a 2s . c om*/ SectionBean secBean = null; if ((sectionMap != null) && (xmlSecList != null)) { if (sectionMap.size() == xmlSecList.size()) { for (ListIterator k = xmlSecList.listIterator(); k.hasNext();) { SecLevelObj slObj = (SecLevelObj) k.next(); if (slObj != null) { sec = (Section) sectionMap.get(new Integer(slObj.getSectionId())); if (sec != null) { secBean = new SectionBean(sec); secBean.setTruncTitle(createTruncstr(sec.getTitle())); secBean.setDisplaySequence(slObj.getDispSeq()); sectionBeanList.add(secBean); rowClassesBuf.append("secrow" + slObj.getLevel() + ","); } } } rowClassesBuf.delete(rowClassesBuf.toString().length() - 1, rowClassesBuf.toString().length()); } } }