List of usage examples for java.lang StringBuffer delete
@Override public synchronized StringBuffer delete(int start, int end)
From source file:com.smart.smartrestfulw.controller.FileDepotController.java
/** * ??// w w w.j a va2s. c o m * * @param formFileData * @param strJson * @param isModify ??? * @return */ private String SaveUpLoadFile(List<MultipartFile> uploadFiles, FileDepotParamModel paramModel, boolean isModify) throws Exception { String strSvcFileLocalName = null, strUpFileName = null, strTempFilePath = null; StringBuffer sbTemp = new StringBuffer(); StringBuffer sbFilePathTemp = new StringBuffer(); boolean bSvcFileExist = false; Set<String> setStrSqls = new HashSet<String>(); ExecuteResultParam resultParam = null; DepotFileDetailModel tempFileDetailModel = null; int saveFlag = 1; if (paramModel == null) { return responseFormat.formationResultToString(ResponseResultCode.ErrorParam, "paramError"); } SignInformationModel signModel = SignCommon.verifySign(paramModel.getToken(), false); if (signModel == null) { return responseFormat.formationResultToString(ResponseResultCode.ErrorSignToken, "no authorize"); } try { for (MultipartFile tempFile : uploadFiles) { strUpFileName = tempFile.getOriginalFilename(); // root/rsid/date(yymmddhh)/Type // sbFilePathTemp.append(paramModel.rsid); sbTemp.append(DeployInfo.GetDeployFilePath()).append(File.separator).append(paramModel.rsid); FileHelper.CheckFileExist(sbTemp.toString()); // sbFilePathTemp.append(File.separator).append(UtileSmart.getCurrentDate()); sbTemp.append(File.separator).append(UtileSmart.getCurrentDate()); FileHelper.CheckFileExist(sbTemp.toString()); tempFileDetailModel = paramModel.getFileDetailModel(strUpFileName); if (tempFileDetailModel == null) { return responseFormat.formationResultToString(ResponseResultCode.ErrorParam, "param error."); // return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("? %s?", strUpFileName), paramModel.toStringInformation())); } //? (File.separator) if (tempFileDetailModel.fileOwnType.indexOf(File.separator) > 0) { return responseFormat.formationResultToString(ResponseResultCode.ErrorFileType, "file type error"); // return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("? %s?", strUpFileName), paramModel.toStringInformation())); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("??", strUpFileName), paramModel.toStringInformation())); } sbFilePathTemp.append(File.separator).append(tempFileDetailModel.fileOwnType); sbTemp.append(File.separator).append(tempFileDetailModel.fileOwnType); FileHelper.CheckFileExist(sbTemp.toString()); //? sbFilePathTemp.append(File.separator).append(strUpFileName).toString(); strSvcFileLocalName = sbTemp.append(File.separator).append(strUpFileName).toString(); bSvcFileExist = FileHelper.CheckFileExist(strSvcFileLocalName, false); if (bSvcFileExist && isModify == false) { return responseFormat.formationResultToString(ResponseResultCode.ErrorFileExist, "file exist"); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("????%s", strUpFileName), paramModel.toStringInformation())); } //?? ownid fpath??????? resultParam = DBHelper.ExecuteSqlOnceSelect(DeployInfo.MasterRSID, String.format( "SELECT COUNT(*) AS ROWSCOUNT FROM FILEDEPOT WHERE OWNID<>'%s' AND FPATH='%s'", paramModel.ownid, sbFilePathTemp.toString())); if (resultParam.ResultCode != 0) { return responseFormat.formationResultToString(ResponseResultCode.ErrorDB, resultParam.errMsg); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("????%s : Msg : %s", strUpFileName, resultParam.errMsg), paramModel.toStringInformation())); } //ROWSCOUNT ?0?? ROWSCOUNT ?0??????? if (resultParam.ResultJsonObject != null) { if (Integer.parseInt(resultParam.ResultJsonObject.getJSONObject(DeployInfo.ResultDataTag) .getString("ROWSCOUNT")) > 0) { return responseFormat.formationResultToString(ResponseResultCode.ErrorFileRepeat, "file binded "); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("%s,?????????????", strUpFileName), paramModel.toStringInformation())); } } tempFile.transferTo(new File(strSvcFileLocalName)); //?????? tempFileDetailModel.fileLocalPath = strSvcFileLocalName; //?sql???? if (isModify) { //todo ?sql?? // 1,?? // 2? uuid ???? //3? uuid ? setStrSqls.add(String.format( "INSERT INTO FILEDEPOT (FID,FNAME,FPATH,FSUMMARY,OWNID,OWNFILETYPE) VALUES ('%s','%s','%s','%s','%s','%s')", UUID.randomUUID().toString(), strUpFileName, sbFilePathTemp.toString(), "md5", paramModel.ownid, tempFileDetailModel.fileOwnType)); } else { setStrSqls.add(String.format( "INSERT INTO FILEDEPOT (FID,FNAME,FPATH,FSUMMARY,OWNID,OWNFILETYPE) VALUES ('%s','%s','%s','%s','%s','%s')", UUID.randomUUID().toString(), strUpFileName, sbFilePathTemp.toString(), "md5", paramModel.ownid, tempFileDetailModel.fileOwnType)); } sbTemp.delete(0, sbTemp.length()); sbFilePathTemp.delete(0, sbFilePathTemp.length()); } //??? resultParam = DBHelper.ExecuteSql(DeployInfo.MasterRSID, setStrSqls); if (resultParam.ResultCode >= 0) { saveFlag = 0; //???? resultParam = SelectDepotFileByOwn(new FileDepotParamModel(paramModel.ownid)); //return formationResult.formationResult(ResponseResultCode.Success, new ExecuteResultParam(resultParam.ResultJsonObject)); return responseFormat.formationSuccessResultToString(resultParam.ResultJsonObject); } else { return responseFormat.formationResultToString(ResponseResultCode.ErrorDB, resultParam.errMsg); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("??%s", resultParam.errMsg), paramModel.toStringInformation())); } } catch (Exception e) { return responseFormat.formationResultToString(ResponseResultCode.ErrorDB, e); // return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(e.getLocalizedMessage(), paramModel.toStringInformation(), e)); } finally { if (saveFlag == 1) { DeleteFile(paramModel.fileDetaile); } UtileSmart.FreeObjects(strSvcFileLocalName, strUpFileName, strTempFilePath, sbTemp, sbFilePathTemp, setStrSqls, resultParam, paramModel, tempFileDetailModel); } }
From source file:dao.DirectoryDaoDb.java
/** * This method updates blob (caption, zoom) for the directory * @param entryId - the entry id of this blob * @param directoryId - the directory id of this blob * @param userId - the user id /*from ww w. j a v a2s . c om*/ * @param userLogin - the user login * @param zoom - the zoom * @param btitle - the btitle * @param def - is this the default * @param caption - caption * @throws BaseDaoException - when error occurs */ public void updateStreamBlob(String entryId, String directoryId, String userId, String userLogin, String zoom, String btitle, boolean def, String caption, String dirPath, String dirName) { /** * check only dirname and owner. others can be null. */ if (RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** * isDiaryAdmin or isAuthor */ if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, userId))) { throw new BaseDaoException("User is neither a diaryAdmin nor author to update directory = " + directoryId + " userId =" + userId); } /** * Get scalability datasource for dirblob - partitioned on directoryId */ String sourceName = scalabilityManager.getWriteBlobScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, updateStreamBlob() " + sourceName); } /** * Check if this entry exists in the default directory blob */ boolean exists = false; String defId = null; try { Object[] params = { (Object) directoryId }; List result = defaultQuery.execute(params); if (result != null && result.size() > 0) { defId = ((Photo) result.get(0)).getValue(DbConstants.ENTRYID); if (!RegexStrUtil.isNull(defId) && defId.equals(entryId)) { exists = true; } } } catch (Exception e) { throw new BaseDaoException("error while" + defaultQuery.getSql(), e); } if (WebUtil.isSanEnabled()) { Photo photo = getPhoto(entryId, directoryId); String srcFileName = null; if (photo != null) { srcFileName = photo.getValue(DbConstants.BTITLE); } if ((!RegexStrUtil.isNull(srcFileName)) && !srcFileName.equals(btitle)) { try { getSanUtils(); sanUtils.renameSanFile(dirPath, dirName, srcFileName, SanConstants.sanPath, btitle); } catch (SanException e) { throw new BaseDaoException("directory updateStreamBlob()renameSanFile error", e); } } } List result = null; Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); blobUpdateQuery.run(conn, entryId, directoryId, zoom, btitle, caption); if (def) { /** * so delete the default entry that does not match with this entryid, add the new entryid */ if (!exists) { deleteDefaultQuery.run(conn, directoryId); addDefaultQuery.run(conn, entryId, directoryId); } } else { /** * A default record matching entryid exists, * Delete this entry as the user does not want this to be a default entry */ if (exists) { deleteDefaultQuery.run(conn, directoryId); } } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "conn.close() exception for rollback(), for updateStreamBlob() directoryId =" + directoryId + " entryId = " + entryId, e2); } throw new BaseDaoException("rollback() exception, for updateStreamBlob() directoryId =" + directoryId + " entryId = " + entryId, e1); } } /** * connection commit * */ try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException( "commit() exception updateStreamBlob, directoryId= " + directoryId + " entryId = " + entryId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("conn.close() exception for commit(), updateStreamBlob() directoryId = " + directoryId + " entryId = " + entryId, e4); } updateDirImage(entryId, directoryId, zoom, btitle, caption); /** * generate a key for the directory stream blob (directoryid + entryid) * remove blobstream of directory, from cache * use the same key for both the caches (DIR_PHOTO, DIR_STREAM_BLOB with blobdata) */ Fqn fqn = cacheUtil.fqn(DbConstants.DIR_STREAM_BLOB); StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(entryId); String key = sb.toString(); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.DIR_PHOTO); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } /** remove the existing default photo from cache */ fqn = cacheUtil.fqn(DbConstants.DIR_PHOTO); sb.delete(0, sb.length()); sb.append(directoryId); sb.append("-"); sb.append(defId); key = sb.toString(); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.DIR_CAT); sb.delete(0, sb.length()); sb.append(directoryId); sb.append("-"); sb.append(DbConstants.PHOTO_CATEGORY); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } /* fqn = cacheUtil.fqn(DbConstants.DIRECTORY_STREAM_BLOBS); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } */ }
From source file:com.smart.smartrestfulw.controller.FileDepotController.java
@RequestMapping(value = "/ModifyBase64File", method = RequestMethod.POST, consumes = { org.springframework.http.MediaType.APPLICATION_JSON_VALUE }, produces = { org.springframework.http.MediaType.APPLICATION_JSON_VALUE }) @ResponseBody/* w w w . j a v a 2 s . c o m*/ public String ModifyBase64File(@RequestParam("param") String param) { String strUpFileName = null, strSvcFileLocalName = null; StringBuffer sbFilePathTemp = new StringBuffer(), sbTemp = new StringBuffer(); boolean bSvcFileExist; ExecuteResultParam resultParam = null; List<String> strSqls = new ArrayList<String>(); int saveFlag = 1; FileDepotParamModel paramModel = null; try { //?? paramModel = analyzeBase64Param(param); if (paramModel == null) { return responseFormat.formationResultToString(ResponseResultCode.ErrorParam, "paramError"); } SignInformationModel signModel = SignCommon.verifySign(paramModel.getToken(), false); if (signModel == null) { return responseFormat.formationResultToString(ResponseResultCode.ErrorSignToken, "no authorize"); } if (paramModel.fileDetaile == null || paramModel.fileDetaile.isEmpty()) { // return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam("base64", param)); return responseFormat.formationResultToString(ResponseResultCode.ErrorBase64Error, "base64"); } for (DepotFileDetailModel fileDetaile : paramModel.fileDetaile) { //? strUpFileName = fileDetaile.fileName; // root/rsid/date(yymmddhh)/Type // sbFilePathTemp.append(paramModel.rsid); sbTemp.append(DeployInfo.GetDeployFilePath()).append(File.separator).append(paramModel.rsid); FileHelper.CheckFileExist(sbTemp.toString()); // sbFilePathTemp.append(File.separator).append(UtileSmart.getCurrentDate()); sbTemp.append(File.separator).append(UtileSmart.getCurrentDate()); FileHelper.CheckFileExist(sbTemp.toString()); sbFilePathTemp.append(File.separator).append(fileDetaile.fileOwnType); sbTemp.append(File.separator).append(fileDetaile.fileOwnType); FileHelper.CheckFileExist(sbTemp.toString()); //? sbFilePathTemp.append(File.separator).append(strUpFileName).toString(); strSvcFileLocalName = sbTemp.append(File.separator).append(strUpFileName).toString(); bSvcFileExist = FileHelper.CheckFileExist(strSvcFileLocalName, false); if (bSvcFileExist) { return responseFormat.formationResultToString(ResponseResultCode.ErrorFileExist, "file exist can not change .please contact system manger"); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam("????", param)); } //?? ownid fpath??????? resultParam = DBHelper.ExecuteSqlOnceSelect(DeployInfo.MasterRSID, String.format( "SELECT COUNT(*) AS ROWSCOUNT FROM FILEDEPOT WHERE OWNID<>'%s' AND FPATH='%s'", paramModel.ownid, sbFilePathTemp.toString())); if (resultParam.ResultCode != 0) { return responseFormat.formationResultToString(ResponseResultCode.ErrorDB, resultParam.errMsg); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("????%s", resultParam.errMsg), param)); } //ROWSCOUNT ?0?? ROWSCOUNT ?0??????? if (resultParam.ResultJsonObject != null) { if (Integer.parseInt(resultParam.ResultJsonObject.getJSONObject(DeployInfo.ResultDataTag) .getString("ROWSCOUNT")) > 0) { return responseFormat.formationResultToString(ResponseResultCode.ErrorFileRepeat, "file binded "); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("%s,?????????????", strUpFileName), param)); } } //? (File.separator) if (fileDetaile.fileOwnType.indexOf(File.separator) > 0) { return responseFormat.formationResultToString(ResponseResultCode.ErrorFileType, "file type error. "); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("??", strUpFileName), paramModel.toStringInformation())); } //? int baseIndex = fileDetaile.fileBase64Value.indexOf(";base64,"); if (!FileHelper.ConvertBase64ToImage( fileDetaile.fileBase64Value.substring(baseIndex + 8, fileDetaile.fileBase64Value.length()), strSvcFileLocalName)) { return responseFormat.formationResultToString(ResponseResultCode.ErrorBase64ConvertFile, String.format("%s: convert image failed", fileDetaile.fileName)); //return formationResult.formationResult(ResponseResultCode.Error, new ExecuteResultParam(String.format("%s: convert image failed", fileDetaile.fileName), param)); } //?????? fileDetaile.fileLocalPath = strSvcFileLocalName; if (fileDetaile.fileId != null && !fileDetaile.fileId.isEmpty()) { strSqls.add(String.format( "insert into FILEDEPOT_LS (FID,FNAME,FPATH,FSUMMARY,OWNID,OWNFILETYPE,UPLOADDATE) select FID,FNAME,FPATH,FSUMMARY,OWNID,OWNFILETYPE,UPLOADDATE from FILEDEPOT as t_f where t_f.OWNID='%s' and t_f.FID ='%s' ", paramModel.ownid, fileDetaile.fileId)); //?sql???? strSqls.add(String.format( "update FILEDEPOT set FNAME='%s',FPATH='%s',OWNFILETYPE='%s',UPLOADDATE=getdate() where FID='%s' and OWNID='%s'", strUpFileName, sbFilePathTemp.toString(), fileDetaile.fileOwnType, fileDetaile.fileId, paramModel.ownid)); } else { strSqls.add(String.format( "INSERT INTO FILEDEPOT (FID,FNAME,FPATH,FSUMMARY,OWNID,OWNFILETYPE) VALUES ('%s','%s','%s','%s','%s','%s')", UUID.randomUUID().toString(), strUpFileName, sbFilePathTemp.toString(), "md5", paramModel.ownid, fileDetaile.fileOwnType)); } sbTemp.delete(0, sbTemp.length()); sbFilePathTemp.delete(0, sbFilePathTemp.length()); } //??? resultParam = DBHelper.ExecuteSql(DeployInfo.MasterRSID, strSqls); if (resultParam.ResultCode >= 0) { saveFlag = 0; //???? resultParam = SelectDepotFileByOwn(new FileDepotParamModel(paramModel.ownid)); // return formationResult.formationResult(ResponseResultCode.Success, new ExecuteResultParam(resultParam.ResultJsonObject)); return responseFormat.formationSuccessResultToString(resultParam.ResultJsonObject); } else { //TODO ?????? return responseFormat.formationResultToString(ResponseResultCode.Error, resultParam.errMsg); } } catch (Exception e) { return responseFormat.formationResultToString(ResponseResultCode.Error, e); } finally { if (saveFlag == 1 && paramModel != null) { DeleteFile(paramModel.fileDetaile); } UtileSmart.FreeObjects(strUpFileName, strSvcFileLocalName, sbFilePathTemp, sbTemp, resultParam, strSqls, paramModel); } }
From source file:com.wabacus.system.component.application.report.ListReportType.java
private void showCommonRowGroupDataPart(List<ColBean> lstColBeans) { List<RowGroupDataBean> lstHasDisplayedRowGroupCols = null; Map<String, AbsListReportRowGroupSubDisplayRowBean> mStatiRowGroupBeans = null; if (this.alrbean.getSubdisplaybean() != null && this.alrbean.getSubdisplaybean().getMRowGroupSubDisplayRowBeans() != null && this.alrbean.getSubdisplaybean().getMRowGroupSubDisplayRowBeans().size() > 0) { lstHasDisplayedRowGroupCols = new ArrayList<RowGroupDataBean>(); mStatiRowGroupBeans = this.alrbean.getSubdisplaybean().getMRowGroupSubDisplayRowBeans(); }//from ww w .j a va2 s .c o m boolean isDisplayInPage = rrequest.getShowtype() == Consts.DISPLAY_ON_PAGE; RowGroupDataBean rgdbean; ColDisplayData colDisplayData; StringBuffer tdPropsBuf; AbsReportDataPojo rowDataObjTmp; int[] displayrowinfo = this.getDisplayRowInfo(); if (displayrowinfo[1] <= 0) return; RowDataBean rowInterceptorObjTmp = null; String trstylepropertyTmp = null; boolean isReadonlyByRowInterceptor; for (int i = displayrowinfo[0]; i < displayrowinfo[1]; i++) { if (i >= this.lstReportData.size()) { dataPartStringBuffer.append(showDataRowInAddMode(lstColBeans, i)); checkAndPrintBufferData(i); continue; } isReadonlyByRowInterceptor = false; rowInterceptorObjTmp = null; rowDataObjTmp = lstReportData.get(i); trstylepropertyTmp = rowDataObjTmp.getRowValuestyleproperty(); if (this.rbean.getInterceptor() != null) { rowInterceptorObjTmp = new RowDataBean(this, trstylepropertyTmp, lstColBeans, rowDataObjTmp, i, this.cacheDataBean.getTotalColCount()); this.rbean.getInterceptor().beforeDisplayReportDataPerRow(this.rrequest, this.rbean, rowInterceptorObjTmp); if (rowInterceptorObjTmp.getInsertDisplayRowHtml() != null) dataPartStringBuffer.append(rowInterceptorObjTmp.getInsertDisplayRowHtml()); if (!rowInterceptorObjTmp.isShouldDisplayThisRow()) { this.global_rowindex++; continue; } trstylepropertyTmp = rowInterceptorObjTmp.getRowstyleproperty(); isReadonlyByRowInterceptor = rowInterceptorObjTmp.isReadonly(); } dataPartStringBuffer.append(showDataRowTrStart(rowInterceptorObjTmp, trstylepropertyTmp, i, true)) .append(" "); dataPartStringBuffer.append(" parentCommonGroupTdId=\"") .append(getDirectParentGroupId(this.mAllParentRowGroupDataBeansForPerDataRow.get(i))) .append("\""); dataPartStringBuffer.append(" grouprow=\"true\"");//<tr/>grouprow=true?<tr/>??????? dataPartStringBuffer.append(">"); if (isDisplayInPage) { for (RowGroupDataBean parentObjTmp : this.mAllParentRowGroupDataBeansForPerDataRow.get(i)) {//?????<td/> if (parentObjTmp.getDisplay_rowidx() != i) {//????<tr/>????<td/>??? dataPartStringBuffer.append(showHiddenCol(parentObjTmp.getCbean(), rowDataObjTmp, i)); } } } boolean isReadonlyByColInterceptor; for (ColBean cbean : lstColBeans) { if (Consts.COL_DISPLAYTYPE_HIDDEN.equals(cbean.getDisplaytype(isDisplayInPage)) || this.cacheDataBean.getColDisplayModeAfterAuthorize(cbean, isDisplayInPage) <= 0) {//???? if (mRowGroupCols.containsKey(cbean) && this.cacheDataBean.getColDisplayModeAfterAuthorize(cbean, isDisplayInPage) < 0) { throw new WabacusRuntimeException("" + rbean.getPath() + "????"); } dataPartStringBuffer.append(showHiddenCol(cbean, rowDataObjTmp, i)); continue; } isReadonlyByColInterceptor = false; int rowspan = 1; boolean isRowGroup = false; tdPropsBuf = new StringBuffer(); if (mRowGroupCols.containsKey(cbean)) { rgdbean = getCommonRowGroupDataBean(mRowGroupCols.get(cbean), mStatiRowGroupBeans, lstHasDisplayedRowGroupCols, cbean, i); if (rgdbean == null) continue; isRowGroup = true; rowspan = rgdbean.getRowspan(); String childIdSuffix = rgdbean.getAllChildDataRowIdxsAsString(); if (!childIdSuffix.equals("")) {//??? tdPropsBuf.append(" childDataIdSuffixes=\"").append(childIdSuffix).append("\""); } childIdSuffix = rgdbean.getAllChildGroupIdxsAsString(); if (!childIdSuffix.equals("")) { tdPropsBuf.append(" childGroupIdSuffixes=\"").append(childIdSuffix).append("\""); } if (rgdbean.getParentGroupIdSuffix() != null && !rgdbean.getParentGroupIdSuffix().trim().equals("")) { tdPropsBuf.append(" parentCommonGroupTdId=\"").append(rgdbean.getParentGroupIdSuffix()) .append("\"");//?<td/>id } } Object colDataObj = initDisplayCol(cbean, rowDataObjTmp); dataPartStringBuffer.append("<td ").append(getTdPropertiesForCol(cbean, colDataObj, i, isRowGroup));//<td/>?; String col_displayvalue = getColDisplayValue(cbean, rowDataObjTmp, rowInterceptorObjTmp, tdPropsBuf, colDataObj, i, isReadonlyByRowInterceptor); colDisplayData = ColDisplayData.getColDataFromInterceptor(this, cbean, rowDataObjTmp, i, getColValuestyleproperty(cbean, rowDataObjTmp), col_displayvalue); isReadonlyByColInterceptor = colDisplayData.getColdataByInterceptor() != null && colDisplayData.getColdataByInterceptor().isReadonly(); if (!isReadonlyByRowInterceptor && isReadonlyByColInterceptor) { tdPropsBuf.delete(0, tdPropsBuf.length()); col_displayvalue = getColDisplayValue(cbean, rowDataObjTmp, rowInterceptorObjTmp, tdPropsBuf, colDataObj, i, true); } else { col_displayvalue = colDisplayData.getValue(); } dataPartStringBuffer.append(" class='" + getDataTdClassName() + "' rowspan=\"").append(rowspan) .append("\" "); dataPartStringBuffer.append(tdPropsBuf.toString()); dataPartStringBuffer.append(" ").append(colDisplayData.getStyleproperty()); if (isRowGroup) { dataPartStringBuffer.append(" groupcol=\"true\""); if (alrbean.getLstRoworderTypes() != null && alrbean.getLstRoworderTypes().contains(Consts.ROWORDER_DRAG)) {//??? dataPartStringBuffer.append( " onmouseover=\"dragrow_enabled=false;\" onmouseout=\"dragrow_enabled=true;\""); } } dataPartStringBuffer.append(">").append(getColDisplayValueWithWrap(cbean, col_displayvalue, colDataObj, isReadonlyByRowInterceptor || isReadonlyByColInterceptor)); dataPartStringBuffer.append("</td>"); } dataPartStringBuffer.append("</tr>"); this.global_rowindex++; this.global_sequence++; dataPartStringBuffer.append(showStatisticForCommonRowGroup(lstHasDisplayedRowGroupCols, i)); checkAndPrintBufferData(i); } }
From source file:org.jmlspecs.util.QDoxUtil.java
public static String getTypeErasureForTypeDeclsInFile(File file, boolean isGenericSource) throws FileNotFoundException, IOException, PositionedError { BufferedReader buffer = null; StringBuffer bufferedFile = null; String fileAsString = ""; String line = ""; bufferedFile = new StringBuffer(""); buffer = new BufferedReader(new FileReader(file)); line = buffer.readLine();/*from w w w .j a v a2s. co m*/ while (line != null) { bufferedFile.append(line); bufferedFile.append("\n"); line = buffer.readLine(); } buffer.close(); fileAsString = bufferedFile.toString(); AspectUtil.getInstance().addJavaFileAsString(fileAsString); JavaDocBuilder qDoxFile = new JavaDocBuilder(); qDoxFile.addSource(new FileReader(file)); // handling JavaDocTags in File List<DocletTag> javaMethsWithDocletTagsFile = QDoxUtil.getAllJavaDocTagsInFile(qDoxFile); if (javaMethsWithDocletTagsFile.size() > 0) { buffer = new BufferedReader(new StringReader(fileAsString)); StringBuffer fileAsStringJavaDocProcessed = new StringBuffer(""); line = buffer.readLine(); int fileLineNumber = 1; while (line != null) { // if any String jmlClause = QDoxUtil.getJavaDocTagAsJMLClause(javaMethsWithDocletTagsFile, fileLineNumber); fileAsStringJavaDocProcessed.append(line).append(jmlClause); fileAsStringJavaDocProcessed.append("\n"); line = buffer.readLine(); fileLineNumber++; } buffer.close(); fileAsString = StringUtils.replaceOnce(fileAsString, fileAsString, fileAsStringJavaDocProcessed.toString()); } // handling javadoc tags in Java types that should be shifted List<JavaClass> javaDeclTypeWithJavadocTags = QDoxUtil .getAllDeclaredJavaTypesWithJavaDocTagsInFile(qDoxFile); for (Iterator<JavaClass> iterator = javaDeclTypeWithJavadocTags.iterator(); iterator.hasNext();) { JavaClass javaTypeWithJavadoc = iterator.next(); String jmlClausesToShift = QDoxUtil.getJavaDocTagAsJMLClauseForTypeToShift( QDoxUtil.getAllJavaDocTagsInAJavaTypedDecl(javaTypeWithJavadoc)); if (jmlClausesToShift.equals("")) { continue; } buffer = new BufferedReader(new StringReader(fileAsString)); int lineStart = javaTypeWithJavadoc.getLineNumber(); int fileLineNumber = 1; StringBuffer TypeDeclUtilOpenBrace = new StringBuffer(""); line = buffer.readLine(); fileLineNumber = 1; while (line != null) { if (fileLineNumber >= lineStart) { if (line.contains("{")) { int indexOpenBrace = line.indexOf('{'); String lineTmp = line.substring(0, (indexOpenBrace + 1)); TypeDeclUtilOpenBrace.append(lineTmp); break; } else { TypeDeclUtilOpenBrace.append(line); TypeDeclUtilOpenBrace.append("\n"); } } line = buffer.readLine(); fileLineNumber++; } buffer.close(); String TypeDeclUtilOpenBraceStr = TypeDeclUtilOpenBrace.toString().trim(); // processing java field tags // placing them where the JML compiler can understand - [[[hemr]]] fileAsString = StringUtils.replaceOnce(fileAsString, TypeDeclUtilOpenBraceStr, TypeDeclUtilOpenBraceStr + jmlClausesToShift); } // end for // handling javadoc tags in Java fields that should be shifted List<JavaField> javaDeclFieldsWithJavadocTags = QDoxUtil .getAllDeclaredJavaFieldsWithJavaDocTagsInFile(qDoxFile); for (Iterator<JavaField> iterator = javaDeclFieldsWithJavadocTags.iterator(); iterator.hasNext();) { JavaField javaFieldWithJavadoc = iterator.next(); String jmlClausesToShift = QDoxUtil.getJavaDocTagAsJMLClauseForFieldToShift( QDoxUtil.getAllJavaDocTagsInAJavaFieldDecl(javaFieldWithJavadoc)); if (jmlClausesToShift.equals("")) { continue; } buffer = new BufferedReader(new StringReader(fileAsString)); int lineStart = javaFieldWithJavadoc.getLineNumber(); int fileLineNumber = 1; StringBuffer fieldDecl = new StringBuffer(""); line = buffer.readLine(); fileLineNumber = 1; while (line != null) { if (fileLineNumber >= lineStart) { if (line.contains(";")) { int indexSemiColon = line.lastIndexOf(';'); fieldDecl.append(line.substring(0, indexSemiColon + 1)); break; } else { fieldDecl.append(line); fieldDecl.append("\n"); } } line = buffer.readLine(); fileLineNumber++; } buffer.close(); String fieldDeclStr = fieldDecl.toString().trim(); // processing java field tags // placing them where the JML compiler can understand - [[[hemr]]] fileAsString = StringUtils.replaceOnce(fileAsString, fieldDeclStr, fieldDeclStr + jmlClausesToShift); } // end for // Generic Source or any Java 5+ features (hopefully :-) if (isGenericSource) { String actualFileAsString = fileAsString; bufferedFile.delete(0, (bufferedFile.length() - 1)); // reset for later use // handling enum types List<JavaClass> javaDeclEnumTypes = QDoxUtil.getAllDeclaredJavaEnumTypesInFile(qDoxFile); if (javaDeclEnumTypes.size() > 0) { fileAsString = QDoxUtil.getFileEnumTypeErasureProcessingAsString(bufferedFile, actualFileAsString, javaDeclEnumTypes); } // collecting all methods that lexically occur within a file bufferedFile.delete(0, (bufferedFile.length() - 1)); // reset for later use List<JavaMethod> javaDeclMeths = QDoxUtil.getAllDeclaredJavaMethodsInFile(qDoxFile); if (file.getCanonicalPath().endsWith(".jml")) { actualFileAsString = QDoxUtil.handleConstructDeclInJMLFileMode(bufferedFile, actualFileAsString, javaDeclMeths); } List<com.github.antlrjavaparser.api.body.BodyDeclaration> members = QDoxUtil .getAllDeclaredJavaMethodsInFile(actualFileAsString); List<String> fileMeths = QDoxUtil.getAllJavaMethodDeclLexicallyInFile(bufferedFile, actualFileAsString, javaDeclMeths, members); if (fileMeths.size() != javaDeclMeths.size()) { System.out.println("file = " + file.getCanonicalPath()); System.out.println("processed ---> " + fileMeths.size()); System.out.println("really contains = " + javaDeclMeths.size()); } fileAsString = QDoxUtil.stripMethBodies(fileAsString, javaDeclMeths, fileMeths); // method bodies stripped... [[[hemr]]] // eliminating the pattern --> default {return null;}; fileAsString = fileAsString.replaceAll("default(\\s)*\\{(\\s)*return [\\w;]+(\\s)*\\};", ";"); // handling annotated Java fields List<JavaField> javaDeclAnnotatedFields = QDoxUtil.getAllDeclaredAnnotatedJavaFieldsInFile(qDoxFile); List<com.github.antlrjavaparser.api.body.FieldDeclaration> javaDeclAnnotatedFields2 = QDoxUtil .getAllDeclaredAnnotatedJavaFieldsInFile(actualFileAsString); for (int i = 0; i < javaDeclAnnotatedFields.size(); i++) { JavaField annotatedJavaField = javaDeclAnnotatedFields.get(i); com.github.antlrjavaparser.api.body.FieldDeclaration annotatedJavaField2 = javaDeclAnnotatedFields2 .get(i); buffer = new BufferedReader(new StringReader(fileAsString)); StringBuffer annotationArea = new StringBuffer(""); StringBuffer annotationAreaCommented = new StringBuffer(""); int lineStart = annotatedJavaField2.getAnnotations().get(0).getBeginLine(); int lineEnd = annotatedJavaField2.getAnnotations() .get(annotatedJavaField2.getAnnotations().size() - 1).getEndLine(); line = buffer.readLine(); int fileLineNumber = 1; while (line != null) { if (fileLineNumber >= lineStart) { if (fileLineNumber == lineEnd) { annotationArea.append(line); annotationAreaCommented .append("/*" + fileLineNumber + "*/" + "/* " + line.replace("@", "#") + "*/"); break; } else { annotationArea.append(line); annotationArea.append("\n"); annotationAreaCommented .append("/*" + fileLineNumber + "*/" + "/* " + line.replace("@", "#") + "*/"); annotationAreaCommented.append("\n"); } } line = buffer.readLine(); fileLineNumber++; } // end while buffer.close(); // pre field annotations if any StringBuffer fieldDeclAnnotations = new StringBuffer(""); if (annotationArea.toString().contains("@SpecPublic")) { fieldDeclAnnotations.append("spec_public "); } if (annotationArea.toString().contains("@SpecProtected")) { fieldDeclAnnotations.append("spec_protected "); } if (annotationArea.toString().contains("@NonNull")) { fieldDeclAnnotations.append("non_null "); } if (annotationArea.toString().contains("@Nullable")) { fieldDeclAnnotations.append("nullable "); } if (annotationArea.toString().contains("@Model")) { fieldDeclAnnotations.append("model "); } // processing java field annotations String annotationAreaCommentedStr = QDoxUtil.getFieldAnnotationAreaCommentedProcessedWithJML( annotatedJavaField, annotationAreaCommented.toString()); // doing the replacement int newLineEnd = annotatedJavaField2.getEndLine(); lineStart = lineEnd; lineEnd = newLineEnd; buffer = new BufferedReader(new StringReader(fileAsString)); StringBuffer fileAsStringReplacement = new StringBuffer(""); line = buffer.readLine(); fileLineNumber = 1; while (line != null) { if (fileLineNumber == (lineStart + 1)) { // placing them where the JML compiler can understand - [[[hemr]]] fileAsStringReplacement .append("/*@ " + fieldDeclAnnotations.toString() + "@*/" + " " + line); fileAsStringReplacement.append("\n"); } else if (fileLineNumber == (lineEnd)) { // getting the end of field line // placing them where the JML compiler can understand - [[[hemr]]] fileAsStringReplacement.append(line + annotationAreaCommentedStr); fileAsStringReplacement.append("\n"); } else { fileAsStringReplacement.append(line); fileAsStringReplacement.append("\n"); } line = buffer.readLine(); fileLineNumber++; } // end while buffer.close(); fileAsString = fileAsStringReplacement.toString(); // updating the current field annotations in the file // removing field annotations fileAsString = StringUtils.replaceOnce(fileAsString, annotationArea.toString(), ""); } // end for // handling annotated Java Methods List<String> javaDeclMethsAnnotationArea = new ArrayList<String>(); List<JavaMethod> javaDeclAnnotatedMeths = QDoxUtil.getAllDeclaredAnnotatedJavaMethodsInFile(qDoxFile); List<com.github.antlrjavaparser.api.body.BodyDeclaration> javaDeclAnnotatedMeths2 = QDoxUtil .getAllDeclaredAnnotatedJavaMethodsInFile(actualFileAsString); for (int i = 0; i < javaDeclAnnotatedMeths.size(); i++) { JavaMethod annotatedJavaMethod = javaDeclAnnotatedMeths.get(i); com.github.antlrjavaparser.api.body.BodyDeclaration annotatedJavaMethod2 = javaDeclAnnotatedMeths2 .get(i); buffer = new BufferedReader(new StringReader(fileAsString)); StringBuffer annotationArea = new StringBuffer(""); StringBuffer annotationAreaCommented = new StringBuffer(""); int lineStart = annotatedJavaMethod2.getAnnotations().get(0).getBeginLine(); int lineEnd = annotatedJavaMethod2.getAnnotations() .get(annotatedJavaMethod2.getAnnotations().size() - 1).getEndLine(); line = buffer.readLine(); int fileLineNumber = 1; while (line != null) { if (fileLineNumber >= lineStart) { if (fileLineNumber == lineEnd) { annotationArea.append(line); annotationArea.append("\n"); annotationAreaCommented.append("/*" + fileLineNumber + "*/" + "/* " + line + "*/"); annotationAreaCommented.append("\n"); break; } else { annotationArea.append(line); annotationArea.append("\n"); annotationAreaCommented.append("/*" + fileLineNumber + "*/" + "/* " + line + "*/"); annotationAreaCommented.append("\n"); } } line = buffer.readLine(); fileLineNumber++; } // end while // processing java method annotations String annotationAreaCommentedStr = QDoxUtil.getMethodAnnotationAreaCommentedProcessedWithJML( annotatedJavaMethod, annotationAreaCommented); // updating the current meth annotations in the file buffer = new BufferedReader(new StringReader(fileAsString)); StringBuffer fileStartCurrentMeth = new StringBuffer(""); lineStart = annotatedJavaMethod.getLineNumber(); lineEnd = annotatedJavaMethod2.getEndLine(); line = buffer.readLine(); fileLineNumber = 1; while (line != null) { if (fileLineNumber >= lineStart) { if (fileLineNumber == lineEnd) { fileStartCurrentMeth.append(line); fileStartCurrentMeth.append("\n"); break; } else { fileStartCurrentMeth.append(line); fileStartCurrentMeth.append("\n"); } } line = buffer.readLine(); fileLineNumber++; } String fileAsStringTmp = StringUtils.replaceOnce(fileStartCurrentMeth.toString(), annotationArea.toString(), annotationAreaCommentedStr.toString()); fileAsString = StringUtils.replaceOnce(fileAsString, fileStartCurrentMeth.toString(), fileAsStringTmp); javaDeclMethsAnnotationArea.add(annotationAreaCommentedStr); } // end for // handling annotated Java Types List<JavaClass> javaDeclAnnotatedTypes = QDoxUtil.getAllDeclaredAnnotatedJavaTypesInFile(qDoxFile); List<com.github.antlrjavaparser.api.body.TypeDeclaration> javaDeclAnnotatedTypes2 = QDoxUtil .getAllDeclaredAnnotatedJavaTypesInFile(actualFileAsString); for (int i = 0; i < javaDeclAnnotatedTypes.size(); i++) { JavaClass annotatedJavaType = javaDeclAnnotatedTypes.get(i); com.github.antlrjavaparser.api.body.TypeDeclaration annotatedJavaType2 = javaDeclAnnotatedTypes2 .get(i); buffer = new BufferedReader(new StringReader(fileAsString)); StringBuffer annotationArea = new StringBuffer(""); StringBuffer annotationAreaCommented = new StringBuffer(""); int lineStart = annotatedJavaType2.getAnnotations().get(0).getBeginLine(); int lineEnd = annotatedJavaType2.getAnnotations() .get(annotatedJavaType2.getAnnotations().size() - 1).getEndLine(); // System.out.println("lineStart = "+lineStart); // System.out.println("lineEnd = "+lineEnd); line = buffer.readLine(); int fileLineNumber = 1; while (line != null) { if (fileLineNumber >= lineStart) { if (fileLineNumber == lineEnd) { annotationArea.append(line); annotationArea.append("\n"); annotationAreaCommented .append("/*" + fileLineNumber + "*/" + "/* " + line.replace("@", "#") + "*/"); annotationAreaCommented.append("\n"); break; } else { annotationArea.append(line); annotationArea.append("\n"); annotationAreaCommented .append("/*" + fileLineNumber + "*/" + "/* " + line.replace("@", "#") + "*/"); annotationAreaCommented.append("\n"); } } line = buffer.readLine(); fileLineNumber++; } // end while // selecting the entire type decl buffer = new BufferedReader(new StringReader(fileAsString)); StringBuffer typeDecl = new StringBuffer(""); int newEnd = annotatedJavaType2.getEndLine(); lineStart = lineEnd + 1; lineEnd = newEnd; // System.out.println("lineStart<new> = "+lineStart); // System.out.println("lineEnd<new> = "+lineEnd); line = buffer.readLine(); fileLineNumber = 1; while (line != null) { if (fileLineNumber >= lineStart) { if (fileLineNumber == lineEnd) { typeDecl.append(line); typeDecl.append("\n"); } else { typeDecl.append(line); typeDecl.append("\n"); } } line = buffer.readLine(); fileLineNumber++; } String typeJMLAnno = ""; if (annotationArea.toString().contains("@NonNullByDefault")) { typeJMLAnno = "/*@ non_null_by_default @*/"; } if (annotationArea.toString().contains("@NullableByDefault")) { typeJMLAnno = "/*@ nullable_by_default @*/" + typeJMLAnno; } if (annotationArea.toString().contains("@SpecPublic")) { typeJMLAnno = "/*@ spec_public @*/" + typeJMLAnno; } if (annotationArea.toString().contains("@SpecProtected")) { typeJMLAnno = "/*@ spec_protected @*/" + typeJMLAnno; } // processing java type annotations String annotationAreaCommentedStr = QDoxUtil.getTypeAnnotationAreaCommentedProcessedWithJML( annotatedJavaType, annotationAreaCommented.toString()); // placing them where the JML compiler can understand - [[[hemr]]] String typeDeclStr = StringUtils.replaceOnce(typeDecl.toString(), "{", "{" + annotationAreaCommentedStr); // updating the current type annotations in the file // removing type annotations fileAsString = StringUtils.replaceOnce(fileAsString, annotationArea.toString(), typeJMLAnno); fileAsString = StringUtils.replaceOnce(fileAsString, typeDecl.toString(), typeDeclStr); } // end for // // collecting all methods that lexically occur within a file bufferedFile.delete(0, (bufferedFile.length() - 1)); // reset for later use javaDeclMeths = QDoxUtil.getAllDeclaredJavaMethodsInFile(qDoxFile); fileMeths = QDoxUtil.getAllJavaMethodDeclLexicallyInFile(bufferedFile, fileAsString, javaDeclMeths, members); // System.out.println("fileMeths = "+fileMeths.size()); // for (Iterator<String> iterator = fileMeths.iterator(); iterator.hasNext();) { // String currentMeth = iterator.next(); // System.out.println("matchedMeth = "+currentMeth); // } fileAsString = QDoxUtil.getFileMethDeclsProcessed(fileAsString, javaDeclMeths, fileMeths, file); // final issues about JML type annotations Pattern jmlAnnoPattern = Pattern.compile("@(\\s)*Pure(\\b)(\\s)*(\\((\\s)*\\))?"); Matcher jmlAnnoMatcher = jmlAnnoPattern.matcher(fileAsString); while (jmlAnnoMatcher.find()) { int numberOfNewLines = QDoxUtil.getLineNumbersQtd(jmlAnnoMatcher.group()); fileAsString = StringUtils.replaceOnce(fileAsString, jmlAnnoMatcher.group(), "/*@ pure @*/" + QDoxUtil.getNewLinesCaracter(numberOfNewLines)); } jmlAnnoMatcher.reset(); jmlAnnoPattern = Pattern.compile("@(\\s)*Helper(\\b)(\\s)*(\\((\\s)*\\))?"); jmlAnnoMatcher = jmlAnnoPattern.matcher(fileAsString); while (jmlAnnoMatcher.find()) { int numberOfNewLines = QDoxUtil.getLineNumbersQtd(jmlAnnoMatcher.group()); fileAsString = StringUtils.replaceOnce(fileAsString, jmlAnnoMatcher.group(), "/*@ helper @*/" + QDoxUtil.getNewLinesCaracter(numberOfNewLines)); } jmlAnnoMatcher.reset(); jmlAnnoPattern = Pattern.compile("@(\\s)*Nullable(\\b)(\\s)*(\\((\\s)*\\))?"); jmlAnnoMatcher = jmlAnnoPattern.matcher(fileAsString); while (jmlAnnoMatcher.find()) { int numberOfNewLines = QDoxUtil.getLineNumbersQtd(jmlAnnoMatcher.group()); fileAsString = StringUtils.replaceOnce(fileAsString, jmlAnnoMatcher.group(), "/*@ nullable @*/" + QDoxUtil.getNewLinesCaracter(numberOfNewLines)); } jmlAnnoMatcher.reset(); jmlAnnoPattern = Pattern.compile("@(\\s)*NonNull(\\b)(\\s)*(\\((\\s)*\\))?"); jmlAnnoMatcher = jmlAnnoPattern.matcher(fileAsString); while (jmlAnnoMatcher.find()) { int numberOfNewLines = QDoxUtil.getLineNumbersQtd(jmlAnnoMatcher.group()); fileAsString = StringUtils.replaceOnce(fileAsString, jmlAnnoMatcher.group(), "/*@ non_null @*/" + QDoxUtil.getNewLinesCaracter(numberOfNewLines)); } jmlAnnoMatcher.reset(); jmlAnnoPattern = Pattern.compile("@(\\s)*SpecPublic(\\b)(\\s)*(\\((\\s)*\\))?"); jmlAnnoMatcher = jmlAnnoPattern.matcher(fileAsString); while (jmlAnnoMatcher.find()) { int numberOfNewLines = QDoxUtil.getLineNumbersQtd(jmlAnnoMatcher.group()); fileAsString = StringUtils.replaceOnce(fileAsString, jmlAnnoMatcher.group(), "/*@ spec_public @*/" + QDoxUtil.getNewLinesCaracter(numberOfNewLines)); } jmlAnnoMatcher.reset(); jmlAnnoPattern = Pattern.compile("@(\\s)*SpecProtected(\\b)(\\s)*(\\((\\s)*\\))?"); jmlAnnoMatcher = jmlAnnoPattern.matcher(fileAsString); while (jmlAnnoMatcher.find()) { int numberOfNewLines = QDoxUtil.getLineNumbersQtd(jmlAnnoMatcher.group()); fileAsString = StringUtils.replaceOnce(fileAsString, jmlAnnoMatcher.group(), "/*@ spec_protected @*/" + QDoxUtil.getNewLinesCaracter(numberOfNewLines)); } fileAsString = fileAsString.replaceAll("/\\*(\\s)*/\\*@", "/*@"); fileAsString = fileAsString.replaceAll("@\\*/(\\s)*\\*/", "@*/"); // final issues due to annotation types support fileAsString = fileAsString.replace("@interface", "interface"); // fileAsString = fileAsString.replaceAll("\"[\\w\"\\s+\\{\\}\\(\\)\\.]*(\\b)default(\\b)[\\w\"\\s+\\{\\}\\(\\)\\.]*\"", "\"\""); // improve! [[[hemr]]] ** Pattern defaultTextPattern = Pattern .compile("\"[\\w\"\\s+\\{\\}\\(\\)\\.]*(\\b)default(\\b)[\\@\\-\\w\"\\s+\\{\\}\\(\\)\\.]*\""); Matcher defaultTextMatcher = defaultTextPattern.matcher(fileAsString); while (defaultTextMatcher.find()) { fileAsString = StringUtils.replaceOnce(fileAsString, defaultTextMatcher.group(), defaultTextMatcher.group().replace("default", "")); } // handling default stmt in annotation types fileAsString = fileAsString .replaceAll("(\\b)default(\\b)(\\s)*[\\@\\-\\w\"\\s+\\{\\}\\(\\)\\.]*(\\s)*;", ";"); // handling static imports to support java 5 mode Pattern staticImportPattern = Pattern.compile("import static (.)*;"); Matcher staticImportMatcher = staticImportPattern.matcher(fileAsString); while (staticImportMatcher.find()) { String staticImport = staticImportMatcher.group(); staticImport = staticImport.substring(0, staticImport.lastIndexOf(".")).replace("static ", "") + ";"; fileAsString = StringUtils.replaceOnce(fileAsString, staticImportMatcher.group(), staticImport); } // handling standard Java 5 annotations if (fileAsString.contains("@Inherited")) { fileAsString = fileAsString.replaceAll("@Inherited((\\s)*\\((\\s)*\\))?", ""); } if (fileAsString.contains("@Override")) { fileAsString = fileAsString.replaceAll("@Override((\\s)*\\((\\s)*\\))?", ""); } if (fileAsString.contains("@Deprecated")) { fileAsString = fileAsString.replaceAll("@Deprecated((\\s)*\\((\\s)*\\))?", ""); } if (fileAsString.contains("@SuppressWarnings")) { fileAsString = fileAsString.replaceAll("@SuppressWarnings((\\s)*\\(\"(.)*\"\\))?", ""); fileAsString = fileAsString.replaceAll("@SuppressWarnings((\\s)*\\((\\s)*\\))?", ""); } // handling dot dot dot in Java 5 if (fileAsString.contains("...")) { fileAsString = fileAsString.replace("...", "[]"); } } // final generic treatment // for debuggin purposes remove the following comment - [[[hemr]]] // System.err.println(fileAsString); return fileAsString; }
From source file:geva.Mapper.ContextFreeGrammar.java
/** * Reads in the BNF grammar specified by its argument text. * Returns true if loading of grammar was successful, false otherwise. * @param bnfString grammar as a string//from w w w . j ava 2 s .co m * @return if parsing was correct */ @SuppressWarnings({ "ConstantConditions" }) boolean readBNFString(String bnfString) throws MalformedGrammarException { if (bnfString == null) { return false; } Rule newRule = new Rule(); // Used to create new rules for grammar boolean insertRule = false;// If newRule is to be inserted onto grammar Rule currentRule = null;// Used in pass 2 to add productions to current rule Production newProduction = new Production();// Used to create new productions for grammar Symbol newSymbol = new Symbol();// Used to create new symbols for grammar String symbolString; Symbol newTokenSeparator = new Symbol();// Used to create token separators for grammar int bnfString_size = bnfString.length(); char currentChar;// Current char of input char separated = 0;// If there was a separator between previous token and current one boolean skip = false;// Skip an iteration on parser (for escaped newlines) boolean quoted = false;// If current char is quoted boolean non_terminal = false;// If current text is a non-terminal symbol StringBuffer currentBuffer = new StringBuffer(bnfString_size);// Buffer used to add new symbols to grammar // States of parser final int START = 0; final int START_RULE = 1; final int LHS_READ = 2; final int PRODUCTION = 3; final int START_OF_LINE = 4; int state = START;// Current state of parser int i; try { for (int pass = 0; pass < 2; pass++) { //Do 2 passes over the string i = 0; while (i < bnfString_size) { if (i < bnfString_size) { currentChar = bnfString.charAt(i); } else { // Simulate presence of endl at end of grammar currentChar = '\n'; } if (bnfString.charAt(i) == '\\') { // Escape sequence i++; if (i >= bnfString_size) {// Escape sequence as last char is invalid throw new MalformedGrammarException("Escape sequence as last char is invalid"); } else { if ((non_terminal) && (bnfString.charAt(i) != '\n')) { // Only escaped newline allowed inside non-terminal throw new MalformedGrammarException( "Only escaped newline allowed inside non-terminal"); } } if (bnfString.charAt(i) == '\'') {// Single quote currentChar = '\''; } else if (bnfString.charAt(i) == '\'') {// Double quote currentChar = '\''; } else if (bnfString.charAt(i) == '\\') {// Backslash currentChar = '\\'; } else if (bnfString.charAt(i) == '0') {// Null character currentChar = '\0'; } else if (bnfString.charAt(i) == 'a') {// Audible bell currentChar = '\007'; } else if (bnfString.charAt(i) == 'b') {// Backspace currentChar = '\b'; } else if (bnfString.charAt(i) == 'f') {// Formfeed currentChar = '\f'; } else if (bnfString.charAt(i) == 'n') {// Newline currentChar = '\n'; } else if (bnfString.charAt(i) == 'r') {// Carriage return currentChar = '\r'; } else if (bnfString.charAt(i) == 't') {// Horizontal tab currentChar = '\t'; } else if (bnfString.charAt(i) == 'v') {// Vertical tab currentChar = '\013'; } else if (bnfString.charAt(i) == '\n') {// Escaped newline skip = true;// Ignore newline } else if (bnfString.charAt(i) == '\r') {// Escaped DOS return skip = true;// Ignore newline if (bnfString.charAt(++i) != '\n') { throw new MalformedGrammarException("No newlinwe"); } } else {// Normal character currentChar = bnfString.charAt(i); } if ((!skip) && (pass > 0)) { if (currentBuffer.length() == 0) {//Empty newSymbol = new Symbol(); newSymbol.setType(Enums.SymbolType.TSymbol); } currentBuffer.append(currentChar); } } else { switch (state) { case (START): if (currentChar == '\r') { break;// Ignore DOS newline first char } if (currentChar == '#') { // this line is a comment in the grammar so skip to end of line while (i < bnfString_size && bnfString.charAt(i) != '\n') { //System.out.println("charAt:" + bnfString.charAt(i)); i++; } // we have skipped to end of line, so exit the switch // next time round, it will see the "\n" (or "\r\n") at end of line break; } switch (currentChar) { case ' ':// Ignore whitespaces case '\t':// Ignore tabs case '\n':// Ignore newlines break; case '<':// START OF RULE newSymbol = new Symbol(); newSymbol.setType(Enums.SymbolType.NTSymbol); currentBuffer.append(currentChar); state = START_RULE; break; default: // Illegal throw new MalformedGrammarException( "Illegal character `" + currentChar + "' found at start of grammar"); } break; case (START_RULE):// Read the lhs Non-terminal symbol if (currentChar == '\r') { break;// Ignore DOS newline first char } switch (currentChar) { case '\n':// Newlines are illegal here throw new MalformedGrammarException("Misplaced newline"); case '>': // Possible end of non-terminal symbol currentBuffer.append(currentChar); symbolString = currentBuffer.toString(); if (pass == 0) {// First pass // Check if new symbol definition if (findRule(newSymbol) == null) {// Create new rule for symbol insertRule = true;//We will add the newRule to Grammar.Rules newRule.setLHS(new Symbol(symbolString, Enums.SymbolType.NTSymbol)); } else { insertRule = true;//We will not add a rule this time } } else { // Second pass // Point currentRule to previously defined rule currentRule = findRule(symbolString); if (currentRule == null) { throw new MalformedGrammarException( "Current rule is null: " + symbolString); } } currentBuffer.delete(0, currentBuffer.length());// Reset the buffer state = LHS_READ;// lhs for this rule has been read break; default:// Check for non-escaped special characters if (((currentChar == '"') || (currentChar == '|') || (currentChar == '<'))) { throw new MalformedGrammarException("Non escaped special character"); } currentBuffer.append(currentChar); } break; case (LHS_READ):// Must read ::= token if (currentChar == '\r') { break;// Ignore DOS newline first char } switch (currentChar) { case ' ':// Ignore whitespaces case '\t':// Ignore tabs case '\n':// Ignore newlines break; case ':':// Part of ::= token currentBuffer.append(currentChar); break; case '=':// Should be end of ::= token currentBuffer.append(currentChar); String s = currentBuffer.toString(); if (s.compareTo("::=") != 0) {// Something other than ::= was read throw new MalformedGrammarException("Something other than ::= was read"); } currentBuffer.delete(0, currentBuffer.length()); // START OF PRODUCTION newProduction.clear(); state = PRODUCTION; break; default: // Illegal throw new MalformedGrammarException( "Illegal character `" + currentChar + "' found in ::= token"); } break; case (PRODUCTION):// Read everything until | token or \n, or EOL if (currentChar == '\r') { break;// Ignore DOS newline first char } if (pass == 0) { if (currentChar == '\n') { state = START_OF_LINE; } break; } else { switch (currentChar) { case '|':// Possible end of production if (quoted) {// Normal character currentBuffer.append(currentChar); break; } case '\n':// End of production (and possibly rule) separated = 0;// Reset separator marker if ((currentBuffer.length() != 0) || (newProduction.size() == 0)) {// There is a symbol to add if (currentBuffer.length() == 0) { // No symbol exists; create terminal empty symbol newSymbol.setType(Enums.SymbolType.TSymbol); } if (non_terminal) {// Current non-terminal symbol isn't finished throw new MalformedGrammarException( "Current non-terminal symbol isn't finished"); } symbolString = currentBuffer.toString(); newSymbol.setSymbolString(symbolString); if (newSymbol.getType() == Enums.SymbolType.NTSymbol) { // Find rule that defines this symbol Rule tempRule = findRule(newSymbol); if (tempRule != null) { newProduction.add(new Symbol(newSymbol)); } else {// Undefined symbol, insert anyway newProduction.add(new Symbol(newSymbol)); } } else {// Add terminal symbol newProduction.add(new Symbol(newSymbol)); } newSymbol.clear();// Reset the symbol } // END OF PRODUCTION // Add production to current rule currentRule.add(new Production(newProduction)); currentBuffer.delete(0, currentBuffer.length());// Reset the buffer if (currentChar == '\n') state = START_OF_LINE; else { // START OF PRODUCTION newProduction.clear(); } break; case '<':// Possible start of non-terminal symbol case '>':// Possible end of non-terminal symbol case ' ':// Possible token separator case '\t':// Possible token separator if ((quoted) || (((currentChar == ' ') || (currentChar == '\t')) && (non_terminal))) {// Spaces inside non-terminals are accepted currentBuffer.append(currentChar); if (!non_terminal) { newSymbol.setType(Enums.SymbolType.TSymbol); } break; } if (currentChar == '>') {// This is also the end of a non-terminal symbol currentBuffer.append(currentChar); non_terminal = false; } if (currentBuffer.length() != 0) { if (non_terminal) {// Current non-terminal symbol isn't finished throw new MalformedGrammarException( "Current non-terminal symbol isn't finished"); } if ((currentChar == ' ') || (currentChar == '\t')) {// Token separator separated = 1; } symbolString = currentBuffer.toString(); newSymbol.setSymbolString(symbolString); if (newSymbol.getType() == Enums.SymbolType.NTSymbol) { // Find rule that defines this symbol Rule tempRule = findRule(newSymbol); if (tempRule != null) { newProduction.add(new Symbol(newSymbol)); } else { // Undefined symbol, insert anyway newProduction.add(new Symbol(newSymbol)); } } else {// Add terminal symbol newProduction.add(new Symbol(newSymbol)); } newSymbol.clear();// Reset the symbol } else {// Empty buffer if (((currentChar == ' ') || (currentChar == '\t')) && (newProduction.size() != 0)) { // Probably a token separator after a non-terminal symbol separated = 1; } } currentBuffer.delete(0, currentBuffer.length());// Reset the buffer if (currentChar == '<') {// This is also the start of a non-terminal symbol // Special case; must create new Symbol here newSymbol.clear(); newSymbol.setType(Enums.SymbolType.NTSymbol); currentBuffer.append(currentChar); non_terminal = true;// Now reading a non-terminal symbol if (separated == '1') {// Insert a token separator separated = 0; newTokenSeparator.clear(); newTokenSeparator.setSymbolString(" "); newTokenSeparator.setType(Enums.SymbolType.TSymbol); newProduction.add(new Symbol(newTokenSeparator)); } } break; default: // Add character to current buffer if (separated == '1') {// Insert a token separator separated = 0; newTokenSeparator.clear(); newTokenSeparator.setSymbolString(" "); newTokenSeparator.setType(Enums.SymbolType.TSymbol); newProduction.add(new Symbol(newTokenSeparator)); } if (currentChar == '"') {// Start (or end) quoted section quoted = !quoted; newSymbol.setType(Enums.SymbolType.TSymbol); break; } if (currentBuffer.length() == 0) { newSymbol.setType(Enums.SymbolType.TSymbol); } currentBuffer.append(currentChar); } break; } case (START_OF_LINE): if (currentChar == '#') { // this line is a comment in the grammar so skip to end of line while (i < bnfString_size && bnfString.charAt(i) != '\n') { //System.out.println("charAt:" + bnfString.charAt(i)); i++; } // we have skipped to end of line, so exit the switch // next time round, it will see the "\n" (or "\r\n") at end of line break; } if (currentChar == '\r') { break;// Ignore DOS newline first char } switch (currentChar) { case ' ':// Ignore whitespaces case '\t':// Ignore tabs case '\n':// Ignore newlines break; case '|':// Start of new production state = PRODUCTION; if (pass == 1) { // START OF PRODUCTION newProduction.clear(); } break; case '<':// Start of lhs non-terminal symbol // END OF RULE if (pass == 0) { // Add current rule if (insertRule) { rules.add(new Rule(newRule)); } } // START OF RULE newSymbol.setType(Enums.SymbolType.NTSymbol); currentBuffer.append(currentChar); state = START_RULE; break; default: // Illegal throw new MalformedGrammarException( "Illegal character `" + currentChar + "' found at start of line"); } break; default://Impossible error, quit the program now! throw new MalformedGrammarException("Impossible error, quit the program now!"); } } skip = false; i++; } // END OF PASS if (state != START_OF_LINE) {// This must be the state of the parser throw new MalformedGrammarException("START_OF_LINE must be the state of the parser"); } if (pass == 0) { // Add current rule if (insertRule) { this.rules.add(new Rule(newRule)); } } } checkInfiniteRecursion(); } catch (MalformedGrammarException ex) { this.setValidGrammar(false); logger.error("Exception parsing grammar", ex); throw ex; } updateRuleFields(); setValidGrammar(true); genotype2Phenotype(); return true; }
From source file:com.wabacus.system.component.application.report.ListReportType.java
private void showTreeRowGroupDataPart(List<ColBean> lstColBeans) { String trgroupid = rbean.getGuid() + "_trgroup_"; List<RowGroupDataBean> lstTreeGroupDataBeans; TreeRowGroupDataBean trgdbean;/*from w w w . j a v a2s . c om*/ RowDataBean rowInterceptorObjTmp = null; ColDisplayData colDisplayData; StringBuffer tdPropsBuf; AbsReportDataPojo rowDataObjTmp = null; int[] displayrowinfo = this.getDisplayRowInfo(); if (displayrowinfo[1] <= 0) return; boolean isReadonlyByRowInterceptor;//???? boolean isDisplayInPage = rrequest.getShowtype() == Consts.DISPLAY_ON_PAGE; for (int i = displayrowinfo[0]; i < displayrowinfo[1]; i++) { if (i >= this.lstReportData.size()) { dataPartStringBuffer.append(showDataRowInAddMode(lstColBeans, i)); checkAndPrintBufferData(i); } rowDataObjTmp = lstReportData.get(i); ColBean cbeanTmp; String trstylepropertyTmp = null; for (String colcolumn : alrdbean.getLstRowgroupColsColumn()) { if (colcolumn == null) continue; cbeanTmp = rbean.getDbean().getColBeanByColColumn(colcolumn); if (this.cacheDataBean.getColDisplayModeAfterAuthorize(cbeanTmp, isDisplayInPage) < 0) { throw new WabacusRuntimeException("" + rbean.getPath() + "????"); } lstTreeGroupDataBeans = this.mRowGroupCols.get(cbeanTmp); trgdbean = getTreeRowGroupDataBean(lstTreeGroupDataBeans, i); if (trgdbean == null) continue; dataPartStringBuffer .append(showTreeRowGroupTrStart(trgroupid + trgdbean.getLayer() + "_" + i, trgdbean)); dataPartStringBuffer.append("<td class='cls-data-td-list' "); Object colDataObj = initDisplayCol(cbeanTmp, rowDataObjTmp); tdPropsBuf = new StringBuffer();//tdPropsBuf??<td/><td/> tdPropsBuf.append(getTdPropertiesForCol(cbeanTmp, colDataObj, i, false));//?resultBuftdPropsBuf???<td/> String col_displayvalue = getColDisplayValue(cbeanTmp, rowDataObjTmp, null, tdPropsBuf, colDataObj, i, false);//? colDisplayData = ColDisplayData.getColDataFromInterceptor(this, cbeanTmp, rowDataObjTmp, i, getColValuestyleproperty(cbeanTmp, rowDataObjTmp), col_displayvalue); col_displayvalue = colDisplayData.getValue(); dataPartStringBuffer.append(" ").append( getTreeNodeTdValueStyleProperty(trgdbean, colDisplayData.getStyleproperty(), i, cbeanTmp)) .append(">"); String childIds = trgdbean.getAllChildDataRowIdxsAsString(); if (!childIds.equals("")) { tdPropsBuf.append(" childDataIdSuffixes=\"").append(childIds).append("\"");//??id???<td/>?td } dataPartStringBuffer.append(showTreeNodeContent(trgroupid + trgdbean.getLayer() + "_" + i, trgdbean, getColDisplayValueWithWrap(cbeanTmp, col_displayvalue, colDataObj, true), tdPropsBuf.toString())); dataPartStringBuffer.append("</td>"); dataPartStringBuffer.append(showOtherTdInTreeGroupRow(trgdbean, i, cbeanTmp)); dataPartStringBuffer.append("</tr>"); } isReadonlyByRowInterceptor = false; trstylepropertyTmp = rowDataObjTmp.getRowValuestyleproperty(); rowInterceptorObjTmp = null; if (this.rbean.getInterceptor() != null) { rowInterceptorObjTmp = new RowDataBean(this, trstylepropertyTmp, lstColBeans, rowDataObjTmp, i, this.cacheDataBean.getTotalColCount()); this.rbean.getInterceptor().beforeDisplayReportDataPerRow(this.rrequest, this.rbean, rowInterceptorObjTmp); if (rowInterceptorObjTmp.getInsertDisplayRowHtml() != null) dataPartStringBuffer.append(rowInterceptorObjTmp.getInsertDisplayRowHtml()); if (!rowInterceptorObjTmp.isShouldDisplayThisRow()) { this.global_rowindex++; continue; } trstylepropertyTmp = rowInterceptorObjTmp.getRowstyleproperty(); isReadonlyByRowInterceptor = rowInterceptorObjTmp.isReadonly(); } dataPartStringBuffer.append(showTreeDataRowTrStart(rowInterceptorObjTmp, this.mAllParentRowGroupDataBeansForPerDataRow, trstylepropertyTmp, i)); dataPartStringBuffer.append(">"); dataPartStringBuffer.append(showTreeNodeTdInDataTr(i));//?????<td/>????<td/>?? for (RowGroupDataBean parentObjTmp : this.mAllParentRowGroupDataBeansForPerDataRow.get(i)) {//?????<td/> dataPartStringBuffer.append(showHiddenCol(parentObjTmp.getCbean(), rowDataObjTmp, i)); } boolean isReadonlyByColInterceptor;//???? for (ColBean cbean : lstColBeans) { if (alrdbean.getLstRowgroupColsColumn().contains(cbean.getColumn())) continue; if (Consts.COL_DISPLAYTYPE_HIDDEN.equals(cbean.getDisplaytype(isDisplayInPage)) || this.cacheDataBean.getColDisplayModeAfterAuthorize(cbean, isDisplayInPage) <= 0) { dataPartStringBuffer.append(showHiddenCol(cbean, rowDataObjTmp, i)); continue; } isReadonlyByColInterceptor = false; Object colDataObj = initDisplayCol(cbean, rowDataObjTmp); tdPropsBuf = new StringBuffer(); dataPartStringBuffer.append("<td ").append(getTdPropertiesForCol(cbean, colDataObj, i, false));//<td/>?; String col_displayvalue = getColDisplayValue(cbean, rowDataObjTmp, rowInterceptorObjTmp, tdPropsBuf, colDataObj, i, isReadonlyByRowInterceptor); colDisplayData = ColDisplayData.getColDataFromInterceptor(this, cbean, rowDataObjTmp, i, getColValuestyleproperty(cbean, rowDataObjTmp), col_displayvalue); isReadonlyByColInterceptor = colDisplayData.getColdataByInterceptor() != null && colDisplayData.getColdataByInterceptor().isReadonly(); if (!isReadonlyByRowInterceptor && isReadonlyByColInterceptor) { tdPropsBuf.delete(0, tdPropsBuf.length()); col_displayvalue = getColDisplayValue(cbean, rowDataObjTmp, rowInterceptorObjTmp, tdPropsBuf, colDataObj, i, true); } else { col_displayvalue = colDisplayData.getValue(); } dataPartStringBuffer.append(" class='" + getDataTdClassName() + "' "); dataPartStringBuffer.append(tdPropsBuf.toString()).append(" "); dataPartStringBuffer .append(getTreeDataTdValueStyleProperty(cbean, colDisplayData.getStyleproperty(), i)); dataPartStringBuffer.append(">").append(getColDisplayValueWithWrap(cbean, col_displayvalue, colDataObj, isReadonlyByRowInterceptor || isReadonlyByColInterceptor)); dataPartStringBuffer.append("</td>"); } dataPartStringBuffer.append("</tr>"); this.global_rowindex++; this.global_sequence++; checkAndPrintBufferData(i); } }
From source file:dao.CollabrumDaoDb.java
/** * updates the stream blob (title, zoom) * @param entryId - entry id/* www . j a v a 2 s . c o m*/ * @param collabrumId - collabrum id * @param userId - user Id * @param userLogin - user login * @param zoom - the zoom * @param btitle - the blob title * @param def - the default blob * @param caption - caption */ public void updateStreamBlob(String entryId, String collabrumId, String userId, String userLogin, String zoom, String btitle, boolean def, String caption) throws BaseDaoException { if (RegexStrUtil.isNull(entryId) || RegexStrUtil.isNull(collabrumId) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } if (RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(btitle)) { throw new BaseDaoException("params are null"); } /** * check authority to update - diaryAdmin & isOrganizer are checked */ if (!isOrganizer(collabrumId, userLogin, userId)) { StringBuffer sb = new StringBuffer("user does not have permission to update streamblobs in collabrum "); sb.append(collabrumId); sb.append(" userId "); sb.append(userId); throw new BaseDaoException(sb.toString()); } /** * Get scalability datasource, collblob is partitioned on collabrumId */ String sourceName = scalabilityManager.getWriteBlobScalability(collabrumId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { StringBuffer sb = new StringBuffer("ds null, in updateStreamBlob() in collabrumDaoDb "); sb.append(sourceName); sb.append(" collabrumId = "); sb.append(collabrumId); sb.append(" entryid = "); sb.append(entryId); throw new BaseDaoException(sb.toString()); } boolean exists = false; try { Object[] params = { (Object) entryId }; List result = defaultQuery.execute(params); if (result != null && result.size() > 0) { exists = true; } } catch (Exception e) { throw new BaseDaoException("error while" + defaultQuery.getSql()); } if (RegexStrUtil.isNull(caption)) { caption = btitle; } Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); updateStreamblobQuery.run(conn, entryId, collabrumId, zoom, btitle, caption); /** if this is the default photo and this photo does not exist, add this entry */ if (def) { if (!exists) { deleteDefQuery.run(conn, collabrumId); addDefQuery.run(conn, entryId, collabrumId); } } else { /** no more a default photo, delete this entry */ if (exists) { deleteDefQuery.run(conn, collabrumId); } } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException("connection close exception in add/delete default collabrum blob", e2); } throw new BaseDaoException( "error occured while rollingback entries from default collabrum stream blob", e1); } } try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit exception", e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("connection close exception", e4); } /* try { if (conn != null) { conn.close(); } } catch(Exception e2) { StringBuffer sb = new StringBuffer("updateBlob error, collabrumId "); sb.append(collabrumId); sb.append(" entryId = "); sb.append(entryId); sb.append(" btitle = "); sb.append(btitle); sb.append(" userId = "); sb.append(userId); throw new BaseDaoException(sb.toString(), e2); } StringBuffer sb = new StringBuffer("updateBlob error, collabrumId "); sb.append(collabrumId); sb.append(" entryId = "); sb.append(entryId); sb.append(" btitle = "); sb.append(btitle); sb.append(" userId = "); sb.append(userId); throw new BaseDaoException(sb.toString(), e); } try { if (conn != null) { conn.close(); } } catch(Exception e) { StringBuffer sb = new StringBuffer("updateBlob error collabrumId "); sb.append(collabrumId); sb.append(" entryId = "); sb.append(entryId); sb.append( " btitle = "); sb.append(btitle); sb.append(" userId = "); sb.append(userId); throw new BaseDaoException(sb.toString(), e); } */ /** * Jboss methods * fqn - full qualified name (key=collabrumId + entryId); * check if the entryId already set in the cache * If it exists, return the entryId from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.COL_STREAM_BLOB); StringBuffer sb = new StringBuffer(collabrumId); sb.append("-"); sb.append(entryId); String key = sb.toString(); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.DEFAULT_PHOTO); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.COLLABRUM_STREAM_BLOBS); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } fqn = cacheUtil.fqn(DbConstants.COLLABRUM); if (treeCache.exists(fqn, collabrumId)) { treeCache.remove(fqn, collabrumId); } fqn = cacheUtil.fqn(DbConstants.COLL_CAT); sb.delete(0, sb.length()); sb.append(collabrumId); sb.append("-"); sb.append(DbConstants.PHOTO_CATEGORY); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } }
From source file:cn.com.sinosoft.cimp.recordsummarize.datadeduplication.service.impl.DataDeduplicationService.java
public Integer findTotalPage(String sql, String crCode, String boxT, List<Map<String, Object>> params) { StringBuffer finalSql = new StringBuffer(); StringBuffer sb = new StringBuffer(); String colTableName = ""; StringBuffer temp = new StringBuffer(); colTableName = queryCreditItemTableFieldName( "select CODE_COLLECTION_TABLE from dic_credit_record_management where cr_code='" + crCode + "'"); // String s1 = colTableName.substring(8); // String s2 = "CIMP_"+s1; // sql = sql.replace(s2, colTableName); String ruleMin = "";//?? String ruleMax = "";//?? if (boxT != null && boxT.length() > 0) { ruleMin = boxT.split(",")[0]; ruleMax = boxT;/* w w w .j ava2 s . c om*/ sb.append("select t.* from " + colTableName + " t where " + ruleMin); sb.append(" in (select c." + ruleMin + " from " + colTableName + " c where c.state_valid = '02' "); if (colTableName.equals("CIMP_CO_FOOD_LICENSE")) { if (crCode.equals("0103000000")) { sb.append(" and c.product_type like '?' "); } if (crCode.equals("0104000000")) { sb.append(" and c.product_type like '???' "); } if (crCode.equals("0105000000")) { sb.append(" and c.product_type like '????' "); } if (crCode.equals("0106000000")) { sb.append(" and c.product_type like '?' "); } } if (colTableName.equals("CIMP_CO_WATER")) { if (crCode.equals("0111000000")) {// sb.append(" and c.comp_type like '0203%' "); } } if (colTableName.equals("CIMP_CO_DISCIPLINE")) { if (crCode.equals("0201010000")) {//??? sb.append(" and c.specialty_primary like '01%' "); } if (crCode.equals("0206080000")) {//??? sb.append(" and c.specialty_primary like '08%' "); } if (crCode.equals("0207020000")) {//?????? sb.append(" and c.specialty_primary like '0203%' "); } if (crCode.equals("0203050000")) {//??? sb.append(" and c.specialty_primary like '05%' "); } if (crCode.equals("0204030000")) {//?)??? sb.append(" and c.specialty_primary like '03%' "); } if (crCode.equals("0205040000")) {//??? sb.append(" and c.specialty_primary like '04%' "); } if (crCode.equals("0208070000")) {//???---??like '07%' sb.append(" and c.specialty_primary like '07%' "); } if (crCode.equals("0202020000")) {//??? sb.append(" and c.specialty_primary like '02%' "); } } // sb.append(" group by "+ruleMax+" having count(1) > 1) and state_valid = '02' order by "+ruleMin); sb.append(" group by " + ruleMax + " having count(1) > 1) and state_valid = '02' "); if (colTableName.equals("CIMP_CO_FOOD_LICENSE")) { if (crCode.equals("0103000000")) { sb.append(" and t.product_type like '?' "); } if (crCode.equals("0104000000")) { sb.append(" and t.product_type like '???' "); } if (crCode.equals("0105000000")) { sb.append(" and t.product_type like '????' "); } if (crCode.equals("0106000000")) { sb.append(" and t.product_type like '?' "); } } if (colTableName.equals("CIMP_CO_WATER")) { if (crCode.equals("0111000000")) {// sb.append(" and t.comp_type like '0203%' "); } } if (colTableName.equals("CIMP_CO_DISCIPLINE")) { if (crCode.equals("0201010000")) {//??? sb.append(" and t.specialty_primary like '01%' "); } if (crCode.equals("0206080000")) {//??? sb.append(" and t.specialty_primary like '08%' "); } if (crCode.equals("0207020000")) {//?????? sb.append(" and t.specialty_primary like '0203%' "); } if (crCode.equals("0203050000")) {//??? sb.append(" and t.specialty_primary like '05%' "); } if (crCode.equals("0204030000")) {//?)??? sb.append(" and t.specialty_primary like '03%' "); } if (crCode.equals("0205040000")) {//??? sb.append(" and t.specialty_primary like '04%' "); } if (crCode.equals("0208070000")) {//???---??like '07%' sb.append(" and t.specialty_primary like '07%' "); } if (crCode.equals("0202020000")) {//??? sb.append(" and t.specialty_primary like '02%' "); } } sb.append(" order by " + ruleMin); } if (!params.isEmpty() || params.size() != 0) { int sum = 0; temp.append(" select distinct "); temp.append(ruleMin); temp.append(" from ( "); temp.append(sb); temp.append(" ) where 1 =1 "); temp.append(sql); Query query = hibernateTemplate.getSessionFactory().openSession().createSQLQuery(temp.toString()); if (params.size() != 0) {//-----------------------------------> for (Map<String, Object> map : params) { if (map.get("symbol").equals("R")) { for (Entry<String, Object> entry : map.entrySet()) { if (!(entry.getKey().equals("fuzzy") || entry.getKey().equals("symbol"))) { query.setString(entry.getKey(), (String) entry.getValue()); } } } else { if (map.get("fuzzy").equals("T")) { for (Entry<String, Object> entry : map.entrySet()) { if (!(entry.getKey().equals("fuzzy") || entry.getKey().equals("symbol"))) { query.setString(entry.getKey(), "%" + (String) entry.getValue() + "%"); } } } else { for (Entry<String, Object> entry : map.entrySet()) { if (!(entry.getKey().equals("fuzzy") || entry.getKey().equals("symbol"))) { query.setString(entry.getKey(), (String) entry.getValue()); } } } } } } List<String> obj = query.list(); if (obj.size() != 0) { String ruleStr = getSqlStrByList(obj, 999, ruleMin); // for(int i=0;i<obj.size();i++){ temp.delete(0, temp.length()); temp.append("select count(*) quantity from "); temp.append(colTableName); temp.append(" where "); // temp.append(ruleMin); // temp.append(" in (:obj) "); temp.append(ruleStr); temp.append(" and state_valid = '02' "); if (temp != null && temp.length() > 0) { Query qry = hibernateTemplate.getSessionFactory().openSession().createSQLQuery(temp.toString()); // qry.setParameterList("obj", obj); qry.setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP); Map map = (Map) qry.list().get(0); BigDecimal b = (BigDecimal) map.get("QUANTITY"); sum = b.intValue(); } // } } return sum; } else { temp.append("select count(*) quantity from ( "); temp.append(sb); temp.append(" ) "); } // StringBuffer sbSql = new StringBuffer(getSql(sql,crCode,boxT)); // finalSql.append("select count(*) quantity from ( "); // finalSql.append(sbSql + " )"); if (temp != null && temp.length() > 0) { Query query = hibernateTemplate.getSessionFactory().openSession().createSQLQuery(temp.toString()); query.setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP); Map map = (Map) query.list().get(0); BigDecimal b = (BigDecimal) map.get("QUANTITY"); return b.intValue(); } return 0; }
From source file:stg.pr.engine.CProcessRequestEngine.java
/** * <p>/*from ww w . j a v a 2 s .c o m*/ * The query picks up requests, from the table PROCESS_REQUEST, that have * the Req_Stat='Q' and GRP_ST_IND = 'G' for the specified group. If * requests are found then each record is processed sequentially depending * on the priority defined in the group. If no records are found then the * engine waits for a specified time (picked up from property file) and then * resumes scanning * </p> * * @param pconForGroupedRequest * Connection object. * @param plGroupId * Group id to be serviced. * @throws CProcessRequestEngineException */ private void startServiceForGroupedRequests(Connection pconForGroupedRequest, long plGroupId) throws CProcessRequestEngineException { boolean isQueuedReqFound = false; ProcessRequestController objPrCont_ = null; ProcessRequestController objPRC = null; ProcessReqParamsController objPRPC = null; ProcessReqParamsEntityBean objPRPEB = null; CDynamicDataContainer objCDC = null; StringBuffer reqLogFileName; StringBuffer reqLogFileUrl; try { reqLogFileName = new StringBuffer(50); reqLogFileUrl = new StringBuffer(50); objEngineLogger_.log(LogLevel.NOTICE, "Processing the Grouped Request for id " + plGroupId); if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_.info("Starting service for scanning queued grouped requests ...."); } CDynamicDataContainer objDdc_ = new CDynamicDataContainer(); objDdc_.addWhereClause(FILTER_CONDITION); objDdc_.addOrderByClause(" Order By grp_id, grp_req_seq_no, priority"); ProcessRequestEntityBean objPrEb_ = new ProcessRequestEntityBean(); // ProcessReqParamsEntityBean objPrparamsEb_ = new // ProcessReqParamsEntityBean(); objPRC = new ProcessRequestController(pconForGroupedRequest); objPRPC = new ProcessReqParamsController(pconForGroupedRequest); objPRPEB = new ProcessReqParamsEntityBean(); objCDC = new CDynamicDataContainer(); objPrCont_ = new ProcessRequestController(pconForGroupedRequest); try { if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_.info("Entered infintite loop, Initializing Request Entity Bean ...."); } isQueuedReqFound = false; if (!bGroupedEngineTerminated_.get()) { objPrEb_.initialize(); objPrEb_.setReqStat(REQUEST_STATUS.LAUNCHING.getID()); objPrEb_.setGrpStInd(REQUEST_TYPE.GROUPED.getID()); objPrEb_.setGrpId(plGroupId); objPrEb_.setScheduledTime(getCurrentTimestamp(pconForGroupedRequest)); if (objEngineLogger_.isDebugEnabled()) { objEngineLogger_.debug("Building query ...."); } objDdc_.build(pconForGroupedRequest, objPrEb_, hmWhereCondition_); // This has been added later by Kedar on 3/1/2003 if (objEngineLogger_.isDebugEnabled()) { objEngineLogger_.debug("Querying for queued requests ...." + objDdc_.getQuery()); } isQueuedReqFound = objDdc_.executeQuery(pconForGroupedRequest, objPrCont_, objPrEb_); } if (isQueuedReqFound) {// pending requests exist if (objEngineLogger_.isDebugEnabled()) { objEngineLogger_.debug("Queued requests exists ...."); } boolean bToProcessOthers = true; while (objDdc_.next()) { ProcessRequestEntityBean objPrEb = (ProcessRequestEntityBean) objDdc_.get(); if (bToProcessOthers) { if (tEngine_.isInterrupted() || bGroupedEngineTerminated_.get()) { if (objEngineLogger_.isEnabledFor(LogLevel.NOTICE)) { objEngineLogger_.log(LogLevel.NOTICE, "The Engine is being terminated. The Group Request #" + objPrEb.getGrpId() + " even though not complete will be allowed to be executed once PRE is restarted."); } break; } } else { if (tEngine_.isInterrupted() || bGroupedEngineTerminated_.get()) { if (objEngineLogger_.isEnabledFor(LogLevel.NOTICE)) { objEngineLogger_.log(LogLevel.NOTICE, "The Engine is being terminated. Please wait till the remaining requests for the Group #" + objPrEb.getGrpId() + " are marked as Cancelled."); } } } objPRPEB.initialize(); reqLogFileName.delete(0, reqLogFileName.length()); reqLogFileUrl.delete(0, reqLogFileUrl.length()); reqLogFileName.append(strReqLogFilePath_); reqLogFileName.append(objPrEb.getReqId()); reqLogFileName.append("."); reqLogFileName.append(strReqLogFileExtension_); reqLogFileUrl.append(strReqLogFileUrl_); reqLogFileUrl.append(objPrEb.getReqId()); reqLogFileUrl.append("."); reqLogFileUrl.append(strReqLogFileExtension_); // objEngineLogger_.debug("Initialize Request Log File ...."); if (bToProcessOthers) { ProcessRequestServicer objProcessRequest_ = null; updateRequestStatus(pconForGroupedRequest, objPRC, objPrEb, REQUEST_STATUS.LAUNCHING, reqLogFileUrl.toString()); if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_.info("Initialize Request Log File ...."); } try { if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_ .info("Instantiate the Process Class which has been requested ....."); } objProcessRequest_ = instantiateReqProcessObject(objPrEb_.getProcessClassNm()); bToProcessOthers = processEachRequest(pconForGroupedRequest, objPRC, objPrEb, objCDC, objPRPC, objPRPEB, objProcessRequest_, new File(reqLogFileName.toString()), reqLogFileUrl.toString()); } catch (RuntimeException re) { try { if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_.info("Update Request Status to Error due to Exception."); } updateRequestStatus(staticConnection_, objPrCont_, objPrEb_, REQUEST_STATUS.ERROR, reqLogFileUrl.toString()); } catch (Exception e) { objEngineLogger_.error("Caught exception while updating status to Error. ", e); } throw re; } catch (Exception e) { try { if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_.info("Update Request Status to Error due to Exception."); } updateRequestStatus(staticConnection_, objPrCont_, objPrEb_, REQUEST_STATUS.ERROR, reqLogFileUrl.toString()); } catch (Exception ex) { objEngineLogger_.error("Caught exception while updating status to Error. ", ex); } throw e; } catch (Error error) { try { if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_.info("Update Request Status to Error due to Exception."); } updateRequestStatus(staticConnection_, objPrCont_, objPrEb_, REQUEST_STATUS.ERROR, reqLogFileUrl.toString()); } catch (Exception ex) { objEngineLogger_.error("Caught exception while updating status to Error. ", ex); } throw error; } finally { objProcessRequest_ = null; } } // If bToProcessOthers else { try { if (objEngineLogger_.isInfoEnabled()) { objEngineLogger_.info("Update Request Status to " + REQUEST_STATUS.SUSPENDED.getDescription()); } updateRequestStatus(pconForGroupedRequest, objPRC, objPrEb, REQUEST_STATUS.SUSPENDED, reqLogFileUrl.toString()); } catch (Exception e) { objEngineLogger_.error("Caught exception while updating status to Error. ", e); // do not throw just log it. } } } // end of while(objDdc.next()) // If the engine is terminated then update the status of // other grouped request to Q so that // next time the remaining processes can be completed. // Added on 30/04/2004....Kedar. if ((tEngine_.isInterrupted() || bGroupedEngineTerminated_.get()) && bToProcessOthers) { PreparedStatement psm = null; try { psm = pconForGroupedRequest.prepareStatement( "UPDATE process_request set req_stat = ? WHERE grp_id = ? and req_stat = ?"); psm.setString(1, REQUEST_STATUS.QUEUED.getID()); psm.setLong(2, plGroupId); psm.setString(3, REQUEST_STATUS.LAUNCHING.getID()); psm.executeUpdate(); } catch (Exception e) { objEngineLogger_.error("Caught exception while updating status to Queued. ", e); // just log the exception. Need not be thrown. // INFO Needs to be re-validated if this needs to be // thrown. } finally { try { if (psm != null) { psm.close(); } } catch (Exception e) { if (objEngineLogger_.isEnabledFor(Level.WARN)) { objEngineLogger_.warn("Exception can be ignored..Dummy exception.", e); } } } } // ((tEngine.isInterrupted() || // bGroupedEngineTerminated_)) } // end of if objDdc.getTotalRows() > 0 } catch (Exception e) { objEngineLogger_.error("Processing GroupedRequest: Exception Caught", e); // Just catch. No need to throw. } } catch (Exception e) { objEngineLogger_.error("Processing GroupedRequest: Exception Caught", e); // Just catch. No need to throw. } // end of 1st try catch block finally { objEngineLogger_.log(LogLevel.NOTICE, "Releasing Group Resources..."); if (objPrCont_ != null) { try { objPrCont_.close(); } catch (SQLException e) { objEngineLogger_.error("Processing GroupedRequest: Exception Caught while closing PRController", e); } } if (objPRC != null) { try { objPRC.close(); } catch (SQLException e) { objEngineLogger_.error("Processing GroupedRequest: Exception Caught while closing PRController", e); } } if (objPRPC != null) { try { objPRPC.close(); } catch (SQLException e) { objEngineLogger_ .error("Processing GroupedRequest: Exception Caught while closing PRPController", e); } } reqLogFileName = null; // Nullifying the variables. reqLogFileUrl = null; // Nullifying the variables. objEngineLogger_.log(LogLevel.NOTICE, "Grouped Request processed for id " + plGroupId); } }