Java tutorial
/** * Copyright (c) 2001-2012 "Redbasin Networks, INC" [http://redbasin.org] * * This file is part of Redbasin OpenDocShare community project. * * Redbasin OpenDocShare is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package dao; import java.sql.Connection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import javax.sql.DataSource; import model.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jboss.cache.Fqn; import san.FileSystemImpl; import san.SanException; import util.*; /** * <B>DirectoryDaoDb</B> <BR> * This object implements DirectoryDao interface * * @author Smitha Gudur (smitha@redbasin.com) * @version $Revision: 1.2 $ */ public class DirectoryDaoDb extends DirectoryAbstractDao implements DirectoryDao { protected final Log logger = LogFactory.getLog(getClass()); //private volatile DiaryAdmin diaryAdmin; //private volatile ExpiringObjectPool eop; /** * these obejcts are set by spring */ // DirChildCountQuery dirChildCountQuery; // IsDirUserAllowedQuery isDirUserAllowedQuery; private volatile BasicQuery dirChildCountQuery; private volatile BasicQuery isDirUserAllowedQuery; private volatile BasicQuery directoryChildQuery; //private volatile DirectoryChildQuery directoryChildQuery; private volatile DirectoryScopeConstants dirScope; private volatile DirectoryQuery directoryQuery; private volatile DirectoryAllQuery directoryallQuery; private volatile DirectoryParentQuery directoryParentQuery; private volatile DirectorySubDirExistsQuery directorySubDirExistsQuery; private volatile DirectoryAddQuery directoryAddQuery; private volatile DirectoryDeleteQuery directoryDeleteQuery; private volatile DirectoryUpdateQuery directoryupdateQuery; private volatile DirectoryRenameQuery directoryrenameQuery; private volatile DirHitsQuery dirHitsQuery; private volatile DirectoryBlobUpdateQuery blobUpdateQuery; private volatile DirectoryTopQuery directoryTopQuery; private volatile DirAdminAddQuery addAdminQuery; private volatile DirAdminDeleteQuery deleteAdminQuery; // private volatile DirAdminExistsQuery diradminexistsQuery; private volatile DirectoryListAuthorQuery listAuthorQuery; private volatile DirectoryScopeQuery scopeQuery; private volatile DirectoryScopeAddQuery scopeAddQuery; private volatile DirectoryScopeDeleteQuery scopeDeleteQuery; private volatile DirectoryWebsiteQuery websiteQuery; private volatile DirectoryOneStreamBlobQuery onestreamblobQuery; private volatile DirectoryCollabrumQuery collabrumsQuery; private volatile DirectoryExistsQuery directoryExistsQuery; private volatile DirectoryIdQuery directoryIdQuery; private volatile DirectoryStreamDataQuery streamDataQuery; private volatile DirectoryBlobQuery blobQuery; private volatile DirectoryPhotosQuery photosQuery; //private volatile DirectoryWebsiteExistsQuery directoryWebsiteExistsQuery; // private volatile CollabrumAdminExistsQuery colladminexistsQuery; private volatile CollabrumOrganizersQuery getOrganizersQuery; private volatile ColTopicExistsQuery topicExistsQuery; private volatile DirMoveQuery dircanmoveQuery; private volatile DirMoveAddQuery dirMoveAddQuery; private volatile DirMoveDeleteQuery dirMoveDeleteQuery; private volatile DirMoveUpdateQuery dirMoveUpdateQuery; private volatile DirPasteQuery dirPasteQuery; private volatile DirCopyQuery dircancopyQuery; private volatile DirCopyAddQuery dirCopyAddQuery; private volatile DirCopyUpdateQuery dirCopyUpdateQuery; private volatile DirCopyDeleteQuery dirCopyDeleteQuery; private volatile DirMoveExistsQuery dirMoveExistsQuery; private volatile DeleteDirCopyQuery deleteDirCopyQuery; private volatile DirectoryBlockDeleteAllQuery deleteDirBlockAllQuery; private volatile DirectoryAllowDeleteAllQuery deleteDirAllowUsersAllQuery; // dirtree table -> child // private volatile DirectoryChildrenExistQuery directorychildrenexistQuery; private volatile DirUpdateChildQuery dirChildUpdateQuery; private volatile DirAddChildQuery addChildQuery; private volatile DirDeleteChildQuery deleteChildQuery; private volatile DirectoryChildrenQuery dirchildrenQuery; private volatile DirectoryNameQuery dirnameQuery; private volatile DirectoryScopeUpdateQuery scopeupdateQuery; // blocked members for a directory private volatile DirectoryBlockQuery blockDirectoryQuery; private volatile DirectoryCobrandQuery cobrandQuery; private volatile CollMemberBlockQuery listBlockedCollabrumsQuery; private volatile DefaultDirectoryBlobDeleteQuery deleteDefaultQuery; private volatile DefaultDirectoryBlobAddQuery addDefaultQuery; private volatile DefaultDirectoryBlobQuery defaultDirBlobQuery; private volatile DefaultDirectoryQuery defaultQuery; private volatile DirSearchQuery dirSearchQuery; private volatile UserSearchQuery userSearchQuery; private volatile PblogSearchQuery pblogSearchQuery; private volatile CarryonSearchQuery carryonSearchQuery; private volatile DirImageUpdateQuery dirImageUpdateQuery; private volatile BasicQuery dirRecentImagesQuery; private volatile BasicQuery dirRecentSelectImagesQuery; private volatile BasicQuery dirRandImagesQuery; private volatile BasicQuery showQuotaQuery; private volatile BasicQuery showGlobalQuotaQuery; private volatile BasicQuery getUserQuotaQuery; private volatile BasicQuery getSubDirsQuery; // private volatile BasicQuery quotaExistsQuery; private volatile BasicQuery dirAuthorsForOthersDirsQuery; private volatile BasicQuery dirUserForOthersDirsQuery; private volatile BasicQuery dirChildrenAlphabetQuery; private volatile BaseCommonQuery setQuotaQuery; private volatile BaseCommonQuery updateQuotaQuery; private volatile BaseCommonQuery updateGlobalQuotaQuery; private volatile BaseCommonQuery addGlobalQuotaQuery; // private volatile SetQuotaQuery setQuotaQuery; // private volatile ShowQuotaQuery showQuotaQuery; // private volatile DirImagesRecentQuery dirRecentImagesQuery; // private volatile DirImagesSelectRecentQuery dirRecentSelectImagesQuery; // private volatile DirImagesRandQuery dirRandImagesQuery; // delete collabrums queries private volatile CollabrumDefaultDeleteQuery deleteDefQuery; private volatile CollabrumDeleteQuery deleteQuery; private volatile CollabrumDeleteAdminQuery deleteCollAdminQuery; private volatile ColStreamBlobDeleteAllQuery deleteAllCollBlobsQuery; private volatile CollBlobTagsDeleteQuery deleteTagsQuery; private volatile DeleteColBlobTagsQuery deleteColBlobTagsQuery; private volatile CollBlockDeleteAllQuery deleteCollBlockQuery; private volatile DeleteColCobrandPerCollabrumQuery deleteColCobrandQuery; private volatile CollAllMembersDeleteQuery deleteAllMembersQuery; private volatile DeleteColTopicsPerCollabrumQuery deleteColTopicsQuery; private volatile DeleteColTopicAttrPerCollabrumQuery deleteColTopicsAttrQuery; private volatile DeleteColMessageAttrPerTidQuery deleteColMsgAttrQuery; private volatile DeleteColMessagePerTidQuery deleteColMessagesQuery; private volatile DirDefDirBlobDeleteAllQuery deleteDefDirBlobAllQuery; private volatile DirBlobDeleteAllQuery deleteDirBlobAllQuery; private volatile DirBlobTagsDeleteAllQuery deleteDirBlobTagsAllQuery; private volatile DirCobrandDeleteAllQuery deleteDirCobrandAllQuery; private volatile DirImagesDeleteAllQuery deleteDirImagesAllQuery; private volatile DirMoveDeleteAllQuery deleteDirMoveAllQuery; //private volatile ListDirAdminForAuthorQuery listDirAdminForAuthorQuery; //private volatile ListDirAllowUsersQuery listDirAllowUsersQuery; //private volatile DirBlobSizeQuery dirBlobSizeQuery; private volatile BasicQuery listDirAdminForAuthorQuery; private volatile BasicQuery listDirAllowUsersQuery; private volatile BasicQuery dirBlobSizeQuery; //private volatile ColTopicsIdQuery getTidsQuery; private volatile BasicQuery getTidsQuery; SanUtils sanUtils = null; /** * checkIfDirectory * This method checks if any directory exists * @return boolean - returns true if directory exists, otherwise false */ public boolean checkIfDirectoryExists() { try { List result = directoryExistsQuery.execute(); if ((result != null) && (result.size() > 0)) { return true; } else { return false; } } catch (Exception e) { throw new BaseDaoException("error in isDirectoryExists()" + directoryExistsQuery.getSql(), e); } } /** * getSanUtils * creates new SanUtils */ public void getSanUtils() { if (sanUtils == null) { sanUtils = new SanUtils(); } } /** * isAdmin - is this user an admin * This method checks if this user is an administrator * @return boolean - returns true if the user is an administrator, false otherwise */ public boolean isAdmin(String userLogin) { if (!RegexStrUtil.isNull(userLogin)) { if (diaryAdmin.isDiaryAdmin(userLogin)) { return true; } } return false; } /** * View directory returns a directory, reads from a slave. * This directory can be viewed by anyone i.e on the internet (without session) * @param directoryId * @param alphabet * @return Directory * @throws BaseDaoException */ public Directory viewDirectory(String directoryId, String alphabet) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { directoryId = getTopDirectory(); } return (getDiaryDirectory(directoryId, null, null, DbConstants.READ_FROM_SLAVE, DbConstants.BLOB_READ_FROM_SLAVE, DbConstants.READ_FROM_SLAVE, alphabet)); } /** * View directory returns a directory, reads from a slave. * This directory can be viewed by anyone i.e on the internet (without session) * @param directoryId * @return Directory * @throws BaseDaoException */ public Directory viewDirectory(String directoryId) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { directoryId = getTopDirectory(); } return (getDiaryDirectory(directoryId, null, null, DbConstants.READ_FROM_SLAVE, DbConstants.BLOB_READ_FROM_SLAVE, DbConstants.READ_FROM_SLAVE, null)); } /** * View directory returns a directory. * This directory can be viewed by anyone i.e on the internet (without session) * @param directoryId * @param userId * @param login * @param accessFlag the access flag indicates the datasource master(1) or slave(0) * @param blobAccessFlag the blob access flag indicates the blob datasource master(1) or slave(0) * @param websiteAccessFlag websiteAccessFlag indicates directory website * datasource master(1) or slave(0) * @param alphabet * @return Directory * @throws BaseDaoException */ public Directory viewDirectory(String directoryId, String userId, String login, int accessFlag, int blobAccessFlag, int websiteAccessFlag, String alphabet) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { directoryId = getTopDirectory(); } return (getDiaryDirectory(directoryId, userId, login, accessFlag, blobAccessFlag, websiteAccessFlag, alphabet)); } /** * View directory returns a directory. * This directory can be viewed by anyone i.e on the internet (without session) * @param directoryId * @param userId * @param login * @param accessFlag the access flag indicates the datasource master(1) or slave(0) * @param blobAccessFlag the blob access flag indicates the blob datasource master(1) or slave(0) * @param websiteAccessFlag websiteAccessFlag indicates directory website * datasource master(1) or slave(0) * @return Directory * @throws BaseDaoException */ public Directory viewDirectory(String directoryId, String userId, String login, int accessFlag, int blobAccessFlag, int websiteAccessFlag) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { directoryId = getTopDirectory(); } return (getDiaryDirectory(directoryId, userId, login, accessFlag, blobAccessFlag, websiteAccessFlag, null)); } public void addDirectory(String dirname, String keywords, String parentId, String scopeid, String desc, String userId, String userLogin, String operations) throws BaseDaoException { boolean addSanFlag = true; addDirectory(dirname, keywords, parentId, scopeid, desc, userId, userLogin, operations, addSanFlag); } /** * Add a new subdirectory for a directory. * User permissions are checked before the user is allowed to add it. * If the user is the userid or administrator, add subdirectory. * If the parent directory (parentid) permission id is set to addchild, add subdirectory * if the user has permission to addchild for parentid, add subdirectory. * @param dirname - directory name * @param keywords - keywords for this directory * @param parentId - parent id * @param scopeid - scope identifier * @param desc - description of the directory * @param userId of the parent * @param userLogin - of the parent * @param operations - assign authors/all are authors * @param addSanFlag - true - add san directory, false otherwise * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public void addDirectory(String dirname, String keywords, String parentId, String scopeid, String desc, String userId, String userLogin, String operations, boolean addSanFlag) throws BaseDaoException { /** * An entry is added in the following tables: directory, dirtree, dirscope, diradmin */ if (RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(dirname)) { throw new BaseDaoException("params are null"); } boolean isAddable = false; String dirpath, dirlink, stateid; dirpath = dirlink = stateid = ""; /** * Does any directory exist in the database * New node is added as the root of the directory */ if (RegexStrUtil.isNull(parentId) && !checkIfDirectoryExists()) { stateid = "3"; if (addSanFlag) { try { addSanDir(dirpath, dirname); } catch (SanException e) { throw new BaseDaoException("addSanDir()" + e.getMessage(), e); } } addRootNode(dirname, keywords, desc, dirlink, dirpath, stateid, userId, scopeid); return; } else { if (diaryAdmin.isDiaryAdmin(userLogin)) { isAddable = true; } // allow the users who login for the first time to add directory to // the sanConstants.sanUserDirectory level with their login name as dirname if (GlobalConst.enableMyFilesDirectory) { if (dirname.equals(userLogin)) { isAddable = true; } } } /** * check global flag * operations: 1 (Designate Specific Members As Authors), 2 (Automatically Allow All Members To Be Authors) * status: 1 (Hidden), 2 (Ready) should we check for the status for operations (2) * if set to 1, check if this user is the designated author */ if (!isAddable) { DirScope dirscope = getDirectoryScope(parentId); //if ( dirscope.getValue(DbConstants.OPERATIONS).equals((Object)"2") ) { if (dirscope.getValue(DbConstants.OPERATIONS).equals(dirScope.getAllasauthorsoperation())) { isAddable = true; } else { if (isAuthor(parentId, userId)) { isAddable = true; } } } if (!isAddable) { throw new BaseDaoException("Donot have the permission to add to this directory, userId = " + userId); } /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() " + sourceName); } // dirpath ? // dirlink ? // stateid ? /** * Jboss methods * fqn - full qualified name * check if the parent direpath already set in the cache * If it exists, return the parent dirpath from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); Object obj = treeCache.get(fqn, parentId); if (obj != null) { StringBuffer sb = new StringBuffer(); if (sb == null) { throw new BaseDaoException("new StringBufer() is null"); } String dpath = ((Directory) obj).getValue(DbConstants.DIRPATH); if (!RegexStrUtil.isNull(dpath)) { /** trims leading and trailing white spaces */ sb.append(dpath.trim()); } //logger.info("sb=" + sb.toString()); /** * This takes care of the root such as "Top" where dirpath is null. */ if (!RegexStrUtil.isNull(sb.toString())) { sb.append(DbConstants.DIRPATH_COLON); } //logger.info("sb=" + sb.toString()); sb.append(((Directory) obj).getValue(DbConstants.DIRNAME)); //logger.info("sb=" + sb.toString()); sb.append(DbConstants.DIRPATH_PIPE); sb.append(parentId); dirpath = sb.toString(); } else { Directory parentInfo = getParentInfo(parentId); if (parentInfo != null) { StringBuffer sb = new StringBuffer(); if (sb == null) { throw new BaseDaoException("new StringBufer(), getParentInfo() is null"); } String dpath = parentInfo.getValue(DbConstants.DIRPATH); if (!RegexStrUtil.isNull(dpath)) { sb.append(dpath.trim()); } /** * This takes care of the root such as "Top" where dirpath is null. */ //logger.info("sb=" + sb.toString()); if (!RegexStrUtil.isNull(sb.toString())) { sb.append(DbConstants.DIRPATH_COLON); } sb.append(parentInfo.getValue(DbConstants.DIRNAME)); //logger.info("sb=" + sb.toString()); sb.append(DbConstants.DIRPATH_PIPE); sb.append(parentId); dirpath = sb.toString(); //dirpath = parentInfo.getValue(DbConstants.DIRPATH) + DbConstants.DIRPATH_COLON + parentInfo.getValue(DbConstants.DIRNAME) + DbConstants.DIRPATH_PIPE + parentId; } } if (addSanFlag && WebUtil.isSanEnabled()) { try { addSanDir(dirpath, dirname); } catch (SanException e) { throw new BaseDaoException("addSanDir()" + e.getMessage(), e); } } //logger.info("dirpath dirname = " + dirpath + dirname); /* if (WebUtil.isSanEnabled()) { logger.info("isSanEnabled"); getSanUtils(); if (sanUtils != null) { try { logger.info("dirpath = " + dirpath + " sanPath " + SanConstants.sanPath + " dirName = " + dirname); sanUtils.addSanDir(dirpath, SanConstants.sanPath, dirname); logger.info("addSanDir completed"); } catch(SanException e) { throw new SanException("addSanDirectory() error", e); } } else { throw new BaseDaoException("sanUtils is null in addDirectory()"); } } */ Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); directoryAddQuery.run(conn, dirname, keywords, desc, dirlink, dirpath, stateid, userId); /** * default dirscope table * Operations is 1 (assign authors) * Operations is 2 (allasauthors) * Scope is 1 (web) * Status is 2 (ready) * * Creator becomes admin in "diradmin" table * Date defaults to creation date */ addChildQuery.run(conn, "LAST_INSERT_ID()", parentId); scopeAddQuery.run(conn, "LAST_INSERT_ID()", operations, dirScope.getReadystatus(), scopeid); /** * This needs to be the last as the order is important, * sql behaviour changes for LAST_INSERT_ID. It will take entryId * for this table as the last_insert_id for the tables below it. */ addAdminQuery.run(conn, "LAST_INSERT_ID()", userId); } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "conn.close() exception for rollback(), for add directory/scope tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " parentId = " + parentId + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e2); } throw new BaseDaoException("rollback() exception, for add directory/scope tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " parentId = " + parentId + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e1); } throw new BaseDaoException("for add directory/scope tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " parentId = " + parentId + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e); } // connection commit try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit() exception, for add directory/permatcat tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " parentId = " + parentId + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("conn.close() exception for commit(), add directory/scope tables " + " dirname = " + dirname + " keywords = " + keywords + " parentId = " + parentId + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e4); } /** * remove this directory from cache */ fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, parentId)) { treeCache.remove(fqn, parentId); } fqn = cacheUtil.fqn(DbConstants.AUTHORS_DIRECTORIES); if (treeCache.exists(fqn, userLogin)) { treeCache.remove(fqn, userLogin); } fqn = cacheUtil.fqn(DbConstants.AUTHORS_LIST); if (treeCache.exists(fqn, userLogin)) { treeCache.remove(fqn, userLogin); } fqn = cacheUtil.fqn(DbConstants.USER_PAGE); if (treeCache.exists(fqn, userLogin)) { treeCache.remove(fqn, userLogin); } } /** * addRootNode - adds the root node. * @param dirname - directory name * @param keywords - keywords for this directory * @param desc - description of the directory * @param dirlink - the directory link * @param dirpath - the directory path * @param stateid - the state id * @param userId - the user id * @param scopeid - the scopeid * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public void addRootNode(String dirname, String keywords, String desc, String dirlink, String dirpath, String stateid, String userId, String scopeid) { Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); directoryAddQuery.run(conn, dirname, keywords, desc, dirlink, dirpath, stateid, userId); /** * default dirscope table * Operations is 2 (allasauthors) * Status is 2 (ready) * Scope is 1 (web) * * Creator becomes admin in "diradmin" table * Date defaults to creation date * Root does not have a parent, so don't add this to dirtree. */ //String parentId = "0"; //addChildQuery.run(conn, LAST_INSERT_ID(), parentId); scopeAddQuery.run(conn, "LAST_INSERT_ID()", dirScope.getAllasauthorsoperation(), dirScope.getReadystatus(), scopeid); /** * this needs to be the last as the order is important, * sql behaviour changes for LAST_INSERT_ID. it will take entryId * for this table as the last_insert_id for the tables below it. */ addAdminQuery.run(conn, "LAST_INSERT_ID()", userId); } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "conn.close() exception for rollback(), for add directory/scope tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e2); } throw new BaseDaoException("rollback() exception, for add directory/scope tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e1); } throw new BaseDaoException( "for add directory/scope tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e); } /** * connection commit */ try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit() exception, for add directory/permatcat tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("conn.close() exception for commit(), add directory/scope tables " + " dirname = " + dirname + " keywords = " + keywords + " scopeid = " + scopeid + " desc = " + desc + " userId = " + userId, e4); } } /** * This method checks if this directory has any subdirectories as its children * isChildASubDir * @param directoryId directoryid * @return boolean true if this has sub directories, false otherwise * @throws BaseDaoException when error occurs */ private boolean isChildASubDir(String directoryId) { /** * check if directoryId has children - for delete (look for only child directories) * Allow socialnetoworks/groupblogging/collabrum and websites to be deleted - ok to delete * otherwise barf! */ try { Object[] params = { (Object) directoryId }; List dResult = directorySubDirExistsQuery.execute(params); if ((dResult != null) && (dResult.size() > 0)) { return true; } return false; } catch (Exception e) { throw new BaseDaoException( "directorySubDirExistsQuery() exception " + directorySubDirExistsQuery.getSql(), e); } } /** * This is specific to SAN * isHomeDirectory * @param directoryId * @param userLogin * @return boolean - true (homedirectory) or false (not a home directory) */ private boolean isHomeDirectory(String directoryId, String userLogin, String userId) { /* if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId)) { */ if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("directoryId is null"); } //logger.info("isHomeDirectory()"); Directory directory = getDirectory(directoryId, userId); if (directory != null) { String dirName = directory.getValue(DbConstants.DIRNAME); String dirPath = directory.getValue(DbConstants.DIRPATH); if (dirName.equals(userLogin)) { if (!RegexStrUtil.isNull(dirPath)) { String[] parentVals = null; String[] parentList = WebUtil.getDirPathVals(dirPath); if (parentList != null) { for (int i = 0; i < parentList.length; i++) { if (parentList[i] == null) { continue; } parentVals = parentList[i].split("\\|"); //logger.info("parentList = " + parentList[i]); //logger.info("parentVals = " + parentVals.toString()); // for example users|1 if (parentVals.length != 2) { //logger.info("parentvals is not 2"); return false; } else { if (parentVals != null) { if (parentVals.length == 2 && !RegexStrUtil.isNull(parentVals[0])) { //logger.info("parentVals = " + parentVals[0]); //logger.info("sanpath = " + SanConstants.sanUserPath); if (WebUtil.isSanEnabled()) { if (SanConstants.sanUserPath.lastIndexOf(parentVals[0]) == -1) { throw new BaseDaoException( parentVals[0] + " does not have the main parent " + SanConstants.sanUserPath); } else { return true; } } } } } } //for } //if } } } return false; } /** * Deletes directory * If the directory contains subdirectories or children or collabrum or urls, then * this directory is not deleted. * User permissions are checked before the user is allowed to delete directory. * If the user is the userid or admin, delete directory. * If the user has permission to delete in the permission directory, delete directory. * Returns parentId of this directory for controller * @param directoryId - the directoryid * @param userId - the user id * @param userLogin - user login * @return List -> has the parentid used by the controller to throw the viewDirectory * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public List deleteDirectory(String directoryId, String userId, String userLogin) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } //logger.info("deleteDirectory" + directoryId); if (!WebUtil.isHomeDirDeleteEnabled()) { if (WebUtil.isSanEnabled()) { if (isHomeDirectory(directoryId, userLogin, userId)) { throw new BaseDaoException("Cannot delete, home directory, directoryId " + directoryId + " userLogin = " + userLogin); } } } /** * allow files, collabrums to be deleted */ if (!WebUtil.isDirTreeDeleteEnabled()) { if (isChildASubDir(directoryId)) { throw new BaseDaoException( "Cannot delete, subdirs exist, userId = " + userId + " directoryId = " + directoryId); } } /** * check authority to delete: isDiaryAdmin, isAuthor * We donot check for global scope as the author who has added this entry * is added as diradmin */ if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, userId))) { throw new BaseDaoException("User does not have permission to delete this directory, user = " + userId + " directoryId " + directoryId); } /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, deleteDirectory() " + sourceName); } /** * gets the parentid of the directoryid and the result is returned */ List dResult = null; try { Object[] params = { (Object) directoryId }; dResult = directoryParentQuery.execute(params); } catch (Exception e) { throw new BaseDaoException("directoryparentQuery() exception " + directoryParentQuery.getSql(), e); } Connection conn = null; HashSet dirSet = null; try { conn = ds.getConnection(); dirSet = listAuthorQuery.run(conn, directoryId); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("conn.close() exception " + directoryId, e1); } throw new BaseDaoException("listAuthorQuery, exception " + directoryId, e); } if (WebUtil.isSanEnabled()) { Directory currentDir = viewDirectory(directoryId, userId, userLogin, DbConstants.READ_FROM_SLAVE, DbConstants.BLOB_READ_FROM_SLAVE, DbConstants.READ_FROM_SLAVE); if (currentDir != null) { try { getSanUtils(); //logger.info("commenting deleteAllSanDir for the time being"); sanUtils.deleteAllSanDir(currentDir.getValue(DbConstants.DIRPATH), currentDir.getValue(DbConstants.DIRNAME), SanConstants.sanPath); } catch (SanException e) { throw new BaseDaoException("deleteSanDir() deleteDirectory() error, " + directoryId + " error message " + e.getMessage(), e); } } } try { if (conn == null) { conn = ds.getConnection(); } conn.setAutoCommit(false); /** delete all children, subdirs */ if (isChildASubDir(directoryId)) { //logger.info("calling deleteNode()"); deleteNode(directoryId, conn); } /** * the userId is not required to be checked * as long as the user either the diaryAdmin or is author */ deleteAllDirectory(directoryId, conn); //logger.info("deleteAllDirectory called " + directoryId); /* directoryDeleteQuery.run(conn, directoryId); scopeDeleteQuery.run(conn, directoryId); deleteChildQuery.run(conn, directoryId); deleteAdminQuery.run(conn, directoryId); deleteDirCopyQuery.run(conn, directoryId); deleteDirBlockAllQuery.run(conn, directoryId); deleteDirAllowUsersAllQuery.run(conn, directoryId); */ //logger.info("completed deleteDirectory queries "); /** * delete collabrum, delete websites coming soon! */ } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "connection close() exception for rollback, delete directory/dirscope/diradmin tables " + " params (2) " + " directoryId = " + directoryId + " userId = " + userId, e2); } throw new BaseDaoException("rollback() exception for delete directory/dirscope/diradmin tables " + " params (2) " + " directoryId = " + directoryId + " userId = " + userId, e1); } } // connection commit try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit() exception for delete, directory/dirscope/diradmin tables " + " params (2) " + " directoryId = " + directoryId + " userId = " + userId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException( "connection close() exception for commit, delete directory/dirscope/diradmin tables " + " params (2) " + " directoryId = " + directoryId + " userId = " + userId, e4); } /** * remove this directory from cache */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } // check for each parent and remove the parent from cache if ((dResult != null) && (dResult.size() > 0)) { String parentId = ((Directory) dResult.get(0)).getValue(DbConstants.PARENT_ID); if (treeCache.exists(fqn, parentId)) { treeCache.remove(fqn, parentId); } } fqn = cacheUtil.fqn(DbConstants.DIR_COBRAND); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.DIR_SCOPE); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.DIR_CAT); StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(DbConstants.PHOTO_CATEGORY); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } sb.delete(0, sb.length()); sb.append(directoryId); sb.append("-"); sb.append(DbConstants.FILE_CATEGORY); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } /* fqn = cacheUtil.fqn(DbConstants.DIRECTORY_STREAM_BLOBS); if (treeCache.exists(fqn, directoryId)) { Object obj = treeCache.get(fqn, directoryId); if (obj != null) { List entries = (List)obj; Fqn streamFqn = cacheUtil.fqn(DbConstants.DIR_STREAM_BLOB); //StringBuffer sb = new StringBuffer(); for (int i = 0; i < entries.size(); i++) { String entryId = ((Photo)entries.get(i)).getValue(DbConstants.ENTRYID); if (!RegexStrUtil.isNull(entryId)) { sb.delete(0, sb.length()); sb.append(directoryId); sb.append("-"); sb.append(entryId); if (treeCache.exists(streamFqn, sb.toString())) { treeCache.remove(streamFqn, sb.toString()); } } } } treeCache.remove(fqn, directoryId); } */ if (dirSet != null) { Iterator it = dirSet.iterator(); while (it.hasNext()) { Directory directory = (Directory) it.next(); String adminUser = directory.getValue(DbConstants.LOGIN); if (!RegexStrUtil.isNull(adminUser)) { fqn = cacheUtil.fqn(DbConstants.USER_PAGE); if (treeCache.exists(fqn, adminUser)) { treeCache.remove(fqn, adminUser); } fqn = cacheUtil.fqn(DbConstants.AUTHORS_LIST); if (treeCache.exists(fqn, adminUser)) { treeCache.remove(fqn, adminUser); } fqn = cacheUtil.fqn(DbConstants.AUTHORS_DIRECTORIES); if (treeCache.exists(fqn, adminUser)) { treeCache.remove(fqn, adminUser); } String adminId = directory.getValue(DbConstants.LOGIN_ID); fqn = cacheUtil.fqn(DbConstants.AUTHOR_BLOCKED_DIRS); if (treeCache.exists(fqn, adminId)) { treeCache.remove(fqn, adminId); } fqn = cacheUtil.fqn(DbConstants.DIR_COPY); if (treeCache.exists(fqn, adminId)) { treeCache.remove(fqn, adminId); } fqn = cacheUtil.fqn(DbConstants.DIR_MOVE); if (treeCache.exists(fqn, adminId)) { treeCache.remove(fqn, adminId); } sb.delete(0, sb.length()); sb.append(directoryId); sb.append("-"); sb.append(adminId); String key = sb.toString(); fqn = cacheUtil.fqn(DbConstants.DIR_AUTHOR); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } } } } removeUsersFromDirAuthorsCache(directoryId); return dResult; } /** * This property is setby spring automatically at web.xml startup * @param ds - this is JDBC datasource bean that is connection from the pool */ public void setJdbcSource(DataSource ds) { this.ds = ds; } /** * gets the Directory bean * @param directoryId - the directory id * @param userId - the user id * @throws BaseDaoException - when error occurs * @return Directory - directory bean */ public Directory getDirectory(String directoryId, String userId) throws BaseDaoException { return (getDirectory(directoryId, userId, DbConstants.READ_FROM_SLAVE)); } /** * getDirectory() method is called when a directory is edited * @param directoryId - the directory id * @param userId - the user id * @param accessFlag - the access flag * @return Directory - directory bean * @throws BaseDaoException - when error occurs */ public Directory getDirectory(String directoryId, String userId, int accessFlag) throws BaseDaoException { //logger.info("getDirectory() is called " + directoryId); if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("directoryId (params) are null"); } /** Jboss methods * fqn - full qualified name * check if the userpage already set in the cache * If it exists, return the userpage from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); Object obj = treeCache.get(fqn, directoryId); if (obj != null) { //logger.info("getting it from cache " + ((Directory)obj).toString()); return (Directory) obj; } Object[] params = { (Object) directoryId }; List result = null; /** * Get scalability datasource for directory - not partitioned * accessFlag - the access flag, use the datasource, master(0) or slave(1) */ String sourceName = null; if (accessFlag == DbConstants.READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteZeroScalability(); } else { sourceName = scalabilityManager.getReadZeroScalability(); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() " + sourceName); } /** * Get the directory based on directoryId */ try { result = directoryQuery.execute(params); } catch (Exception e) { throw new BaseDaoException( "error getDirectory() , " + directoryQuery.getSql() + "directoryId = " + directoryId, e); } Directory dir = null; if (result != null && result.size() > 0) { dir = (Directory) result.get(0); if (dir == null) { throw new BaseDaoException( "directory null, result list, " + directoryQuery.getSql() + "directoryId = " + directoryId); } // get scopeid String scopeid = dir.getValue(DbConstants.SCOPE_ID); if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("scopeid null, directoryId " + directoryId); } // set scope strings if (scopeid.equals(dirScope.getWebscopeid())) { dir.setValue(DbConstants.SCOPE_STR, dirScope.getWebscope()); } else { if (scopeid.equals(dirScope.getUserspecificscopeid())) { dir.setValue(DbConstants.SCOPE_STR, dirScope.getUserspecificscope()); } else { if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { dir.setValue(DbConstants.SCOPE_STR, dirScope.getUsergroupspecificscope()); } else { if (scopeid.equals(dirScope.getShareusergroupspecificscopeid())) { dir.setValue(DbConstants.SCOPE_STR, dirScope.getShareusergroupspecificscope()); } else { dir.setValue(DbConstants.SCOPE_STR, dirScope.getDiaryallscope()); } } } } /** * set leafnode (does this directory have children) * set default settings, that it is a leaf node. * this is used for cut + copy + paste + delete */ if (childrenExist(directoryId)) { dir.setValue(DbConstants.IS_LEAFNODE, "0"); } else { dir.setValue(DbConstants.IS_LEAFNODE, "1"); } } else { throw new BaseDaoException("Directory does not exist, " + directoryId); } return dir; } /** * Update an existing in a directory. * User permissions are checked before the user is allowed to update it. * If the user is the userid or admin, update directory. * if the user has permission to rename or modify in the permission directory, update directory. * @param directoryId - directory id * @param dirname - directory name (destination directory name) * @param keywords - keywords * @param scopeid - scope id * @param desc - description * @param operations - operations allowed in this directory * @param authorid - author id or login id of this author * @param dirPath - directory path * @param srcDirName - original or source directory name * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public void updateDirectory(String directoryId, String dirname, String keywords, String scopeid, String desc, String operations, String authorid, String userLogin, String dirPath, String srcDirName) throws BaseDaoException { /** * check only dirname and owner. others can be null. */ if (RegexStrUtil.isNull(authorid) || RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(dirname)) { throw new BaseDaoException("params are null"); } /** * isDiaryAdmin or isAuthor */ if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, authorid))) { throw new BaseDaoException("User is neither a diaryAdmin nor author to update directory = " + directoryId + " authorid =" + authorid); } /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, updateDirectory() " + sourceName); } /** * children do not exist then, rename the dirname * isDirTreeRenameEnabled(), rename the dirname */ if (!childrenExist(directoryId) || WebUtil.isDirTreeRenameEnabled()) { if (WebUtil.isSanEnabled() && !RegexStrUtil.isNull(dirname)) { if (!RegexStrUtil.isNull(srcDirName) && !srcDirName.equals(dirname)) { try { getSanUtils(); /** * renames the directory from srcDirName to dirname */ sanUtils.renameSanDir(dirPath, srcDirName, SanConstants.sanPath, dirname); } catch (SanException e) { throw new BaseDaoException("error in renameSanDir(), updateDirectory()" + directoryId + " srcDirName " + srcDirName + " dirname = " + dirname + " dirPath = " + dirPath + " error message = " + e.getMessage(), e); } } } } // where do we get the stateid from ? String stateid = "null"; Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); /** * 1) isDirTreeRenameEnabled() is true, rename the directory * 2) if children do not exist, rename the directory */ if (childrenExist(directoryId)) { if (!WebUtil.isDirTreeRenameEnabled()) { directoryupdateQuery.run(conn, directoryId, keywords, desc); } else { /* * update the dirname */ directoryrenameQuery.run(conn, directoryId, dirname, keywords, desc); /* * update the dirname in the dirpath for the children * subdirectories * * directoryId is the mainDir that is being renamed * so set it to true */ StringBuffer oldPath = new StringBuffer(srcDirName); oldPath.append(DbConstants.DIRPATH_PIPE); oldPath.append(directoryId); StringBuffer newPath = new StringBuffer(dirname); newPath.append(DbConstants.DIRPATH_PIPE); newPath.append(directoryId); //logger.info("params to updatePath(), oldPath = " + oldPath + " newPath=" + newPath + " directoryId=" + directoryId); updatePath(directoryId, newPath.toString(), oldPath.toString(), conn, true); /** create new segment of dirpath **/ /* List subDirs = getListOfSubDirs(directoryId, DbConstants.READ_FROM_MASTER); if (subDirs != null && subDirs.size() > 0) { StringBuffer sb = new StringBuffer(); sb.append(DbConstants.DIRPATH_COLON); sb.append(dirname); sb.append(DbConstants.DIRPATH_PIPE); sb.append(directoryId); String replaceDirNamePath = sb.toString(); sb.delete(0, sb.length()); sb.append(DbConstants.DIRPATH_COLON); sb.append(srcDirName); sb.append(DbConstants.DIRPATH_PIPE); sb.append(directoryId); String oldDirNamePath = sb.toString(); for (int i = 0; i < subDirs.size(); i++) { if (subDirs.get(i) != null) { String subDirId = ((Directory)subDirs.get(i)).getValue(DbConstants.DIRECTORY_ID); Directory subDirWhole = getDirectory(subDirId, authorid); if (subDirWhole != null) { String subDirPath = subDirWhole.getValue(DbConstants.DIRPATH); logger.info("oldDirNamePath = " + oldDirNamePath + " newDirNamePath = " + replaceDirNamePath); subDirPath = RegexStrUtil.replaceString(subDirPath, oldDirNamePath, replaceDirNamePath); dirPasteQuery.run(conn, subDirId, subDirPath); } } } } */ } } else { /* * Children do not exist */ if (RegexStrUtil.isNull(dirname)) { throw new BaseDaoException( "dirname is null for update " + directoryId + " authorid = " + authorid); } directoryrenameQuery.run(conn, directoryId, dirname, keywords, desc); } scopeupdateQuery.run(conn, directoryId, scopeid, operations); } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "conn.close() exception for rollback(), for update directory/scope tables params (6) " + " dirname = " + dirname + " keywords = " + keywords + " desc = " + desc + " dirid = " + directoryId, e2); } throw new BaseDaoException("rollback() exception, for update directory/scope" + " dirname = " + dirname + " keywords = " + keywords + " desc = " + desc + " dirId = " + directoryId, e1); } throw new BaseDaoException("for update directory/scope tables params " + " dirname = " + dirname + " keywords = " + keywords + " desc = " + desc + " directoryId = " + directoryId, e); } // connection commit try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit() exception, for update directory" + " dirname = " + dirname + " keywords = " + keywords + " desc = " + desc + " directoryId = " + directoryId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("conn.close() exception for commit(), updateDirectory" + " dirname = " + dirname + " keywords = " + keywords + " desc = " + desc + " directoryId = " + directoryId, e4); } /** * remove directory from cache */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.DIR_SCOPE); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } /** * check for parent and remove the parent from cache */ String parentId = getParentId(directoryId); fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, parentId)) { treeCache.remove(fqn, parentId); } } /** * get parentid of the directory * @param directoryId - directoryId * @return parentid - parentid * @throws BaseDaoException - if error occurs */ private String getParentId(String directoryId) throws BaseDaoException { /** * get the parent of directoryId */ try { Object[] params = { (Object) directoryId }; List dResult = directoryParentQuery.execute(params); if ((dResult != null) && (dResult.size() > 0)) { return ((Directory) dResult.get(0)).getValue(DbConstants.PARENT_ID); } } catch (Exception e) { throw new BaseDaoException("DirectoryParentQuery, exception " + directoryParentQuery.getSql(), e); } return null; } /** * for test drive purposes */ private List getAllDirectories(String userId) throws BaseDaoException { if (RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getAllDirectories() " + sourceName); } List result = null; Object[] params = { (Object) userId }; try { result = directoryallQuery.execute(params); } catch (Exception e) { throw new BaseDaoException("error occured while getting all directories for this user , " + directoryallQuery.getSql() + " params (1) userId = " + userId, e); } return result; } /** * Checks if this userId has the scope for this directory * @param directoryId * @param userId * @return DirScope * @throws BaseDaoException */ private DirScope getDirectoryScope(String directoryId) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null in getDirectoryScope() for DirectoryDaoDb"); } /** Jboss methods * fqn - full qualified name * check if the directory already set in the cache * If it exists, return the directory from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.DIR_SCOPE); Object obj = treeCache.get(fqn, directoryId); if (obj != null) { return (DirScope) obj; } /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getDirectoryScope() " + sourceName); } Object[] params = { (Object) directoryId }; List result = scopeQuery.execute(params); if (result == null) { throw new BaseDaoException("DirectoryScopeCategory db result is null, " + scopeQuery.getSql() + " params (1) directoryId = " + directoryId); } else { /** * add dirscope in the cache */ if (result.size() > 0 && result.get(0) != null) { treeCache.put(fqn, directoryId, (DirScope) result.get(0)); return (DirScope) result.get(0); } else { throw new BaseDaoException("DirectoryScopeCategory result is null, " + scopeQuery.getSql() + " params (1) directoryId = " + directoryId); } } } /** * Block list of directories * @param dirAuthorList - the directories list for an author * @return boolean - true or false (whether these directories can be blocked or not ) */ public boolean canDirsBeBlocked(List dirAuthorList) { if ((dirAuthorList != null) && (dirAuthorList.size() > 0)) { int cnt = 0; int i = 0; for (i = 0; i < dirAuthorList.size(); i++) { String directoryId = ((Directory) dirAuthorList.get(i)).getValue(DbConstants.DIRECTORY_ID); /** * checks only for the a child directory * if child exists, not a leaf node * if child does not exist, it is a leaf node */ if (childrenExist(directoryId)) { ((Directory) dirAuthorList.get(i)).setValue(DbConstants.IS_LEAFNODE, "0"); cnt++; } else { ((Directory) dirAuthorList.get(i)).setValue(DbConstants.IS_LEAFNODE, "1"); } } if (cnt == i) { return false; } } return true; } /** * This method updates blob (caption, zoom) for the directory * @param entryId - the entry id of this blob * @param directoryId - the directory id of this blob * @param userId - the user id * @param userLogin - the user login * @param zoom - the zoom * @param btitle - the btitle * @param def - is this the default * @param caption - caption * @throws BaseDaoException - when error occurs */ public void updateStreamBlob(String entryId, String directoryId, String userId, String userLogin, String zoom, String btitle, boolean def, String caption, String dirPath, String dirName) { /** * check only dirname and owner. others can be null. */ if (RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** * isDiaryAdmin or isAuthor */ if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, userId))) { throw new BaseDaoException("User is neither a diaryAdmin nor author to update directory = " + directoryId + " userId =" + userId); } /** * Get scalability datasource for dirblob - partitioned on directoryId */ String sourceName = scalabilityManager.getWriteBlobScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, updateStreamBlob() " + sourceName); } /** * Check if this entry exists in the default directory blob */ boolean exists = false; String defId = null; try { Object[] params = { (Object) directoryId }; List result = defaultQuery.execute(params); if (result != null && result.size() > 0) { defId = ((Photo) result.get(0)).getValue(DbConstants.ENTRYID); if (!RegexStrUtil.isNull(defId) && defId.equals(entryId)) { exists = true; } } } catch (Exception e) { throw new BaseDaoException("error while" + defaultQuery.getSql(), e); } if (WebUtil.isSanEnabled()) { Photo photo = getPhoto(entryId, directoryId); String srcFileName = null; if (photo != null) { srcFileName = photo.getValue(DbConstants.BTITLE); } if ((!RegexStrUtil.isNull(srcFileName)) && !srcFileName.equals(btitle)) { try { getSanUtils(); sanUtils.renameSanFile(dirPath, dirName, srcFileName, SanConstants.sanPath, btitle); } catch (SanException e) { throw new BaseDaoException("directory updateStreamBlob()renameSanFile error", e); } } } List result = null; Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); blobUpdateQuery.run(conn, entryId, directoryId, zoom, btitle, caption); if (def) { /** * so delete the default entry that does not match with this entryid, add the new entryid */ if (!exists) { deleteDefaultQuery.run(conn, directoryId); addDefaultQuery.run(conn, entryId, directoryId); } } else { /** * A default record matching entryid exists, * Delete this entry as the user does not want this to be a default entry */ if (exists) { deleteDefaultQuery.run(conn, directoryId); } } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "conn.close() exception for rollback(), for updateStreamBlob() directoryId =" + directoryId + " entryId = " + entryId, e2); } throw new BaseDaoException("rollback() exception, for updateStreamBlob() directoryId =" + directoryId + " entryId = " + entryId, e1); } } /** * connection commit * */ try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException( "commit() exception updateStreamBlob, directoryId= " + directoryId + " entryId = " + entryId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("conn.close() exception for commit(), updateStreamBlob() directoryId = " + directoryId + " entryId = " + entryId, e4); } updateDirImage(entryId, directoryId, zoom, btitle, caption); /** * generate a key for the directory stream blob (directoryid + entryid) * remove blobstream of directory, from cache * use the same key for both the caches (DIR_PHOTO, DIR_STREAM_BLOB with blobdata) */ Fqn fqn = cacheUtil.fqn(DbConstants.DIR_STREAM_BLOB); StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(entryId); String key = sb.toString(); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.DIR_PHOTO); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } /** remove the existing default photo from cache */ fqn = cacheUtil.fqn(DbConstants.DIR_PHOTO); sb.delete(0, sb.length()); sb.append(directoryId); sb.append("-"); sb.append(defId); key = sb.toString(); if (treeCache.exists(fqn, key)) { treeCache.remove(fqn, key); } fqn = cacheUtil.fqn(DbConstants.DIR_CAT); sb.delete(0, sb.length()); sb.append(directoryId); sb.append("-"); sb.append(DbConstants.PHOTO_CATEGORY); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } /* fqn = cacheUtil.fqn(DbConstants.DIRECTORY_STREAM_BLOBS); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } */ } /** * gets the stream blob information with the blob data * This method is called when stream blob information is zipped * @param directoryId - the directory id * @param blobtype - the blob type * @return List of streamblobs belonging to this directory * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public List getAllStreamData(String directoryId, String blobType) { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(blobType)) { throw new BaseDaoException("directoryId || blobType params are null in getAllStreamData()"); } /** * Get scalability datasource for dirblob, partitioned on directoryId */ String sourceName = scalabilityManager.getReadBlobScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getAllStreamData() " + sourceName); } try { Object[] params = { (Object) directoryId, (Object) blobType }; List blobs = (List) streamDataQuery.execute(params); if (WebUtil.isSanEnabled()) { return getSanBlobsDir(directoryId, blobs); } else { return blobs; } //return (List)streamDataQuery.execute(params); } catch (BaseDaoException e) { throw new BaseDaoException( "getAllStreamData() exception, directoryId = " + streamDataQuery.getSql() + directoryId, e); } } /** * gets the stream blob information (without the blob data) * This method is called when stream blob information is displayed * @param directoryId - the directory id * @param blobType - the blobType * @param blobAcces - blob accessibility from datasource * @return List of streamblobs belonging to this directory * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public List getBlobsByCategory(String directoryId, String blobType, int blobAccessFlag) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(blobType)) { throw new BaseDaoException("params are null in getBlobsByCategory"); } /** * Get scalability datasource for dirblob - partitioned on directoryId */ /** * get stream blobs for a directory from cache, if it exists */ StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(blobType); Fqn fqn = cacheUtil.fqn(DbConstants.DIR_CAT); if (blobAccessFlag == DbConstants.BLOB_READ_FROM_SLAVE) { Object obj = treeCache.get(fqn, sb.toString()); if (obj != null) { return (List) obj; } } /** * Get scalability datasource for dirblob, partitioned on directoryId */ String sourceName = null; if (blobAccessFlag == DbConstants.BLOB_READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteBlobScalability(directoryId); } else { sourceName = scalabilityManager.getReadBlobScalability(directoryId); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getBlobsByCategory() " + sourceName); } try { Object[] params = { (Object) directoryId, (Object) blobType }; List result = photosQuery.execute(params); if ((result != null) && (result.size() > 0)) { treeCache.put(fqn, sb.toString(), (List) result); } return result; } catch (BaseDaoException e) { throw new BaseDaoException("photosQuery exception, directoryId = " + photosQuery.getSql() + directoryId, e); } } /** * gets the stream blob information (without the blob data) * This method is called when stream blob information is displayed * @param directoryId - the directory id * @return List of streamblobs belonging to this directory * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ /* public List getStreamBlobs(String directoryId, String blobType) { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(blobType)) { throw new BaseDaoException("directoryId params are null in getStreamBlobs"); } Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY_STREAM_BLOBS); StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(blobType); Object obj = treeCache.get(fqn, sb.toString()); if (obj != null) { return (List)obj; } String sourceName = scalabilityManager.getReadBlobScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getStreamBlobs() " + sourceName); } try { Object[] params = {(Object)directoryId, blobType}; List result = streamblobQuery.execute(params); if ((result != null) && (result.size() > 0)) { treeCache.put(fqn, sb.toString(), (List)result); } return result; } catch(BaseDaoException e) { throw new BaseDaoException("getStreamBlobs exception, directoryId = " + streamblobQuery.getSql() + directoryId, e); } } */ /** * Sets IS_AUTHOR and CAN_ADD properties for this directory * @param directoryId - the directoryid * @param userId - the user id * @param userLogin - the user login * @param isLoginValid - is Login valid * @throws BaseDaoException when error occurs */ private void setDirAuthorAndScope(Directory dir, String directoryId, String userId, String userLogin, boolean isLoginValid) { boolean isAddable = false; if (isLoginValid && !RegexStrUtil.isNull(userId)) { if (userId.equals((Object) dir.getValue(DbConstants.OWNER_ID)) || isAuthor(directoryId, userId) || diaryAdmin.isDiaryAdmin(userLogin)) { dir.setValue(DbConstants.IS_AUTHOR, "1"); isAddable = true; } else { dir.setValue(DbConstants.IS_AUTHOR, "0"); } } else { dir.setValue(DbConstants.IS_AUTHOR, "0"); } /** * check global flag * operations: 1 (Designate Specific Members As Authors), 2 (Automatically Allow All Members To Be Authors) * status: 1 (Hidden), 2 (Ready) should we check for the status for operations (2) * if set to 1, check if this user is the designated author */ if (!isAddable) { DirScope dirscope = getDirectoryScope(directoryId); //if (dirscope.getValue(DbConstants.OPERATIONS).equals((Object)"2") ) { if (dirscope != null) { if (dirscope.getValue(DbConstants.OPERATIONS).equals(dirScope.getAllasauthorsoperation())) { isAddable = true; } } else { throw new BaseDaoException("dirscope is null for directorid=" + directoryId); } } if (isAddable) { dir.setValue(DbConstants.CAN_ADD, "1"); } else { dir.setValue(DbConstants.CAN_ADD, "0"); } } /** * isUserAuthor * checks if this user is the author of a directory or an admin * @param directoryId - the directoryid * @param loginId - the login id * @param userLogin - the user login * @returns boolean - true if the user is the author or admin * @throws BaseDaoException when error occurs */ public boolean isUserAuthor(String directoryId, String loginId, String userLogin) { /** * check params */ if (RegexStrUtil.isNull(loginId) || RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } /** * isDiaryAdmin or isAuthor */ if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, loginId))) { return false; } return true; } /** * getWebsitesAndCollabrums for a directory * @param directoryId - the directoryid * @param loginId - the login id * @param userLogin - the user login * @returns Directory - that has websites and collabrums of this directory * @throws BaseDaoException when error occurs */ public Directory getWebsitesAndCollabrums(String directoryId, String userId, String userLogin) { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params directoryId is null"); } /** * get directory from cache, if it exists */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); Object obj = treeCache.get(fqn, directoryId); if (obj != null) { return (Directory) obj; } /** * get it from db */ if (RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params userId || userLogin are null"); } Directory dir = getRBDirectory(directoryId, DbConstants.READ_FROM_MASTER); if (dir == null) { throw new BaseDaoException("dir is null"); } /** * set collabrums * loginValid is set to true */ List dResult = getCollabrums(directoryId, true, userId, userLogin); if (dResult != null) { dir.setObject(DbConstants.COLLABRUMS, dResult); } /** * Get scalability datasource for websites & dirblob - partitioned on directoryId */ String sourceName = scalabilityManager.getWriteScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException( "ds null, getWebsitesAndCollabrums() " + sourceName + " directoryId " + directoryId); } /** * get directory websites, dirwebsites partitioned on directoryId */ try { Object[] params = { (Object) directoryId }; dResult = websiteQuery.execute(params); if (dResult != null) { dir.setObject(DbConstants.WEBSITES, dResult); } } catch (Exception e) { throw new BaseDaoException("websiteQuery exception for directoryId = " + directoryId, e); } return dir; } /** * Get the directory based on directoryId */ private Directory getRBDirectory(String directoryId, int accessFlag) { /** * Get scalability datasource for directory, not partitioned */ String sourceName = null; if (accessFlag == DbConstants.READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteZeroScalability(); } else { sourceName = scalabilityManager.getReadZeroScalability(); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getRBDirectory() " + sourceName); } /** * get the directory from database */ Object[] params = { (Object) directoryId }; Directory dir = null; try { List result = directoryQuery.execute(params); //logger.info("directoryQuery"); if ((result != null) && (result.size() > 0)) { dir = (Directory) result.get(0); } } catch (Exception e) { throw new BaseDaoException("error, directoryQuery(), getRBDirectory() " + directoryQuery.getSql() + " params (1) directoryId = " + directoryId, e); } /** * get directory */ if (dir == null) { throw new BaseDaoException("directory null, getRBDirectory(), " + directoryQuery.getSql() + " params (1) directoryId = " + directoryId); } return dir; } /** * getDiaryDirectory * Get the directory with all the information attached to the directory * Attached are: streamblobs, author, subdirs, collabrums, websites are part of the directory * This method is called getViewDirectory() * @param directoryId * @param userId * @param userLogin * @param accessFlag accessFlag indicates datasource master(1) or slave(0) * @param blobAccessFlag blobAccessFlag indicates blob datasource master(1) or slave(0) * @param websiteAccessFlag websiteAccessFlag indicates directory website datasource master(1) or slave(0) * @param alphabet * @return Directory bean * @throws BaseDaoException **/ private Directory getDiaryDirectory(String directoryId, String userId, String userLogin, int accessFlag, int blobAccessFlag, int websiteAccessFlag, String alphabet) { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } boolean isLoginValid = false; if (!RegexStrUtil.isNull(userLogin)) { isLoginValid = true; } //logger.info("isLoginValid " + isLoginValid); // check if user access is allowed for directory if (isLoginValid) { if (isThisMemberBlockedFromDirectory(directoryId, userId)) { return null; } if (!isAccessAllowed(directoryId, userLogin, userId, accessFlag)) { throw new BaseDaoException( "Directory access is not allowed, for directoryid = " + directoryId + " for userLogin = " + userLogin + " because this user is not the owner or author of this directory"); } } /** * get directory from cache, if it exists */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (RegexStrUtil.isNull(alphabet)) { Object obj = treeCache.get(fqn, directoryId); if (obj != null) { Directory dir = (Directory) obj; //logger.info("directoryId=" + directoryId); //logger.info("directory=" + dir.toString()); setDirAuthorAndScope(dir, directoryId, userId, userLogin, isLoginValid); List subDirs = (List) dir.getObject(DbConstants.SUBDIRS); if (subDirs != null) { logger.info("subdirs.size()=" + subDirs.size()); logger.info("numhide=" + dir.getValue(DbConstants.NUM_HIDE)); logger.info("numdirs=" + dir.getValue(DbConstants.NUM_DIRS)); int numHide = setSubDirProperties(subDirs, isLoginValid, userId, userLogin, accessFlag); //logger.info("caching stuff=" + numHide); } else { //logger.info("subdirs is null, directoryid = " + directoryId); String strNumChildren = getChildCount(directoryId, DbConstants.READ_FROM_MASTER); logger.info("strNumChilden = " + strNumChildren); dir.setValue(DbConstants.TOTAL_DIRS, strNumChildren); List dResult = getSubDirs(dir, directoryId, isLoginValid, userId, userLogin, accessFlag, alphabet); if (dResult != null) { dir.setValue(DbConstants.NUM_DIRS, new Integer(dResult.size()).toString()); dir.setObject(DbConstants.SUBDIRS, dResult); int numHide = setSubDirProperties(subDirs, isLoginValid, userId, userLogin, accessFlag); dir.setValue(DbConstants.NUM_HIDE, new Integer(numHide).toString()); } } /** * This attribute should be set, as it is relevant. * can this member on the diarynet cut/copy/paste directories */ boolean isUserAuthor = false; if (isLoginValid && dir.getValue(DbConstants.IS_AUTHOR).equals("1")) { isUserAuthor = true; } if (childrenExist(directoryId)) { dir.setValue(DbConstants.IS_LEAFNODE, "0"); /** * cannot delete directory, if it has subdirectories * can delete directory, if it has only collabrums and websites */ /* if ((subDirs == null) || subDirs.size() > 0){ logger.info("CAN_DELETE= 1, subdirs are null"); dir.setValue(DbConstants.CAN_DELETE, "1"); } else { logger.info("CAN_DELETE= 0 "); dir.setValue(DbConstants.CAN_DELETE, "0"); } */ } else { /* logger.info("CAN_DELETE= 1, leafnode"); dir.setValue(DbConstants.CAN_DELETE, "1"); */ // as children dont exist, can this user move,copy this directory // check if this user has any directories that are in dirmove, dircopy, dirpaste tables dir.setValue(DbConstants.IS_LEAFNODE, "1"); if (isUserAuthor) { Object[] myparams = { (Object) userId }; dir.setValue(DbConstants.IS_MOVABLE, isMovable(myparams, directoryId, dir)); dir.setValue(DbConstants.IS_COPYABLE, isCopyable(myparams)); } else { dir.setValue(DbConstants.IS_MOVABLE, "0"); dir.setValue(DbConstants.IS_COPYABLE, "0"); dir.setValue(DbConstants.IS_CLONABLE, "0"); dir.setValue(DbConstants.IS_MOVE_PASTE, "0"); dir.setValue(DbConstants.DIR_MOVE, "0"); } } /** this allows the Paste link to appear for this directory */ if (isUserAuthor) { Object[] myparams = { (Object) userId }; dir.setValue(DbConstants.DIR_MOVE, isMovable(myparams, directoryId, dir)); dir.setValue(DbConstants.IS_CLONABLE, isCopyable(myparams)); } /** * Set if this member is blocked from a directory, in cache * Get subdirectories, get list of blocked members for each subdirectory (blockedList) * For each subdirectory, check if this member is blocked from a directory */ if (isLoginValid) { if (subDirs != null && (subDirs.size() > 0)) { for (int i = 0; i < subDirs.size(); i++) { String subDirId = ((Directory) subDirs.get(i)).getValue(DbConstants.DIRECTORY_ID); if (isThisMemberBlockedFromDirectory(subDirId, userId)) { ((Directory) subDirs.get(i)).setValue(DbConstants.BLOCK_MEMBER, "1"); } else { ((Directory) subDirs.get(i)).setValue(DbConstants.BLOCK_MEMBER, "0"); } } } } /* * needs to be set as the user may have been enabled to * upload the files */ if (!RegexStrUtil.isNull(userLogin)) { if (WebUtil.isUserQuotaEnabled()) { //logger.info("enabled quota"); String quotaSize = getQuotaSize(userId); if (RegexStrUtil.isNull(quotaSize) || quotaSize.equals("0")) { dir.setValue(DbConstants.ENABLE_USER_QUOTA, "0"); } else { //logger.info("enabling the userquota " + quotaSize); dir.setValue(DbConstants.ENABLE_USER_QUOTA, "1"); } } } /** * collabrum information and member informaiton for collabrum has to be set dynamically */ setCollInformation((List) dir.getObject(DbConstants.COLLABRUMS), isLoginValid, userId, userLogin); //logger.info("isLoginValid 4" + isLoginValid); return (Directory) obj; } } /** * Get the directory based on directoryId */ List dResult = null; String sourceName = null; Directory dir = getRBDirectory(directoryId, accessFlag); Object[] params = { (Object) directoryId }; String strNumChildren = getChildCount(directoryId, DbConstants.READ_FROM_MASTER); logger.info("strNumChilden = " + strNumChildren + " directoryId=" + directoryId); dir.setValue(DbConstants.TOTAL_DIRS, strNumChildren); /** * Get scalability datasource for directory, not partitioned */ /* if (accessFlag == DbConstants.READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteZeroScalability(); } else { sourceName = scalabilityManager.getReadZeroScalability(); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getDiaryDirectory() " + sourceName); } */ /** * get the directory from database */ /* try { result = directoryQuery.execute(params); if ((result != null) && (result.size() > 0 )) { dir = (Directory)result.get(0); } } catch (Exception e) { throw new BaseDaoException("error occured while getting directory , " + directoryQuery.getSql() + " params (1) directoryId = " + directoryId, e); } */ /** * get directory */ if (dir == null) { throw new BaseDaoException("directory is null, directoryId " + directoryId); } /** * Directory is not null */ if (WebUtil.isSanEnabled()) { try { getSanUtils(); String dirUsedQuota = sanUtils.getUsedQuota(dir.getValue(DbConstants.DIRNAME), SanConstants.sanPath, dir.getValue(DbConstants.DIRPATH)); //logger.info("dirUsedQuota = " + dirUsedQuota + " dirpath = " + dir.getValue(DbConstants.DIRPATH) + " directoryid = " + directoryId); dir.setValue(DbConstants.USED_QUOTA, dirUsedQuota); } catch (Exception e) { throw new BaseDaoException("Error in getting the quota size for directory=" + directoryId); } if (!RegexStrUtil.isNull(userLogin)) { if (WebUtil.isUserQuotaEnabled()) { String quotaSize = getQuotaSize(userId); if (RegexStrUtil.isNull(quotaSize) || quotaSize.equals("0")) { dir.setValue(DbConstants.ENABLE_USER_QUOTA, "0"); } else { dir.setValue(DbConstants.ENABLE_USER_QUOTA, "1"); } } } } /** * set isauthor and canadd values */ setDirAuthorAndScope(dir, directoryId, userId, userLogin, isLoginValid); if (!WebUtil.isHomeDirDeleteEnabled()) { if (WebUtil.isSanEnabled()) { if (isHomeDirectory(directoryId, userLogin, userId)) { dir.setValue(DbConstants.HOME_DIR, "1"); } else { dir.setValue(DbConstants.HOME_DIR, "0"); } } else { dir.setValue(DbConstants.HOME_DIR, "0"); } } /** * set scope strings */ String scopeid = dir.getValue(DbConstants.SCOPE_ID); if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("scopeid null, directoryId " + directoryId); } if (scopeid.equals(dirScope.getWebscopeid())) { dir.setValue(DbConstants.SCOPE_STR, dirScope.getWebscope()); } else { if (scopeid.equals(dirScope.getUserspecificscopeid())) { dir.setValue(DbConstants.SCOPE_STR, dirScope.getUserspecificscope()); } else { if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { dir.setValue(DbConstants.SCOPE_STR, dirScope.getUsergroupspecificscope()); } else { dir.setValue(DbConstants.SCOPE_STR, dirScope.getDiaryallscope()); } } } /** * increment hits on this directory */ incrementHits(directoryId); /** * Get scalability datasource for * websites & dirblob - partitioned on directoryId */ if (websiteAccessFlag == 1) { sourceName = scalabilityManager.getWriteScalability(directoryId); } else { sourceName = scalabilityManager.getReadScalability(directoryId); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException( "ds null, getDiaryDirectory() " + sourceName + " directoryId " + directoryId); } /** * get directory websites, dirwebsites partitioned on directoryId */ try { params[0] = directoryId; dResult = websiteQuery.execute(params); if (dResult != null) { dir.setObject(DbConstants.WEBSITES, dResult); } } catch (Exception e) { throw new BaseDaoException("websiteQuery exception for directoryId = " + directoryId, e); } /** * set stream blobs (photos and files), * dirblob partitioned on directoryId */ dir.setObject(DbConstants.FILE_CATEGORY, getBlobsByCategory(directoryId, DbConstants.FILE_CATEGORY, blobAccessFlag)); dir.setObject(DbConstants.PHOTO_CATEGORY, getBlobsByCategory(directoryId, DbConstants.PHOTO_CATEGORY, blobAccessFlag)); /** * Get scalability datasource for dirblob - partitioned on directoryId */ if (blobAccessFlag == 1) { sourceName = scalabilityManager.getWriteBlobScalability(directoryId); } else { sourceName = scalabilityManager.getReadBlobScalability(directoryId); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException( "ds null, getDiaryDirectory() " + sourceName + " directoryId " + directoryId); } try { params[0] = directoryId; dResult = defaultDirBlobQuery.execute(params); if (dResult != null && dResult.size() > 0) { dir.setObject(DbConstants.DEFAULT_PHOTO, (Photo) dResult.get(0)); } } catch (Exception e) { throw new BaseDaoException("directoryId = " + directoryId + " defaultDirBlobQuery exception " + defaultDirBlobQuery.getSql(), e); } /** * set collabrums */ dResult = getCollabrums(directoryId, isLoginValid, userId, userLogin); if (dResult != null) { dir.setObject(DbConstants.COLLABRUMS, dResult); } /** * set subdirectories information, count the num subdirectories */ logger.info("alphabet = " + alphabet); dResult = getSubDirs(dir, directoryId, isLoginValid, userId, userLogin, accessFlag, alphabet); if (dResult != null) { dir.setValue(DbConstants.NUM_DIRS, new Integer(dResult.size()).toString()); dir.setObject(DbConstants.SUBDIRS, dResult); } /** * set leafnode and other attributes for this directory * If children exist, don't allow to copy and cut(move) * set ismovable, is copyable for this user, check if this user has any movable or copyable directories */ boolean isUserAuthor = false; if (isLoginValid && dir.getValue(DbConstants.IS_AUTHOR).equals("1")) { isUserAuthor = true; } /** * cannot delete directory, if it has subdirectories * can delete directory, if it has collabrums and websites */ if (childrenExist(directoryId)) { dir.setValue(DbConstants.IS_LEAFNODE, "0"); if (((dResult == null) || dResult.size() <= 0) || WebUtil.isDirTreeDeleteEnabled()) { dir.setValue(DbConstants.CAN_DELETE, "1"); } else { dir.setValue(DbConstants.CAN_DELETE, "0"); } } else { dir.setValue(DbConstants.IS_LEAFNODE, "1"); dir.setValue(DbConstants.CAN_DELETE, "1"); if (isUserAuthor) { Object[] myparams = { (Object) userId }; String move = isMovable(myparams, directoryId, dir); dir.setValue(DbConstants.IS_MOVABLE, move); dir.setValue(DbConstants.IS_COPYABLE, isCopyable(myparams)); } else { // not an author or invalid login dir.setValue(DbConstants.IS_MOVABLE, "0"); dir.setValue(DbConstants.IS_COPYABLE, "0"); dir.setValue(DbConstants.IS_CLONABLE, "0"); dir.setValue(DbConstants.DIR_MOVE, "0"); dir.setValue(DbConstants.IS_MOVE_PASTE, "0"); } } /** this allows the Paste link to appear for these directories */ if (isUserAuthor) { Object[] myparams = { (Object) userId }; dir.setValue(DbConstants.DIR_MOVE, isMovable(myparams, directoryId, dir)); dir.setValue(DbConstants.IS_CLONABLE, isCopyable(myparams)); } /** * set cobrand for directory */ getCobrand(directoryId, (Directory) dir); /** * add directory to cache */ treeCache.put(fqn, directoryId, (Directory) dir); return dir; } /** * undocopy directory - undoes the the copy of the directory * @param directoryId - the directory id * @param userLogin - the user login * @param userId - the user id * @throws BaseDaoException - when error occurs */ public void undoCopyDirectory(String directoryId, String userLogin, String userId) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null in undoCopyDirectory"); } /** * does not have permission to undocopy for this directory */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { return; } deleteCopyDirectory(directoryId, userId); Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } } /** * clones the directory as a new directory into the parent/current directory * @param parentId - the new parent id * @param userLogin - the user login * @param userId - the user id * @throws BaseDaoException - when error occurs */ public void cloneDirectory(String parentId, String userLogin, String userId) throws BaseDaoException { if (RegexStrUtil.isNull(parentId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null in cloneDirectory"); } /** * does not have permission to clone/paste the directory */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(parentId, userId)) { return; } /** * get the directoryid of the directory that needs to be cloned */ String directoryId = null; Fqn copyFqn = cacheUtil.fqn(DbConstants.DIR_COPY); Object obj = treeCache.get(copyFqn, userId); if (obj != null) { directoryId = ((Directory) obj).getValue(DbConstants.DIRECTORY_ID); } String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null" + sourceName); } if (RegexStrUtil.isNull(directoryId)) { try { Object[] myparams = { (Object) userId }; List dResult = dircancopyQuery.execute(myparams); if ((dResult != null) && (dResult.size() > 0)) { directoryId = ((Directory) dResult.get(0)).getValue(DbConstants.DIRECTORY_ID); treeCache.put(copyFqn, userId, dResult.get(0)); } else { throw new BaseDaoException( "error dircancopyQuery, null directoryId " + dircancopyQuery.getSql()); } } catch (Exception e) { throw new BaseDaoException("dircancopyQuery error" + dircancopyQuery.getSql(), e); } } if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("directoryId is null " + dircancopyQuery.getSql()); } /** * setup the clone directory */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); obj = treeCache.get(fqn, directoryId); Directory dir = null; String dirname = null; String keywords = null; String scopeid = null; String operations = null; String desc = null; if (obj != null) { dir = (Directory) obj; dirname = dir.getValue(DbConstants.DIRNAME); keywords = dir.getValue(DbConstants.KEYWORDS); scopeid = dir.getValue(DbConstants.SCOPE_ID); operations = dir.getValue(DbConstants.OPERATIONS); desc = dir.getValue(DbConstants.DIRDESC); } else { DirScope dirscope = getDirectoryScope(directoryId); scopeid = dirscope.getValue(DbConstants.SCOPE_ID); operations = dirscope.getValue(DbConstants.OPERATIONS); try { Object[] params = { (Object) directoryId }; List result = directoryQuery.execute(params); if ((result != null) && (result.size() > 0)) { dir = (Directory) result.get(0); dirname = dir.getValue(DbConstants.DIRNAME); keywords = dir.getValue(DbConstants.KEYWORDS); desc = dir.getValue(DbConstants.DIRDESC); } else { throw new BaseDaoException("result is null in " + directoryQuery.getSql()); } } catch (BaseDaoException e) { throw new BaseDaoException("erorr in directoryQuery " + directoryQuery.getSql(), e); } } /** * get the directory path of the directory that is being cloned */ String dirName = null; String srcDirPath = null; if (!RegexStrUtil.isNull(directoryId)) { fqn = cacheUtil.fqn(DbConstants.DIRECTORY); obj = treeCache.get(fqn, directoryId); if (obj != null) { removeDirPathsFromCache(((Directory) obj).getValue(DbConstants.DIRPATH)); dirName = ((Directory) obj).getValue(DbConstants.DIRNAME); srcDirPath = ((Directory) obj).getValue(DbConstants.DIRPATH); } else { dir = getDirectory(directoryId, userId, DbConstants.READ_FROM_SLAVE); if (dir != null) { dirName = dir.getValue(DbConstants.DIRNAME); srcDirPath = dir.getValue(DbConstants.DIRPATH); } } } /** * Jboss methods * fqn - full qualified name * check if the parent dirpath already set in the cache * or get it from DB. * If it exists, return the parent dirpath from the cache. * This parent is the new parent for the above directoryId */ StringBuffer newDirPath = null; fqn = cacheUtil.fqn(DbConstants.DIRECTORY); obj = treeCache.get(fqn, parentId); if (obj != null) { newDirPath = new StringBuffer(((Directory) obj).getValue(DbConstants.DIRPATH)); newDirPath.append(DbConstants.DIRPATH_COLON); newDirPath.append(((Directory) obj).getValue(DbConstants.DIRNAME)); } else { Directory parentInfo = getParentInfo(parentId); if (parentInfo != null) { newDirPath = new StringBuffer(parentInfo.getValue(DbConstants.DIRPATH)); newDirPath.append(DbConstants.DIRPATH_COLON); newDirPath.append(parentInfo.getValue(DbConstants.DIRNAME)); } } String newPath = null; if (newDirPath != null) { newDirPath.append(DbConstants.DIRPATH_PIPE); newDirPath.append(parentId); newPath = newDirPath.toString(); } if (WebUtil.isDirTreeCopyEnabled()) { if (dir != null) { String newDirId = null; if (WebUtil.isSanEnabled()) { try { getSanUtils(); sanUtils.copySanDir(srcDirPath, dirName, SanConstants.sanPath, newPath, dirName); } catch (SanException e) { throw new BaseDaoException("copySanDir() cloneDirectory() error, " + directoryId + " error message " + e.getMessage(), e); } } /* * This method adds entries into blobs,tags, dirimages tables * add only empty tags for the time being * future add list of websites, collabrums (not added websites, collabrums) * dont add san directory as we do /bin/cp -r * get dirblobs from the existing directoryId */ boolean addSanFlag = false; addDirectory(dirname, keywords, parentId, scopeid, desc, userId, userLogin, operations, addSanFlag); List files = getBlobsByCategory(directoryId, DbConstants.FILE_CATEGORY, DbConstants.READ_FROM_MASTER); newDirId = getChildDirId(parentId, dirname); if (RegexStrUtil.isNull(newDirId)) { throw new BaseDaoException("newDirId is null, parentId=" + parentId + " dirname=" + dirname); } else { logger.info("newDirId = " + newDirId + " userLogin=" + userLogin + " userId=" + userId + " files.size()=" + files.size()); if (files != null && files.size() > 0) { addStreamBlobs(files, newDirId, userLogin, userId); } } /* recursion for db, cloneDirectory() * directoryId - clone this directoryid * newDirId - parentid of the new dirs that will be cloned */ logger.info("directoryId = " + directoryId + " newDirId = " + newDirId + " userId = " + userId + " userLogin = " + userLogin); cloneNode(directoryId, newDirId, userId, userLogin); deleteCopyDirectory(directoryId, userId); } } else { /** * create a clone from directoryId using its description, name, * addDirectory removes parentId directory from cache. */ addDirectory(dirname, keywords, parentId, scopeid, desc, userId, userLogin, operations); List files = getBlobsByCategory(directoryId, DbConstants.FILE_CATEGORY, DbConstants.READ_FROM_MASTER); if (files != null && files.size() > 0) { String newDirId = getChildDirId(parentId, dirname); if (!RegexStrUtil.isNull(newDirId)) { addStreamBlobs(files, newDirId, userLogin, userId); } } deleteCopyDirectory(directoryId, userId); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, parentId)) { treeCache.remove(fqn, parentId); } } /** * undoCutDirectory - undoes the the cut of the directory * delete an entry in the dirmove table * @param directoryId - the directory id * @param userLogin - the user login * @param userId - the user id * @throws BaseDaoException - when error occurs */ public void undoCutDirectory(String directoryId, String userLogin, String userId) { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } /** * does not have permission to clone the directory */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { return; } /** * Get scalability datasource for dirmove - no partition */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null" + sourceName); } /** * delete an entry from dirmove table that matches the userid */ Connection conn = null; try { conn = ds.getConnection(); dirMoveDeleteQuery.run(conn, directoryId, userId); /** * dirmove has directoryid set in it. */ Fqn fqn = cacheUtil.fqn(DbConstants.DIR_MOVE); if (treeCache.exists(fqn, userId)) { treeCache.remove(fqn, userId); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException( "undoCutDirectory() error occured while deleting directory from dirmove, directoryId = " + directoryId + " userId " + userId, e1); } throw new BaseDaoException( "undoCutDirectory() error occured while deleting directory from dirmove, directoryId = " + directoryId + " userId " + userId, e); } try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException( "undoCutDirectory() error occured while deleting directory from dirmove, directoryId = " + directoryId + " userId = " + userId, e1); } } private void deleteCopyDirectory(String directoryId, String userId) { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for dircopy - no partition */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null" + sourceName); } /** * delete an entry from dircopy table that matches the userid */ Connection conn = null; try { conn = ds.getConnection(); dirCopyDeleteQuery.run(conn, directoryId, userId); Fqn fqn = cacheUtil.fqn(DbConstants.DIR_COPY); if (treeCache.exists(fqn, userId)) { treeCache.remove(fqn, userId); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while deleting directory from dircopy, directoryId = " + directoryId + " userId " + userId, e1); } throw new BaseDaoException("error occured while deleting directory from dircopy, directoryId = " + directoryId + " userId " + userId, e); } try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while deleting directory from dircopy, directoryId = " + directoryId + " userId = " + userId, e1); } } /** * paste the directory into the parent/current directory * @param parentId - the parent id where a directory is being moved * @param userLogin - the user login * @param userId - the user id whose directory is moved or who is the author of this directory * @throws BaseDaoException - when error occurs */ public void pasteDirectory(String parentId, String userLogin, String userId) throws BaseDaoException { if (RegexStrUtil.isNull(parentId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null in pasteDirectory"); } /** * does not have permission to paste the directory */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(parentId, userId)) { return; } /** * Get scalability datasource for dircopy - not partitioned */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null " + sourceName); } /** * only one directory per user is allowed to be moved, * we don't want to store more than one moves in DB * get the directoryId that needs to be pasted. */ String directoryId = null; String dirName = null; String srcDirPath = null; Object[] myparams = { (Object) userId }; try { Directory obj = isMoveExists(myparams); if (obj != null) { directoryId = ((Directory) obj).getValue(DbConstants.DIRECTORY_ID); } else { throw new BaseDaoException("dirmove for userId is null, userId " + userId); } } catch (BaseDaoException e) { throw new BaseDaoException("isMovExists() error=" + e.getMessage(), e); } if (directoryId == null) { throw new BaseDaoException("directoryId that is being pasted is null"); } /** * get the directory path of the directory that is being moved */ if (!RegexStrUtil.isNull(directoryId)) { Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); Object obj = treeCache.get(fqn, directoryId); if (obj != null) { removeDirPathsFromCache(((Directory) obj).getValue(DbConstants.DIRPATH)); dirName = ((Directory) obj).getValue(DbConstants.DIRNAME); srcDirPath = ((Directory) obj).getValue(DbConstants.DIRPATH); } else { Directory dir = getDirectory(directoryId, userId, DbConstants.READ_FROM_SLAVE); if (dir != null) { dirName = dir.getValue(DbConstants.DIRNAME); srcDirPath = dir.getValue(DbConstants.DIRPATH); } } } /** * Jboss methods * fqn - full qualified name * check if the parent dirpath already set in the cache * or get it from DB. * If it exists, return the parent dirpath from the cache. * This parent is the new parent for the above directoryId */ StringBuffer newDirPath = null; Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); Object obj = treeCache.get(fqn, parentId); if (obj != null) { newDirPath = new StringBuffer(((Directory) obj).getValue(DbConstants.DIRPATH)); newDirPath.append(DbConstants.DIRPATH_COLON); newDirPath.append(((Directory) obj).getValue(DbConstants.DIRNAME)); } else { Directory parentInfo = getParentInfo(parentId); if (parentInfo != null) { newDirPath = new StringBuffer(parentInfo.getValue(DbConstants.DIRPATH)); newDirPath.append(DbConstants.DIRPATH_COLON); newDirPath.append(parentInfo.getValue(DbConstants.DIRNAME)); } } String newPath = null; if (newDirPath != null) { newDirPath.append(DbConstants.DIRPATH_PIPE); newDirPath.append(parentId); newPath = newDirPath.toString(); } /** * When a directory is moved, make sure you move all * the subdirectories that belong to subdirectory. */ Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); dirMoveDeleteQuery.run(conn, directoryId, userId); /** * Allowing moves + paste(s) when the directory has children * get from dirtree all the children of the directoryid * update the children (directories) dirpath */ if (WebUtil.isDirTreePasteEnabled()) { if (WebUtil.isSanEnabled()) { try { getSanUtils(); sanUtils.moveSanDir(srcDirPath, dirName, SanConstants.sanPath, newPath, dirName); } catch (SanException e) { throw new BaseDaoException( "moveSanDir() error, " + directoryId + " error message " + e.getMessage(), e); } } // complete isSanEnabled /** * If this directory is moving up in the parent tree * then the path replacement requires change * E dirpath = A|1::B|2::C|3::D|4 (srcDirPath) * Directory E moved upto level B: * E dirpath now will be: A|1::B|2 (newDirPath) * E's children dirpath: A|1::B|2::E|5 */ dirPasteQuery.run(conn, directoryId, newPath); /** * for the mainDirectory that is being moved, set it to true * for children directories, update the path */ updatePath(directoryId, newPath, srcDirPath, conn, true); } else { /** * * if there are no children, do not recurse */ dirPasteQuery.run(conn, directoryId, newPath); } /** * only the top level directory gets affected with the change * in the dirtree parentid, the rest below the directory i.e * subdirs parents remain the same. So the subdirs are not * changed. */ dirChildUpdateQuery.run(conn, directoryId, parentId); } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "conn.close() exception for rollback(), for pasteDirectory() directoryId =" + directoryId + " parentId = " + parentId, e2); } throw new BaseDaoException("rollback() exception, for pasteDirectory() directoryId =" + directoryId + " parentId = " + parentId, e1); } throw new BaseDaoException( "autocommit() exception, directoryId =" + directoryId + " parentId = " + parentId, e); } // connection commit try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit() exception, for pasteDirectory() directoryId= " + directoryId + "parentId = " + parentId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("conn.close() exception for commit(), pasteDirectory(), directoryId=" + directoryId + " parentId =" + parentId, e4); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } if (treeCache.exists(fqn, parentId)) { treeCache.remove(fqn, parentId); } Fqn moveFqn = cacheUtil.fqn(DbConstants.DIR_MOVE); if (treeCache.exists(moveFqn, userId)) { treeCache.remove(moveFqn, userId); } } /** * copies the directory into copy buffer * we maintain only either one copy or one move for each userid * we delete the move for this userid, if it exists. * @param directoryId - the directory id * @param userLogin - the user login * @param userId - the user id * @throws BaseDaoException - when error occurs */ public void copyDirectory(String directoryId, String userLogin, String userId) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null in copyDirectory"); } /** * does not have permission to copy the directory */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { return; } /** * Get scalability datasource for dircopy - no partition */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null" + sourceName); } /** * check if this user already has an entry in dirmove table * If it exists, delete the entry */ Object[] myparams = { (Object) userId }; try { Directory obj = isMoveExists(myparams); if (obj != null) { undoCutDirectory(directoryId, userLogin, userId); } } catch (BaseDaoException e) { throw new BaseDaoException("isMovExists() error=" + e.getMessage(), e); } /** * check if this user already has an entry in dircopy table. * If it exists, update the same entry. */ boolean updateCopy = false; Fqn fqn = cacheUtil.fqn(DbConstants.DIR_COPY); Object obj = treeCache.get(fqn, userId); if (obj != null) { updateCopy = true; } else { if (RegexStrUtil.isNull(directoryId)) { try { List dResult = dircancopyQuery.execute(myparams); if ((dResult != null) && (dResult.size() > 0)) { updateCopy = true; //directoryId = ((Directory)dResult.get(0)).getValue(DbConstants.DIRECTORY_ID); treeCache.put(fqn, userId, dResult.get(0)); } else { throw new BaseDaoException( "error dircancopyQuery, null directoryId " + dircancopyQuery.getSql()); } } catch (Exception e) { throw new BaseDaoException("dircancopyQuery error" + dircancopyQuery.getSql(), e); } } } Connection conn = null; try { conn = ds.getConnection(); if (updateCopy) { dirCopyUpdateQuery.run(conn, directoryId, userId); } else { dirCopyAddQuery.run(conn, directoryId, userId); } fqn = cacheUtil.fqn(DbConstants.DIR_COPY); if (treeCache.exists(fqn, userId)) { treeCache.remove(fqn, userId); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException( "error occured while adding/updating directory into dircopy, directoryId = " + directoryId + " updateCopy " + updateCopy, e1); } throw new BaseDaoException("error occured while adding/updating directory into dircopy, directoryId = " + directoryId + " updateCopy " + updateCopy, e); } try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while adding directory into dircopy, directoryId = " + directoryId + " updateCopy = " + updateCopy, e1); } } /** * moves the directory into move buffer * we maintain only either one copy or one move for each userid * we delete the copy for this userid if it exists * * @param directoryId - the directory id * @param userLogin - the user login * @param userId - the user id * @throws BaseDaoException - when error occurs */ public void moveDirectory(String directoryId, String userLogin, String userId) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null in moveDirectory"); } /** * does not have permission to move the directory */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { return; } /** * Get scalability datasource for dirmove - no partition */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null" + sourceName); } /** * check if this user already has an entry in dircopy table. * If it exists, remove this entry */ Fqn fqn = cacheUtil.fqn(DbConstants.DIR_COPY); Object obj = treeCache.get(fqn, userId); if (obj != null) { deleteCopyDirectory(directoryId, userId); } else { if (RegexStrUtil.isNull(directoryId)) { try { Object[] myparams = { (Object) userId }; List dResult = dircancopyQuery.execute(myparams); if ((dResult != null) && (dResult.size() > 0)) { deleteCopyDirectory(directoryId, userId); } } catch (Exception e) { throw new BaseDaoException("error dircancopyQuery()/deleteCopyDirectory(), userLogin=" + userLogin + " directoryId=" + directoryId + " userId=" + userId + " dircancopyQuery= " + dircancopyQuery.getSql()); } } } /** * check if this user already has an entry in dirmove table. * If it exists, update the same entry. */ boolean updateMove = false; Object[] myparams = { (Object) userId }; Directory dir = isMoveExists(myparams); if (dir != null) { updateMove = true; } Connection conn = null; try { conn = ds.getConnection(); if (updateMove) { dirMoveUpdateQuery.run(conn, directoryId, userId); } else { dirMoveAddQuery.run(conn, directoryId, userId); } fqn = cacheUtil.fqn(DbConstants.DIR_MOVE); if (treeCache.exists(fqn, userId)) { treeCache.remove(fqn, userId); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while moving directory into dirmove, directoryId = " + directoryId + " updateMove " + updateMove, e1); } throw new BaseDaoException("error occured while moving directory into dirmove, directoryId = " + directoryId + " updateMove " + updateMove, e); } try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while moving directory into dirmove, directoryId = " + directoryId + " updateMove = " + updateMove, e1); } /* try { Object[] params = {(Object)directoryId}; List dResult = directoryParentQuery.execute(params); if ((dResult != null) && (dResult.size() > 0)) { String parentId = ((Directory)dResult.get(0)).getValue(DbConstants.PARENT_ID); Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, parentId)) { treeCache.remove(fqn, parentId); } } } catch (Exception e) { throw new BaseDaoException("DirectoryParentQuery, dirMove, exception " + directoryParentQuery.getSql(), e); } */ } /** * isAuthor() method * checks if this user is the author of this directory and returns the value of either true or false * @param myparams * @param adminResult * @return boolean * @throws BaseDaoException */ /* private boolean isAuthor(String directoryId, String userId) { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("directoryId is null, isAuthor"); } */ /** * create the key, (directoryid + userId) * get isAuthor value for this userId in this directory */ /* StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(userId); String key = sb.toString(); Fqn fqn = cacheUtil.fqn(DbConstants.DIR_AUTHOR); Object obj = treeCache.get(fqn, key); if (obj != null) { String isAuthor = ((DirAuthor)obj).getValue(DbConstants.IS_AUTHOR); if (!RegexStrUtil.isNull(isAuthor)) { return (isAuthor.equals("1")); } } */ /** * Get scalability datasource for diradmin - no partition */ /* String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, isAuthor() " + sourceName); } List adminResult = null; boolean isAuthor = false; try { Object[] myparams = {(Object)directoryId, (Object)userId}; adminResult = diradminexistsQuery.execute(myparams); if ( (adminResult != null) && (adminResult.size() > 0)) { isAuthor = true; } } catch (BaseDaoException e) { throw new BaseDaoException("exception for isauthor " + diradminexistsQuery.getSql(), e); } DirAuthor author = (DirAuthor)eop.newObject(DbConstants.DIR_AUTHOR); if (isAuthor) { author.setValue(DbConstants.IS_AUTHOR, "1"); } else { author.setValue(DbConstants.IS_AUTHOR, "0"); } treeCache.put(fqn, key, (DirAuthor)author); return isAuthor; } */ /** * isMoveExists() method * checks if this user already has move directory setup * @param myparams - userId params of the object * @return Directory that contains the move object of userid and directoryid * @throws BaseDaoException */ private Directory isMoveExists(Object[] myparams) { if (myparams == null) { throw new BaseDaoException("params are null"); } /** * get isMovable value for this userId in this directory * Cannot move to the same directory, set IS_MOVE_PASTE="1" * Cannot move to the directories children, set IS_MOVE_PASTE="1" * if can be moved, set IS_MOVE_PASTE="0" */ Fqn fqn = cacheUtil.fqn(DbConstants.DIR_MOVE); Object obj = treeCache.get(fqn, myparams[0]); if (obj != null) { return (Directory) obj; } /** * Get scalability datasource for dirmove - not partitioned */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, isMovable() " + sourceName); } try { List dResult = dircanmoveQuery.execute(myparams); if ((dResult != null) && (dResult.size() > 0)) { treeCache.put(fqn, myparams[0], dResult.get(0)); return (Directory) dResult.get(0); } else { return null; } } catch (Exception e) { throw new BaseDaoException("dirmoveQuery exception, " + dircanmoveQuery.getSql(), e); } } /** * isMovable() method * checks if this directory is movable and returns the value of either "1" or "0" * @param myparams - userId params of the object * @param directoryId - directoryId * @param Directory - the directory * @return String * @throws BaseDaoException */ private String isMovable(Object[] myparams, String directoryId, Directory dir) { if (myparams == null) { throw new BaseDaoException("params are null"); } Directory obj = isMoveExists(myparams); if (obj != null) { if (directoryId.equals(((Directory) obj).getValue(DbConstants.DIRECTORY_ID))) { dir.setValue(DbConstants.IS_MOVE_PASTE, ((Directory) obj).getValue(DbConstants.DIRECTORY_ID)); } else { /** * check if directoryId is the child of dirMove(DirectoryId). * It cannot be moved if it is part of the subchildren * So check for the dirpath of dir to make sure it is not * child of the dirMove(directoryId) */ dir.setValue(DbConstants.IS_MOVE_PASTE, "0"); } return "1"; } else { return "0"; } /** * Get scalability datasource for dirmove - not partitioned */ /* String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, isMovable() " + sourceName); } try { List dResult = dircanmoveQuery.execute(myparams); if ((dResult != null) && (dResult.size() > 0)) { treeCache.put(fqn, myparams[0], dResult.get(0)); if (directoryId.equals( ((Directory)dResult.get(0)).getValue(DbConstants.DIRECTORY_ID))) { dir.setValue(DbConstants.IS_MOVE_PASTE, "1"); } else { dir.setValue(DbConstants.IS_MOVE_PASTE, "0"); } return("1"); } return("0"); } catch (Exception e) { throw new BaseDaoException("dirmoveQuery exception, " + dircanmoveQuery.getSql(), e); } */ } /** * isMoveAndPaste() method * checks if this user can paste this directory returns the value of either "1" or "0" * @param directoryId * @param userId * @return String * @throws BaseDaoException */ private String isMoveAndPaste(String directoryId, String userId) { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null, in isMoveAndPaste in DirectoryDao "); } /** * Get scalability datasource for dirmove - not partitioned */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, isMoveAndPaste() " + sourceName); } try { Object params[] = { (Object) directoryId, (Object) userId }; List dResult = dirMoveExistsQuery.execute(params); if ((dResult != null) && (dResult.size() > 0)) { return ("1"); } return ("0"); } catch (Exception e) { throw new BaseDaoException("dirMoveExistsQuery exception, " + dirMoveExistsQuery.getSql(), e); } } /** * isCopyable() method * checks if this user has any copyable directories, and returns the value of either "1" or "0" * @param myparams * @return String * @throws BaseDaoException */ private String isCopyable(Object[] myparams) { if (myparams == null) { throw new BaseDaoException("myparams are null, in isCopyable in DirectoryDao "); } /** * get isCopyable value for this userId in this directory */ Fqn copyFqn = cacheUtil.fqn(DbConstants.DIR_COPY); Object obj = treeCache.get(copyFqn, myparams[0]); if (obj != null) { return ("1"); } /** * Get scalability datasource for dircopy - not partitioned */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, isCopyable() " + sourceName); } try { List dResult = dircancopyQuery.execute(myparams); if ((dResult != null) && (dResult.size() > 0)) { treeCache.put(copyFqn, myparams[0], dResult.get(0)); return ("1"); } return ("0"); } catch (Exception e) { throw new BaseDaoException("dircopyQuery exception, " + dircancopyQuery.getSql(), e); } } /** * Get subdirectories based on directoryId. (sorted alphabetically) * The author previleges are set for each subdirectory. These are applicable only when the login is valid. * @param directoryId * @param isLoginValid * @param alphabet * @return List * @throws BaseDaoException */ private List getSubDirs(Directory dir, String directoryId, boolean isLoginValid, String userId, String userLogin, int accessFlag, String alphabet) { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for dirtree, dirscope, directory - not partitioned */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSubDirs() " + sourceName); } /** * if the maxsize of this directory children is more than maxDisplayDirs * set the alphabet to "A" if it is not already set. */ logger.info("alphbet =" + alphabet); if (RegexStrUtil.isNull(alphabet)) { logger.info("alphbet is null"); String StrNumChildren = getChildCount(directoryId, DbConstants.READ_FROM_MASTER); if (!RegexStrUtil.isNull(StrNumChildren)) { int numChildren = new Integer(StrNumChildren).intValue(); logger.info("numChildren=" + numChildren + " Maxdisplaydirs = " + GlobalConst.maxDisplayDirs); if (numChildren > GlobalConst.maxDisplayDirs) { alphabet = "A"; logger.info("alphbet is set to A =" + alphabet); } } } List childrenResult = null; if (RegexStrUtil.isNull(alphabet)) { logger.info("alphbet is null calling all dirChildrenQuery"); try { Object[] params = { (Object) directoryId }; childrenResult = dirchildrenQuery.execute(params); } catch (BaseDaoException e) { throw new BaseDaoException("dirchildrenQuery exception, " + dirchildrenQuery.getSql() + " directoryId = " + directoryId + e.getMessage(), e); } } else { try { String queryName = scalabilityManager.getReadZeroScalability("dirchildrenalphabetquery"); logger.info("alphabet is not null, calling all dirChildrenAlphabetQuery"); dirChildrenAlphabetQuery = getQueryMapper().getQuery(queryName); if (dirChildrenAlphabetQuery == null) { throw new BaseDaoException("dirChildrenAlphabetQuery is null"); } Object[] params = { (Object) directoryId, (Object) alphabet }; childrenResult = dirChildrenAlphabetQuery.execute(params); } catch (BaseDaoException e) { throw new BaseDaoException("dirChildrenAlphabetQuery exception, " + dirChildrenAlphabetQuery.getSql() + " directoryId = " + directoryId + e.getMessage(), e); } } /** * Set properties for subdirectories (NUM_DIRS, IS_AUTHOR, IS_LEAFNODE) */ if (childrenResult != null) { int numHide = setSubDirProperties(childrenResult, isLoginValid, userId, userLogin, accessFlag); dir.setValue(DbConstants.NUM_HIDE, new Integer(numHide).toString()); } return childrenResult; } private int setSubDirProperties(List childrenResult, boolean isLoginValid, String userId, String userLogin, int accessFlag) throws BaseDaoException { /** * get subdirs admin information, used for setting isauthor, * if the user is a valid login */ Directory subdir = null; String subdir_id = null; int numHide = 0; if (childrenResult != null) { for (int i = 0; i < childrenResult.size(); i++) { if (childrenResult.get(i) != null) { /** * get child directoryId */ subdir = (Directory) childrenResult.get(i); if (subdir == null) { continue; } subdir_id = subdir.getValue(DbConstants.DIRECTORY_ID); if (!WebUtil.isHomeDirDeleteEnabled()) { if (WebUtil.isSanEnabled()) { if (isHomeDirectory(subdir_id, userLogin, userId)) { subdir.setValue(DbConstants.HOME_DIR, "1"); } else { subdir.setValue(DbConstants.HOME_DIR, "0"); } } else { subdir.setValue(DbConstants.HOME_DIR, "0"); } } if (WebUtil.isSanEnabled()) { try { String usedQuota = sanUtils.getUsedQuota(subdir.getValue(DbConstants.DIRNAME), SanConstants.sanPath, subdir.getValue(DbConstants.DIRPATH)); subdir.setValue(DbConstants.USED_QUOTA, usedQuota); } catch (Exception e) { throw new BaseDaoException( "Error in getting the quota size for subdirectory = " + subdir_id); } } /** * set cardinality (num children) */ subdir.setValue(DbConstants.NUM_DIRS, getChildCount(subdir_id, accessFlag)); if (isLoginValid) { /** * set isAuthor */ if (diaryAdmin.isDiaryAdmin(userLogin) || isAuthor(subdir_id, userId)) { subdir.setValue(DbConstants.IS_AUTHOR, "1"); } else { subdir.setValue(DbConstants.IS_AUTHOR, "0"); } /** * set leafnode (does this directory have subdirs/websites/collabrums) * set default settings, that it is a leaf node. */ if (childrenExist(subdir_id)) { /** * check for subdirectories * It has subdirectories, cannot delete * if it has only collabrums, websites, can delete */ if (isChildASubDir(subdir_id)) { subdir.setValue(DbConstants.CAN_DELETE, "0"); } else { subdir.setValue(DbConstants.CAN_DELETE, "1"); } subdir.setValue(DbConstants.IS_LEAFNODE, "0"); if (isAccessAllowed(subdir_id, userLogin, userId, accessFlag)) { subdir.setValue(DbConstants.STATUS, "0"); } else { subdir.setValue(DbConstants.STATUS, "1"); numHide++; } } else { // where it does not have children subdir.setValue(DbConstants.CAN_DELETE, "1"); subdir.setValue(DbConstants.IS_LEAFNODE, "1"); /** * A member can be blocked on a directory that contains no subdirectories * no collabrums, no websites(urls) etc */ if (isThisMemberBlockedFromDirectory(subdir_id, userId)) { subdir.setValue(DbConstants.BLOCK_MEMBER, "1"); } else { subdir.setValue(DbConstants.BLOCK_MEMBER, "0"); } if (isAccessAllowed(subdir_id, userLogin, userId, accessFlag)) { subdir.setValue(DbConstants.STATUS, "0"); } else { subdir.setValue(DbConstants.STATUS, "1"); numHide++; } } } // if validlogin } // if (childrenResult.get(i)) } // for (int) } // if (childrenResult != null) return numHide; } /** * gets list of directories that are blocked for this directory * @param loginId - the login id * @returns List - the list of members */ private List getBlockedMembersForDirectory(String loginId) { if (RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } /** * get directories value for this userId in this directory */ Fqn fqn = cacheUtil.fqn(DbConstants.AUTHOR_BLOCKED_DIRS); Object obj = treeCache.get(fqn, loginId); if (obj != null) { return (List) obj; } /** * Get scalability datasource for dirblock - partitioned on loginId */ String sourceName = scalabilityManager.getReadBlobScalability(loginId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, " + sourceName); } try { Object[] params = { (Object) loginId }; List result = blockDirectoryQuery.execute(params); if (result != null && (result.size() > 0)) { treeCache.put(fqn, loginId, result); } return result; } catch (Exception e) { throw new BaseDaoException("blockDirectoryQuery " + blockDirectoryQuery.getSql(), e); } } /** * Checks if this user is blocked for this directory * @param directoryId - directory Id * @param userId - user id * @returns boolean - true (blocked) or false (not blocked) */ private boolean isThisMemberBlockedFromDirectory(String directoryId, String userId) { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } List blockedList = getBlockedMembersForDirectory(userId); if (RegexStrUtil.isNull(userId) || (blockedList == null)) { throw new BaseDaoException("params are null"); } boolean yes = false; if ((blockedList != null) && (blockedList.size() > 0)) { for (int i = 0; i < blockedList.size(); i++) { if (((DirAuthor) blockedList.get(i)).getValue(DbConstants.DIRECTORY_ID).equals(directoryId)) { yes = true; } } DirAuthor member = (DirAuthor) eop.newObject(DbConstants.DIR_AUTHOR); if (member != null) { member.setValue(DbConstants.DIRECTORY_ID, directoryId); if (blockedList.contains(member)) { return true; } } } if (yes) return true; return false; } /** * Checks if this user is allowed access for this directory * @param directoryId - directory Id * @param userLogin - user Login * @param userId - user id * @param accessFlag - access flag * @returns boolean - true (allowed) or false (not allowed) */ private boolean isAccessAllowed(String directoryId, String userLogin, String userId, int accessFlag) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } // access is allowed for admin and authors if (diaryAdmin.isDiaryAdmin(userLogin) || isAuthor(directoryId, userId)) { return true; } /* * check the scope to see if this directory is user assigned scope */ DirScope dirscope = getDirectoryScope(directoryId); String scopeid = null; if (dirscope != null) { scopeid = dirscope.getValue(DbConstants.SCOPE_ID); } else { throw new BaseDaoException("dirscope is null, for directoryid= " + directoryId); } if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("scopeid is null for directoryid= " + directoryId); } // check for specific user access for these directories boolean checkAccess = false; if (scopeid.equals(dirScope.getUserspecificscopeid())) { checkAccess = true; } else { if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { checkAccess = true; } } if (checkAccess) { /** Do additional checks from ldap to sync in new users that belong to this group * scenarios: * 1. Check if the user is diradmin or diruser. * 2. If the user is not a diradmin or diruser, check the group that the * user belongs to, is it the same as diradmin's group. * 2.1 If diradmin group is same as this users group, add the user * 2.2 If not the same group, deny access to him. */ String queryName = null; if (accessFlag == 1) { queryName = scalabilityManager.getWriteZeroScalability("isdiruserallowedQuery"); } else { queryName = scalabilityManager.getReadZeroScalability("isdiruserallowedQuery"); } logger.info("directoryid = " + directoryId + " userId = " + userId); isDirUserAllowedQuery = getQueryMapper().getQuery(queryName); Object params[] = { directoryId, userId }; List result = null; try { result = isDirUserAllowedQuery.execute(params); } catch (Exception e) { throw new BaseDaoException( "error in IsDirUserAllowedQuery " + isDirUserAllowedQuery.getSql() + e.getMessage(), e); } if (result != null && result.size() > 0) { return true; // access is allowed } else { return false; // access is not allowed /** * For LDAP users, these scopes are valid and we need to add this * user either as author or user based on the directory permission * on the scope */ /* if (WebUtil.isLdapActive()) { boolean allowUser = false; if (scopeid.equals(dirScope.getShareusergroupspecificscopeid())) { allowUser = true; } else { if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { if (isSameGroup(userLogin, userId, directoryId)) { allowUser = true; } } } if (allowUser) { return true; */ /** * Dont check permission to add this user as it is ldap based group * entries for adding (flag set to false) * if the operations for directory scope is allowing all users as authors * add this user as author otherwise add this userLogin as directory user. * Let the admin later add this user as author. */ /* if (dirscope.getValue(DbConstants.OPERATIONS).equals(dirScope.getAllasauthorsoperation())) { try { addAuthor(directoryId, userLogin, "" , "", false); return true; } catch (Exception e1) { throw new BaseDaoException("Error in DirectoryAbstractDao, addAuthor(), directoryId=" + directoryId + " error message=" + e1.getMessage()); } } else { try { addUser(directoryId, userLogin, "" , "", false); return true; } catch (Exception e2) { throw new BaseDaoException("Error in DirectoryAbstractDao, addUser(), directoryId=" + directoryId + " error message=" + e2.getMessage()); } } } else { return false; // access is denied } // allowUser } // LdapActive */ } // else } return true; } /** * isSameGroup - applies when the LDAP is enabled and the usergroup specific scopes are set. * This method checks if the user and the diaryauthor(s) * @param userLogin - user login * @param userId - user id * @param directoryid - directory id * @return boolean - true if the user is in the same group as the author of this directory * false otherwise * @throws - BaseDaoException when error */ public boolean isSameGroup(String userLogin, String userId, String directoryId) throws BaseDaoException { if (RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } String[] groups = getGroups(userLogin, userId, directoryId); if (groups != null && groups.length > 0) { String authorGroup = groups[0]; String userGroup = groups[1]; if ((!RegexStrUtil.isNull(authorGroup)) && authorGroup.equals(userGroup)) { return true; } else { return false; } } return false; } /** * getGroups - get the groups information * @param userLogin * @param userId * @param directoryId * @return String[] of groups (directory group and userLogin's group) * @return throws BaseDaoException */ private String[] getGroups(String userLogin, String userId, String directoryId) throws BaseDaoException { logger.info("getGroups() should be invoked when Ldap is enabled " + WebUtil.isLdapActive()); if (WebUtil.isLdapActive()) { try { List authors = getAuthors(directoryId, DbConstants.READ_FROM_SLAVE); if (authors == null) { throw new BaseDaoException("no authors for this directory were found, dirid =" + directoryId); } else { String dirAuthorId = (String) ((Directory) authors.get(0)).getValue(DbConstants.OWNER_ID); if (RegexStrUtil.isNull(dirAuthorId)) { throw new BaseDaoException( "getGroups(), dirAuthorId is null from dirAdmins for dirId = " + directoryId); } else { Hdlogin hdlogin = getLogin(dirAuthorId); if (hdlogin == null) { throw new BaseDaoException("getGroups() is null for dirAuthorId " + dirAuthorId + " directoryid " + directoryId); } else { Hdlogin userLoginHdLogin = getLoginid(userLogin); logger.info("invoking getUserGroupInfo()"); String[] groups = { LdapUtil.getUserGroupInfo(LdapConstants.ldapAttrMail, hdlogin.getValue(DbConstants.EMAIL)), LdapUtil.getUserGroupInfo(LdapConstants.ldapAttrMail, userLoginHdLogin.getValue(DbConstants.EMAIL)) }; if (groups != null) { for (int i = 0; i < groups.length; i++) { logger.info("groups[i] = " + groups[i]); } } return groups; } } } } catch (Exception e) { throw new BaseDaoException( "error either in getAuthors()/LdapUtil.getUserGroupInfo() for login or adminUser for a directoryid = " + directoryId + e.getMessage(), e); } } else { logger.info("ldap is not active or enabled, returning groups as null."); return null; } } /** * getDirectoryGroup() - get the group for this directory * @param userLogin * @returns String group * @throws Exception */ public String getDirectoryGroup(String userLogin) throws BaseDaoException { logger.info("getDirectoryGroup(String userlogin) applies only when Ldap is enabled."); if (WebUtil.isLdapActive()) { if (!RegexStrUtil.isNull(userLogin)) { Hdlogin userLoginHdLogin = getLoginid(userLogin); logger.info("invoking getUserGroupInfo()"); try { String group = LdapUtil.getUserGroupInfo(LdapConstants.ldapAttrMail, userLoginHdLogin.getValue(DbConstants.EMAIL)); return group; } catch (Exception e) { throw new BaseDaoException("LdapUtil.getUserGroupInfo() " + e.getMessage(), e); } } } return null; } /** * getDirectoryGroupAndAreaInfo() - get group and division info * for this directory user * @param userLogin * @param loginId * @param directoryId * @returns String authors group and division info * ou=<group>,divisionname * @throws Exception */ public String getDirectoryGroupAndAreaInfo(String userLogin, String loginId, String directoryId) throws BaseDaoException { logger.info("getDirectoryGroupAndAreaInfo entered"); if (!WebUtil.isLdapActive()) { throw new BaseDaoException( "getDirectoryGroupAndAreaInfo() cannot be accessed when ldap is not enabled, userLogin = " + userLogin); } if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, loginId))) { throw new BaseDaoException( "user is neither a diaryadmin nor an author to manage users/authors for DirectoryGroupAndAreaInfo, directoryId= " + directoryId + " userLogin= " + userLogin); } /* * check the scope to see if this directory is user assigned scope */ DirScope dirscope = getDirectoryScope(directoryId); String scopeid = dirscope.getValue(DbConstants.SCOPE_ID); if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("scopeid is null for directoryid= " + directoryId); } // check for specific user access for these directories // ldapaccess boolean getGroup = false; if (WebUtil.isLdapActive()) { if (scopeid.equals(dirScope.getShareusergroupspecificscopeid())) { getGroup = true; } else { if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { getGroup = true; } } } logger.info("getGroup = " + getGroup); /** * This method returns the directory group as groups[0] and * the users group as group[1] */ if (getGroup) { if (WebUtil.isLdapActive()) { String[] groups = getGroupAndArea(userLogin, loginId, directoryId); if (groups != null && groups.length > 0) { logger.info("groups.length = " + groups.length); for (int i = 0; i < groups.length; i++) { logger.info("group[" + i + "]" + groups[i]); } // authorGroup and AreaInfo if (!RegexStrUtil.isNull(groups[0])) { logger.info("groups[0] = " + groups[0]); return groups[0]; } } } } return null; } /** * getGroupAndArea - get the group and area information * @param userLogin * @param userId * @param directoryId * @return String[] of group and division info * format is ou=<groupname>,divisionname * @return throws BaseDaoException */ private String[] getGroupAndArea(String userLogin, String userId, String directoryId) throws BaseDaoException { logger.info("getGroupAndArea() should be invoked when Ldap is enabled." + WebUtil.isLdapActive()); if (WebUtil.isLdapActive()) { try { List authors = getAuthors(directoryId, DbConstants.READ_FROM_SLAVE); if (authors == null) { throw new BaseDaoException("no authors for this directory were found, dirid =" + directoryId); } else { String dirAuthorId = (String) ((Directory) authors.get(0)).getValue(DbConstants.OWNER_ID); if (RegexStrUtil.isNull(dirAuthorId)) { throw new BaseDaoException( "getGroupAndArea(), dirAuthorId is null from dirAdmins for dirId = " + directoryId); } else { Hdlogin hdlogin = getLogin(dirAuthorId); if (hdlogin == null) { throw new BaseDaoException("getGroupAndArea() is null for dirAuthorId " + dirAuthorId + " directoryid " + directoryId); } else { Hdlogin userLoginHdLogin = getLoginid(userLogin); String[] groups = { LdapUtil.getUserGroupAndAreaInfo(LdapConstants.ldapAttrMail, hdlogin.getValue(DbConstants.EMAIL)), LdapUtil.getUserGroupAndAreaInfo(LdapConstants.ldapAttrMail, userLoginHdLogin.getValue(DbConstants.EMAIL)) }; logger.info("getGroupAndArea = " + groups.toString()); return groups; } } } } catch (Exception e) { throw new BaseDaoException( "error either in getAuthors()/LdapUtil.getUserGroupInfo() for login or adminUser for a directoryid = " + directoryId + e.getMessage(), e); } } else { logger.info("ldap is not active, returning groups as null"); return null; } } /** * isShareGroupEnabled() - check if the scope of the directory * is share with other users in other groups * @param userLogin * @param loginId * @param directoryId * @returns boolean - true if group scope is enabled else false * @throws Exception */ public boolean isShareGroupEnabled(String userLogin, String loginId, String directoryId) throws BaseDaoException { if (!WebUtil.isLdapActive()) { throw new BaseDaoException( "isShareGroupEnabled() cannot be accessed when ldap is not enabled, userLogin = " + userLogin); } if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, loginId))) { throw new BaseDaoException( "user is neither a diaryadmin nor an author to manage users/authors for directory, directoryId= " + directoryId + " userLogin= " + userLogin); } /* * check the scope to see if this directory is user assigned scope */ DirScope dirscope = getDirectoryScope(directoryId); String scopeid = dirscope.getValue(DbConstants.SCOPE_ID); if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("isShareGroupEnabled() scopeid is null for directoryid= " + directoryId); } // check for specific user access for these directories if (scopeid.equals(dirScope.getShareusergroupspecificscopeid())) { return true; } else { return false; } } /** * isUserGroupEnabled() - check if the scope of the * directory is only user group enabled * not shared with other groups or users * @param userLogin * @param loginId * @param directoryId * @returns boolean - true if group scope is enabled else false * @throws Exception */ public boolean isUserGroupEnabled(String userLogin, String loginId, String directoryId) throws BaseDaoException { if (!WebUtil.isLdapActive()) { throw new BaseDaoException( "isUserGroupEnabled() cannot be accessed when ldap is not enabled, userLogin = " + userLogin); } if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, loginId))) { throw new BaseDaoException( "user is neither a diaryadmin nor an author to manage users/authors for directory, directoryId= " + directoryId + " userLogin= " + userLogin); } /* * check the scope to see if this directory is user assigned scope */ DirScope dirscope = getDirectoryScope(directoryId); String scopeid = dirscope.getValue(DbConstants.SCOPE_ID); if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("scopeid is null for directoryid= " + directoryId); } // check for specific user access for these directories if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { return true; } return false; } /** * getDirectoryGroup() - get the group for this directory * @param userLogin * @param loginId * @param directoryId * @returns String group type of this directory (i.e authors group) * @throws Exception */ public String getDirectoryGroup(String userLogin, String loginId, String directoryId) throws BaseDaoException { if (WebUtil.isLdapActive()) { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } if ((!diaryAdmin.isDiaryAdmin(userLogin)) && (!isAuthor(directoryId, loginId))) { throw new BaseDaoException( "user is neither a diaryadmin nor an author to manage users/authors for directory, directoryId= " + directoryId + " userLogin= " + userLogin); } logger.info("getDirectoryGroup isAuthor"); /* * check the scope to see if this directory is user assigned scope */ DirScope dirscope = getDirectoryScope(directoryId); String scopeid = dirscope.getValue(DbConstants.SCOPE_ID); if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("scopeid is null for directoryid= " + directoryId); } // check for specific user access for these directories boolean getGroup = false; if (scopeid.equals(dirScope.getShareusergroupspecificscopeid())) { getGroup = true; } else { if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { getGroup = true; } } /** * This method returns the directory group as groups[0] and * the users group as group[1] */ if (getGroup) { logger.info("getGroup = " + getGroup); String[] groups = getGroups(userLogin, loginId, directoryId); if (groups != null && groups.length > 0) { logger.info("groups.length = " + groups.length); // authorGroup if (!RegexStrUtil.isNull(groups[0])) { return groups[0]; } else { return null; } } } else { return null; } } else { return null; } return null; } /** * gets the blob information, including default value without the blob data * This method is called when blob information needs to be update * @param entryId * @param directoryId * @return Photo - any stream blob * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public Photo getPhoto(String entryId, String directoryId) throws BaseDaoException { if (RegexStrUtil.isNull(entryId) || RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** Jboss methods * directoryid is not partitioned, so it is unique across all the partitions * entryId is not unique across all partitions (partitions are based on directoryid) * fqn - full qualified name made with (directoryid + entryId) * check if the userpage already set in the cache * If it exists, return the userpage from the cache. */ StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(entryId); String key = sb.toString(); Fqn fqn = cacheUtil.fqn(DbConstants.DIR_PHOTO); Object obj = treeCache.get(fqn, key); if (obj != null) { return (Photo) obj; } /** * Get scalability datasource for dirblob - partitioned on directoryId */ String sourceName = scalabilityManager.getReadBlobScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getPhoto() " + sourceName); } try { Object[] params = { (Object) entryId }; List result = blobQuery.execute(params); if ((result != null) && (result.size() > 0)) { treeCache.put(fqn, key, (Photo) result.get(0)); return (Photo) result.get(0); } } catch (Exception e) { throw new BaseDaoException("blobQuery exception, directoryid = " + directoryId + " entryId = " + entryId + " " + blobQuery.getSql(), e); } return null; } /** * gets the stream blob with blob data * This method is called when stream blob is to displayed * @param entryId * @param directoryId * @return Photo - any stream blob * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect */ public Photo getStreamBlob(String entryId, String directoryId) throws BaseDaoException { if (RegexStrUtil.isNull(entryId) || RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** Jboss methods * directoryid is not partitioned, so it is unique across all the partitions * entryId is not unique across all partitions (partitions are based on directoryid) * fqn - full qualified name made with (directoryid + entryId) * check if the userpage already set in the cache * If it exists, return the userpage from the cache. */ StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(entryId); String key = sb.toString(); Fqn fqn = cacheUtil.fqn(DbConstants.DIR_STREAM_BLOB); Object obj = treeCache.get(fqn, key); if (obj != null) { logger.info("getting it from cache, entryId" + entryId + " directoryid = " + directoryId); return (Photo) obj; } /** * Get scalability datasource for dirblob - partitioned on directoryId */ String sourceName = scalabilityManager.getReadBlobScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getStreamBlob() " + sourceName); } try { Object[] params = { (Object) entryId }; List result = onestreamblobQuery.execute(params); if ((result != null) && (result.size() > 0)) { if ((Photo) result.get(0) != null) { if (WebUtil.isSanEnabled()) { Photo photo = (Photo) result.get(0); photo.setBlob(getSanBlobDir(directoryId, photo.getValue(DbConstants.BTITLE))); } treeCache.put(fqn, key, (Photo) result.get(0)); return (Photo) result.get(0); } else { return null; } } } catch (Exception e) { throw new BaseDaoException("onestreamblobQuery exception, directoryid = " + directoryId + " entryId = " + entryId + " " + onestreamblobQuery.getSql(), e); } return null; } /** * This method checks if this member is blocked in any social networks * This method is used to display blocked member * userId - the user id * userLogin - the user login * @return List of collabrums that are blocked for this user * @throws BaseDaoException */ private List listBlockedCollabrums(String userId) throws BaseDaoException { if (RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } /** Jboss methods * fqn - full qualified name * check if the blocked collabrums already set in the cache * If it exists, return the blocked collabrums from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.BLOCKED_COLLABRUM_LIST); Object obj = treeCache.get(fqn, userId); if (obj != null) { return (List) obj; } /** * Get scalability datasource for collblock partitioned on loginid */ String sourceName = scalabilityManager.getReadScalability(userId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, listBlockedCollabrums() " + sourceName + " userId = " + userId); } try { Object[] params = { (Object) userId }; List result = listBlockedCollabrumsQuery.execute(params); if (result != null && (result.size() > 0)) { treeCache.put(fqn, userId, result); } return result; } catch (Exception e) { throw new BaseDaoException( "listBlockedCollabrumsQuery for collabrum " + listBlockedCollabrumsQuery.getSql(), e); } } private boolean isThisMemberBlockedFromCollabrum(String collabrumId, String userId) { if (RegexStrUtil.isNull(collabrumId) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } List blockedSet = listBlockedCollabrums(userId); if ((blockedSet != null) && (blockedSet.size() > 0)) { for (int i = 0; i < blockedSet.size(); i++) { if (((Collabrum) blockedSet.get(i)).getValue(DbConstants.COLLABRUM_ID) != null) { if (((Collabrum) blockedSet.get(i)).getValue(DbConstants.COLLABRUM_ID).equals(collabrumId)) { return true; } } } } return false; } /* * Gets collabrums * This method is called by viewDirectory * @param directoryId * @param isLoginValid * @param userId * @return List of collabrums */ private List getCollabrums(String directoryId, boolean isLoginValid, String userId, String userLogin) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null in getCollabrums()"); } /** Jboss methods * fqn - full qualified name */ Fqn fqn = cacheUtil.fqn(DbConstants.COLLABRUM_LIST); Object obj = treeCache.get(fqn, directoryId); if (obj != null) { if ((obj != null) && (((List) obj).size() > 0)) { setCollInformation((List) obj, isLoginValid, userId, userLogin); } return (List) obj; } /** * Did not find in cache, get it from DB * Get scalability datasource for dircoll, collabrum, colladmin - no partition */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getCollabrums() " + sourceName + " directoryId = " + directoryId); } try { Object[] params = { (Object) directoryId }; List dResult = collabrumsQuery.execute(params); if ((dResult != null) && (dResult.size() > 0)) { setCollInformation(dResult, isLoginValid, userId, userLogin); treeCache.put(fqn, directoryId, dResult); return dResult; } } catch (BaseDaoException e) { throw new BaseDaoException( "collabrumQuery exception for directoryId = " + directoryId + collabrumsQuery.getSql(), e); } return null; } private void setCollInformation(List dResult, boolean isLoginValid, String userId, String userLogin) { if ((dResult != null) && (dResult.size() > 0)) { /** * As users can be different, we need to check for this userLogin */ for (int i = 0; i < dResult.size(); i++) { Collabrum collabrum = ((Collabrum) dResult.get(i)); if (collabrum == null) continue; String collabrumId = collabrum.getValue(DbConstants.COLLABRUM_ID); if (isLoginValid) { if (isThisMemberBlockedFromCollabrum(collabrumId, userId)) { collabrum.setValue(DbConstants.BLOCK_MEMBER, "1"); } else { collabrum.setValue(DbConstants.BLOCK_MEMBER, "0"); } } else { collabrum.setValue(DbConstants.BLOCK_MEMBER, "0"); } /** * get list of organizers for each collabrum */ List organizersList = null; Fqn colFqn = cacheUtil.fqn(DbConstants.ORGANIZERS); Object obj = treeCache.get(colFqn, collabrumId); if (obj != null) { organizersList = (List) obj; } else { try { Object[] myparams = { (Object) collabrumId }; organizersList = getOrganizersQuery.execute(myparams); if ((organizersList != null) && (organizersList.size() > 0)) { treeCache.put(colFqn, collabrumId, organizersList); } } catch (Exception e) { throw new BaseDaoException("getOrganizersQuery, exception collabrumid = " + collabrumId + getOrganizersQuery.getSql(), e); } } /** * if organizers list is missing in cache, add it to cache */ if ((organizersList != null) && (organizersList.size() > 0)) { /** * check if this user is organizer for this collabrum */ if (isLoginValid) { if (diaryAdmin.isDiaryAdmin(userLogin) || isUserCollOrganizer(organizersList, userId)) { collabrum.setValue(DbConstants.IS_ORGANIZER, "1"); } } else { collabrum.setValue(DbConstants.IS_ORGANIZER, "0"); } } /** * msg exists */ if (doesTopicExist(collabrumId)) { collabrum.setValue(DbConstants.MSG_EXISTS, "1"); } else { collabrum.setValue(DbConstants.MSG_EXISTS, "0"); } } } } private boolean isUserCollOrganizer(List organizers, String userId) { if ((organizers != null) && (organizers.size() > 0)) { Member organizer = (Member) eop.newObject(DbConstants.MEMBER); if (organizer != null) { organizer.setValue(DbConstants.LOGIN_ID, userId); if (organizers.contains(organizer)) { return true; } } } return false; } /** * checks if this topic exists in this collabrum * @param collabrumId - the collabrum id * @return boolean * @throws BaseDaoException when there is an error in retrieving it from database */ private boolean doesTopicExist(String collabrumId) throws BaseDaoException { /** Jboss methods * fqn - full qualified name * check if the userpage already set in the cache * If it exists, return the userpage from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.COLLABRUM); Object obj = treeCache.get(fqn, collabrumId); if (obj != null) { Collabrum coll = (Collabrum) obj; String msgExists = coll.getValue(DbConstants.MSG_EXISTS); if (!RegexStrUtil.isNull(msgExists)) { return msgExists.equals("1"); } } /** * Get scalability datasource for colltopics - partitioned on collabrumId */ String sourceName = scalabilityManager.getReadScalability(collabrumId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, doesTopicExist() " + sourceName); } try { List result = topicExistsQuery.execute(collabrumId); if ((result != null) && (result.size() > 0)) { return true; } } catch (Exception e) { throw new BaseDaoException("exception error in ColTopicExistsQuery , " + topicExistsQuery.getSql() + " collabrumId = " + collabrumId, e); } return false; } /** * Gets the dirpath, dirname based on directoryId * @param directoryId the directoryId * @return Directory bean * @throws BaseDaoException when error occurs */ private Directory getParentInfo(String directoryId) throws BaseDaoException { /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() in getParentInfo " + sourceName); } /** * Get the directory based on directoryId */ List result = null; Object[] params = { (Object) directoryId }; try { result = dirnameQuery.execute(directoryId); } catch (Exception e) { throw new BaseDaoException( "error occured while getting directory, directoryId = " + directoryId + dirnameQuery.getSql(), e); } if (result != null && result.size() > 0) { return (Directory) result.get(0); } return null; } /** * getTopDirectory() - get the top level directoryid */ private String getTopDirectory() { /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getTopDirectory() " + sourceName); } try { List result = directoryTopQuery.execute(); if (result != null && result.size() > 0) { return ((Directory) result.get(0)).getValue(DbConstants.DIRECTORY_ID); } return null; } catch (Exception e) { throw new BaseDaoException("error in result " + directoryTopQuery.getSql()); } } private String getChildCount(String directoryId, int accessFlag) { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for directory - not partitioned */ String queryName = null; if (accessFlag == 1) { queryName = scalabilityManager.getWriteZeroScalability("dirchildcountQuery"); } else { queryName = scalabilityManager.getReadZeroScalability("dirchildcountQuery"); } dirChildCountQuery = getQueryMapper().getQuery(queryName); try { Object[] params = { (Object) directoryId }; List result = dirChildCountQuery.execute(params); if (result != null && result.size() > 0) { if ((Directory) result.get(0) != null) { String count = ((Directory) result.get(0)).getValue(DbConstants.COUNT_SUM); return count; } } else { return "0"; } } catch (Exception e) { throw new BaseDaoException("error in result " + dirChildCountQuery.getSql(), e); } return "0"; } public String getDirectoryId(String dirName) { if (RegexStrUtil.isNull(dirName)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getDirectoryId() " + sourceName); } try { Object[] params = { (Object) dirName }; List result = directoryIdQuery.execute(params); if (result != null && result.size() > 0) { return ((Directory) result.get(0)).getValue(DbConstants.DIRECTORY_ID); } return null; } catch (Exception e) { throw new BaseDaoException("error in result " + directoryIdQuery.getSql(), e); } } public void incrementHits(String directoryId) { /** * Get scalability datasource for directory - not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource(), incrementHits() " + sourceName); } List result = null; Connection conn = null; try { conn = ds.getConnection(); dirHitsQuery.run(conn, directoryId); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException( "error occured while adding hits directory, directoryId = " + directoryId, e1); } throw new BaseDaoException("error occured while adding hits directory, directoryId = " + directoryId, e); } try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error occured while adding hits directory, directoryId = " + directoryId, e1); } } /** * getCobrand method - gets the cobranding information for this user and sets it in the directory * @throws BaseDaoException - when error occurs */ private void getCobrand(String directoryId, Directory directory) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || (directory == null)) { throw new BaseDaoException("params are null"); } Fqn fqn = cacheUtil.fqn(DbConstants.DIR_COBRAND); Object obj = treeCache.get(fqn, directoryId); if (obj != null) { directory.setValue(DbConstants.COBRAND, "1"); directory.setValue(DbConstants.HEADER, ((Directory) obj).getValue(DbConstants.HEADER)); directory.setValue(DbConstants.FOOTER, ((Directory) obj).getValue(DbConstants.FOOTER)); return; } /** * Get scalability datasource for partitioned on directoryid */ String sourceName = scalabilityManager.getWriteScalability(directoryId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, Directory getCobrand() " + sourceName); } try { Object[] params = { (Object) directoryId }; List result = cobrandQuery.execute(params); if ((result != null) && (result.size() > 0)) { directory.setValue(DbConstants.COBRAND, "1"); directory.setValue(DbConstants.HEADER, ((Directory) result.get(0)).getValue(DbConstants.HEADER)); directory.setValue(DbConstants.FOOTER, ((Directory) result.get(0)).getValue(DbConstants.FOOTER)); treeCache.put(fqn, directoryId, result.get(0)); } } catch (Exception e) { throw new BaseDaoException("Directory, get cobrandQuery() " + cobrandQuery.getSql(), e); } } /** * Removes parent directories from cache, when a directory is pasted * @param dirPath - the directory path of child directory */ private void removeDirPathsFromCache(String dirPath) { if (!RegexStrUtil.isNull(dirPath)) { String[] parentVals = null; String[] parentList = WebUtil.getDirPathVals(dirPath); if (parentList != null) { for (int i = 0; i < parentList.length; i++) { if (parentList[i] == null) { continue; } parentVals = parentList[i].split("\\|"); if (parentVals != null) { if (parentVals.length > 1 && !RegexStrUtil.isNull(parentVals[1])) { Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, parentVals[1])) { treeCache.remove(fqn, parentVals[1]); } } } } } } } /** * This method searches in directories for all the strings in the searchText * @param searchText - search strings * @return HashSet - of results * @throws BaseDaoException If we have a problem interpreting the data or the data is missing * or incorrect */ public HashSet searchDirectory(int accessFlag, String searchText) { /** * Get scalability datasource for directory - not partitioned */ String sourceName = null; if (accessFlag == DbConstants.READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteZeroScalability(); } else { sourceName = scalabilityManager.getReadZeroScalability(); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() " + sourceName); } Connection conn = null; HashSet result = null; try { conn = ds.getConnection(); result = dirSearchQuery.run(conn, searchText); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error in conn.close(), searchDirectory() ", e1); } throw new BaseDaoException("error in result, searchDirectory() ", e); } try { if (conn != null) { conn.close(); } } catch (Exception e) { throw new BaseDaoException("error in conn.close(), searchDirectory() ", e); } return result; } /** * This method searches in personal blogs for all the strings in the searchText * @param searchText - search strings * @return HashSet - of results * @throws BaseDaoException If we have a problem interpreting the data or the data is missing * or incorrect */ public HashSet searchPblogs(int accessFlag, String searchText) throws BaseDaoException { /** * Get scalability datasource for usertab - not partitioned */ String sourceName = null; if (accessFlag == DbConstants.READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteZeroScalability(); } else { sourceName = scalabilityManager.getReadZeroScalability(); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() " + sourceName); } Connection conn = null; HashSet result = null; try { conn = ds.getConnection(); result = pblogSearchQuery.run(conn, searchText); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { } throw new BaseDaoException("error in result, searchPblogs() ", e); } try { if (conn != null) { conn.close(); } } catch (Exception e) { throw new BaseDaoException("error in conn.close(), searchPblogs() ", e); } return result; } /** * This method searches in users for all the strings in the searchText * @param searchText - search strings * @return HashSet - of results * @throws BaseDaoException If we have a problem interpreting the data or the data is missing * or incorrect */ public HashSet searchUsers(int accessFlag, String searchText) { /** * Get scalability datasource for usertab - not partitioned */ String sourceName = null; if (accessFlag == DbConstants.READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteZeroScalability(); } else { sourceName = scalabilityManager.getReadZeroScalability(); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() " + sourceName); } Connection conn = null; HashSet result = null; try { conn = ds.getConnection(); result = userSearchQuery.run(conn, searchText); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error in conn.close(), searchUsers() ", e1); } throw new BaseDaoException("error in result, searchUsers() ", e); } try { if (conn != null) { conn.close(); } } catch (Exception e) { throw new BaseDaoException("error in conn.close(), searchUsers() ", e); } return result; } /** * This method searches in carryontag for all the strings in the searchText * @param searchText - search strings * @return HashSet - of results * @throws BaseDaoException If we have a problem interpreting the data or the data is missing * or incorrect */ public HashSet searchCarryon(int accessFlag, String searchText) { /** * Get scalability datasource for carryontag - not partitioned */ String sourceName = null; if (accessFlag == DbConstants.READ_FROM_MASTER) { sourceName = scalabilityManager.getWriteZeroScalability(); } else { sourceName = scalabilityManager.getReadZeroScalability(); } ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() " + sourceName); } Connection conn = null; HashSet result = null; try { conn = ds.getConnection(); result = carryonSearchQuery.run(conn, searchText); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error in conn.close(), searchCarryon() ", e1); } throw new BaseDaoException("error in result, searchCarryon()", e); } try { if (conn != null) { conn.close(); } } catch (Exception e) { throw new BaseDaoException("error in conn.close(), searchCarryon() ", e); } return result; } /** * getDirBlobsFromTags - gets the directory blobs based on tags * @param dirtags - directory with tags */ public HashSet getDirBlobsFromTags(HashSet dirtags) throws BaseDaoException { if (dirtags != null && dirtags.size() > 0) { Iterator it1 = dirtags.iterator(); while (it1.hasNext()) { Photo blob = (Photo) it1.next(); if (blob == null) continue; String entryid = blob.getValue(DbConstants.ENTRYID); blob.setObject(DbConstants.PHOTO, getPhoto(blob.getValue(DbConstants.ENTRYID), blob.getValue(DbConstants.DIRECTORY_ID))); } } return dirtags; } /** * setDirPhotos - sets the directory default photo for * a list of directories * @param topDirs - top 10 directories * @return List - top 10 directories with photos */ public List setDirPhotos(List topDirs) { if (topDirs != null && topDirs.size() > 0) { for (int i = 0; i < topDirs.size(); i++) { Directory topDir = (Directory) topDirs.get(i); if (topDir == null) continue; Directory dir = getDiaryDirectory(topDir.getValue(DbConstants.DIRECTORY_ID), null, null, DbConstants.READ_FROM_SLAVE, DbConstants.BLOB_READ_FROM_SLAVE, DbConstants.READ_FROM_SLAVE, null); if (dir == null) continue; topDir.setObject(DbConstants.DEFAULT_PHOTO, (Photo) dir.getObject(DbConstants.DEFAULT_PHOTO)); } } return topDirs; } private void updateDirImage(String entryId, String directoryId, String zoom, String btitle, String caption) { String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getSource() " + sourceName); } Connection conn = null; try { conn = ds.getConnection(); dirImageUpdateQuery.run(conn, entryId, directoryId, zoom, btitle, caption); } catch (Exception e) { try { if (conn != null) { conn.close(); } } catch (Exception e1) { throw new BaseDaoException("error conn.close(), updateDirImage() ", e1); } throw new BaseDaoException("error updateDirImage()", e); } try { if (conn != null) { conn.close(); } } catch (Exception e) { throw new BaseDaoException("error conn.close(), updateDirImage() ", e); } Fqn fqn = cacheUtil.fqn(DbConstants.RECENT_DIR_IMAGES); if (treeCache.exists(fqn, DbConstants.RECENT_DIR_IMAGES)) { treeCache.remove(fqn, DbConstants.RECENT_DIR_IMAGES); } } /** * getRecentDirImages - get the recent directory images * @param accessFlag - accessFlag * @return List - list of recent directories that added images */ public List getRecentDirImages(int accessFlag) { /** Jboss methods * fqn - full qualified name * check if the userpage already set in the cache * If it exists, return the userpage from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.RECENT_DIR_IMAGES); Object obj = treeCache.get(fqn, DbConstants.RECENT_DIR_IMAGES); if (obj != null) { return (List) obj; } /** * Get scalability datasource for directory - not partitioned * accessFlag - the access flag */ String queryName = scalabilityManager.getReadZeroScalability("dirrecentimagesQuery"); dirRecentImagesQuery = getQueryMapper().getQuery(queryName); if (dirRecentImagesQuery == null) { throw new BaseDaoException("dirRecentImagesQuery is null "); } /** * Get the directory based on directoryId */ try { List result = dirRecentImagesQuery.execute(); if (result != null && result.size() > 0) { treeCache.put(fqn, DbConstants.RECENT_DIR_IMAGES, result); } return result; } catch (Exception e) { throw new BaseDaoException("error dirRecentImagesQuery() , " + dirRecentImagesQuery.getSql(), e); } } /** * getRandDirImages - get random directory images * @param accessFlag - accessFlag * @return List - list of random dirimages */ public List getRandDirImages(int catVal, int directoryId, int accessFlag) { if ((catVal != DbConstants.FILE_CATEGORY_INT) && catVal != DbConstants.PHOTO_CATEGORY_INT) { throw new BaseDaoException("category is invalid"); } String queryName = scalabilityManager.getWriteZeroScalability("dirrandimagesQuery"); dirRandImagesQuery = getQueryMapper().getQuery(queryName); if (dirRandImagesQuery == null) { throw new BaseDaoException("dirRandImagesQuery is null "); } /** * Get the directory based on directoryId */ try { Object[] params = { (Object) catVal, (Object) directoryId }; List result = dirRandImagesQuery.execute(params); return result; } catch (Exception e) { throw new BaseDaoException("error dirRandImagesQuery() , " + dirRandImagesQuery.getSql(), e); } } /** * getRecentSelectDirImages - get exclusive recent directory images * @param excludeSelect - execluded from the select list * @param accessFlag - accessFlag * @return List - list of recent directories that have recent images */ public List getRecentSelectDirImages(String excludeSelect, int accessFlag) { /** Jboss methods * fqn - full qualified name * check if the userpage already set in the cache * If it exists, return the userpage from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.RECENT_DIR_SELECT_IMAGES); Object obj = treeCache.get(fqn, DbConstants.RECENT_DIR_SELECT_IMAGES); if (obj != null) { return (List) obj; } /** * Get scalability datasource for directory - not partitioned * accessFlag - the access flag */ String queryName = scalabilityManager.getWriteZeroScalability("dirrecentselectimagesQuery"); dirRecentSelectImagesQuery = getQueryMapper().getQuery(queryName); if (dirRecentSelectImagesQuery == null) { throw new BaseDaoException("dirRecentSelectImagesQuery is null "); } /** * Get recently selected directory images */ try { //Object[] params = {excludeSelect}; List result = dirRecentSelectImagesQuery.execute(); if (result != null && result.size() > 0) { treeCache.put(fqn, DbConstants.RECENT_DIR_SELECT_IMAGES, result); } return result; } catch (Exception e) { throw new BaseDaoException( "error dirRecentSelectImagesQuery() , " + dirRecentSelectImagesQuery.getSql(), e); } } /** * getSanBlobDir - get directory blob * @param directoryid - collabrumid * @param btitle - file name * @return byte[] - blob array * @throws BaseDaoException for errors */ public byte[] getSanBlobDir(String directoryid, String btitle) { if (RegexStrUtil.isNull(directoryid) || RegexStrUtil.isNull(btitle)) { throw new BaseDaoException("params are null"); } if (WebUtil.isSanEnabled()) { Directory dir = getDirectory(directoryid, null); if (dir == null) { throw new BaseDaoException("getSanBlobDir(), dir == null, directoryid = " + directoryid); } String filePath = RegexStrUtil.sanFilePath(dir.getValue(DbConstants.DIRPATH), dir.getValue(DbConstants.DIRNAME)); FileSystemImpl fApi = new FileSystemImpl(); if (fApi != null) { return fApi.readFile(filePath, SanConstants.sanPath, btitle); } } return null; } /** * getSanBlobsDir - get directory blobs * @param directoryid - directoryid * @param blobs - which need to be retrieved * @return List - list of blobs with data * @throws BaseDaoException for errors */ public List getSanBlobsDir(String directoryid, List blobs) { if (RegexStrUtil.isNull(directoryid) || (blobs == null)) { throw new BaseDaoException("params are null"); } if (WebUtil.isSanEnabled()) { Directory dir = getDirectory(directoryid, null); if (dir == null) { throw new BaseDaoException("dir is null, directoryid = " + directoryid); } String filePath = RegexStrUtil.sanFilePath(dir.getValue(DbConstants.DIRPATH), dir.getValue(DbConstants.DIRNAME)); FileSystemImpl fApi = new FileSystemImpl(); if (fApi != null) { for (int i = 0; i < blobs.size(); i++) { if (((Photo) blobs.get(i)) != null) { String btitle = ((Photo) blobs.get(i)).getValue(DbConstants.BTITLE); if (!RegexStrUtil.isNull(btitle)) { byte[] blob = fApi.readFile(filePath, SanConstants.sanPath, btitle); if (blob != null && blob.length > 0) { ((Photo) blobs.get(i)).setBlob(blob); } } //btitle } // if } //for } else { throw new BaseDaoException("fApi is null, getSanBlobsDir(), directoryid" + directoryid); } } return null; } /** * addSanDir - add san directory * @param dirpath - directory path * @param dirname - directory name * @return none * @throws BaseDaoException for errors */ private void addSanDir(String dirpath, String dirname) { if (WebUtil.isSanEnabled()) { getSanUtils(); if (sanUtils != null) { try { sanUtils.addSanDir(dirpath, SanConstants.sanPath, dirname); } catch (SanException e) { throw new BaseDaoException("DirectoryDaoDb: addDirectory(), addSanDir() error" + e.getMessage(), e); } } else { throw new BaseDaoException("DirectoryDaoDb: addDirectory(), addSanDir()"); } } } /** * getDirHardQuota - gets the directory hard quota * @param dirpath - directory path * @param dirname - directory name * @param prefixPath - prefix path * @return long hardQuota * @throws BaseDaoException for errors */ public long getDirHardQuota(String dirPath, String dirName, String prefixPath) { long totalSpace = 0; long totalUsableSpace = 0; long freeSpace = 0; if (WebUtil.isSanEnabled()) { getSanUtils(); if (sanUtils != null) { try { totalUsableSpace = sanUtils.getUsableSpace(dirPath, prefixPath, dirName); /* long totalSpace = sanUtils.getTotalSpace(dirPath, prefixPath, dirName); logger.info("totalSpace = " + totalSpace); long freeSpace = sanUtils.getFreeSpace(dirPath, prefixPath, dirName); logger.info("freeSpace = " + freeSpace); */ } catch (SanException e) { throw new BaseDaoException("getHardQuota() error" + e.getMessage(), e); } } else { throw new BaseDaoException("sanUtils is null in getHardQuota()"); } } return totalUsableSpace; } /** * setQuotaSize - sets the user hard quota * @param memberId - members whose disk quota size is set * @param login - admin's login * @param quotaSize - quota size * @return none * @throws BaseDaoException for errors */ public void setQuotaSize(String memberId, String login, String quotaSize) { if (RegexStrUtil.isNull(memberId) || RegexStrUtil.isNull(login) || RegexStrUtil.isNull(quotaSize)) { throw new BaseDaoException("params are null"); } if (!isAdmin(login)) { throw new BaseDaoException("Not an admin, cannot set quota size for the users, " + login); } String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, setQuotaSize() " + sourceName + " memberId = " + memberId); } /** * Get scalability datasource - not partitioned * accessFlag - the access flag */ Connection conn = null; String queryName = scalabilityManager.getWriteZeroScalability("setquotaquery"); setQuotaQuery = getQueryMapper().getCommonQuery(queryName); if (setQuotaQuery == null) { throw new BaseDaoException("setQuotaQuery is null, login= " + login + " memberId = " + memberId); } queryName = scalabilityManager.getWriteZeroScalability("updatequotaquery"); updateQuotaQuery = getQueryMapper().getCommonQuery(queryName); if (updateQuotaQuery == null) { throw new BaseDaoException("updateQuotaQuery is null, login= " + login + " memberId = " + memberId); } /* queryName = scalabilityManager.getWriteZeroScalability("quotaexistsquery"); quotaExistsQuery = getQueryMapper().getQuery(queryName); List result = quotaExistsQuery.execute(); */ List result = getQuotaSize(memberId, DbConstants.READ_FROM_MASTER); try { conn = ds.getConnection(); conn.setAutoCommit(false); String params[] = { memberId, quotaSize }; if (result != null && result.size() > 0) { updateQuotaQuery.run(conn, params); } else { setQuotaQuery.run(conn, params); } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException("conn.close() exception for rollback(), for setQuotaQuery() login = " + login + " memberId = " + memberId, e2); } throw new BaseDaoException( "rollback() exception, for SetQuotaQuery() login =" + login + " memberId = " + memberId, e1); } throw new BaseDaoException( "error in executing SetQuotaQuery, login=" + login + " memberId = " + memberId, e); } // connection commit try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit() exception, for SetQuotaQuery/UpdateQuotaQuery", e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException("conn.close() exception for setAutoCommit() SetQuotaQuery/UpdateQuotaQuery", e4); } Fqn fqn = cacheUtil.fqn(DbConstants.QSIZE); if (treeCache.exists(fqn, memberId)) { treeCache.remove(fqn, memberId); } } /** * getQuotaSize - get quota size * @param loginId - loginId of the user whose quota size is required * @return String - quotaSize for this user */ public String getQuotaSize(String loginId) { List quotaList = getQuotaSize(loginId, DbConstants.READ_FROM_SLAVE); if (quotaList != null && quotaList.size() > 0) { if ((Hdlogin) quotaList.get(0) != null) { return (String) ((Hdlogin) quotaList.get(0)).getValue(DbConstants.QSIZE); } else { logger.info("quotasize does not exist for userid = " + loginId); return "0"; } } logger.info("return zero(0), quotasize does not exist for userid = " + loginId); return "0"; } /** * getQuotaSize - get quota size * @param loginId - loginId of the user whose quota size is required * @param accessFlag - accessFlag * @return List - quotaSize */ public List getQuotaSize(String loginId, int accessFlag) { if (RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } /** Jboss methods * fqn - full qualified name * If it exists, return the quotasize from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.QSIZE); if (accessFlag == DbConstants.READ_FROM_SLAVE) { Object obj = treeCache.get(fqn, loginId); if (obj != null) { return (List) obj; } } /** * Get scalability datasource - not partitioned * accessFlag - the access flag */ String queryName = scalabilityManager.getWriteZeroScalability("showquotaquery"); showQuotaQuery = getQueryMapper().getQuery(queryName); if (showQuotaQuery == null) { throw new BaseDaoException("showQuotaQuery is null "); } /** * get quota size */ try { Object[] params = { loginId }; List result = showQuotaQuery.execute(params); if (result != null && result.size() > 0) { treeCache.put(fqn, loginId, result); } return result; } catch (Exception e) { throw new BaseDaoException( "error ShowQuotaQuery() , " + showQuotaQuery.getSql() + " loginId = " + loginId, e); } } /** * getGlobalQuotas - get quota size * @param loginId - loginId of the user whose quota size is required * @param accessFlag - accessFlag * @return List - quotaSize */ public List getGlobalQuotas(String login, int accessFlag) { if (RegexStrUtil.isNull(login)) { throw new BaseDaoException("params is null"); } if (!isAdmin(login)) { throw new BaseDaoException("not an admin, cannot access global quotas"); } /** Jboss methods * fqn - full qualified name * If it exists, return the quotasize from the cache. */ Fqn fqn = cacheUtil.fqn(DbConstants.GLOBAL_QUOTAS); if (accessFlag == DbConstants.READ_FROM_SLAVE) { Object obj = treeCache.get(fqn, DbConstants.GLOBAL_QUOTAS); if (obj != null) { return (List) obj; } } /** * Get scalability datasource - not partitioned * accessFlag - the access flag */ String queryName = scalabilityManager.getWriteZeroScalability("showglobalquotaquery"); showGlobalQuotaQuery = getQueryMapper().getQuery(queryName); if (showGlobalQuotaQuery == null) { throw new BaseDaoException("showGlobalQuotaQuery is null"); } /** * get quota size */ try { List result = showGlobalQuotaQuery.execute(); if (result != null && result.size() > 0) { treeCache.put(fqn, DbConstants.GLOBAL_QUOTAS, result); } return result; } catch (Exception e) { throw new BaseDaoException( "error ShowGlobalQuotaQuery() , " + showGlobalQuotaQuery.getSql() + " login = " + login, e); } } /** * getGlobalMatch - gets areas/sections/divisions/groups matches and returns * the complete list * @param areas - area list * @param groups - groups list * @param sections - sections list * @param divisions - divisions list * @param gquotas - global quotas against which the other list are matched * @return List - quotaSize complete list of matches and not matched */ public List getGlobalMatch(List areas, List groups, List sections, List divisions, List gquotas, List organizations) { if (gquotas == null) { return null; } if (areas != null && areas.size() > 0) { gquotas = getMatches(gquotas, areas, LdapConstants.ldapArea); } if (groups != null && groups.size() > 0) { gquotas = getMatches(gquotas, groups, LdapConstants.ldapGroup); } if (sections != null && sections.size() > 0) { gquotas = getMatches(gquotas, sections, LdapConstants.ldapSection); } if (divisions != null && divisions.size() > 0) { gquotas = getMatches(gquotas, divisions, LdapConstants.ldapDivision); } if (organizations != null && organizations.size() > 0) { gquotas = getMatches(gquotas, organizations, LdapConstants.ldapOrganization); } return gquotas; } /** * getGlobalMatch - gets areas/sections/divisions/groups/organization * matches and returns * the complete list * @param list - list to match * @param gquotas - global quotas against which the other list are matched * @return List - quotaSize integrated with the list of matches * and not matched */ private List getMatches(List gquotas, List list, String type) throws BaseDaoException { if (gquotas == null || list == null) { return gquotas; } for (int i = 0; i < list.size(); i++) { if (list.get(i) == null || (String) list.get(i) == null) { continue; } String qname = (String) list.get(i); boolean matches = false; for (int j = 0; j < gquotas.size(); j++) { if (gquotas.get(j) == null || RegexStrUtil.isNull((String) ((Directory) gquotas.get(j)).getValue(DbConstants.QNAME))) { continue; } else { if (qname.equalsIgnoreCase((String) ((Directory) gquotas.get(j)).getValue(DbConstants.QNAME)) && (type.equalsIgnoreCase( (String) ((Directory) gquotas.get(j)).getValue(DbConstants.QTYPE)))) { matches = true; break; } } } if (!matches) { Directory dir = new Directory(); if (dir != null) { dir.setValue(DbConstants.QTYPE, type); dir.setValue(DbConstants.QNAME, qname); dir.setValue(DbConstants.QSIZE, ""); gquotas.add(dir); } else { throw new BaseDaoException("new Directory() is null"); } } } return gquotas; } /** * saveGlobalQuotas - updates/saves global quotas * @param login - admin's login * @param entryList - list of entryid that need to be update with quotas * @param entryListVals - list of vals that need to be update with quotas * @param qNameList - list of names for whose quota has to be updated * (these are new entries) * @param qNameListVals - list of vals for whose quota has to be updated * @param qtype - quota type (section/area/division/organization/group) * @return none * @throws BaseDaoException for errors */ public void saveGlobalQuotas(String login, List entryList, List entryListVals, List qNameList, List qNameListVals, List qTypeList) { if (RegexStrUtil.isNull(login)) { throw new BaseDaoException("params are null"); } if (!isAdmin(login)) { throw new BaseDaoException("Not an admin, cannot set quota size for the users, " + login); } String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, saveGlobalQuotas() " + sourceName + " login = " + login); } /** * Get scalability datasource - not partitioned * accessFlag - the access flag */ if (entryList != null && entryList.size() > 0 && entryListVals != null && entryListVals.size() > 0) { String queryName = scalabilityManager.getWriteZeroScalability("updateglobalquotaquery"); updateGlobalQuotaQuery = getQueryMapper().getCommonQuery(queryName); if (updateGlobalQuotaQuery == null) { throw new BaseDaoException("updateGlobalQuotaQuery is null, login= " + login); } } if ((qNameList != null && qNameList.size() > 0) && (qNameListVals != null && qNameListVals.size() > 0) && (qTypeList != null && qTypeList.size() > 0)) { String queryName = scalabilityManager.getWriteZeroScalability("addglobalquotaquery"); addGlobalQuotaQuery = getQueryMapper().getCommonQuery(queryName); if (addGlobalQuotaQuery == null) { throw new BaseDaoException("addGlobalQuotaQuery is null, login= " + login); } } /** * update global quotas for existing ones */ Connection conn = null; try { conn = ds.getConnection(); conn.setAutoCommit(false); if (updateGlobalQuotaQuery != null) { for (int i = 0; i < entryList.size(); i++) { if (RegexStrUtil.isNull((String) entryList.get(i))) { continue; } else { if (RegexStrUtil.isNull((String) entryListVals.get(i))) { entryListVals.set(i, (Object) "0"); } String params[] = { (String) entryList.get(i), (String) entryListVals.get(i) }; updateGlobalQuotaQuery.run(conn, params); } } } /** * add global quotas for new ones */ if (addGlobalQuotaQuery != null) { if (qTypeList.size() != qNameList.size()) { throw new BaseDaoException("qType missing for some quota name categories, qNameList.size() = " + qNameList.size() + " qTypeList.size() = " + qTypeList.size()); } if (qNameList.size() != qNameListVals.size()) { throw new BaseDaoException( "quotaValue missing for some quota name categories qNameList.size()= " + qNameList.size() + " qNameListVals.size() = " + qNameListVals.size()); } for (int i = 0; i < qNameList.size(); i++) { if (RegexStrUtil.isNull((String) qNameList.get(i))) { continue; } else { if (RegexStrUtil.isNull((String) qTypeList.get(i))) { throw new BaseDaoException("quota type is missing for quotName =" + qNameList.get(i)); } if (RegexStrUtil.isNull((String) qNameListVals.get(i))) { qNameListVals.set(i, (Object) "0"); } String params[] = { (String) qTypeList.get(i), (String) qNameList.get(i), (String) qNameListVals.get(i) }; addGlobalQuotaQuery.run(conn, params); } } } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException( "conn.close() exception for rollback(), for updateGlobalQuotaQuery()/addGlobalQuotaQuery login = " + login, e2); } throw new BaseDaoException( "rollback() exception, for UpdateGlobalQuotaQuery()/addGlobalQuotaQuery login =" + login, e1); } throw new BaseDaoException( "error in executing UpdateGlobalQuotaQuery/AddGlobalQuotaQuery, login=" + login, e); } // connection commit try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException("commit() exception, for AddGlobalQuotaQuery/UpdateGlobalQuotaQuery", e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException( "conn.close() exception for setAutoCommit() addGlobalQuotaQuery/UpdateGlobalQuotaQuery", e4); } } /** * getUserQuotas - get quota size in all qtypes for this user * @param loginId - loginId of the user whose quota size is required * @param qTypes - String[] of quota types for this user * @param qNames - list of quota names for this user * @param accessFlag - accessFlag * @ List - quotaSize */ public List getUserQuotas(String loginId, String[] qTypes, List qNames, int accessFlag) { if (RegexStrUtil.isNull(loginId) || qTypes == null || qNames == null) { throw new BaseDaoException("params is null"); } if (qNames.size() < 5 && qTypes.length < 5) { throw new BaseDaoException( "qNames and qTypes are less than 5" + qNames.size() + " qTypes = " + qTypes.length); } /** Jboss methods * fqn - full qualified name * If it exists, return the quotas from cache */ Fqn fqn = cacheUtil.fqn(DbConstants.USER_QUOTAS); if (accessFlag == DbConstants.READ_FROM_SLAVE) { Object obj = treeCache.get(fqn, loginId); if (obj != null) { return (List) obj; } } /** * Get scalability datasource - not partitioned * accessFlag - the access flag */ String queryName = scalabilityManager.getWriteZeroScalability("getuserquotaquery"); getUserQuotaQuery = getQueryMapper().getQuery(queryName); if (getUserQuotaQuery == null) { throw new BaseDaoException("getUserQuotaQuery is null"); } /** * get quota size, area/section/division/group/organization */ /* for (int i = 0; i < qNames.size(); i++) { logger.info("i = " + i + " qNames=" + qNames.get(i)); } for (int j = 0; j < qTypes.length; j++) { logger.info("j = " + j + " qTypes=" + qTypes[j]); } */ try { Object[] params = { (Object) qNames.get(0), (Object) qTypes[0], (Object) qNames.get(1), (Object) qTypes[1], (Object) qNames.get(2), (Object) qTypes[2], (Object) qNames.get(3), (Object) qTypes[3], (Object) qNames.get(4), (Object) qTypes[4] }; List result = getUserQuotaQuery.execute(params); //logger.info("getUserQuotaQuery " + result.toString()); if (result != null && result.size() > 0) { //logger.info("result is not null" + result.size() + result.toString()); treeCache.put(fqn, DbConstants.USER_QUOTAS, result); } return result; } catch (Exception e) { throw new BaseDaoException( "error getUserQuotaQuery() , " + getUserQuotaQuery.getSql() + " loginId = " + loginId, e); } } /** * getListOfSubDirs - gets the list of sub directories given parent directory * @param - directoryId of the parent * @List - list of sub directories or children of this directory * @throws exception */ public List getListOfSubDirs(String directoryId, int accessFlag) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** Jboss methods * fqn - full qualified name * If it exists, return sub directories or subdirs of the dirtree */ Fqn fqn = cacheUtil.fqn(DbConstants.SUB_DIRS); if (accessFlag == DbConstants.READ_FROM_SLAVE) { Object obj = treeCache.get(fqn, directoryId); if (obj != null) { return (List) obj; } } /** * Get scalability datasource for dirtree, dirscope, directory - * not partitioned */ String queryName = scalabilityManager.getReadZeroScalability("getsubdirsquery"); getSubDirsQuery = getQueryMapper().getQuery(queryName); if (getSubDirsQuery == null) { throw new BaseDaoException("getSubDirsQuery is null"); } /** * get subdirectories that belong to this directory */ try { Object[] params = { (Object) directoryId }; List result = getSubDirsQuery.execute(params); if (result != null && result.size() > 0) { treeCache.put(fqn, DbConstants.SUB_DIRS, result); } return result; } catch (BaseDaoException e) { throw new BaseDaoException("error in getSubDirsQuery()" + getSubDirsQuery.getSql(), e); } } /** * To see if the directory that is being moved is in the parent path * @param destDirName - directory name of the destination directory * @param destDirectoryId - directoryid of the destination directory * @param srcDirPath - existing dirpath * eg: directory E's path: A|1::B|2::C|2::D|4 * E is moved up to the level of B * So need to check if B is part of the parent tree of E * If it is the replacePath looks different for a directory that is moving * up the parent */ public boolean isDirInPath(String destDirName, String destDirectoryId, String srcDirPath) { if (RegexStrUtil.isNull(destDirName) || RegexStrUtil.isNull(destDirectoryId)) { throw new BaseDaoException("params are null, isDirInPath()"); } /** * path can be null for toplevel directories */ if (RegexStrUtil.isNull(srcDirPath)) { return false; } StringBuffer dirPattern = new StringBuffer(destDirName); if (dirPattern != null) { dirPattern.append("|"); dirPattern.append(destDirectoryId); /** * this directoryid is not part of the parent tree */ if (srcDirPath.indexOf(dirPattern.toString()) == -1) { return false; } else { /** * this directoryid is part of the parent tree */ return true; } } else { throw new BaseDaoException("new StringBuffer() failed, isDirInPath() "); } } /** * Return the child's new dirpath. * @param oldPath - oldPath * @param newParentPath - new parent path * @param path - existing path that needs to be changed. * For example: * childPath = "A :: B :: C :: D"; * newParentPath = "A :: E :: F"; * return value = "A :: E :: F :: D"; * * @param currentParentPath dirpath of current parent of node to be moved */ public String replacePath(String newParentPath, String oldPath, String path) { if (RegexStrUtil.isNull(path)) { return null; } if (RegexStrUtil.isNull(oldPath) && RegexStrUtil.isNull(newParentPath)) { return path; } //logger.info("path = " + path + " oldPath=" + oldPath + " newParentPath = " + newParentPath); if (!RegexStrUtil.isNull(oldPath)) { oldPath = oldPath.replace(DbConstants.DIRPATH_PIPE, DbConstants.DIRPATH_PIPE_REGEXP); } if (!RegexStrUtil.isNull(newParentPath)) { newParentPath = newParentPath.replace(DbConstants.DIRPATH_PIPE, DbConstants.DIRPATH_PIPE_REGEXP); } path = path.replaceAll(oldPath, newParentPath); // fix the paths from [|] to | return path.replaceAll("\\[", "").replaceAll("\\]", ""); } /** * Get a clone using object serialization. * @param did the directory to be moved * @param newPath new path (newparent directoryname) * @param oldPath old path (oldparent directoryname) * @param conn connection for updating dirpath queries * @return Object return the cloned object * @exception ObjException run time exception if stuff screws */ public Object updatePath(String childId, String newPath, String oldPath, Connection conn, boolean isMainDir) throws BaseDaoException { /** * childid is null indicates no children for this directory */ if (RegexStrUtil.isNull(childId)) { return null; } //logger.info("updatePath(), childId=" + childId + " newPath=" + newPath + " oldPath=" + oldPath + " isMainDir=" + isMainDir); try { Directory child = viewDirectory(childId); if (child == null) { throw new BaseDaoException("child directory is null" + childId); } if (!isMainDir) { String dirpath = replacePath(newPath, oldPath, child.getValue(DbConstants.DIRPATH)); //logger.info("new dirpath=" + dirpath); dirPasteQuery.run(conn, childId, dirpath); //logger.info("completed dirpastequery for childid = " + childId + " dirpath = " + dirpath); Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, childId)) { treeCache.remove(fqn, childId); } } List children = getListOfSubDirs(childId, DbConstants.READ_FROM_MASTER); if (children != null) { for (int i = 0; i < children.size(); i++) { Directory c = (Directory) children.get(i); // recursion if (c != null) { //logger.info("oldPath = " + oldPath + " newPath=" + newPath); //logger.info("updating children directoryid=" + c.getValue(DbConstants.DIRECTORY_ID)); updatePath(c.getValue(DbConstants.DIRECTORY_ID), newPath, oldPath, conn, false); } else { continue; } } } } catch (Exception e) { throw new BaseDaoException("Could not update dirpath, childId = " + childId, e); } return null; } /** * deleteNode deletes all nodes of the directory underneath it * @param dirId - (dirId) * @param conn - connection * @throws BaseDaoException when error occurs */ public void deleteNode(String dirId, Connection conn) throws BaseDaoException { if (RegexStrUtil.isNull(dirId) || conn == null) { throw new BaseDaoException("params are null"); } try { /** * no children, delete the directory */ String numChild = getChildCount(dirId, DbConstants.READ_FROM_MASTER); if (!RegexStrUtil.isNull(numChild)) { if (numChild.equals("0")) { deleteAllDirectory(dirId, conn); } else { List children = getListOfSubDirs(dirId, DbConstants.READ_FROM_MASTER); Iterator iter = children.iterator(); while (iter.hasNext()) { Directory child = (Directory) iter.next(); if (child == null) { continue; } String childId = child.getValue(DbConstants.DIRECTORY_ID); if (RegexStrUtil.isNull(childId)) { continue; } /** * calling myself, recursion, if it has children */ if (getChildCount(childId, DbConstants.READ_FROM_MASTER) != null) { deleteNode(childId, conn); // calling myself, recursion } else { deleteAllDirectory(childId, conn); } } // while deleteAllDirectory(dirId, conn); } // else // delete the parent as it does not have children anymore } else { // no children delete the directory deleteAllDirectory(dirId, conn); } } catch (Exception e) { throw new BaseDaoException("Could not delete children of dir" + dirId + e.getMessage(), e); } } /** * deleteAllDirectory - delete all directory * @param directoryId - directoryId * @param conn - conn */ public void deleteAllDirectory(String directoryId, Connection conn) { if (RegexStrUtil.isNull(directoryId) || (conn == null)) { throw new BaseDaoException("params are null"); } try { /** * This directory has no children directories * delete it's files, collabrums, * tables to be cleaned up: * collabrum **************************************** * dircoll - collabrumid/directoryid (DirCollDeleteQuery) * directory can have multiple collabrums ****************************************/ //deleteCollabrums(directoryId, conn); /**************************************** * defdirblob - directoryid * dirblob - directoryid * dirblob_ind ? needs to be checked * dirblobtags - directoryid * dircobrand - directoryid * dirimages - directoryid * dirmove - directoryid * dirtree - directoryid * dirwebsites - directoryid ****************************************/ //logger.info("calling scopeDeleteQuery " + directoryId); scopeDeleteQuery.run(conn, directoryId); // dirscope //logger.info("calling deleteaAminQuery " + directoryId); deleteAdminQuery.run(conn, directoryId); // diradmins //logger.info("calling deleteDirCopyQuery " + directoryId); deleteDirCopyQuery.run(conn, directoryId); // dircopy //logger.info("calling deleteDirBlockAllQuery " + directoryId); deleteDirBlockAllQuery.run(conn, directoryId); // dirblock //logger.info("calling deleteDirAllowUsersAllQuery " + directoryId); deleteDirAllowUsersAllQuery.run(conn, directoryId); // dirallow //logger.info("calling deleteDefDirBlobAllQuery " + directoryId); deleteDefDirBlobAllQuery.run(conn, directoryId); // defdirblob //logger.info("calling deleteDirBlobAllQuery " + directoryId); deleteDirBlobAllQuery.run(conn, directoryId); // dirblob //logger.info("calling deleteDirBlobTagsAllQuery " + directoryId); deleteDirBlobTagsAllQuery.run(conn, directoryId); //dirblobtags //logger.info("calling deleteDirCobrandAllQuery " + directoryId); deleteDirCobrandAllQuery.run(conn, directoryId); // dircobrand //logger.info("calling deleteDirImagesAllQuery " + directoryId); deleteDirImagesAllQuery.run(conn, directoryId); //dirimages //logger.info("calling deleteDirMoveAllQuery " + directoryId); deleteDirMoveAllQuery.run(conn, directoryId); //dirmove //logger.info("calling deleteChildQuery " + directoryId); deleteChildQuery.run(conn, directoryId); //dirtree //logger.info("calling directoryDeleteQuery " + directoryId); directoryDeleteQuery.run(conn, directoryId); // directory //logger.info("deleteCollabrum calling, directoryId=" + directoryId); Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } //logger.info("directoryDeleteQuery, directoryId " + directoryId); } catch (Exception e) { throw new BaseDaoException( "deleting directory information " + directoryId + " Error = " + e.getMessage(), e); } } /** * delete collabrums given a directoryId * @param dirId - directoryid * @param conn - connection * @return throws BaseDaoException */ public void deleteCollabrums(String dirId, Connection conn) throws BaseDaoException { if (RegexStrUtil.isNull(dirId) || conn == null) { throw new BaseDaoException("params are null"); } try { List children = getCollabrums(dirId); if (children != null) { } Iterator iter = children.iterator(); while (iter.hasNext()) { Collabrum child = (Collabrum) iter.next(); if (child == null) { continue; } String collabrumId = child.getValue(DbConstants.COLLABRUM_ID); if (RegexStrUtil.isNull(collabrumId)) { throw new BaseDaoException("collabrumId is null for directory collabrum " + dirId); } /* * get collabrumid from the above delete the following * defcollblob - collabrumid (done) CollabrumDefaultDeleteQuery * collabrum - collabrumid (done) CollabrumDeleteQuery * colladmin - collabrumid (done) CollabrumDeleteAdminQuery * collblob - collabrumid - (done) ColStreamBlobDeleteAllQuery * collblobtags - collabrumid DeleteColBlobTagsQuery * collblock - collabrumid CollBlockDeleteAllQuery * collcobrand - collabrumid (done) DeleteColCobrandPerCollabrumQuery * collmembers - collabrumid (done) CollAllMembersDelete ****************************************/ deleteDefQuery.run(conn, collabrumId); deleteQuery.run(conn, collabrumId); deleteCollAdminQuery.run(conn, collabrumId); deleteAllCollBlobsQuery.run(conn, collabrumId); deleteColBlobTagsQuery.run(conn, collabrumId); deleteCollBlockQuery.run(conn, collabrumId); deleteColCobrandQuery.run(conn, collabrumId); deleteAllMembersQuery.run(conn, collabrumId); /* * get tid from the collabrumid **************************************** * colltopicattr - tid (done) DeleteColTopicAttrPerCollabrumQuery * collmessages - tid (DeleteColMessagePerTidQuery) * collmsgattr - tid (DeleteColMessageAttrPerTidQuery) * colltopics - collabrumid/tid DeleteColTopicsPerCollabrumQuery ****************************************/ List tidList = getTidList(collabrumId); if (tidList != null) { // get tid list query for (int i = 0; i < tidList.size(); i++) { String tid = (String) ((ColTopic) tidList.get(i)).getValue(DbConstants.TID); if (RegexStrUtil.isNull(tid)) { continue; } deleteColTopicsAttrQuery.run(conn, tid); deleteColMessagesQuery.run(conn, tid); deleteColMsgAttrQuery.run(conn, tid); } } deleteColTopicsQuery.run(conn, collabrumId); } // while } catch (Exception e) { throw new BaseDaoException("error deleting collabrums, dirId " + dirId + " errormsg= " + e.getMessage(), e); } } /** * clones the node information recursively * @param cloneId - directoryid that needs to be cloned * @param parentId - parentId * @param userId - userId * @param userLogin - userLogin */ public void cloneNode(String cloneId, String parentId, String userId, String userLogin) { if (RegexStrUtil.isNull(cloneId) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(parentId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } logger.info("cloneId =" + cloneId + " parentId = " + parentId + " userId =" + userId + " userLogin = " + userLogin); try { List children = getListOfSubDirs(cloneId, DbConstants.READ_FROM_MASTER); if (children != null) { for (int i = 0; i < children.size(); i++) { Directory c = (Directory) children.get(i); if (c != null) { logger.info("directoryid = " + i + "=" + c.getValue(DbConstants.DIRECTORY_ID)); String directoryId = c.getValue(DbConstants.DIRECTORY_ID); // dont copy the parent recursively if (directoryId.equals(parentId)) { continue; } else { /** get the child directory */ Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); Object obj = treeCache.get(fqn, directoryId); Directory dir = null; String dirname = null; String keywords = null; String scopeid = null; String operations = null; String desc = null; if (obj != null) { dir = (Directory) obj; dirname = dir.getValue(DbConstants.DIRNAME); keywords = dir.getValue(DbConstants.KEYWORDS); scopeid = dir.getValue(DbConstants.SCOPE_ID); operations = dir.getValue(DbConstants.OPERATIONS); desc = dir.getValue(DbConstants.DIRDESC); } else { DirScope dirscope = getDirectoryScope(directoryId); scopeid = dirscope.getValue(DbConstants.SCOPE_ID); operations = dirscope.getValue(DbConstants.OPERATIONS); try { Object[] params = { (Object) directoryId }; List result = directoryQuery.execute(params); if ((result != null) && (result.size() > 0)) { dir = (Directory) result.get(0); dirname = dir.getValue(DbConstants.DIRNAME); keywords = dir.getValue(DbConstants.KEYWORDS); desc = dir.getValue(DbConstants.DIRDESC); } else { throw new BaseDaoException("result is null in " + directoryQuery.getSql()); } } catch (BaseDaoException e) { throw new BaseDaoException("erorr in DirectoryQuery " + directoryQuery.getSql(), e); } } boolean addSanFlag = false; addDirectory(dirname, keywords, parentId, scopeid, desc, userId, userLogin, operations, addSanFlag); List files = getBlobsByCategory(directoryId, DbConstants.FILE_CATEGORY, DbConstants.READ_FROM_MASTER); /** * This method adds entries into blobs,tags, dirimages tables * add only empty tags for the time being * Future add list of websites, collabrums (not added websites, collabrums) */ String newDirId = getChildDirId(parentId, dirname); if (RegexStrUtil.isNull(newDirId)) { throw new BaseDaoException( "getChildDirId(), parentId=" + parentId + " dirname = " + dirname); } if (files != null && files.size() > 0) { addStreamBlobs(files, newDirId, userLogin, userId); } logger.info(" CloneNode is being invoked here with newDirId"); logger.info("newDirId = " + newDirId); logger.info("parentid = " + parentId); logger.info("dirname = " + dirname); cloneNode(c.getValue(DbConstants.DIRECTORY_ID), newDirId, userId, userLogin); } } else { continue; /* (c == null) */ } } // for } } catch (Exception e1) { throw new BaseDaoException("error in cloneNode(), getListOfSubDirs()" + e1.getMessage(), e1); } } /** * getChildDirID - get the directoryid of the child from parentid and dirname * @param - parentId of the parent * @param - dirname of the child * @return - directoryId of the child * @throws exception */ public String getChildDirId(String parentId, String dirname) throws BaseDaoException { if (RegexStrUtil.isNull(parentId) || RegexStrUtil.isNull(dirname)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for dirtree, dirscope, directory - * not partitioned */ String queryName = scalabilityManager.getReadZeroScalability("directorychildquery"); directoryChildQuery = getQueryMapper().getQuery(queryName); if (directoryChildQuery == null) { throw new BaseDaoException("directoryChildQuery is null"); } /** * get directoryid */ try { Object[] params = { (Object) parentId, (Object) dirname }; List result = directoryChildQuery.execute(params); if (result != null && result.size() > 0) { if (result.get(0) != null) { return ((Directory) result.get(0)).getValue(DbConstants.DIRECTORY_ID); } } } catch (BaseDaoException e) { throw new BaseDaoException("error in directoryChildQuery()" + directoryChildQuery.getSql(), e); } return null; } /* * Gets collabrums * This method is called deleteNode() * @param directoryId * @return List of collabrums */ private List getCollabrums(String directoryId) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null in getCollabrums()"); } /** Jboss methods * fqn - full qualified name */ Fqn fqn = cacheUtil.fqn(DbConstants.COLLABRUM_LIST); Object obj = treeCache.get(fqn, directoryId); if (obj != null) { return (List) obj; } /** * Did not find in cache, get it from DB * Get scalability datasource for dircoll, collabrum, colladmin - no partition */ String sourceName = scalabilityManager.getReadZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getCollabrums() " + sourceName + " directoryId = " + directoryId); } try { Object[] params = { (Object) directoryId }; List dResult = collabrumsQuery.execute(params); if ((dResult != null) && (dResult.size() > 0)) { treeCache.put(fqn, directoryId, dResult); return dResult; } } catch (BaseDaoException e) { throw new BaseDaoException( "collabrumQuery exception for directoryId = " + directoryId + collabrumsQuery.getSql(), e); } return null; } /** * This lists all tids belonging to a collabrum * @return List - the list of entryid * @throws BaseDaoException If we have a problem interpreting the data */ private List getTidList(String collabrumId) throws BaseDaoException { if (RegexStrUtil.isNull(collabrumId)) { throw new BaseDaoException("params are null"); } /** * Get scalability ds, topics partitioned on collabrumid * colltopics */ String sourceName = scalabilityManager.getReadScalability(collabrumId); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, getTidList(), " + sourceName + " collabrumId = " + collabrumId); } try { Object[] params = { (Object) collabrumId }; List tidList = getTidsQuery.execute(params); if (tidList == null) { throw new BaseDaoException("collabrum tids null getTidList(), collabrumId " + collabrumId); } return tidList; } catch (Exception e) { throw new BaseDaoException("error occured getTidList()", e); } } /** * paste the node's dirblobs information recursively * @param pasteId - directoryid and its subdirs blobs/collabrums should * be copied. collabrums/websites are not yet supported. * @param parentId - parentId * @param userId - userId * @param userLogin - userLogin */ public void pasteNode(String pasteId, String parentId, String userId, String userLogin) { if (RegexStrUtil.isNull(pasteId) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } //logger.info("pasteNode() " + pasteId + " parentId = " + parentId); try { List children = getListOfSubDirs(pasteId, DbConstants.READ_FROM_MASTER); if (children != null) { for (int i = 0; i < children.size(); i++) { Directory c = (Directory) children.get(i); if (c == null) { continue; } //logger.info("directoryid = " + i + "=" + c.getValue(DbConstants.DIRECTORY_ID)); String directoryId = c.getValue(DbConstants.DIRECTORY_ID); Fqn fqn = cacheUtil.fqn(DbConstants.DIRECTORY); Object obj = treeCache.get(fqn, directoryId); Directory dir = null; String dirname = null; if (obj != null) { dir = (Directory) obj; dirname = dir.getValue(DbConstants.DIRNAME); } else { try { Object[] params = { (Object) directoryId }; List result = directoryQuery.execute(params); if ((result != null) && (result.size() > 0)) { dir = (Directory) result.get(0); if (dir != null) { dirname = dir.getValue(DbConstants.DIRNAME); } } else { throw new BaseDaoException("dir is null in " + directoryQuery.getSql() + " directoryId=" + directoryId); } } catch (BaseDaoException e) { throw new BaseDaoException("erorr in directoryQuery " + directoryQuery.getSql() + " Error Msg=" + e.getMessage(), e); } } boolean addSanFlag = false; List files = getBlobsByCategory(directoryId, DbConstants.FILE_CATEGORY, DbConstants.READ_FROM_MASTER); /** * This method adds entries into blobs,tags, dirimages tables * add only empty tags for the time being * Future add list of websites, collabrums * (not added websites, collabrums) */ if (files != null && files.size() > 0) { addStreamBlobs(files, directoryId, userLogin, userId); } //logger.info("directoryid = " + directoryId); //logger.info("parentid = " + parentId); pasteNode(c.getValue(DbConstants.DIRECTORY_ID), getChildDirId(parentId, dirname), userId, userLogin); } } else { /** * no children, then just add the blobstreams for the pasteId */ //logger.info("no children add the blobstream"); boolean addSanFlag = false; List files = getBlobsByCategory(pasteId, DbConstants.FILE_CATEGORY, DbConstants.READ_FROM_MASTER); if (files != null && files.size() > 0) { //logger.info("files.size()=" + files.size() + " pasteId = " + pasteId); addStreamBlobs(files, pasteId, userLogin, userId); } } } catch (Exception e1) { throw new BaseDaoException( "error in pasteNode()/getBlobsByCategory()/addStreamBlobs(), Error Msg=" + e1.getMessage(), e1); } } /** * getListOfDirsAsAuthor - get list of directories * for this user where this user is the author * @param login - login * @param loginId - loginId of the user * @param accessFlag - accessFlag * @ List - directories list */ public List getListOfDirsAsAuthor(String login, String loginId) { if (RegexStrUtil.isNull(login) || RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for dirtree, dirscope, directory - * not partitioned */ String queryName = scalabilityManager.getReadZeroScalability("listdiradminforauthorquery"); listDirAdminForAuthorQuery = getQueryMapper().getQuery(queryName); if (listDirAdminForAuthorQuery == null) { throw new BaseDaoException("listDirAdminForAuthorQuery is null"); } /** * get directory */ try { Object[] params = { (Object) loginId, (Object) loginId }; List result = listDirAdminForAuthorQuery.execute(params); if (result != null && result.size() > 0) { return result; } else { return null; } } catch (BaseDaoException e) { throw new BaseDaoException("error in getListOfDirAsAuthor()" + listDirAdminForAuthorQuery.getSql(), e); } } /** * getListOfDirsAsUser - get list of directories where this user is allowed to access * directories of others as diruser * @param loginId - loginId of the user * @param accessFlag - accessFlag * @ List - directories list */ public List getListOfDirsAsUser(String loginId) { if (RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource for dirtree, dirscope, directory - * not partitioned */ String queryName = scalabilityManager.getReadZeroScalability("listdirallowusersquery"); listDirAllowUsersQuery = getQueryMapper().getQuery(queryName); if (listDirAllowUsersQuery == null) { throw new BaseDaoException("listDirAllowUsersQuery is null"); } /** * get directories for this user */ try { Object[] params = { (Object) loginId }; List result = listDirAllowUsersQuery.execute(params); if (result != null && result.size() > 0) { return result; } else { return null; } } catch (BaseDaoException e) { throw new BaseDaoException("error in getListOfDirAsUser()" + listDirAllowUsersQuery.getSql(), e); } } /* * getDirUsedQuota - get directory used quota * @param loginid - login id * @return String - total file size for this user */ public String getDirUsedQuota(String loginId) { if (RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params are null"); } /** * Get scalability datasource partitioned */ String queryName = scalabilityManager.getReadBlobScalability(loginId, "dirblobsizequery"); dirBlobSizeQuery = getQueryMapper().getQuery(queryName); if (dirBlobSizeQuery == null) { throw new BaseDaoException("dirBlobSizeQuery is null"); } /** * get totalsize for this user */ try { Object[] params = { (Object) loginId }; List result = dirBlobSizeQuery.execute(params); if (result != null && result.size() > 0) { logger.info("bsize= " + ((Directory) result.get(0)).getValue(DbConstants.BSIZE)); return ((Directory) result.get(0)).getValue(DbConstants.BSIZE); } else { return null; } } catch (BaseDaoException e) { throw new BaseDaoException("error in getDirUsedQuota()" + dirBlobSizeQuery.getSql(), e); } } /** * getListOfSharedDirsAsAuthor - get list of shared directories * for this user where this user is the author * @param loginId - loginId of the user * @param accessFlag - accessFlag * @ List - shared directories list */ /* public List getListOfSharedDirsAsAuthor(String loginId) { if (RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params is null"); } /** * getListOfSharedDirsAsAuthor - get list of shared directories * for this user where this user is the author * @param loginId - loginId of the user * @param accessFlag - accessFlag * @ List - shared directories list */ /* public List getListOfSharedDirsAsAuthor(String loginId) { if (RegexStrUtil.isNull(loginId)) { throw new BaseDaoException("params is null"); } String queryName = scalabilityManager.getWriteZeroScalability("dirauthorsforothersdirsquery"); dirAuthorsForOthersDirsQuery = getQueryMapper().getQuery(queryName); if (dirAuthorsForOthersDirsQuery == null) { throw new BaseDaoException("dirAuthorsForOthersDirsQuery is null"); } try { Object[] params = {(Object)loginid}; List result = dirAuthorsForOthersDirsQuery.execute(params); if (result != null && result.size() > 0) { logger.info("dirAuthorsForOthersDirsQuery " + " size = " + result.size() + " result = " + result.toString()); } return result; } catch (Exception e) { throw new BaseDaoException("error dirAuthorsForOthersDirsQuery() , " + dirAuthorsForOthersDirsQuery.getSql() + " loginId = " + loginId, e); } } */ /** * isLdapListVisible - returns 0 if the ldap list should not be shown * otherwise return 1 based on the scopeid * @param directory - directory * @return String - "1" - display ldap list, "0" - donot display ldap list * */ public String isLdapListVisible(Directory directory) throws BaseDaoException { if (WebUtil.isLdapActive()) { if (directory == null) { throw new BaseDaoException("directory is null"); } String scopeid = directory.getValue(DbConstants.SCOPE_ID); if (RegexStrUtil.isNull(scopeid)) { throw new BaseDaoException("scopeid is null"); } /* donot show the ldap list as the authors are already added */ if (scopeid.equals(dirScope.getUsergroupspecificscopeid())) { return "0"; } else { /* show the ldap list as the authors have to be added */ return "1"; } } return null; } public void setdirhitsQuery(DirHitsQuery daq) { this.dirHitsQuery = daq; } public void setdirscopeconstants(DirectoryScopeConstants dirscope) { this.dirScope = dirscope; } public void setdirectoryaddQuery(DirectoryAddQuery daq) { this.directoryAddQuery = daq; } public void setdirectorydeleteQuery(DirectoryDeleteQuery daq) { this.directoryDeleteQuery = daq; } public void setdirectoryupdateQuery(DirectoryUpdateQuery daq) { this.directoryupdateQuery = daq; } public void setdirectoryrenameQuery(DirectoryRenameQuery daq) { this.directoryrenameQuery = daq; } public void setdirectoryQuery(DirectoryQuery daq) { this.directoryQuery = daq; } public void setdirectoryAllQuery(DirectoryAllQuery daq) { this.directoryallQuery = daq; } public void setdirectoryparentQuery(DirectoryParentQuery daq) { this.directoryParentQuery = daq; } // scopes directory public void setdirectoryscopeupdateQuery(DirectoryScopeUpdateQuery daq) { this.scopeupdateQuery = daq; } public void setdirectoryscopeaddQuery(DirectoryScopeAddQuery daq) { this.scopeAddQuery = daq; } public void setdirectoryscopeQuery(DirectoryScopeQuery daq) { this.scopeQuery = daq; } public void setdirectoryscopedeleteQuery(DirectoryScopeDeleteQuery daq) { this.scopeDeleteQuery = daq; } // directory websites public void setdirectorywebsiteQuery(DirectoryWebsiteQuery daq) { this.websiteQuery = daq; } // streamblobs /* public void setstreamblobQuery(DirectoryStreamBlobQuery daq) { this.streamblobQuery = daq; } */ public void setonestreamblobQuery(DirectoryOneStreamBlobQuery daq) { this.onestreamblobQuery = daq; } // collabrums public void setcollabrumsQuery(DirectoryCollabrumQuery daq) { this.collabrumsQuery = daq; } // collabrum adminquery /* public void setcollabrumadminexistsQuery(CollabrumAdminExistsQuery daq) { this.colladminexistsQuery = daq; } */ public void setcollabrumorganizersQuery(CollabrumOrganizersQuery daq) { this.getOrganizersQuery = daq; } // collabrum topic public void setcolltopicexistsQuery(ColTopicExistsQuery daq) { this.topicExistsQuery = daq; } // directory ismovable // directory adminquery /* public void setdiradminexistsQuery(DirAdminExistsQuery daq ) { this.diradminexistsQuery = daq; } */ public void setdiraddadminQuery(DirAdminAddQuery daq) { this.addAdminQuery = daq; } public void setdirdeleteadminQuery(DirAdminDeleteQuery daq) { this.deleteAdminQuery = daq; } // directory addchild public void setaddchildQuery(DirAddChildQuery daq) { this.addChildQuery = daq; } public void setdeletechildQuery(DirDeleteChildQuery daq) { this.deleteChildQuery = daq; } /* public void setdirectorychildrenexistQuery(DirectoryChildrenExistQuery daq) { this.directorychildrenexistQuery = daq; } */ public void setdirchildrenQuery(DirectoryChildrenQuery daq) { this.dirchildrenQuery = daq; } public void setdirectorynameQuery(DirectoryNameQuery daq) { this.dirnameQuery = daq; } public void setdirectoryblobupdateQuery(DirectoryBlobUpdateQuery daq) { this.blobUpdateQuery = daq; } public void setdircanmoveQuery(DirMoveQuery daq) { this.dircanmoveQuery = daq; } public void setdirmoveaddQuery(DirMoveAddQuery daq) { this.dirMoveAddQuery = daq; } public void setdirmovedeleteQuery(DirMoveDeleteQuery daq) { this.dirMoveDeleteQuery = daq; } public void setdirmoveupdateQuery(DirMoveUpdateQuery daq) { this.dirMoveUpdateQuery = daq; } public void setdirupdatechildQuery(DirUpdateChildQuery daq) { this.dirChildUpdateQuery = daq; } public void setdirupdatepathQuery(DirPasteQuery daq) { this.dirPasteQuery = daq; } public void setdircancopyQuery(DirCopyQuery daq) { this.dircancopyQuery = daq; } public void setdircopyaddQuery(DirCopyAddQuery daq) { this.dirCopyAddQuery = daq; } public void setdircopyupdateQuery(DirCopyUpdateQuery daq) { this.dirCopyUpdateQuery = daq; } public void setdircopydeleteQuery(DirCopyDeleteQuery daq) { this.dirCopyDeleteQuery = daq; } public void setdircobrandQuery(DirectoryCobrandQuery daq) { this.cobrandQuery = daq; } public void setdirlistauthorQuery(DirectoryListAuthorQuery daq) { this.listAuthorQuery = daq; } public void setdirectoryblockQuery(DirectoryBlockQuery daq) { this.blockDirectoryQuery = daq; } public void setcollmemberblockQuery(CollMemberBlockQuery daq) { this.listBlockedCollabrumsQuery = daq; } public void setdirectoryexistsQuery(DirectoryExistsQuery daq) { this.directoryExistsQuery = daq; } public void setdirectoryidQuery(DirectoryIdQuery daq) { this.directoryIdQuery = daq; } public void setdirectorytopQuery(DirectoryTopQuery daq) { this.directoryTopQuery = daq; } public void setdirmoveexistsQuery(DirMoveExistsQuery daq) { this.dirMoveExistsQuery = daq; } public void setdirectorysubdirexistsQuery(DirectorySubDirExistsQuery daq) { this.directorySubDirExistsQuery = daq; } public void setdirectorystreamdataQuery(DirectoryStreamDataQuery daq) { this.streamDataQuery = daq; } public void setdeletedefaultdirectoryQuery(DefaultDirectoryBlobDeleteQuery daq) { this.deleteDefaultQuery = daq; } public void setadddefaultdirectoryQuery(DefaultDirectoryBlobAddQuery daq) { this.addDefaultQuery = daq; } public void setblobdirectoryQuery(DirectoryBlobQuery daq) { this.blobQuery = daq; } public void setdirectoryphotosQuery(DirectoryPhotosQuery daq) { this.photosQuery = daq; } public void setdefaultdirblobQuery(DefaultDirectoryBlobQuery daq) { this.defaultDirBlobQuery = daq; } public void setdefaultdirQuery(DefaultDirectoryQuery daq) { this.defaultQuery = daq; } public void setdeletedircopyQuery(DeleteDirCopyQuery daq) { this.deleteDirCopyQuery = daq; } public void setdirsearchQuery(DirSearchQuery daq) { this.dirSearchQuery = daq; } public void setusersearchQuery(UserSearchQuery daq) { this.userSearchQuery = daq; } public void setpblogsearchQuery(PblogSearchQuery daq) { this.pblogSearchQuery = daq; } public void setcarryonsearchQuery(CarryonSearchQuery daq) { this.carryonSearchQuery = daq; } public void setdirimageupdateQuery(DirImageUpdateQuery daq) { this.dirImageUpdateQuery = daq; } public void setdeleteblockalldirQuery(DirectoryBlockDeleteAllQuery daq) { this.deleteDirBlockAllQuery = daq; } public void setdeleteallowalldirQuery(DirectoryAllowDeleteAllQuery daq) { this.deleteDirAllowUsersAllQuery = daq; } /** delete collabrums **/ public void setcolldefaultdeleteQuery(CollabrumDefaultDeleteQuery daq) { this.deleteDefQuery = daq; } public void setcollabrumdeleteQuery(CollabrumDeleteQuery daq) { this.deleteQuery = daq; } public void setcolstreamblobdeleteallQuery(ColStreamBlobDeleteAllQuery daq) { this.deleteAllCollBlobsQuery = daq; } public void setcollblobdeletetagsQuery(CollBlobTagsDeleteQuery daq) { this.deleteTagsQuery = daq; } public void setdeletecolblobtagsQuery(DeleteColBlobTagsQuery daq) { this.deleteColBlobTagsQuery = daq; } public void setcollabrumdeleteadminQuery(CollabrumDeleteAdminQuery daq) { this.deleteCollAdminQuery = daq; } public void setcollblockdeleteallQuery(CollBlockDeleteAllQuery daq) { this.deleteCollBlockQuery = daq; } public void setdelcolcobrandpercollabrumQuery(DeleteColCobrandPerCollabrumQuery daq) { this.deleteColCobrandQuery = daq; } public void setdeletecoltopicsQuery(DeleteColTopicsPerCollabrumQuery daq) { this.deleteColTopicsQuery = daq; } public void setdelcoltopicattrpercollabrumQuery(DeleteColTopicAttrPerCollabrumQuery daq) { this.deleteColTopicsAttrQuery = daq; } public void setdeletecolmsgattrpertidQuery(DeleteColMessageAttrPerTidQuery daq) { this.deleteColMsgAttrQuery = daq; } public void setdeletecolmessagepertidQuery(DeleteColMessagePerTidQuery daq) { this.deleteColMessagesQuery = daq; } public void setdirdefdirblobdeleteallQuery(DirDefDirBlobDeleteAllQuery daq) { this.deleteDefDirBlobAllQuery = daq; } public void setdeletedirbloballQuery(DirBlobDeleteAllQuery daq) { this.deleteDirBlobAllQuery = daq; } public void setdirblobtagsdeleteallQuery(DirBlobTagsDeleteAllQuery daq) { this.deleteDirBlobTagsAllQuery = daq; } public void setdircobranddeleteallQuery(DirCobrandDeleteAllQuery daq) { this.deleteDirCobrandAllQuery = daq; } public void setdirimagesdeleteallQuery(DirImagesDeleteAllQuery daq) { this.deleteDirImagesAllQuery = daq; } public void setdirmovedeleteallQuery(DirMoveDeleteAllQuery daq) { this.deleteDirMoveAllQuery = daq; } public void setlistdiradminforauthorquery(ListDirAdminForAuthorQuery daq) { this.listDirAdminForAuthorQuery = daq; } public void setlistdirallowusersquery(ListDirAllowUsersQuery daq) { this.listDirAllowUsersQuery = daq; } public void setdirblobsizequery(DirBlobSizeQuery daq) { this.dirBlobSizeQuery = daq; } public void setdirchildrenalphabetquery(DirectoryChildrenAlphabetQuery daq) { this.dirChildrenAlphabetQuery = daq; } }