List of usage examples for java.sql PreparedStatement setBoolean
void setBoolean(int parameterIndex, boolean x) throws SQLException;
boolean
value. From source file:com.mirth.connect.donkey.server.data.jdbc.JdbcDao.java
@Override public void insertMetaData(ConnectorMessage connectorMessage, List<MetaDataColumn> metaDataColumns) { logger.debug(connectorMessage.getChannelId() + "/" + connectorMessage.getMessageId() + "/" + connectorMessage.getMetaDataId() + ": inserting custom meta data"); PreparedStatement statement = null; try {/* w ww. jav a 2 s .com*/ List<String> metaDataColumnNames = new ArrayList<String>(); Map<String, Object> metaDataMap = connectorMessage.getMetaDataMap(); for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { metaDataColumnNames.add(metaDataColumn.getName()); } } // Don't do anything if all values were null if (!metaDataColumnNames.isEmpty()) { Map<String, Object> values = new HashMap<String, Object>(); values.put("localChannelId", getLocalChannelId(connectorMessage.getChannelId())); values.put("metaDataColumnNames", quoteChar + StringUtils.join(metaDataColumnNames, quoteChar + "," + quoteChar) + quoteChar); values.put("metaDataColumnPlaceholders", "?" + StringUtils.repeat(", ?", metaDataColumnNames.size() - 1)); statement = connection.prepareStatement(querySource.getQuery("insertMetaData", values)); statement.setInt(1, connectorMessage.getMetaDataId()); statement.setLong(2, connectorMessage.getMessageId()); int n = 3; for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { // @formatter:off switch (metaDataColumn.getType()) { case STRING: statement.setString(n, (String) value); break; case NUMBER: statement.setBigDecimal(n, (BigDecimal) value); break; case BOOLEAN: statement.setBoolean(n, (Boolean) value); break; case TIMESTAMP: statement.setTimestamp(n, new Timestamp(((Calendar) value).getTimeInMillis())); break; } // @formatter:on n++; } } statement.executeUpdate(); } } catch (Exception e) { throw new DonkeyDaoException("Failed to insert connector message meta data", e); } finally { close(statement); } }
From source file:org.exoplatform.social.core.mysql.storage.ActivityMysqlStorageImpl.java
@Override public void saveComment(ExoSocialActivity activity, ExoSocialActivity comment) throws ActivityStorageException { LOG.debug("begin to create comment"); // insert to mysql comment table Connection dbConnection = null; PreparedStatement preparedStatement = null; StringBuilder insertTableSQL = new StringBuilder(); insertTableSQL.append("INSERT INTO comment").append( "(_id, activityId, title, titleId, body, bodyId, postedTime, lastUpdated, posterId, mentioners, ") .append("hidable, lockable, templateParams)").append("VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)"); StringBuilder updateActivitySQL = new StringBuilder(); updateActivitySQL.append(/*from ww w . j ava 2s . c o m*/ "update activity set lastUpdated = ?, mentioners = ?, commenters = ?, commentIds = ? where _id = ?"); long currentMillis = System.currentTimeMillis(); long commentMillis = (comment.getPostedTime() != null ? comment.getPostedTime() : currentMillis); List<String> mentioners = new ArrayList<String>(); activity.setMentionedIds(processMentions(activity.getMentionedIds(), comment.getTitle(), mentioners, true)); List<String> commenters = new ArrayList<String>(Arrays.asList(activity.getCommentedIds())); if (comment.getUserId() != null && !commenters.contains(comment.getUserId())) { commenters.add(comment.getUserId()); } activity.setCommentedIds(commenters.toArray(new String[0])); comment.setMentionedIds(processMentions(comment.getTitle())); try { dbConnection = dbConnect.getDBConnection(); // insert comment preparedStatement = dbConnection.prepareStatement(insertTableSQL.toString()); comment.setId(UUID.randomUUID().toString()); preparedStatement.setString(1, comment.getId()); preparedStatement.setString(2, activity.getId()); preparedStatement.setString(3, comment.getTitle()); preparedStatement.setString(4, comment.getTitleId()); preparedStatement.setString(5, comment.getBody()); preparedStatement.setString(6, comment.getBodyId()); preparedStatement.setLong(7, commentMillis); preparedStatement.setLong(8, commentMillis); preparedStatement.setString(9, comment.getUserId()); preparedStatement.setString(10, StringUtils.join(comment.getMentionedIds(), ",")); preparedStatement.setBoolean(11, activity.isHidden()); preparedStatement.setBoolean(12, activity.isLocked()); // if (comment.getTemplateParams() != null) { try { ByteArrayOutputStream b = new ByteArrayOutputStream(); ObjectOutputStream output = new ObjectOutputStream(b); output.writeObject(comment.getTemplateParams()); preparedStatement.setBinaryStream(13, new ByteArrayInputStream(b.toByteArray())); } catch (IOException e) { LOG.debug("Failed to save templateParams of activity into database"); } } else { preparedStatement.setNull(13, Types.BLOB); } preparedStatement.executeUpdate(); LOG.debug("new comment created"); // List<String> commentIds = new ArrayList(Arrays.asList(activity.getReplyToId())); commentIds.add(comment.getId()); // update activity preparedStatement = dbConnection.prepareStatement(updateActivitySQL.toString()); preparedStatement.setLong(1, commentMillis); preparedStatement.setString(2, StringUtils.join(activity.getMentionedIds(), ",")); preparedStatement.setString(3, StringUtils.join(activity.getCommentedIds(), ",")); preparedStatement.setString(4, StringUtils.join(commentIds, ",")); preparedStatement.setString(5, activity.getId()); preparedStatement.executeUpdate(); LOG.debug("activity updated"); activity.setReplyToId(commentIds.toArray(new String[commentIds.size()])); activity.setUpdated(currentMillis); } catch (SQLException e) { LOG.error("error in comment creation:", e.getMessage()); } finally { try { if (preparedStatement != null) { preparedStatement.close(); } if (dbConnection != null) { dbConnection.close(); } } catch (SQLException e) { LOG.error("Cannot close statement or connection:", e.getMessage()); } } comment.setUpdated(commentMillis); Identity poster = new Identity(activity.getPosterId()); poster.setRemoteId(activity.getStreamOwner()); commenter(poster, activity, comment); updateMentioner(poster, activity, comment); }
From source file:com.flexive.core.storage.genericSQL.GenericHierarchicalStorage.java
/** * Insert property detail data into the database * * @param prop thepropery * @param allData List of all data belonging to this property (for cascaded updates like binaries to avoid duplicates) * @param con an open and valid connection * @param ps batch prepared statement for detail inserts * @param ft fulltext indexer * @param pk primary key of the content * @param data the value * @param isMaxVer is this content in the max. version? * @param isLiveVer is this content in the live version? * @param disregardFlatStorageEntry true = do not check if the data is in the flatstorage (contentTypeConversion) * @throws FxDbException on errors * @throws FxUpdateException on errors * @throws FxNoAccessException for FxNoAccess values * @throws SQLException on SQL errors *///w w w . j a v a2 s .com private void insertPropertyData(FxProperty prop, List<FxData> allData, Connection con, PreparedStatement ps, FulltextIndexer ft, FxPK pk, FxPropertyData data, boolean isMaxVer, boolean isLiveVer, boolean disregardFlatStorageEntry) throws SQLException, FxDbException, FxUpdateException, FxNoAccessException { if (data == null || data.isEmpty()) return; if (!disregardFlatStorageEntry) { if (data.getPropertyAssignment().isFlatStorageEntry()) { if (ft != null && prop.isFulltextIndexed()) ft.index(data); return; } } clearPreparedStatement(ps, INSERT_VALUE_POS, INSERT_END_POS); ps.setLong(14, 0); //FSELECT has to be set to 0 and not null ps.setLong(1, pk.getId()); ps.setInt(2, pk.getVersion()); ps.setInt(3, data.getPos()); ps.setLong(5, data.getAssignmentId()); // ps.setString(6, "dummy"/*XPathElement.stripType(data.getXPath())*/); // ps.setString(7, "dummyfull"/*XPathElement.stripType(data.getXPathFull())*/); String xmult = FxArrayUtils.toStringArray(data.getIndices(), ','); ps.setString(6, xmult); ps.setInt(7, data.getIndex()); ps.setString(8, getParentGroupXMult(xmult)); ps.setBoolean(9, isMaxVer); ps.setBoolean(10, isLiveVer); ps.setLong(12, prop.getId()); // ps.setString(16, "dummyParent"/*XPathElement.stripType(data.getParent().getXPathFull())*/); ps.setInt(13, data.getIndices().length); if (!data.getValue().isMultiLanguage()) { ps.setBoolean(INSERT_LANG_POS, true); } else ps.setBoolean(INSERT_ISDEF_LANG_POS, false); setPropertyData(true, prop, allData, con, data, ps, ft, getUppercaseColumnPos(prop, true), true); }
From source file:com.flexive.ejb.beans.AccountEngineBean.java
private long create(String userName, String loginName, String password, String email, long lang, long mandatorId, boolean isActive, boolean isConfirmed, Date validFrom, Date validTo, long defaultNode, String description, boolean allowMultiLogin, boolean checkUserRoles, boolean hashPassword) throws FxApplicationException { final UserTicket ticket = FxContext.getUserTicket(); // Security/*from ww w . j a v a2 s. c o m*/ if (checkUserRoles && !ticket.isGlobalSupervisor()) { if (!ticket.isInRole(Role.MandatorSupervisor)) FxPermissionUtils.checkRole(ticket, Role.AccountManagement); if (ticket.getMandatorId() != mandatorId) throw new FxNoAccessException(LOG, "ex.account.create.wrongMandator"); } // Parameter checks try { CacheAdmin.getEnvironment().getMandator(mandatorId); loginName = checkLoginName(loginName); userName = checkUserName(userName); checkDates(validFrom, validTo); if (description == null) description = ""; email = FxFormatUtils.checkEmail(email); if (!language.isValid(lang)) throw new FxInvalidParameterException("LANGUAGE", "ex.account.languageInvalid", lang); } catch (FxInvalidParameterException pe) { if (LOG.isInfoEnabled()) LOG.info(pe); throw pe; } Connection con = null; PreparedStatement stmt = null; String curSql; FxPK contactDataPK; try { final FxContext ri = FxContext.get(); final FxContent contactData; try { if (!checkUserRoles) { // we're probably running in unprivileged code, but application logic // already determined that a user account should be created ri.runAsSystem(); } contactData = co.initialize(CacheAdmin.getEnvironment().getType(FxType.CONTACTDATA).getId()); } finally { if (!checkUserRoles) { ri.stopRunAsSystem(); } } contactData.setAclId(ACL.ACL_CONTACTDATA); contactData.setValue("/SURNAME", new FxString(false, userName)); contactData.setValue("/EMAIL", new FxString(false, email)); try { FxContext.get().runAsSystem(); contactDataPK = co.save(contactData); } finally { FxContext.get().stopRunAsSystem(); } // Obtain a database connection con = Database.getDbConnection(); // Get a new Id long newId = seq.getId(FxSystemSequencer.ACCOUNT); if (hashPassword) { password = password == null ? "" : FxFormatUtils.encodePassword(newId, userName, password); } curSql = "INSERT INTO " + TBL_ACCOUNTS + "(" + //1 2 3 4 5 6 7 8 9 "ID,MANDATOR,USERNAME,LOGIN_NAME,PASSWORD,EMAIL,CONTACT_ID,LANG,VALID_FROM," + //10 11 12 13 14 "VALID_TO,DESCRIPTION,CREATED_BY,CREATED_AT,MODIFIED_BY," + //15 16 17 18 19 20 "MODIFIED_AT,IS_ACTIVE,IS_VALIDATED,DEFAULT_NODE,ALLOW_MULTILOGIN,UPDATETOKEN) values " + //1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 "(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; final long NOW = System.currentTimeMillis(); stmt = con.prepareStatement(curSql); stmt.setLong(1, newId); stmt.setLong(2, mandatorId); stmt.setString(3, userName); stmt.setString(4, loginName); stmt.setString(5, password); stmt.setString(6, email); stmt.setLong(7, contactDataPK.getId()); stmt.setInt(8, (int) lang); //subtract a second to prevent login-after-immediatly-create problems with databases truncating milliseconds stmt.setLong(9, validFrom.getTime() - 1000); stmt.setLong(10, validTo.getTime()); stmt.setString(11, description); stmt.setLong(12, ticket.getUserId()); stmt.setLong(13, NOW); stmt.setLong(14, ticket.getUserId()); stmt.setLong(15, NOW); stmt.setBoolean(16, isActive); stmt.setBoolean(17, isConfirmed); stmt.setLong(18, (defaultNode < 0) ? 0 : defaultNode); stmt.setBoolean(19, allowMultiLogin); stmt.setString(20, RandomStringUtils.randomAlphanumeric(64)); stmt.executeUpdate(); //make sure the user is the owner of his contact data stmt.close(); stmt = con.prepareStatement("UPDATE " + TBL_CONTENT + " SET CREATED_BY=?, MANDATOR=? WHERE ID=?"); stmt.setLong(1, newId); stmt.setLong(2, mandatorId); stmt.setLong(3, contactDataPK.getId()); stmt.executeUpdate(); CacheAdmin.expireCachedContent(contactDataPK.getId()); // call scripts final List<Long> scriptIds = scripting.getByScriptEvent(FxScriptEvent.AfterAccountCreate); final FxScriptBinding binding = new FxScriptBinding(); binding.setVariable("accountId", newId); binding.setVariable("pk", contactDataPK); for (long scriptId : scriptIds) scripting.runScript(scriptId, binding); StringBuilder sbHistory = new StringBuilder(1000); sbHistory.append("<account action=\"create\">\n").append(" <id>").append(newId).append("</id>\n") .append(" <mandator>").append(CacheAdmin.getEnvironment().getMandator(mandatorId).getName()) .append("</mandator>\n").append(" <username>").append(userName).append("</username>\n") .append(" <loginname>").append(loginName).append("</loginname>\n").append(" <email>") .append(email).append("</email>\n").append(" <validfrom>") .append(FxFormatUtils.toString(validFrom)).append("</validfrom>\n").append(" <validto>") .append(FxFormatUtils.toString(validTo)).append("</validto>\n") .append(" <description><![CDATA[").append(description).append("]]></description>\n") .append(" <active>").append(isActive).append("</active>\n").append(" <confirmed>") .append(isConfirmed).append("</confirmed>\n").append(" <multilogin>").append(allowMultiLogin) .append("</multilogin>\n").append("</account>"); EJBLookup.getHistoryTrackerEngine().trackData(sbHistory.toString(), "history.account.create", loginName); StructureLoader.updateUserGroups(FxContext.get().getDivisionId(), group.loadAll(-1)); // EVERY users is a member of group EVERYONE and his mandator final long[] groups = { UserGroup.GROUP_EVERYONE, group.loadMandatorGroup(mandatorId).getId() }; ri.runAsSystem(); try { setGroups(newId, groups); } catch (Exception exc) { throw new FxCreateException(exc, "ex.account.create.everyoneAssignFailed", exc.getMessage()); } finally { ri.stopRunAsSystem(); } // Return the id return newId; } catch (SQLException exc) { final boolean uniqueConstraintViolation = StorageManager.isUniqueConstraintViolation(exc); EJBUtils.rollback(ctx); if (uniqueConstraintViolation) { throw new FxEntryExistsException(LOG, exc, "ex.account.userExists", loginName, userName); } else { throw new FxCreateException(LOG, exc, "ex.account.createFailed.sql", loginName, exc.getMessage()); } } finally { Database.closeObjects(AccountEngineBean.class, con, stmt); } }
From source file:edu.ku.brc.specify.conversion.AgentConverter.java
/** * Specify 5.x points at AgentAdress instead of an Agent. The idea was that to point at an Agent * and possibly a differnt address that represents what that person does. This was really * confusing so we are changing it to point at an Agent instead. * //www .j a v a2s.c om * So that means we need to pull apart these relationships and have all foreign keys that point * to an AgentAddress now point at an Agent and we then need to add in the Agents and then add * the Address to the Agents. * * The AgentAdress, Agent and Address (triple) can have a NULL Address but it cannot have a NULL * Agent. If there is a NULL Agent then this method will throw a RuntimeException. */ public boolean convertAgents(final boolean doFixAgents) { boolean debugAgents = false; log.debug("convert Agents"); BasicSQLUtils.removeForeignKeyConstraints(newDBConn, BasicSQLUtils.myDestinationServerType); // Create the mappers here, but fill them in during the AgentAddress Process IdTableMapper agentIDMapper = idMapperMgr.addTableMapper("agent", "AgentID"); IdTableMapper agentAddrIDMapper = idMapperMgr.addTableMapper("agentaddress", "AgentAddressID"); agentIDMapper.setInitialIndex(4); if (shouldCreateMapTables) { log.info("Mapping Agent Ids"); agentIDMapper.mapAllIds("SELECT AgentID FROM agent ORDER BY AgentID"); } // Just like in the conversion of the CollectionObjects we // need to build up our own SELECT clause because the MetaData of columns names returned // FROM // a query doesn't include the table names for all columns, this is far more predictable List<String> oldFieldNames = new ArrayList<String>(); StringBuilder agtAdrSQL = new StringBuilder("SELECT "); List<String> agentAddrFieldNames = getFieldNamesFromSchema(oldDBConn, "agentaddress"); agtAdrSQL.append(buildSelectFieldList(agentAddrFieldNames, "agentaddress")); agtAdrSQL.append(", "); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentAddrFieldNames, "agentaddress"); List<String> agentFieldNames = getFieldNamesFromSchema(oldDBConn, "agent"); agtAdrSQL.append(buildSelectFieldList(agentFieldNames, "agent")); log.debug("MAIN: " + agtAdrSQL); agtAdrSQL.append(", "); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentFieldNames, "agent"); List<String> addrFieldNames = getFieldNamesFromSchema(oldDBConn, "address"); log.debug(agtAdrSQL); agtAdrSQL.append(buildSelectFieldList(addrFieldNames, "address")); GenericDBConversion.addNamesWithTableName(oldFieldNames, addrFieldNames, "address"); // Create a Map FROM the full table/fieldname to the index in the resultset (start at 1 not zero) HashMap<String, Integer> indexFromNameMap = new HashMap<String, Integer>(); agtAdrSQL.append( " FROM agent INNER JOIN agentaddress ON agentaddress.AgentID = agent.AgentID INNER JOIN address ON agentaddress.AddressID = address.AddressID Order By agentaddress.AgentAddressID Asc"); // These represent the New columns of Agent Table // So the order of the names are for the new table // the names reference the old table String[] agentColumns = { "agent.AgentID", "agent.TimestampModified", "agent.AgentType", "agentaddress.JobTitle", "agent.FirstName", "agent.LastName", "agent.MiddleInitial", "agent.Title", "agent.Interests", "agent.Abbreviation", "agentaddress.Email", "agentaddress.URL", "agent.Remarks", "agent.TimestampCreated", // User/Security changes "agent.ParentOrganizationID" }; HashMap<Integer, AddressInfo> addressHash = new HashMap<Integer, AddressInfo>(); // Create a HashMap to track which IDs have been handled during the conversion process try { log.info("Hashing Address Ids"); Integer agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(AddressID) FROM address ORDER BY AddressID"); // So first we hash each AddressID and the value is set to 0 (false) Statement stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rsX = stmtX .executeQuery("SELECT AgentAddressID, AddressID FROM agentaddress ORDER BY AgentAddressID"); conv.setProcess(0, agentCnt); int cnt = 0; // Needed to add in case AgentAddress table wasn't used. while (rsX.next()) { int agentAddrId = rsX.getInt(1); int addrId = rsX.getInt(2); addressHash.put(addrId, new AddressInfo(agentAddrId, addrId)); if (cnt % 100 == 0) { conv.setProcess(0, cnt); } cnt++; } rsX.close(); stmtX.close(); conv.setProcess(0, 0); // Next we hash all the Agents and set their values to 0 (false) log.info("Hashing Agent Ids"); stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(*) FROM agent ORDER BY AgentID"); rsX = stmtX.executeQuery( "SELECT AgentID, AgentType, LastName, Name, FirstName FROM agent ORDER BY AgentID"); conv.setProcess(0, agentCnt); cnt = 0; while (rsX.next()) { int agentId = rsX.getInt(1); agentHash.put(agentId, new AgentInfo(agentId, agentIDMapper.get(agentId), rsX.getByte(2), rsX.getString(3), rsX.getString(4), rsX.getString(5))); if (cnt % 100 == 0) { conv.setProcess(0, cnt); } cnt++; } rsX.close(); stmtX.close(); conv.setProcess(0, 0); // Now we map all the Agents to their Addresses AND // All the Addresses to their Agents. // // NOTE: A single Address Record May be used by more than one Agent so // we will need to Duplicate the Address records later // log.info("Cross Mapping Agents and Addresses"); String post = " FROM agentaddress WHERE AddressID IS NOT NULL and AgentID IS NOT NULL"; agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(AgentAddressID)" + post); stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); String asSQL = "SELECT AgentAddressID, AgentID" + post; log.debug(asSQL); rsX = stmtX.executeQuery(asSQL); conv.setProcess(0, agentCnt); cnt = 0; // Needed to add in case AgentAddress table wasn't used. while (rsX.next()) { int agentAddrId = rsX.getInt(1); int agentId = rsX.getInt(2); // /////////////////////// // Add Address to Agent // /////////////////////// AgentInfo agentInfo = agentHash.get(agentId); if (agentInfo == null) { String msg = "The AgentID [" + agentId + "] in AgentAddress table id[" + agentAddrId + "] desn't exist"; log.error(msg); tblWriter.logError(msg); } else { agentInfo.add(agentAddrId, agentAddrId); } if (cnt % 100 == 0) { conv.setProcess(0, cnt); } cnt++; } rsX.close(); stmtX.close(); //dumpInfo("beforeInfo.txt", addressHash); conv.setProcess(0, 0); // It OK if the address is NULL, but the Agent CANNOT be NULL log.info("Checking for null Agents"); agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(AgentAddressID) FROM agentaddress a where AddressID IS NOT NULL and AgentID is null"); // If there is a Single Record With a NULL Agent this would be BAD! if (agentCnt != null && agentCnt > 0) { showError("There are " + agentCnt + " AgentAddress Records where the AgentID is null and the AddressId IS NOT NULL!"); } // //////////////////////////////////////////////////////////////////////////////// // This does the part of AgentAddress where it has both an Address AND an Agent // //////////////////////////////////////////////////////////////////////////////// log.info(agtAdrSQL.toString()); Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); log.debug("AgentAddress: " + agtAdrSQL.toString()); // Create Map of column name to column index number int inx = 1; for (String fldName : oldFieldNames) { // log.info("["+fldName+"] "+inx+" ["+rsmd.getColumnName(inx)+"]"); indexFromNameMap.put(fldName, inx++); } Statement updateStatement = newDBConn.createStatement(); // Figure out certain column indexes we will need alter int agentIdInx = indexFromNameMap.get("agent.AgentID"); int agentTypeInx = indexFromNameMap.get("agent.AgentType"); int lastEditInx = indexFromNameMap.get("agent.LastEditedBy"); int nameInx = indexFromNameMap.get("agent.Name"); int lastNameInx = indexFromNameMap.get("agent.LastName"); int firstNameInx = indexFromNameMap.get("agent.FirstName"); int recordCnt = 0; ResultSet rs = stmt.executeQuery(agtAdrSQL.toString()); while (rs.next()) { int agentAddressId = rs.getInt(1); int agentId = rs.getInt(agentIdInx); String lastEditedBy = rs.getString(lastEditInx); AgentInfo agentInfo = agentHash.get(agentId); // Deal with Agent FirstName, LastName and Name] String lastName = rs.getString(lastNameInx); String name = rs.getString(nameInx); namePair.second = StringUtils.isNotEmpty(name) && StringUtils.isEmpty(lastName) ? name : lastName; namePair.first = rs.getString(firstNameInx); // Now tell the AgentAddress Mapper the New ID to the Old AgentAddressID if (shouldCreateMapTables) { agentAddrIDMapper.setShowLogErrors(false); if (debugAgents) log.info(String.format("Map - agentAddressId (Old) %d to Agent -> New ID: %d", agentAddressId, agentInfo.getNewAgentId())); if (agentAddrIDMapper.get(agentAddressId) == null) { agentAddrIDMapper.put(agentAddressId, agentInfo.getNewAgentId()); } else { log.debug(String.format("ERROR - agentAddressId %d Already mapped to New ID: %d", agentAddressId, agentInfo.getNewAgentId())); } agentAddrIDMapper.setShowLogErrors(true); } // Because of the old DB relationships we want to make sure we only add each agent // in one time // So start by checking the HashMap to see if it has already been added if (!agentInfo.wasAdded()) { agentInfo.setWasAdded(true); //agentInfo.addWrittenAddrOldId(addrInfo.getOldAddrId()); BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); // It has not been added yet so Add it StringBuilder sqlStr = new StringBuilder(); sqlStr.append("INSERT INTO agent "); sqlStr.append( "(AgentID, DivisionId, TimestampModified, AgentType, JobTitle, FirstName, LastName, MiddleInitial, "); sqlStr.append("Title, Interests, Abbreviation, Email, URL, Remarks, TimestampCreated, "); sqlStr.append("ParentOrganizationID, CreatedByAgentID, ModifiedByAgentID, Version)"); sqlStr.append(" VALUES ("); for (int i = 0; i < agentColumns.length; i++) { if (i > 0) { sqlStr.append(","); } if (i == 0) { if (debugAgents) log.info("Adding: " + agentColumns[i] + " New ID: " + agentInfo.getNewAgentId()); sqlStr.append(agentInfo.getNewAgentId()); sqlStr.append(","); sqlStr.append(conv.getCurDivisionID()); } else if (agentColumns[i].equals("agent.ParentOrganizationID")) { Object obj = rs.getObject(indexFromNameMap.get(agentColumns[i])); if (obj != null) { int oldId = rs.getInt(agentColumns[i]); Integer newID = agentIDMapper.get(oldId); if (newID == null) { log.error("Couldn't map ParentOrganizationID [" + oldId + "]"); } sqlStr.append(BasicSQLUtils.getStrValue(newID)); } else { sqlStr.append("NULL"); } } else if (agentColumns[i].equals("agent.LastName") || agentColumns[i].equals("LastName")) { int lastNameLen = 120; String lstName = namePair.second; lstName = lstName == null ? null : lstName.length() <= lastNameLen ? lstName : lstName.substring(0, lastNameLen); sqlStr.append(BasicSQLUtils.getStrValue(lstName)); } else if (agentColumns[i].equals("agent.FirstName") || agentColumns[i].equals("FirstName")) { sqlStr.append(BasicSQLUtils.getStrValue(namePair.first)); } else { inx = indexFromNameMap.get(agentColumns[i]); sqlStr.append(BasicSQLUtils.getStrValue(rs.getObject(inx))); } } sqlStr.append("," + conv.getCreatorAgentIdForAgent(lastEditedBy) + "," + conv.getModifiedByAgentIdForAgent(lastEditedBy) + ",0"); sqlStr.append(")"); try { if (debugAgents) { log.info(sqlStr.toString()); } updateStatement.executeUpdate(sqlStr.toString(), Statement.RETURN_GENERATED_KEYS); Integer newAgentId = BasicSQLUtils.getInsertedId(updateStatement); if (newAgentId == null) { throw new RuntimeException("Couldn't get the Agent's inserted ID"); } //conv.addAgentDisciplineJoin(newAgentId, conv.getDisciplineId()); } catch (SQLException e) { log.error(sqlStr.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); System.exit(0); throw new RuntimeException(e); } } BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); if (recordCnt % 250 == 0) { log.info("AgentAddress Records: " + recordCnt); } recordCnt++; } // while BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "address", BasicSQLUtils.myDestinationServerType); log.info("AgentAddress Records: " + recordCnt); rs.close(); stmt.close(); // //////////////////////////////////////////////////////////////////////////////// // This does the part of AgentAddress where it has JUST Agent // //////////////////////////////////////////////////////////////////////////////// log.info("******** Doing AgentAddress JUST Agent"); int newRecordsAdded = 0; StringBuilder justAgentSQL = new StringBuilder(); justAgentSQL.setLength(0); justAgentSQL.append("SELECT "); justAgentSQL.append(buildSelectFieldList(agentAddrFieldNames, "agentaddress")); justAgentSQL.append(", "); getFieldNamesFromSchema(oldDBConn, "agent", agentFieldNames); justAgentSQL.append(buildSelectFieldList(agentFieldNames, "agent")); justAgentSQL.append( " FROM agent INNER JOIN agentaddress ON agentaddress.AgentID = agent.AgentID WHERE agentaddress.AddressID IS NULL ORDER BY agentaddress.AgentAddressID ASC"); log.info(justAgentSQL.toString()); stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery(justAgentSQL.toString()); oldFieldNames.clear(); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentAddrFieldNames, "agentaddress"); GenericDBConversion.addNamesWithTableName(oldFieldNames, agentFieldNames, "agent"); indexFromNameMap.clear(); inx = 1; for (String fldName : oldFieldNames) { indexFromNameMap.put(fldName, inx++); } agentIdInx = indexFromNameMap.get("agent.AgentID"); lastEditInx = indexFromNameMap.get("agent.LastEditedBy"); agentTypeInx = indexFromNameMap.get("agent.AgentType"); recordCnt = 0; while (rs.next()) { byte agentType = rs.getByte(agentTypeInx); int agentAddressId = rs.getInt(1); int agentId = rs.getInt(agentIdInx); String lastEditedBy = rs.getString(lastEditInx); AgentInfo agentInfo = agentHash.get(agentId); // Now tell the AgentAddress Mapper the New ID to the Old AgentAddressID if (shouldCreateMapTables) { agentAddrIDMapper.put(agentAddressId, agentInfo.getNewAgentId()); } recordCnt++; if (!agentInfo.wasAdded()) { agentInfo.setWasAdded(true); BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); // Create Agent StringBuilder sqlStr = new StringBuilder("INSERT INTO agent "); sqlStr.append( "(AgentID, DivisionID, TimestampModified, AgentType, JobTitle, FirstName, LastName, MiddleInitial, Title, Interests, "); sqlStr.append("Abbreviation, Email, URL, Remarks, TimestampCreated, ParentOrganizationID, "); sqlStr.append("CreatedByAgentID, ModifiedByAgentID, Version)"); sqlStr.append(" VALUES ("); for (int i = 0; i < agentColumns.length; i++) { if (i > 0) sqlStr.append(","); if (i == 0) { if (debugAgents) log.info(agentColumns[i]); sqlStr.append(agentInfo.getNewAgentId()); sqlStr.append(","); sqlStr.append(conv.getCurDivisionID()); } else if (i == lastEditInx) { // Skip the field } else if (agentColumns[i].equals("agent.LastName")) { if (debugAgents) log.info(agentColumns[i]); int srcColInx = agentType != 1 ? nameInx : lastNameInx; String lName = BasicSQLUtils.getStrValue(rs.getObject(srcColInx)); sqlStr.append(lName); } else { if (debugAgents) log.info(agentColumns[i]); inx = indexFromNameMap.get(agentColumns[i]); sqlStr.append(BasicSQLUtils.getStrValue(rs.getObject(inx))); } } sqlStr.append("," + conv.getCreatorAgentIdForAgent(lastEditedBy) + "," + conv.getModifiedByAgentIdForAgent(lastEditedBy) + ", 0"); // '0' is Version sqlStr.append(")"); try { if (debugAgents) { log.info(sqlStr.toString()); } updateStatement.executeUpdate(sqlStr.toString(), Statement.RETURN_GENERATED_KEYS); Integer newAgentId = BasicSQLUtils.getInsertedId(updateStatement); if (newAgentId == null) { throw new RuntimeException("Couldn't get the Agent's inserted ID"); } newRecordsAdded++; } catch (SQLException e) { log.error(sqlStr.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); throw new RuntimeException(e); } } if (recordCnt % 250 == 0) { log.info("AgentAddress (Agent Only) Records: " + recordCnt); } } // while log.info("AgentAddress (Agent Only) Records: " + recordCnt + " newRecordsAdded " + newRecordsAdded); rs.close(); updateStatement.close(); conv.setProcess(0, BasicSQLUtils.getNumRecords(oldDBConn, "agent")); conv.setDesc("Adding Agents"); // Now Copy all the Agents that where part of an Agent Address Conversions stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery("SELECT AgentID FROM agent"); recordCnt = 0; while (rs.next()) { Integer agentId = rs.getInt(1); AgentInfo agentInfo = agentHash.get(agentId); if (agentInfo == null || !agentInfo.wasAdded()) { copyAgentFromOldToNew(agentId, agentIDMapper); } recordCnt++; if (recordCnt % 50 == 0) { conv.setProcess(recordCnt); } } conv.setProcess(recordCnt); BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); //------------------------------------------------------------ // Now Copy all the Agents that where missed //------------------------------------------------------------ conv.setProcess(0); stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery("SELECT AgentID FROM agent"); recordCnt = 0; while (rs.next()) { Integer agentId = rs.getInt(1); Integer newId = agentIDMapper.get(agentId); if (newId != null) { Integer isThere = BasicSQLUtils.getCount(newDBConn, "SELECT COUNT(*) FROM agent WHERE AgentID = " + newId); if (isThere == null || isThere == 0) { copyAgentFromOldToNew(agentId, agentIDMapper); } } else { tblWriter.logError("Mapping missing for old Agent id[" + agentId + "]"); } recordCnt++; if (recordCnt % 50 == 0) { conv.setProcess(recordCnt); } } conv.setProcess(recordCnt); if (doFixAgents) { fixAgentsLFirstLastName(); } //---------------------------------------------------------------------------------------------------------------------------------- // Now loop through the Agents hash and write the addresses. If the address has already been written then it will need to be // duplicate in the second step. //---------------------------------------------------------------------------------------------------------------------------------- StringBuilder sqlStr1 = new StringBuilder("INSERT INTO address "); sqlStr1.append( "(TimestampModified, Address, Address2, City, State, Country, PostalCode, Remarks, TimestampCreated, "); sqlStr1.append( "IsPrimary, IsCurrent, Phone1, Phone2, Fax, RoomOrBuilding, PositionHeld, AgentID, CreatedByAgentID, ModifiedByAgentID, Version, Ordinal)"); sqlStr1.append(" VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); PreparedStatement pStmt = newDBConn.prepareStatement(sqlStr1.toString(), Statement.RETURN_GENERATED_KEYS); // 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 String addrSQL = "SELECT a.TimestampModified, a.Address, a.City, a.State, a.Country, a.Postalcode, a.Remarks, a.TimestampCreated, aa.Phone1, aa.Phone2, aa.Fax, aa.RoomOrBuilding , aa.IsCurrent, a.LastEditedBy, aa.JobTitle " + "FROM address AS a " + "INNER JOIN agentaddress AS aa ON a.AddressID = aa.AddressID WHERE aa.AgentAddressID = %d"; BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "address", BasicSQLUtils.myDestinationServerType); int fixCnt = 0; for (AgentInfo agentInfo : agentHash.values()) { HashMap<Integer, Integer> addrs = agentInfo.getAddrs(); for (Integer oldAgentAddrId : addrs.keySet()) { String adrSQL = String.format(addrSQL, oldAgentAddrId); rs = stmt.executeQuery(adrSQL); if (!rs.next()) { rs.close(); continue; } String lastEditedBy = rs.getString(14); String posHeld = rs.getString(15); if (posHeld != null && posHeld.length() > 32) { posHeld = posHeld.substring(0, 32); } String addr1 = rs.getString(2); String addr2 = null; if (addr1 != null && addr1.length() > 255) { addr1 = addr1.substring(0, 255); addr2 = addr1.substring(255); } pStmt.setTimestamp(1, rs.getTimestamp(1)); pStmt.setString(2, addr1); pStmt.setString(3, addr2); // Address 2 pStmt.setString(4, rs.getString(3)); pStmt.setString(5, rs.getString(4)); pStmt.setString(6, rs.getString(5)); pStmt.setString(7, rs.getString(6)); pStmt.setString(8, rs.getString(7)); pStmt.setTimestamp(9, rs.getTimestamp(8)); pStmt.setBoolean(10, rs.getByte(13) != 0); pStmt.setBoolean(11, rs.getByte(13) != 0); pStmt.setString(12, rs.getString(9)); pStmt.setString(13, rs.getString(10)); pStmt.setString(14, rs.getString(11)); pStmt.setString(15, rs.getString(12)); pStmt.setString(16, posHeld); pStmt.setInt(17, agentInfo.getNewAgentId()); pStmt.setInt(18, conv.getCreatorAgentIdForAgent(lastEditedBy)); pStmt.setInt(19, conv.getModifiedByAgentIdForAgent(lastEditedBy)); pStmt.setInt(20, 0); pStmt.setInt(21, agentInfo.addrOrd); Integer newID = BasicSQLUtils.getInsertedId(pStmt); log.debug(String.format("Saved New Id %d", newID)); //agentInfo.addWrittenAddrOldId(addrInfo.getOldAddrId()); agentInfo.addrOrd++; rs.close(); try { if (debugAgents) { log.info(sqlStr1.toString()); } if (pStmt.executeUpdate() != 1) { log.error("Error inserting address.)"); } //addrInfo.setWasAdded(true); } catch (SQLException e) { log.error(sqlStr1.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); throw new RuntimeException(e); } } } log.info(String.format("Added %d new Addresses", fixCnt)); pStmt.close(); //------------------------------------------------------------------ // Step #2 - Now duplicate the addresses for the agents that had // already been written to the database //------------------------------------------------------------------ /*fixCnt = 0; for (AgentInfo agentInfo : agentHash.values()) { for (Integer oldAgentAddrId : agentInfo.getUnwrittenOldAddrIds()) { Integer oldAddrId = agentInfo.getAddrs().get(oldAgentAddrId); //AddressInfo addrInfo = addressHash.get(oldAddrId); System.out.println(String.format("%d %d", oldAgentAddrId, oldAddrId)); //duplicateAddress(newDBConn, addrInfo.getOldAddrId(), addrInfo.getNewAddrId(), agentInfo.getNewAgentId()); } } log.info(String.format("Duplicated %d new Addresses", fixCnt)); */ //---------------------------------------------------------------------------------------------------------------------------------- // Now loop through the Agents hash and write the addresses. If the address has already been written then it will need to be // duplicate in the second step. //---------------------------------------------------------------------------------------------------------------------------------- /*BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "address", BasicSQLUtils.myDestinationServerType); sqlStr1 = new StringBuilder("INSERT INTO address "); sqlStr1.append("(TimestampModified, Address, Address2, City, State, Country, PostalCode, Remarks, TimestampCreated, "); sqlStr1.append("IsPrimary, IsCurrent, Phone1, Phone2, Fax, RoomOrBuilding, AgentID, CreatedByAgentID, ModifiedByAgentID, Version, Ordinal)"); sqlStr1.append(" VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); pStmt = newDBConn.prepareStatement(sqlStr1.toString()); // 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 String addrOnlySQL = "SELECT aa.TimestampModified, a.Address, a.City, a.State, a.Country, a.Postalcode, a.Remarks, aa.TimestampCreated, aa.Phone1, aa.Phone2, aa.Fax, aa.RoomOrBuilding , aa.IsCurrent, a.LastEditedBy, aa.AgentID " + "FROM agentaddress AS aa " + "LEFT JOIN address AS a ON a.AddressID = aa.AddressID " + "WHERE a.addressID IS NULL AND aa.AgentID IS NOT NULL"; fixCnt = 0; rs = stmt.executeQuery(addrOnlySQL); while (rs.next()) { int agentId = rs.getInt(15); int newAgentId = agentIDMapper.get(agentId); String lastEditedBy = rs.getString(14); pStmt.setTimestamp(1, rs.getTimestamp(1)); pStmt.setString(2, rs.getString(2)); pStmt.setString(3, null); // Address 2 pStmt.setString(4, rs.getString(3)); pStmt.setString(5, rs.getString(4)); pStmt.setString(6, rs.getString(5)); pStmt.setString(7, rs.getString(6)); pStmt.setString(8, rs.getString(7)); pStmt.setTimestamp(9, rs.getTimestamp(8)); pStmt.setBoolean(10, rs.getByte(13) != 0); pStmt.setBoolean(11, rs.getByte(13) != 0); pStmt.setString(12, rs.getString(9)); pStmt.setString(13, rs.getString(10)); pStmt.setString(14, rs.getString(11)); pStmt.setString(15, rs.getString(12)); pStmt.setInt(16, newAgentId); pStmt.setInt(17, conv.getCreatorAgentIdForAgent(lastEditedBy)); pStmt.setInt(18, conv.getModifiedByAgentIdForAgent(lastEditedBy)); pStmt.setInt(19, 0); pStmt.setInt(20, 1); try { if (debugAgents) { log.info(sqlStr1.toString()); } if (pStmt.executeUpdate() != 1) { log.error("Error inserting address.)"); } else { fixCnt++; } } catch (SQLException e) { log.error(sqlStr1.toString()); log.error("Count: " + recordCnt); e.printStackTrace(); log.error(e); throw new RuntimeException(e); } } rs.close(); log.info(String.format("Added %d new Addresses", fixCnt)); pStmt.close();*/ stmt.close(); //dumpInfo("afterInfo.txt", addressHash); BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent", BasicSQLUtils.myDestinationServerType); return true; } catch (SQLException ex) { log.error(ex); ex.printStackTrace(); System.exit(0); throw new RuntimeException(ex); } }
From source file:HSqlManager.java
public static void primerAnalysis(Connection connection, int bps) throws SQLException, IOException { long time = System.currentTimeMillis(); DpalLoad.main(new String[1]); HSqlPrimerDesign.Dpal_Inst = DpalLoad.INSTANCE_WIN64; String base = new File("").getAbsolutePath(); CSV.makeDirectory(new File(base + "/PhageData")); INSTANCE = ImportPhagelist.getInstance(); INSTANCE.parseAllPhages(bps);// w w w. j a v a 2 s .com System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60); time = System.currentTimeMillis(); written = true; Connection db = connection; db.setAutoCommit(false); Statement stat = db.createStatement(); stat.execute("SET FILES LOG FALSE\n"); // PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers" + // "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + // " Values(?,?,true,false,false,?,?)"); PreparedStatement st = db.prepareStatement( "INSERT INTO Primerdb.Primers" + "(Bp,Sequence,Strain,Cluster,Tm,GC,UniqueP,CommonP,Hairpin) " + "VALUES(?,?,?,?,?,?,true,true,?)"); ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;"); List<String[]> phages = new ArrayList<>(); while (call.next()) { String[] r = new String[3]; r[0] = call.getString("Strain"); r[1] = call.getString("Cluster"); r[2] = call.getString("Name"); phages.add(r); // if(strain.equals("-myco")) { // if (r[2].equals("xkcd")) { // strain = r[0]; // } // }else if(strain.equals("-arthro")){ // if (r[2].equals("ArV1")) { // strain = r[0]; // } // } } call.close(); Set<String> strains = phages.stream().map(y -> y[0]).collect(Collectors.toSet()); for (String x : strains) { Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]) .collect(Collectors.toSet()); Map<String, Integer> clustersNum = new HashMap<>(); Map<Integer, String> clustersName = new HashMap<>(); Map<Integer, List<String>> clusters = new HashMap<>(); Map<Bytes, Primer> primers = new HashMap<>(); int i = 0; for (String cluster : clust) { clustersName.put(i, cluster); clustersNum.put(cluster, i); i++; } clust.parallelStream() .forEach(cluster -> clusters.put(clustersNum.get(cluster), phages.stream().filter(a -> a[0].equals(x) && a[1].equals(cluster)).map(a -> a[2]) .collect(Collectors.toList()))); for (int z : clusters.keySet()) { // try { List<String> clustphages = clusters.get(z); for (String phage : clustphages) { Set<Bytes> phagprimers = //Read from CSV file here //Premade CSV files of all possible //primers in a phage genome CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv").stream() .map(l -> new Bytes(l.getBytes())).collect(Collectors.toSet()); for (Bytes primer : phagprimers) { if (!primers.containsKey(primer)) { primers.put(primer, new Primer(z)); } else { Primer select = primers.get(primer); select.phageCount++; if (!select.containsCluster(z)) { select.addCluster(z); } } } } System.out.println(clustersName.get(z)); } int count = 0; Iterator<Map.Entry<Bytes, Primer>> primersSet = primers.entrySet().iterator(); while (primersSet.hasNext()) { Map.Entry<Bytes, Primer> primer = primersSet.next(); Primer primerInf = primer.getValue(); if (primerInf.clusters.length != 1) { primer.setValue(null); } else { int primerClust = -1; for (int cluster : primerInf.clusters) { primerClust = cluster; } if (primerInf.phageCount != clusters.get(primerClust).size()) { primer.setValue(null); } else { count++; } } } System.out.print("Unique Count: "); System.out.println(count); System.out.print("Primer Count: "); System.out.println(primers.size()); i = 0; for (Bytes a : primers.keySet()) { Primer primerInf = primers.get(a); if (primerInf != null) { String primerClust = ""; for (int cluster : primerInf.clusters) { primerClust = clustersName.get(cluster); } String str = new String(a.bytes); try { st.setInt(1, bps); st.setString(2, str); st.setString(3, x); st.setString(4, primerClust); // st.setDouble(5, HSqlPrimerDesign.primerTm(str, 0, 800, 1.5, 0.2)); st.setDouble(5, HSqlPrimerDesign.easytm(str)); st.setDouble(6, HSqlPrimerDesign.gcContent(str)); st.setBoolean(7, HSqlPrimerDesign.calcHairpin(str, 4)); st.addBatch(); } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + primerClust); } i++; if (i == 1000) { i = 0; st.executeBatch(); db.commit(); } } } if (i > 0) { st.executeBatch(); db.commit(); } // } System.out.println("Unique Updated"); System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60); } stat.execute("SET FILES LOG TRUE;"); st.close(); stat.close(); }
From source file:edu.ku.brc.specify.toycode.mexconabio.BuildTags.java
/** * //from w ww . j a v a 2 s . c om */ public void process() throws SQLException { int dupAgents = 0; int dupLocality = 0; int unknown = 0; boolean doAll = false; BasicSQLUtils.setDBConnection(dbConn); boolean doTrim = false; if (doTrim || doAll) { String trimNamesSQL = "UPDATE tagger SET first=TRIM(first),last=TRIM(last),company=TRIM(company),address1=TRIM(address1),address2=TRIM(address2),city=TRIM(city),state=TRIM(state)"; BasicSQLUtils.update(srcDBConn2, trimNamesSQL); String removeQuote = "UPDATE tagger SET first=SUBSTRING_INDEX(first, '\"', -1),last=SUBSTRING_INDEX(last, '\"', -1),company=SUBSTRING_INDEX(company, '\"', -1),address1=SUBSTRING_INDEX(address1, '\"', -1)," + "address2=SUBSTRING_INDEX(address2, '\"', -1),city=SUBSTRING_INDEX(city, '\"', -1), state=SUBSTRING_INDEX(state, '\"', -1)"; BasicSQLUtils.update(srcDBConn2, removeQuote); String trimNamesSQL2 = "UPDATE tag SET city=TRIM(city),county=TRIM(county),state=TRIM(state)"; BasicSQLUtils.update(srcDBConn2, trimNamesSQL2); String removeQuote2 = "UPDATE tag SET city=SUBSTRING_INDEX(city, '\"', -1), county=SUBSTRING_INDEX(county, '\"', -1), state=SUBSTRING_INDEX(state, '\"', -1)"; BasicSQLUtils.update(srcDBConn2, removeQuote2); } IdMapperMgr idMapperMgr = IdMapperMgr.getInstance(); idMapperMgr.setDBs(srcDBConn2, dbConn); IdHashMapper agentMapper; Division division = (Division) session.get(Division.class, 2); initialPrepareStatements(); BasicSQLUtils.update(srcDBConn, "UPDATE tag SET `Date` = null WHERE Date = '0000-00-00'"); //IdMapperMgr.setSkippingOldTableCheck(true); boolean doAgents = false; if (doAgents || doAll) { agentMapper = new IdTableMapper("agent", "AgentID", false, false); String sql = "SELECT first, last, company, address1, address2, city, state, country, zip, phone, fax, enail, tnum FROM tagger ORDER BY tnum"; Statement stmt = srcDBConn.createStatement(); stmt.setFetchSize(Integer.MIN_VALUE); log.debug("Querying for Agents..."); ResultSet rs = stmt.executeQuery(sql); int cnt = 0; while (rs.next()) { String first = rs.getString(1); String last = rs.getString(2); String company = rs.getString(3); String addr1 = rs.getString(4); String addr2 = rs.getString(5); String city = rs.getString(6); String state = rs.getString(7); String country = rs.getString(8); String zip = rs.getString(9); String phone = rs.getString(10); String fax = rs.getString(11); String email = rs.getString(12); Integer oldId = rs.getInt(13); if (oldId == null) { log.error("Null primary Id: " + last + " " + first); continue; } Agent agent = getAgent(first, last, city, state); Integer agentId = null; if (agent == null) { agent = new Agent(); agent.initialize(); agent.setFirstName(first); agent.setLastName(last); agent.setEmail(email); agent.setRemarks(company); agent.setDivision(division); Address addr = new Address(); addr.initialize(); addr.setAddress(addr1); addr.setAddress2(addr2); addr.setCity(city); addr.setState(state); addr.setCountry(country); addr.setPostalCode(zip); addr.setPhone1(phone); addr.setFax(fax); agent.getAddresses().add(addr); addr.setAgent(agent); Transaction trans = null; try { trans = session.beginTransaction(); session.saveOrUpdate(agent); session.saveOrUpdate(addr); trans.commit(); agentId = agent.getId(); } catch (Exception ex) { ex.printStackTrace(); try { if (trans != null) trans.rollback(); } catch (Exception ex2) { ex2.printStackTrace(); } } } else { agentId = agent.getId(); dupAgents++; //System.out.println("Found Agent: "+first+", "+last); } agentMapper.put(oldId, agentId); cnt++; if (cnt % 500 == 0) { System.out.println("Agents: " + cnt); } if (cnt % 400 == 0) { HibernateUtil.closeSession(); session = HibernateUtil.getCurrentSession(); hibSession = new HibernateDataProviderSession(session); } } rs.close(); stmt.close(); division = (Division) session.get(Division.class, 2); } else { //agentMapper = idMapperMgr.addTableMapper("agent", "AgentID", false); IdHashMapper.setEnableDelete(false); agentMapper = new IdTableMapper("agent", "AgentID", null, false, false); } System.out.println("Duplicated Agent: " + dupAgents); boolean doTags = true; if (doTags || doAll) { HashMap<String, Integer> localityHash = new HashMap<String, Integer>(); HashMap<Integer, String> geoFullNameHash = new HashMap<Integer, String>(); int divId = 2; int dspId = 3; int colId = 4; String sql = "SELECT t.tagid, t.`date`, t.wild, t.gender, t.city, t.county, t.state, t.country, t.zip, t.observations, t.lat, t.lon, t.sunangle, p.tnum " + "FROM tag AS t Inner Join page AS p ON t.page = p.page "; Statement stmt = srcDBConn.createStatement(); stmt.setFetchSize(Integer.MIN_VALUE); log.debug("Querying for Tags..."); ResultSet rs = stmt.executeQuery(sql); int cnt = 0; log.debug("Done querying for Tags..."); Calendar cal = Calendar.getInstance(); Timestamp ts = new Timestamp(cal.getTime().getTime()); String common = "TimestampCreated, Version, CreatedByAgentID"; String coStr = String.format( "INSERT INTO collectionobject (CatalogNumber, FieldNumber, Text1, Remarks, CollectionID, CollectionMemberId, CollectingEventID, %s) VALUES(?,?,?,?,?,?,?,?,?,?)", common); String ceStr = String.format( "INSERT INTO collectingevent (StartDate, Method, DisciplineID, LocalityID, %s) VALUES(?,?,?,?,?,?,?)", common); String lcStr = String.format( "INSERT INTO locality (Latitude1, Longitude1, SrcLatLongUnit, Lat1text, Long1text, LatLongType, DisciplineID, MaxElevation, LocalityName, GeographyID, %s) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?)", common); String clStr = String.format( "INSERT INTO collector (OrderNumber, IsPrimary, CollectingEventID, DivisionID, AgentID, %s) VALUES(?,?,?,?,?,?,?,?)", common); PreparedStatement coStmt = dbConn.prepareStatement(coStr); PreparedStatement ceStmt = dbConn.prepareStatement(ceStr); PreparedStatement lcStmt = dbConn.prepareStatement(lcStr); PreparedStatement clStmt = dbConn.prepareStatement(clStr); int recNum = 1; while (rs.next()) { String tag = rs.getString(1); if (tag != null && tag.startsWith("ERR")) continue; Date date = rs.getDate(2); String wild = rs.getString(3); String gender = rs.getString(4); String city = rs.getString(5); String county = rs.getString(6); String state = rs.getString(7); String country = rs.getString(8); //String zip = rs.getString(9); String obs = rs.getString(10); double lat = rs.getDouble(11); double lon = rs.getDouble(12); double angle = rs.getDouble(13); Integer taggerId = rs.getInt(14); String locName = null; String fullName = null; Integer locId = null; Integer geoId = getGeography(country, state, county); if (geoId != null) { //locName = localityHash.get(geoId); fullName = geoFullNameHash.get(geoId); if (fullName == null) { fullName = BasicSQLUtils .querySingleObj("SELECT FullName FROM geography WHERE GeographyID = " + geoId); geoFullNameHash.put(geoId, fullName); } if (StringUtils.isNotEmpty(city)) { locName = city + ", " + fullName; } else { locName = fullName; } locId = localityHash.get(locName); } else { unknown++; fullName = "Unknown"; locName = buildLocalityName(city, fullName); geoId = 27507; // Unknown locId = localityHash.get(locName); //log.error("Couldn't find matching geography["+country+", "+state+", "+county+"]"); } if (locId == null) { lcStmt.setDouble(1, lat); lcStmt.setDouble(2, lon); lcStmt.setByte(3, (byte) 0); lcStmt.setString(4, Double.toString(lat)); lcStmt.setString(5, Double.toString(lon)); lcStmt.setString(6, "Point"); lcStmt.setInt(7, dspId); lcStmt.setDouble(8, angle); lcStmt.setString(9, locName); lcStmt.setObject(10, geoId); lcStmt.setTimestamp(11, ts); lcStmt.setInt(12, 1); lcStmt.setInt(13, 1); lcStmt.executeUpdate(); locId = BasicSQLUtils.getInsertedId(lcStmt); localityHash.put(locName, locId); } else { dupLocality++; } // (StartDate, Method, DisciplineID, LocalityID ceStmt.setDate(1, date); ceStmt.setString(2, wild); ceStmt.setInt(3, dspId); ceStmt.setInt(4, locId); ceStmt.setTimestamp(5, ts); ceStmt.setInt(6, 1); ceStmt.setInt(7, 1); ceStmt.executeUpdate(); Integer ceId = BasicSQLUtils.getInsertedId(ceStmt); //(CatalogNumber, FieldNumber, Text1, Remarks, CollectionID, CollectionMemberId coStmt.setString(1, String.format("%09d", recNum++)); coStmt.setString(2, tag); coStmt.setString(3, gender); coStmt.setString(4, obs); coStmt.setInt(5, colId); coStmt.setInt(6, colId); coStmt.setInt(7, ceId); coStmt.setTimestamp(8, ts); coStmt.setInt(9, 1); coStmt.setInt(10, 1); coStmt.executeUpdate(); //Integer coId = BasicSQLUtils.getInsertedId(coStmt); //Integer coltrId = null; if (taggerId != null) { Integer agentId = agentMapper.get(taggerId); //System.out.println(agentId); if (agentId != null) { // OrderIndex, IsPrimary, CollectingEventID, DivisionID, AgentID clStmt.setInt(1, 0); clStmt.setBoolean(2, true); clStmt.setInt(3, ceId); clStmt.setInt(4, divId); clStmt.setInt(5, agentId); clStmt.setTimestamp(6, ts); clStmt.setInt(7, 1); clStmt.setInt(8, 1); clStmt.executeUpdate(); //coltrId = BasicSQLUtils.getInsertedId(clStmt); //BasicSQLUtils.getInsertedId(clStmt); } else { log.debug("Couldn't find Agent in DB for tagger id (tnum): " + taggerId + " AgentID:: " + agentId); } } else { log.debug("Couldn't find Mapped Id for tagger id (tnum): " + taggerId); } cnt++; if (cnt % 1000 == 0) { System.out.println("Col Obj: " + cnt); } } coStmt.close(); ceStmt.close(); lcStmt.close(); clStmt.close(); System.out.println("Duplicated Agent: " + dupAgents); System.out.println("Duplicated Localities: " + dupLocality); System.out.println("Unknown Localities: " + unknown); System.out.println("Localities: " + BasicSQLUtils.getCountAsInt(dbConn, "SELECT COUNT(*) FROM locality")); } }
From source file:com.flexive.core.storage.genericSQL.GenericHierarchicalStorage.java
/** * Set a properties data for inserts or updates * * @param insert perform insert or update? * @param prop current property * @param allData all data of the instance (might be needed to buld references, etc.) * @param con an open and valid connection * @param data current property data * @param ps prepared statement for the data table * @param ft fulltext indexer * @param upperColumnPos position of the uppercase column (if present, else <code>-1</code>) * @param includeFullText add fulltext entries? Will be skipped for position only changes * @throws SQLException on errors * @throws FxUpdateException on errors * @throws FxDbException on errors * @throws FxNoAccessException for FxNoAccess values *//*from w w w . ja va 2 s . c om*/ private void setPropertyData(boolean insert, FxProperty prop, List<FxData> allData, Connection con, FxPropertyData data, PreparedStatement ps, FulltextIndexer ft, int upperColumnPos, boolean includeFullText) throws SQLException, FxUpdateException, FxDbException, FxNoAccessException { FxValue value = data.getValue(); if (value instanceof FxNoAccess) throw new FxNoAccessException("ex.content.value.noaccess"); if (value.isMultiLanguage() != ((FxPropertyAssignment) data.getAssignment()).isMultiLang()) { if (((FxPropertyAssignment) data.getAssignment()).isMultiLang()) throw new FxUpdateException("ex.content.value.invalid.multilanguage.ass.multi", data.getXPathFull()); else throw new FxUpdateException("ex.content.value.invalid.multilanguage.ass.single", data.getXPathFull()); } int pos_lang = insert ? INSERT_LANG_POS : UPDATE_ID_POS + 2; int pos_isdef_lang = insert ? INSERT_ISDEF_LANG_POS : UPDATE_MLDEF_POS; final FxEnvironment env = CacheAdmin.getEnvironment(); if (prop.getDataType().isSingleRowStorage()) { //Data types that just use one db row can be handled in a very similar way Object translatedValue; GregorianCalendar gc = null; final long[] translatedLanguages = value.getTranslatedLanguages(); for (long translatedLanguage : translatedLanguages) { translatedValue = value.getTranslation(translatedLanguage); if (translatedValue == null) { LOG.warn("Translation for " + data.getXPath() + " is null!"); } ps.setLong(pos_lang, translatedLanguage); if (!value.isMultiLanguage()) ps.setBoolean(pos_isdef_lang, true); else ps.setBoolean(pos_isdef_lang, value.isDefaultLanguage(translatedLanguage)); if (upperColumnPos != -1) { final Locale locale = value.isMultiLanguage() ? env.getLanguage(translatedLanguage).getLocale() : Locale.getDefault(); ps.setString(upperColumnPos, translatedValue.toString().toUpperCase(locale)); } int[] pos = insert ? getColumnPosInsert(prop) : getColumnPosUpdate(prop); switch (prop.getDataType()) { case Double: checkDataType(FxDouble.class, value, data.getXPathFull()); ps.setDouble(pos[0], (Double) translatedValue); break; case Float: checkDataType(FxFloat.class, value, data.getXPathFull()); ps.setFloat(pos[0], (Float) translatedValue); break; case LargeNumber: checkDataType(FxLargeNumber.class, value, data.getXPathFull()); ps.setLong(pos[0], (Long) translatedValue); break; case Number: checkDataType(FxNumber.class, value, data.getXPathFull()); ps.setInt(pos[0], (Integer) translatedValue); break; case HTML: checkDataType(FxHTML.class, value, data.getXPathFull()); boolean useTidy = ((FxHTML) value).isTidyHTML(); ps.setBoolean(pos[1], useTidy); final String extractorInput = doTidy(data.getXPathFull(), (String) translatedValue); if (useTidy) { translatedValue = extractorInput; } final HtmlExtractor result = new HtmlExtractor(extractorInput, true); setBigString(ps, pos[2], result.getText()); setBigString(ps, pos[0], (String) translatedValue); break; case String1024: case Text: checkDataType(FxString.class, value, data.getXPathFull()); setBigString(ps, pos[0], (String) translatedValue); break; case Boolean: checkDataType(FxBoolean.class, value, data.getXPathFull()); ps.setBoolean(pos[0], (Boolean) translatedValue); break; case Date: checkDataType(FxDate.class, value, data.getXPathFull()); if (gc == null) gc = new GregorianCalendar(); gc.setTime((Date) translatedValue); //strip all time information, this might not be necessary since ps.setDate() strips them //for most databases but won't hurt either ;) gc.set(GregorianCalendar.HOUR, 0); gc.set(GregorianCalendar.MINUTE, 0); gc.set(GregorianCalendar.SECOND, 0); gc.set(GregorianCalendar.MILLISECOND, 0); ps.setDate(pos[0], new java.sql.Date(gc.getTimeInMillis())); break; case DateTime: checkDataType(FxDateTime.class, value, data.getXPathFull()); if (gc == null) gc = new GregorianCalendar(); gc.setTime((Date) translatedValue); ps.setTimestamp(pos[0], new Timestamp(gc.getTimeInMillis())); break; case DateRange: checkDataType(FxDateRange.class, value, data.getXPathFull()); if (gc == null) gc = new GregorianCalendar(); gc.setTime(((DateRange) translatedValue).getLower()); gc.set(GregorianCalendar.HOUR, 0); gc.set(GregorianCalendar.MINUTE, 0); gc.set(GregorianCalendar.SECOND, 0); gc.set(GregorianCalendar.MILLISECOND, 0); ps.setDate(pos[0], new java.sql.Date(gc.getTimeInMillis())); gc.setTime(((DateRange) translatedValue).getUpper()); gc.set(GregorianCalendar.HOUR, 0); gc.set(GregorianCalendar.MINUTE, 0); gc.set(GregorianCalendar.SECOND, 0); gc.set(GregorianCalendar.MILLISECOND, 0); ps.setDate(pos[1], new java.sql.Date(gc.getTimeInMillis())); break; case DateTimeRange: checkDataType(FxDateTimeRange.class, value, data.getXPathFull()); if (gc == null) gc = new GregorianCalendar(); gc.setTime(((DateRange) translatedValue).getLower()); ps.setTimestamp(pos[0], new Timestamp(gc.getTimeInMillis())); gc.setTime(((DateRange) translatedValue).getUpper()); ps.setTimestamp(pos[1], new Timestamp(gc.getTimeInMillis())); break; case Binary: checkDataType(FxBinary.class, value, data.getXPathFull()); BinaryDescriptor binary = (BinaryDescriptor) translatedValue; if (!binary.isNewBinary()) { ps.setLong(pos[0], binary.getId()); } else { try { //transfer the binary from the transit table to the binary table BinaryDescriptor created = binaryStorage.binaryTransit(con, binary); ps.setLong(pos[0], created.getId()); //check all other properties if they contain the same handle //and replace with the data of the new binary for (FxData _curr : allData) { if (_curr instanceof FxPropertyData && !_curr.isEmpty() && ((FxPropertyData) _curr).getValue() instanceof FxBinary) { FxBinary _val = (FxBinary) ((FxPropertyData) _curr).getValue(); _val._replaceHandle(binary.getHandle(), created); } } } catch (FxApplicationException e) { throw new FxDbException(e); } } break; case SelectOne: checkDataType(FxSelectOne.class, value, data.getXPathFull()); ps.setLong(pos[0], ((FxSelectListItem) translatedValue).getId()); break; case SelectMany: checkDataType(FxSelectMany.class, value, data.getXPathFull()); SelectMany sm = (SelectMany) translatedValue; for (int i1 = 0; i1 < sm.getSelected().size(); i1++) { FxSelectListItem item = sm.getSelected().get(i1); if (i1 > 0) { if (batchContentDataChanges()) ps.addBatch(); else ps.executeUpdate(); } ps.setLong(pos[0], item.getId()); ps.setString(pos[1], sm.getSelectedIdsList()); ps.setLong(pos[2], sm.getSelectedIds().size()); } if (sm.getSelected().size() == 0) ps.setLong(pos[0], 0); //write the virtual item as a marker to have a valid row break; case Reference: //reference integrity check is done prior to saving ps.setLong(pos[0], ((FxPK) translatedValue).getId()); break; case InlineReference: default: throw new FxDbException(LOG, "ex.db.notImplemented.store", prop.getDataType().getName()); } int valueDataPos = insert ? getValueDataInsertPos(prop.getDataType()) : getValueDataUpdatePos(prop.getDataType()); if (value.hasValueData(translatedLanguage)) { ps.setInt(valueDataPos, value.getValueDataRaw(translatedLanguage)); } else ps.setNull(valueDataPos, Types.NUMERIC); if (batchContentDataChanges()) ps.addBatch(); else { try { ps.executeUpdate(); } catch (SQLException e) { LOG.error(prop.getName(), e); throw e; } } } } else { switch (prop.getDataType()) { //TODO: implement datatype specific insert default: throw new FxDbException(LOG, "ex.db.notImplemented.store", prop.getDataType().getName()); } } if (ft != null && prop.isFulltextIndexed() && includeFullText) ft.index(data); }
From source file:org.apache.ctakes.ytex.uima.mapper.DocumentMapperServiceImpl.java
/** * bind the variables to the prepared statement * // w w w .ja v a 2 s .c o m * @param type * @param mapInfo * @param ps * @param annoId * @param anno * @throws SQLException */ private void saveAnnoBindVariables(final Type type, final AnnoMappingInfo mapInfo, PreparedStatement ps, int annoId, FeatureStructure anno, final BiMap<Annotation, Integer> mapAnnoToId) throws SQLException { // set anno_base_id int argIdx = 1; ps.setInt(argIdx++, annoId); if (mapInfo.getCoveredTextColumn() != null) { String trunc = null; if (anno instanceof Annotation) { trunc = truncateString(((Annotation) anno).getCoveredText(), mapInfo.getCoveredTextColumn().getSize()); } ps.setString(argIdx++, trunc); } if (!Strings.isNullOrEmpty(mapInfo.getUimaTypeIdColumnName())) { ps.setInt(argIdx++, mapInfo.getUimaTypeId()); } // iterate over fields for (Map.Entry<String, ColumnMappingInfo> fieldEntry : mapInfo.getMapField().entrySet()) { ColumnMappingInfo fieldMapInfo = fieldEntry.getValue(); String fieldName = fieldMapInfo.getAnnoFieldName(); Feature feat = type.getFeatureByBaseName(fieldName); if (fieldMapInfo.getConverter() != null) { try { String prop = anno.getFeatureValueAsString(feat); ps.setObject(argIdx, fieldMapInfo.getConverter().convert(fieldMapInfo.getTargetType(), prop)); } catch (Exception e) { throw new RuntimeException(e); } } else if (!feat.getRange().isPrimitive()) { // feature is a structure/annotation FeatureStructure fs = anno.getFeatureValue(feat); if (fs == null) { // feature is null - set the column to null ps.setNull(argIdx, fieldMapInfo.getSqlType()); } else { if (fieldMapInfo.getJxpath() != null) { // jxpath to pull out feature attribute Object o = this.extractFeature(fieldMapInfo.getJxpath(), fs); if (o == null) { // extracted value null - set column to null ps.setNull(argIdx, fieldMapInfo.getSqlType()); } else if (o instanceof String) { // string - truncate as needed String trunc = truncateString((String) o, fieldMapInfo.getSize()); ps.setString(argIdx, trunc); } else { // set value ps.setObject(argIdx, o); } } else { // reference to another annotation - get the other // anno's id Integer refAnnoId = null; if (fs instanceof Annotation) { refAnnoId = mapAnnoToId.get(fs); } if (refAnnoId != null) { ps.setInt(argIdx, refAnnoId); } else { ps.setNull(argIdx, Types.INTEGER); } } } } else { if ("uima.cas.Integer".equals(feat.getRange().getName())) { ps.setInt(argIdx, anno.getIntValue(feat)); } else if ("uima.cas.Short".equals(feat.getRange().getName())) { ps.setShort(argIdx, anno.getShortValue(feat)); } else if ("uima.cas.Long".equals(feat.getRange().getName())) { ps.setLong(argIdx, anno.getLongValue(feat)); } else if ("uima.cas.Float".equals(feat.getRange().getName())) { ps.setFloat(argIdx, anno.getFloatValue(feat)); } else if ("uima.cas.Double".equals(feat.getRange().getName())) { ps.setDouble(argIdx, anno.getDoubleValue(feat)); } else if ("uima.cas.Byte".equals(feat.getRange().getName())) { ps.setByte(argIdx, anno.getByteValue(feat)); } else if ("uima.cas.Boolean".equals(feat.getRange().getName())) { ps.setBoolean(argIdx, anno.getBooleanValue(feat)); } else if ("uima.cas.String".equals(feat.getRange().getName())) { String trunc = truncateString(anno.getStringValue(feat), fieldMapInfo.getSize()); ps.setString(argIdx, trunc); } } argIdx++; } }
From source file:com.flexive.core.security.FxDBAuthentication.java
/** * Login a user using flexive's database * * @param loginname name of the user//from w w w . ja v a2s.co m * @param password plaintext password * @param callback callback providing datasource, ejb context and "take over" * @return Authenticated UserTicket * @throws FxAccountInUseException on errors * @throws FxLoginFailedException on errors * @throws FxAccountExpiredException on errors */ public static UserTicket login(String loginname, String password, FxCallback callback) throws FxAccountInUseException, FxLoginFailedException, FxAccountExpiredException { final long SYS_UP = CacheAdmin.getInstance().getSystemStartTime(); FxContext inf = FxContext.get(); // Avoid null pointer exceptions if (password == null) password = ""; if (loginname == null) loginname = ""; final String applicationId = StringUtils.defaultString(inf.getApplicationId()); if (StringUtils.isBlank(applicationId)) { LOG.warn("Login: application ID is not set"); } String curSql; PreparedStatement ps = null; Connection con = null; try { // Obtain a database connection con = callback.getDataSource().getConnection(); // 1-6 7 8 9 10 11 12 13 14 15 16 curSql = "SELECT d.*,a.ID,a.IS_ACTIVE,a.IS_VALIDATED,a.ALLOW_MULTILOGIN,a.VALID_FROM,a.VALID_TO,NOW(),a.PASSWORD,a.MANDATOR,a.LOGIN_NAME " + "FROM " + TBL_ACCOUNTS + " a " + "LEFT JOIN " + " (SELECT ID,ISLOGGEDIN,LAST_LOGIN,LAST_LOGIN_FROM,FAILED_ATTEMPTS,AUTHSRC FROM " + TBL_ACCOUNT_DETAILS + " WHERE APPLICATION=? ORDER BY LAST_LOGIN DESC) d ON a.ID=d.ID WHERE UPPER(a.LOGIN_NAME)=UPPER(?)"; ps = con.prepareStatement(curSql); ps.setString(1, applicationId); ps.setString(2, loginname); final ResultSet rs = ps.executeQuery(); // Anything found? if (rs == null || !rs.next()) throw new FxLoginFailedException("Login failed (invalid user or password)", FxLoginFailedException.TYPE_USER_OR_PASSWORD_NOT_DEFINED); // check if the hashed password matches the hash stored in the database final long id = rs.getLong(7); final String dbLoginName = rs.getString(16); // use DB login name for non-lowercase login names final String dbPassword = rs.getString(14); boolean passwordMatches = FxSharedUtils.hashPassword(id, dbLoginName, password).equals(dbPassword); if (!passwordMatches && "supervisor".equalsIgnoreCase(loginname)) { // before 3.2.0 the default supervisor password was incorrectly hashed against the lower-cased login name passwordMatches = FxSharedUtils.hashPassword(id, "supervisor", password).equals(dbPassword); } if (!passwordMatches && !callback.isCalledAsGlobalSupervisor()) { increaseFailedLoginAttempts(con, id); throw new FxLoginFailedException("Login failed (invalid user or password)", FxLoginFailedException.TYPE_USER_OR_PASSWORD_NOT_DEFINED); } // Read data final boolean loggedIn = rs.getBoolean(2); final Date lastLogin = new Date(rs.getLong(3)); final String lastLoginFrom = rs.getString(4); final long failedAttempts = rs.getLong(5); final boolean active = rs.getBoolean(8); final boolean validated = rs.getBoolean(9); final boolean allowMultiLogin = rs.getBoolean(10); final Date validFrom = new Date(rs.getLong(11)); final Date validTo = new Date(rs.getLong(12)); final Date dbNow = rs.getTimestamp(13); final long mandator = rs.getLong(15); // Account active? if (!active || !validated || (CacheAdmin.isEnvironmentLoaded() && !CacheAdmin.getEnvironment().getMandator(mandator).isActive())) { if (LOG.isDebugEnabled()) LOG.debug("Login for user [" + loginname + "] failed, account is inactive. Active=" + active + ", Validated=" + validated + ", Mandator active: " + CacheAdmin.getEnvironment().getMandator(mandator).isActive()); increaseFailedLoginAttempts(con, id); throw new FxLoginFailedException("Login failed, account is inactive.", FxLoginFailedException.TYPE_INACTIVE_ACCOUNT); } // Account date from-to valid? //Compute the day AFTER the dValidTo Calendar endDate = Calendar.getInstance(); endDate.setTime(validTo); endDate.add(Calendar.DAY_OF_MONTH, 1); if (validFrom.getTime() > dbNow.getTime() || endDate.getTimeInMillis() < dbNow.getTime()) { SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy"); if (LOG.isDebugEnabled()) LOG.debug("Login for user [" + loginname + "] failed, from/to date not valid. from='" + sdf.format(validFrom) + "' to='" + validTo + "'"); increaseFailedLoginAttempts(con, id); throw new FxAccountExpiredException(loginname, dbNow); } // Check 'Account in use and takeOver false' if (!allowMultiLogin && !callback.getTakeOverSession() && loggedIn && lastLogin != null) { // Only if the last login time was AFTER the system started if (lastLogin.getTime() >= SYS_UP) { FxAccountInUseException aiu = new FxAccountInUseException(loginname, lastLoginFrom, lastLogin); if (LOG.isInfoEnabled()) LOG.info(aiu); // don't log this as an invalid login attempt - this happens routinely when a session times // out and the cached session data has not been evicted by the maintenance task yet //increaseFailedLoginAttempts(con, id); throw aiu; } } // Clear any old data curSql = "DELETE FROM " + TBL_ACCOUNT_DETAILS + " WHERE ID=? AND APPLICATION=?"; ps.close(); ps = con.prepareStatement(curSql); ps.setLong(1, id); ps.setString(2, applicationId); ps.executeUpdate(); // Mark user as active in the database // This can lead to duplicate rows for a user/application for concurrent logins (e.g. WebDAV clients), // but we prefer this to actually locking the complete table before updates. (FX-868) curSql = "INSERT INTO " + TBL_ACCOUNT_DETAILS + " (ID,APPLICATION,ISLOGGEDIN,LAST_LOGIN,LAST_LOGIN_FROM,FAILED_ATTEMPTS,AUTHSRC) " + "VALUES (?,?,?,?,?,?,?)"; ps.close(); ps = con.prepareStatement(curSql); ps.setLong(1, id); ps.setString(2, applicationId); ps.setBoolean(3, true); ps.setLong(4, System.currentTimeMillis()); ps.setString(5, inf.getRemoteHost()); ps.setLong(6, 0); //reset failed attempts ps.setString(7, AuthenticationSource.Database.name()); ps.executeUpdate(); // Load the user and construct a user ticket try { final UserTicketImpl ticket = (UserTicketImpl) UserTicketStore.getUserTicket(loginname); ticket.setFailedLoginAttempts(failedAttempts); ticket.setAuthenticationSource(AuthenticationSource.Database); return ticket; } catch (FxApplicationException e) { if (callback.getSessionContext() != null) callback.getSessionContext().setRollbackOnly(); throw new FxLoginFailedException( e.getExceptionMessage().getLocalizedMessage( CacheAdmin.getEnvironment().getLanguage(FxLanguage.DEFAULT_ID)), FxLoginFailedException.TYPE_UNKNOWN_ERROR); } } catch (SQLException exc) { if (callback.getSessionContext() != null) callback.getSessionContext().setRollbackOnly(); throw new FxLoginFailedException("Database error: " + exc.getMessage(), FxLoginFailedException.TYPE_SQL_ERROR); } finally { Database.closeObjects(FxDBAuthentication.class, con, ps); } }