List of usage examples for java.sql Statement addBatch
void addBatch(String sql) throws SQLException;
Statement
object. From source file:com.flexive.core.storage.genericSQL.GenericTreeStorageSimple.java
/** * {@inheritDoc}/*w w w . j av a 2 s .c om*/ */ @Override public void move(Connection con, SequencerEngine seq, FxTreeMode mode, long nodeId, long newParentId, int newPosition) throws FxApplicationException { FxTreeNodeInfo node = getTreeNodeInfo(con, mode, nodeId); final FxTreeNodeInfo parentNode = getTreeNodeInfo(con, mode, node.getParentId()); if (newPosition < 0) newPosition = 0; else if (newPosition > parentNode.getDirectChildCount()) newPosition = parentNode.getDirectChildCount() - 1; final boolean sameParent = parentNode.getId() == newParentId; if (sameParent && node.getPosition() == newPosition) return; //nothing to do int depthDelta = 0; FxTreeNodeInfo newParentNode; if (!sameParent) { newParentNode = getTreeNodeInfo(con, mode, newParentId); depthDelta = (newParentNode.getDepth() + 1) - node.getDepth(); } else newParentNode = parentNode; //make room for the node at destination position long destLeft, destRight; if (!newParentNode.hasChildren() || newPosition == 0) { //first destLeft = newParentNode.getLeft().longValue() + 1; } else if (newParentNode.getDirectChildCount() - 1 == newPosition) { //last destLeft = newParentNode.getRight().longValue()/* - 2*/; } else { //middle destLeft = getTreeNodeInfoAt(con, mode, newParentNode, newPosition).getLeft().longValue(); } //dest right = dest left + "width" of node final long nodeWidth = node.getRight().longValue() - node.getLeft().longValue() + 1; destRight = destLeft + nodeWidth - 2; PreparedStatement ps = null; Statement stmt = null; try { //open enough space to hold the node ps = con.prepareStatement("UPDATE " + getTable(mode) + " SET LFT=LFT+? WHERE LFT>=?"); ps.setLong(1, nodeWidth); ps.setLong(2, destLeft); ps.executeUpdate(); ps.close(); ps = con.prepareStatement("UPDATE " + getTable(mode) + " SET RGT=RGT+? WHERE RGT>=?"); ps.setLong(1, nodeWidth); ps.setLong(2, destRight); ps.executeUpdate(); ps.close(); //node may have been moved as well, refetch it FxTreeNodeInfo movedNode = getTreeNodeInfo(con, mode, nodeId); //move the node into the created gap final long delta = movedNode.getLeft().longValue() - destLeft; ps = con.prepareStatement( "UPDATE " + getTable(mode) + " SET LFT=LFT-(?), RGT=RGT-(?) WHERE LFT>=? AND RGT<=?"); ps.setLong(1, delta); ps.setLong(2, delta); ps.setLong(3, movedNode.getLeft().longValue()); ps.setLong(4, movedNode.getRight().longValue()); ps.executeUpdate(); //close the gap from the original node ps.close(); ps = con.prepareStatement("UPDATE " + getTable(mode) + " SET RGT=RGT-? WHERE RGT>?"); ps.setLong(1, nodeWidth); ps.setLong(2, movedNode.getRight().longValue()); ps.executeUpdate(); ps.close(); ps = con.prepareStatement("UPDATE " + getTable(mode) + " SET LFT=LFT-? WHERE LFT>?"); ps.setLong(1, nodeWidth); ps.setLong(2, movedNode.getRight().longValue()); ps.executeUpdate(); // Update the parent of the node stmt = con.createStatement(); stmt.addBatch("UPDATE " + getTable(mode) + " SET PARENT=" + newParentId + " WHERE ID=" + nodeId); if (mode != FxTreeMode.Live) stmt.addBatch("UPDATE " + getTable(mode) + " SET DIRTY=TRUE WHERE ID=" + nodeId); stmt.executeBatch(); stmt.close(); // Update the childcount of the new and old parent if needed + set dirty flag if (!sameParent) { FxTreeNodeInfo nodeOldParent = getTreeNodeInfo(con, mode, node.getParentId()); node = getTreeNodeInfo(con, mode, nodeId); stmt = con.createStatement(); stmt.addBatch("UPDATE " + getTable(mode) + " SET CHILDCOUNT=CHILDCOUNT+1 WHERE ID=" + newParentId); stmt.addBatch("UPDATE " + getTable(mode) + " SET CHILDCOUNT=CHILDCOUNT-1 WHERE ID=" + nodeOldParent.getId()); if (mode != FxTreeMode.Live) { stmt.addBatch("UPDATE " + getTable(mode) + " SET DIRTY=TRUE, DEPTH=DEPTH+(" + depthDelta + ") WHERE LFT>=" + node.getLeft().longValue() + " AND " + "RGT<=" + node.getRight().longValue()); stmt.addBatch("UPDATE " + getTable(mode) + " SET DIRTY=TRUE WHERE ID IN(" + node.getParentId() + "," + newParentId + ")"); } stmt.executeBatch(); stmt.close(); } } catch (SQLException e) { throw new FxTreeException(e, "ex.tree.move.failed", node.getId(), newParentId, newPosition, e.getMessage()); } finally { try { if (ps != null) ps.close(); } catch (SQLException e) { //ignore } try { if (stmt != null) stmt.close(); } catch (SQLException e) { //ignore } } }
From source file:com.edgenius.wiki.installation.DBLoader.java
public void resetTable(String dbType, ConnectionProxy con) throws SQLException, IOException { Statement dropStat = null;/* www . j ava2s . c o m*/ Statement stat = null; try { log.info("Creating tables..."); dropStat = con.createStatement(); stat = con.createStatement(); List<String> lines = loadSQLFile(dbType, dbType + ".ddl"); for (String sql : lines) { //need know if table already exist, if exist need run alter table ... drop index..., otherwise, skip that sql = sql.replaceAll("\n", " ").trim(); // Here is really a special patch for MYSQL 4 as I don't want to waste much time on this special issue // key size is over 1024 issue on MySQL 4 http://bugs.mysql.com/bug.php?id=4541 if (sql.toLowerCase().startsWith( "create index page_link_index on " + Constants.TABLE_PREFIX.toLowerCase() + "page_links")) { try { //only mysql4 may throw exception, ignore it. dropStat.execute(sql); } catch (Exception e) { log.error("Create page link index operation failed...."); } continue; } //1. don't detect table exist because it only check if role table has data, if partial create, isTableExist() won't work //2. put drop independent and try..catch b/c some DBs will report error if table not exist while drop if (sql.toLowerCase().startsWith("alter table ") && sql.toLowerCase().indexOf(" drop ") != -1 || sql.toLowerCase().startsWith("drop ")) { try { dropStat.execute(sql); } catch (Exception e) { log.error("Drop operation failed. It is OK for initial installation."); } continue; } stat.addBatch(sql); } stat.executeBatch(); log.info("Initialize data for system..."); lines = loadSQLFile(dbType, dbType + "-init-tables.sql"); for (String sql : lines) { sql = sql.replaceAll("\n", " ").trim(); stat.addBatch(sql); } stat.executeBatch(); log.info("Initialize quartz tables for system..."); lines = loadSQLFile(dbType, dbType + "-quartz.sql"); for (String sql : lines) { sql = sql.replaceAll("\n", " ").trim(); if (sql.toLowerCase().startsWith("drop ")) { try { dropStat.execute(sql); } catch (Exception e) { log.error("Drop operation failed...." + sql); } continue; } stat.addBatch(sql); } stat.executeBatch(); log.info("System all tables and initial data are ready"); } finally { if (stat != null) stat.close(); if (dropStat != null) dropStat.close(); } }
From source file:org.kuali.rice.test.ClearDatabaseLifecycle.java
protected void clearTables(final PlatformTransactionManager transactionManager, final DataSource dataSource) { Assert.assertNotNull("DataSource could not be located.", dataSource); try {/*from w w w . j a v a 2 s.com*/ StopWatch s = new StopWatch(); s.start(); new TransactionTemplate(transactionManager).execute(new TransactionCallback() { public Object doInTransaction(final TransactionStatus status) { verifyTestEnvironment(dataSource); return new JdbcTemplate(dataSource).execute(new StatementCallback() { public Object doInStatement(Statement statement) throws SQLException { String schemaName = statement.getConnection().getMetaData().getUserName().toUpperCase(); LOG.info("Clearing tables for schema " + schemaName); if (StringUtils.isBlank(schemaName)) { Assert.fail("Empty schema name given"); } final List<String> reEnableConstraints = new ArrayList<String>(); DatabaseMetaData metaData = statement.getConnection().getMetaData(); Map<String, List<String[]>> exportedKeys = indexExportedKeys(metaData, schemaName); final ResultSet resultSet = metaData.getTables(null, schemaName, null, new String[] { "TABLE" }); final StringBuilder logStatements = new StringBuilder(); while (resultSet.next()) { String tableName = resultSet.getString("TABLE_NAME"); if (shouldTableBeCleared(tableName)) { if (!isUsingDerby(metaData) && isUsingOracle(metaData)) { List<String[]> exportedKeyNames = exportedKeys.get(tableName); if (exportedKeyNames != null) { for (String[] exportedKeyName : exportedKeyNames) { final String fkName = exportedKeyName[0]; final String fkTableName = exportedKeyName[1]; final String disableConstraint = "ALTER TABLE " + fkTableName + " DISABLE CONSTRAINT " + fkName; logStatements.append("Disabling constraints using statement ->" + disableConstraint + "<-\n"); statement.addBatch(disableConstraint); reEnableConstraints.add("ALTER TABLE " + fkTableName + " ENABLE CONSTRAINT " + fkName); } } } else if (isUsingMySQL(metaData)) { statement.addBatch("SET FOREIGN_KEY_CHECKS = 0"); } String deleteStatement = "DELETE FROM " + tableName; logStatements.append( "Clearing contents using statement ->" + deleteStatement + "<-\n"); statement.addBatch(deleteStatement); } } for (final String constraint : reEnableConstraints) { logStatements .append("Enabling constraints using statement ->" + constraint + "<-\n"); statement.addBatch(constraint); } if (isUsingMySQL(metaData)) { statement.addBatch("SET FOREIGN_KEY_CHECKS = 1"); } LOG.info(logStatements); int[] results = statement.executeBatch(); for (int index = 0; index < results.length; index++) { if (results[index] == Statement.EXECUTE_FAILED) { Assert.fail("Execution of database clear statement failed."); } } resultSet.close(); LOG.info("Tables successfully cleared for schema " + schemaName); return null; } }); } }); s.stop(); LOG.info("Time to clear tables: " + DurationFormatUtils.formatDurationHMS(s.getTime())); } catch (Exception e) { LOG.error(e); throw new RuntimeException(e); } }
From source file:com.autentia.tnt.manager.data.MigrationManager.java
/** * @return the original database version * @throws com.autentia.tnt.manager.data.exception.DataException *//* w ww .j a v a 2 s . c o m*/ public Version upgradeDatabase() throws DataException { Version ret = null; Version db = null; Version code = Version.getApplicationVersion(); Session ses = HibernateUtil.getSessionFactory().openSession(); Connection con = ses.connection(); Statement stmt = null; LineNumberReader file = null; String delimiter = ";"; try { db = Version.getDatabaseVersion(); ret = db.clone(); log.info("upgradeDatabase - >>>> STARTING MIGRATION FROM " + db + " TO " + code + " <<<<"); // Disable auto-commit (just in case...) log.info("upgradeDatabase - disabling auto commit"); con.setAutoCommit(false); // Create statement stmt = con.createStatement(); // While necessary, upgrade database while (db.compareTo(code, Version.MINOR) < 0) { log.info("upgradeDatabase - " + db); // Compose script name and open it String script = SCRIPT_PREFIX + db.toString(Version.MINOR) + SCRIPT_SUFFIX; log.info("upgradeDatabase - loading script " + script); InputStream sqlScript = Thread.currentThread().getContextClassLoader().getResourceAsStream(script); if (sqlScript == null) { throw FileNotFoundException(script); } file = new LineNumberReader(new InputStreamReader(new BufferedInputStream(sqlScript), "UTF-8")); int _c; // Add batched SQL sentences to statement StringBuilder sentence = new StringBuilder(); String line; while ((line = file.readLine()) != null) { line = line.trim(); if (!line.startsWith("--")) { // Interpret "DELIMITER" sentences if (line.trim().toUpperCase(Locale.ENGLISH).startsWith("DELIMITER")) { delimiter = line.trim().substring("DELIMITER".length()).trim(); } else { // Add line to sentence if (line.endsWith(delimiter)) { // Remove delimiter String lastLine = line.substring(0, line.length() - delimiter.length()); // Append line to sentence sentence.append(lastLine); // Execute sentence log.info(" " + sentence.toString()); stmt.addBatch(sentence.toString()); // Prepare new sentence sentence = new StringBuilder(); } else { // Append line to sentence sentence.append(line); // Append separator for next line sentence.append(" "); } } } } // Execute batch log.info("upgradeDatabase - executing batch of commands"); stmt.executeBatch(); // Re-read database version Version old = db; db = Version.getDatabaseVersion(con); if (old.equals(db)) { throw new DataException("Script was applied but did not upgrade database: " + script); } log.info("upgradeDatabase - database upgraded to version " + db); } // Commit transaction log.info("upgradeDatabase - commiting changes to database"); con.commit(); // Report end of migration log.info("upgradeDatabase - >>>> MIGRATION SUCCESSFULLY FINISHED <<<<"); } catch (Exception e) { log.error("upgradeDatabase - >>>> MIGRATION FAILED: WILL BE ROLLED BACK <<<<", e); try { con.rollback(); } catch (SQLException e2) { log.error("upgradeDatabase - Error al realizar el rollback"); } throw new DataException("Script was applied but did not upgrade database: ", e); } finally { if (file != null) { try { file.close(); } catch (IOException e2) { log.error("upgradeDatabase - Error al cerrar el fichero de script ", e2); } } if (stmt != null) { try { stmt.close(); } catch (SQLException e2) { log.error("upgradeDatabase - Error al cerrar el statement"); } } ses.close(); } return ret; }
From source file:mondrian.test.loader.MondrianFoodMartLoader.java
/** * If we are outputting to JDBC,// w w w . jav a 2 s . co m * Execute the given set of SQL statements * * Otherwise, * output the statements to a file. * * @param sqls SQL statements to execute * @param pauseMillis How many milliseconds to pause between batches * @return # SQL statements executed */ private int writeBatch(List<String> sqls, long pauseMillis) throws IOException, SQLException { if (sqls.size() == 0) { // nothing to do return sqls.size(); } if (dialect.getDatabaseProduct() == Dialect.DatabaseProduct.INFOBRIGHT) { for (String sql : sqls) { fileOutput.write(sql); fileOutput.write(nl); } } else if (outputDirectory != null) { for (String sql : sqls) { fileOutput.write(sql); fileOutput.write(";" + nl); } } else { final boolean useTxn; if (dialect.getDatabaseProduct() == Dialect.DatabaseProduct.NEOVIEW) { // setAutoCommit can not changed to true again, throws // "com.hp.t4jdbc.HPT4Exception: SetAutoCommit not possible", // since a transaction is active useTxn = false; } else if (pauseMillis > 0) { // No point trickling in data if we don't commit it as we write. useTxn = false; connection.setAutoCommit(true); } else { useTxn = connection.getMetaData().supportsTransactions(); } if (useTxn) { connection.setAutoCommit(false); } switch (dialect.getDatabaseProduct()) { case LUCIDDB: case NEOVIEW: // LucidDB doesn't perform well with single-row inserts, // and its JDBC driver doesn't support batch writes, // so collapse the batch into one big multi-row insert. // Similarly Neoview. String VALUES_TOKEN = "VALUES"; StringBuilder sb = new StringBuilder(sqls.get(0)); for (int i = 1; i < sqls.size(); i++) { sb.append(",\n"); int valuesPos = sqls.get(i).indexOf(VALUES_TOKEN); if (valuesPos < 0) { throw new RuntimeException("Malformed INSERT: " + sqls.get(i)); } valuesPos += VALUES_TOKEN.length(); sb.append(sqls.get(i).substring(valuesPos)); } sqls.clear(); sqls.add(sb.toString()); } Statement stmt = connection.createStatement(); if (sqls.size() == 1) { // Don't use batching if there's only one item. This allows // us to work around bugs in the JDBC driver by setting // outputJdbcBatchSize=1. stmt.execute(sqls.get(0)); } else { for (String sql : sqls) { stmt.addBatch(sql); } int[] updateCounts; try { updateCounts = stmt.executeBatch(); } catch (SQLException e) { for (String sql : sqls) { LOGGER.error("Error in SQL batch: " + sql); } throw e; } int updates = 0; for (int i = 0; i < updateCounts.length; updates += updateCounts[i], i++) { if (updateCounts[i] == 0) { LOGGER.error("Error in SQL: " + sqls.get(i)); } } if (updates < sqls.size()) { throw new RuntimeException("Failed to execute batch: " + sqls.size() + " versus " + updates); } } stmt.close(); if (useTxn) { connection.setAutoCommit(true); } } return sqls.size(); }
From source file:org.freebxml.omar.server.persistence.rdb.TelephoneNumberDAO.java
/** * @param registryObjects is a List of Organizations or Users * @throws RegistryException if the RegistryObject is not Organization or User, * or it has SQLException when inserting their PostalAddress *//*from ww w.j a v a 2s . c om*/ public void insert(List registryObjects) throws RegistryException { Statement stmt = null; if (registryObjects.size() == 0) { return; } try { stmt = context.getConnection().createStatement(); Iterator rosIter = registryObjects.iterator(); while (rosIter.hasNext()) { Object ro = rosIter.next(); String parentId = null; Iterator telephoneNumbers; if (ro instanceof OrganizationType) { OrganizationType org = (OrganizationType) ro; telephoneNumbers = org.getTelephoneNumber().iterator(); parentId = org.getId(); } else if (ro instanceof UserType) { UserType user = (UserType) ro; telephoneNumbers = user.getTelephoneNumber().iterator(); parentId = user.getId(); } else { throw new RegistryException( ServerResourceBundle.getInstance().getString("message.incorrectRegistryObject")); } while (telephoneNumbers.hasNext()) { TelephoneNumberType telephoneNumber = (TelephoneNumberType) telephoneNumbers.next(); String areaCode = telephoneNumber.getAreaCode(); if (areaCode != null) { areaCode = "'" + areaCode + "'"; } String countryCode = telephoneNumber.getCountryCode(); if (countryCode != null) { countryCode = "'" + countryCode + "'"; } String extension = telephoneNumber.getExtension(); if (extension != null) { extension = "'" + extension + "'"; } String number = telephoneNumber.getNumber(); if (number != null) { number = "'" + number + "'"; } String phoneType = telephoneNumber.getPhoneType(); if (phoneType != null) { phoneType = "'" + phoneType + "'"; } String str = "INSERT INTO TelephoneNumber " + "VALUES( " + areaCode + ", " + countryCode + ", " + extension + ", " + number + ", " + phoneType + ", " + "'" + parentId + "' )"; log.trace("SQL = " + str); stmt.addBatch(str); } } stmt.executeBatch(); } catch (SQLException e) { log.error(ServerResourceBundle.getInstance().getString("message.CaughtException1"), e); throw new RegistryException(e); } finally { closeStatement(stmt); } }
From source file:it.cnr.icar.eric.server.persistence.rdb.TelephoneNumberDAO.java
/** * @param registryObjects is a List of Organizations or Users * @throws RegistryException if the RegistryObject is not Organization or User, * or it has SQLException when inserting their PostalAddress *//*from ww w .jav a 2 s . c o m*/ @SuppressWarnings("resource") public void insert(@SuppressWarnings("rawtypes") List registryObjects) throws RegistryException { Statement stmt = null; if (registryObjects.size() == 0) { return; } try { stmt = context.getConnection().createStatement(); Iterator<?> rosIter = registryObjects.iterator(); while (rosIter.hasNext()) { Object ro = rosIter.next(); String parentId = null; Iterator<?> telephoneNumbers; if (ro instanceof OrganizationType) { OrganizationType org = (OrganizationType) ro; telephoneNumbers = org.getTelephoneNumber().iterator(); parentId = org.getId(); } else if (ro instanceof UserType) { UserType user = (UserType) ro; telephoneNumbers = user.getTelephoneNumber().iterator(); parentId = user.getId(); } else { throw new RegistryException( ServerResourceBundle.getInstance().getString("message.incorrectRegistryObject")); } while (telephoneNumbers.hasNext()) { TelephoneNumberType telephoneNumber = (TelephoneNumberType) telephoneNumbers.next(); String areaCode = telephoneNumber.getAreaCode(); if (areaCode != null) { areaCode = "'" + areaCode + "'"; } String countryCode = telephoneNumber.getCountryCode(); if (countryCode != null) { countryCode = "'" + countryCode + "'"; } String extension = telephoneNumber.getExtension(); if (extension != null) { extension = "'" + extension + "'"; } String number = telephoneNumber.getNumber(); if (number != null) { number = "'" + number + "'"; } String phoneType = telephoneNumber.getPhoneType(); if (phoneType != null) { phoneType = "'" + phoneType + "'"; } String str = "INSERT INTO TelephoneNumber " + "VALUES( " + areaCode + ", " + countryCode + ", " + extension + ", " + number + ", " + phoneType + ", " + "'" + parentId + "' )"; log.trace("stmt = " + str); stmt.addBatch(str); } } stmt.executeBatch(); } catch (SQLException e) { log.error(ServerResourceBundle.getInstance().getString("message.CaughtException1"), e); throw new RegistryException(e); } finally { closeStatement(stmt); } }
From source file:com.mysql.stresstool.RunnableClusterQueryInsert.java
@Override public boolean createSchema(StressTool sTool) { // Custom schema creation this is the default for the stresstool but can be anything String DropTables1 = "Drop table IF EXISTS tbtest"; String DropTables2 = "Drop table IF EXISTS tbtest_child"; String TruncateTables1 = "Truncate table tbtest"; String TruncateTables2 = "Truncate table tbtest_child"; Connection conn = null;/* w w w . ja v a 2 s . co m*/ Statement stmt = null; try { if (jdbcUrlMap.get("dbType") != null && !((String) jdbcUrlMap.get("dbType")).equals("MySQL")) { conn = DriverManager.getConnection((String) jdbcUrlMap.get("dbType"), "test", "test"); } else conn = DriverManager.getConnection((String) jdbcUrlMap.get("jdbcUrl")); conn.setAutoCommit(false); stmt = conn.createStatement(); StringBuffer sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest" + iTable + "("); if (this.isUseAutoIncrement()) { sb.append("`autoInc` bigint(11) AUTO_INCREMENT NOT NULL,"); } sb.append(" `a` int(11) NOT NULL,"); sb.append(" `uuid` char(36) NOT NULL,"); sb.append(" `b` varchar(100) NOT NULL,"); sb.append(" `c` char(200) NOT NULL,"); sb.append(" `counter` bigint(20) NULL, "); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); sb.append(" `strrecordtype` char(3) NULL"); if (this.isUseAutoIncrement()) { sb.append(", PRIMARY KEY (`autoInc`), INDEX `IDX_a` (a), INDEX `IDX_uuid` (uuid) "); } else { if (!this.doSimplePk) sb.append(", PRIMARY KEY (`uuid`), INDEX `IDX_a` (a) "); else sb.append(", PRIMARY KEY (`a`), INDEX `IDX_uuid` (uuid) "); } sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts1 = sb.toString(); sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest_child" + iTable); sb.append("(`a` int(11) NOT NULL,"); sb.append("`bb` int(11) AUTO_INCREMENT NOT NULL,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); if (operationShort) sb.append(" `stroperation` VARCHAR(254) NULL,"); else sb.append(" `stroperation` TEXT(41845) NULL,"); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP"); sb.append(", PRIMARY KEY (`a`,`bb`), UNIQUE(`bb`)"); sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts2 = sb.toString(); System.out.println(tbts1); if (!doSimplePk) System.out.println(tbts2); if (sTool.droptable) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println( "**** Please wait DROP table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables1 + iTable); } for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait DROP table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables2 + iTable); } stmt.execute("COMMIT"); System.out.println("**** DROP finished *******"); System.out.println( "****============================================================================*******"); } if (sTool.createtable) stmt.executeBatch(); if (sTool.truncate) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables1 + iTable); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables2 + iTable); } } System.out.println("**** TRUNCATE finish *******"); System.out.println( "****============================================================================*******"); } } catch (Exception ex) { ex.printStackTrace( ); return false; } finally { try { conn.close(); return true; } catch (SQLException ex1) { ex1.printStackTrace(); return false; } } }
From source file:com.mysql.stresstool.RunnableQueryInsert.java
@Override public boolean createSchema(StressTool sTool) { // Custom schema creation this is the default for the stresstool but can be anything String DropTables1 = "Drop table IF EXISTS tbtest"; String DropTables2 = "Drop table IF EXISTS tbtest_child"; String TruncateTables1 = "Truncate table tbtest"; String TruncateTables2 = "Truncate table tbtest_child"; Connection conn = null;/*from w w w . java 2 s. c o m*/ Statement stmt = null; try { if (jdbcUrlMap.get("dbType") != null && !((String) jdbcUrlMap.get("dbType")).equals("MySQL")) { conn = DriverManager.getConnection((String) jdbcUrlMap.get("dbType"), "test", "test"); } else conn = DriverManager.getConnection((String) jdbcUrlMap.get("jdbcUrl")); conn.setAutoCommit(false); stmt = conn.createStatement(); StringBuffer sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest" + iTable + "("); if (this.isUseAutoIncrement()) { sb.append("`autoInc` bigint(11) AUTO_INCREMENT NOT NULL,"); } sb.append(" `a` int(11) NOT NULL,"); sb.append(" `uuid` char(36) NOT NULL,"); sb.append(" `b` varchar(100) NOT NULL,"); sb.append(" `c` char(200) NOT NULL,"); sb.append(" `counter` bigint(20) NULL, "); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); sb.append(" `strrecordtype` char(3) NULL"); if (this.isUseAutoIncrement()) { sb.append(", PRIMARY KEY (`autoInc`), INDEX `IDX_a` (a), INDEX `IDX_uuid` (uuid) "); } else { if (!this.doSimplePk) sb.append(", PRIMARY KEY (`uuid`), INDEX `IDX_a` (a) "); else sb.append(", PRIMARY KEY (`a`), INDEX `IDX_uuid` (uuid) "); } sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts1 = sb.toString(); sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest_child" + iTable); sb.append("(`a` int(11) NOT NULL,"); sb.append("`bb` int(11) AUTO_INCREMENT NOT NULL,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); if (operationShort) sb.append(" `stroperation` VARCHAR(254) NULL,"); else sb.append(" `stroperation` TEXT(41845) NULL,"); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP"); sb.append(", PRIMARY KEY (`a`,`bb`), UNIQUE(`bb`)"); sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts2 = sb.toString(); System.out.println(tbts1); if (!doSimplePk) System.out.println(tbts2); if (sTool.droptable) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println( "**** Please wait DROP table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables1 + iTable); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait DROP table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables2 + iTable); } } stmt.execute("COMMIT"); System.out.println("**** DROP finished *******"); System.out.println( "****============================================================================*******"); } if (sTool.createtable) stmt.executeBatch(); if (sTool.truncate) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables1 + iTable); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables2 + iTable); } } System.out.println("**** TRUNCATE finish *******"); System.out.println( "****============================================================================*******"); } } catch (Exception ex) { ex.printStackTrace( ); return false; } finally { try { conn.close(); return true; } catch (SQLException ex1) { ex1.printStackTrace(); return false; } } }
From source file:com.mysql.stresstool.RunnableQueryInsertPCH.java
@Override public boolean createSchema(StressTool sTool) { // Custom schema creation this is the default for the stresstool but can be anything String DropTables1 = "Drop table IF EXISTS tbtest"; String DropTables2 = "Drop table IF EXISTS tbtest_child"; String TruncateTables1 = "Truncate table tbtest"; String TruncateTables2 = "Truncate table tbtest_child"; Connection conn = null;/* ww w.j a va2 s . c o m*/ Statement stmt = null; try { if (jdbcUrlMap.get("dbType") != null && !((String) jdbcUrlMap.get("dbType")).equals("MySQL")) { conn = DriverManager.getConnection((String) jdbcUrlMap.get("dbType"), "test", "test"); } else conn = DriverManager.getConnection((String) jdbcUrlMap.get("jdbcUrl")); conn.setAutoCommit(false); stmt = conn.createStatement(); StringBuffer sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest" + iTable + "("); if (this.isUseAutoIncrement()) { sb.append("`autoInc` bigint(11) AUTO_INCREMENT NOT NULL,"); } sb.append(" `a` int(11) NOT NULL,"); sb.append(" `uuid` char(36) NOT NULL,"); sb.append(" `serverid` int NOT NULL,"); sb.append(" `b` varchar(100) NOT NULL,"); sb.append(" `c` char(200) NOT NULL,"); sb.append(" `counter` bigint(20) NULL, "); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); sb.append(" `strrecordtype` char(3) NULL"); if (this.isUseAutoIncrement()) { sb.append(", PRIMARY KEY (`autoInc`), INDEX `IDX_a` (a), INDEX `IDX_uuid` (uuid) "); } else { if (!this.doSimplePk) sb.append(", PRIMARY KEY (`uuid`), INDEX `IDX_a` (a), INDEX `serverid` (serverid) "); else sb.append(", PRIMARY KEY (`a`), INDEX `IDX_uuid` (uuid), INDEX `serverid` (serverid) "); } sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts1 = sb.toString(); sb = new StringBuffer(); for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { sb.append("CREATE TABLE IF NOT EXISTS tbtest_child" + iTable); sb.append("(`a` int(11) NOT NULL,"); sb.append(" `serverid` int NOT NULL,"); sb.append("`bb` int(11) AUTO_INCREMENT NOT NULL,"); sb.append(" `partitionid` int NOT NULL DEFAULT 0,"); if (operationShort) sb.append(" `stroperation` VARCHAR(254) NULL,"); else sb.append(" `stroperation` TEXT(41845) NULL,"); sb.append(" `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP"); sb.append(", PRIMARY KEY (`a`,`bb`), UNIQUE(`bb`), INDEX `serverid` (serverid)"); sb.append(") ENGINE=" + sTool.tableEngine); if (!sb.toString().equals("")) stmt.addBatch(sb.toString()); sb.delete(0, sb.length()); } String tbts2 = sb.toString(); System.out.println(tbts1); if (!doSimplePk) System.out.println(tbts2); if (sTool.droptable) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println( "**** Please wait DROP table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables1 + iTable); stmt.execute("COMMIT"); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait DROP table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(DropTables2 + iTable); stmt.execute("COMMIT"); } } stmt.execute("COMMIT"); System.out.println("**** DROP finished *******"); System.out.println( "****============================================================================*******"); } if (sTool.createtable) stmt.executeBatch(); if (sTool.truncate) { System.out.println( "****============================================================================*******"); for (int iTable = 1; iTable <= this.getNumberOfprimaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables1 + iTable); } if (!doSimplePk) { for (int iTable = 1; iTable <= this.getNumberOfSecondaryTables(); iTable++) { System.out.println("**** Please wait TRUNCATE table tbtest_child" + iTable + " it could take a LOT of time *******"); stmt.execute(TruncateTables2 + iTable); } } System.out.println("**** TRUNCATE finish *******"); System.out.println( "****============================================================================*******"); } } catch (Exception ex) { ex.printStackTrace( ); return false; } finally { try { conn.close(); return true; } catch (SQLException ex1) { ex1.printStackTrace(); return false; } } }