Example usage for java.sql Statement execute

List of usage examples for java.sql Statement execute

Introduction

In this page you can find the example usage for java.sql Statement execute.

Prototype

boolean execute(String sql) throws SQLException;

Source Link

Document

Executes the given SQL statement, which may return multiple results.

Usage

From source file:com.splout.db.engine.MySQLOutputFormat.java

@Override
public void write(ITuple tuple) throws IOException, InterruptedException {
    int partition = (Integer) tuple.get(PARTITION_TUPLE_FIELD);

    try {//from   ww  w  . java2  s . co  m
        /*
         * Key performance trick: Cache PreparedStatements when possible. We will have one PreparedStatement per each
            * different Tuple Schema (table).
            */
        Map<String, PreparedStatement> stMap = stCache.get(partition);

        PreparedStatement pS = stMap.get(tuple.getSchema().getName());
        if (pS == null) {
            Connection conn = connCache.get(partition);
            // Create a PreparedStatement according to the received Tuple
            String preparedStatement = "INSERT INTO " + tuple.getSchema().getName() + " VALUES (";
            // NOTE: tuple.getSchema().getFields().size() - 1 : quick way of skipping "_partition" fields here
            for (int i = 0; i < tuple.getSchema().getFields().size() - 1; i++) {
                preparedStatement += "?, ";
            }
            preparedStatement = preparedStatement.substring(0, preparedStatement.length() - 2) + ");";
            pS = conn.prepareStatement(preparedStatement);
            stMap.put(tuple.getSchema().getName(), pS);
        }

        int count = 1, tupleCount = 0;
        for (Field field : tuple.getSchema().getFields()) {
            if (field.getName().equals(PARTITION_TUPLE_FIELD)) {
                tupleCount++;
                continue;
            }
            if (field.getType().equals(Type.STRING)) {
                boolean autoTrim = autoTrim(field);
                int fieldSize = fixedSizeStringField(field);
                String str = tuple.getString(tupleCount);
                if (fieldSize > -1 && autoTrim && str != null && str.length() > fieldSize) {
                    str = str.substring(0, fieldSize);
                }
                pS.setObject(count, str);
            } else {
                pS.setObject(count, tuple.get(tupleCount));
            }
            count++;
            tupleCount++;
        }
        pS.execute();

        records++;
        if (records == getBatchSize()) {
            Connection conn = connCache.get(partition);
            Statement st = conn.createStatement();
            st.execute("COMMIT");
            st.execute("BEGIN");
            st.close();
            records = 0;
        }
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:com.ipcglobal.fredimportaws.TsvsToRedshift.java

/**
 * Drop table./* ww w.j a va2s  .  c  o  m*/
 *
 * @param statement the statement
 * @param tableName the table name
 * @throws Exception the exception
 */
private void dropTable(Statement statement, String tableName) throws Exception {
    try {
        statement.execute("DROP TABLE " + tableName);
    } catch (PSQLException pe) {
        if (!"42P01".equals(pe.getSQLState()))
            throw pe; // 42P01 is "table doesn't exist"
    }
}

From source file:edu.duke.cabig.c3pr.webservice.integration.StudyImportExportWebServiceTest.java

/**
 * Need to do some DELETEs which could not be done via DbUnit.
 * //from   w ww  .  jav  a  2 s .co m
 * @throws SQLException
 * @throws Exception
 */
private void cleanupDatabaseData() throws SQLException, Exception {
    try {
        Connection conn = getConnection().getConnection();
        Statement st = conn.createStatement();
        st.execute("DELETE FROM identifiers where stu_id is not null");
        st.close();
    } catch (Exception e) {
        logger.severe("cleanupDatabaseData() failed.");
        logger.severe(ExceptionUtils.getFullStackTrace(e));
        e.printStackTrace();
    }
}

From source file:com.cloudera.sqoop.manager.DB2ManagerImportManualTest.java

@Before
public void setUp() {
    super.setUp();

    SqoopOptions options = new SqoopOptions(CONNECT_STRING, getTableName());
    options.setUsername(DATABASE_USER);/* ww  w  .ja v  a  2  s  . co m*/
    options.setPassword(DATABASE_PASSWORD);

    manager = new Db2Manager(options);

    // Drop the existing table, if there is one.
    Connection conn = null;
    Statement stmt = null;
    try {
        conn = manager.getConnection();
        stmt = conn.createStatement();
        stmt.execute("DROP TABLE " + getTableName());
    } catch (SQLException sqlE) {
        LOG.info("Table was not dropped: " + sqlE.getMessage());
    } finally {
        try {
            if (null != stmt) {
                stmt.close();
            }
        } catch (Exception ex) {
            LOG.warn("Exception while closing stmt", ex);
        }
    }

    // Create and populate table
    try {
        conn = manager.getConnection();
        conn.setAutoCommit(false);
        stmt = conn.createStatement();

        // create the database table and populate it with data.
        stmt.executeUpdate(
                "CREATE TABLE " + getTableName() + " (" + "id INT NOT NULL, " + "name VARCHAR(24) NOT NULL, "
                        + "salary FLOAT, " + "dept VARCHAR(32), " + "PRIMARY KEY (id))");

        stmt.executeUpdate(
                "INSERT INTO " + getTableName() + " VALUES(" + "1,'Aaron', " + "1000000.00,'engineering')");
        stmt.executeUpdate("INSERT INTO " + getTableName() + " VALUES(" + "2,'Bob', " + "400.00,'sales')");
        stmt.executeUpdate("INSERT INTO " + getTableName() + " VALUES(" + "3,'Fred', 15.00," + "'marketing')");
        conn.commit();
    } catch (SQLException sqlE) {
        LOG.error("Encountered SQL Exception: ", sqlE);
        sqlE.printStackTrace();
        fail("SQLException when running test setUp(): " + sqlE);
    } finally {
        try {
            if (null != stmt) {
                stmt.close();
            }
        } catch (Exception ex) {
            LOG.warn("Exception while closing connection/stmt", ex);
        }
    }
}

From source file:it.unibas.spicy.model.algebra.query.operators.sql.ExecuteSQL.java

public void executeScript(MappingTask mappingTask, AccessConfiguration accessConfiguration, String sqlScript,
        Reader sourceSQLScriptReader, Reader sourceInstanceSQLScriptReader, Reader targetSQLScriptReader,
        Reader intermediateSQLScriptReader, int scenarioNo) throws DAOException {
    boolean isChainingScenario = (mappingTask.getSourceProxy() instanceof ChainingDataSourceProxy);
    IConnectionFactory connectionFactory = null;
    Connection connection = null;
    try {//  w w  w.j a  v  a2 s.  c  o  m
        connectionFactory = new SimpleDbConnectionFactory();
        connection = connectionFactory.getConnection(accessConfiguration);
        ScriptRunner scriptRunner = new ScriptRunner(connection, true, true);
        scriptRunner.setLogWriter(null);

        //giannisk
        if (sourceSQLScriptReader != null && sourceInstanceSQLScriptReader != null
                && targetSQLScriptReader != null) {
            StringBuilder createSchemasQuery = new StringBuilder();
            createSchemasQuery
                    .append("create schema " + SpicyEngineConstants.SOURCE_SCHEMA_NAME + scenarioNo + ";\n");
            createSchemasQuery
                    .append("create schema " + SpicyEngineConstants.TARGET_SCHEMA_NAME + scenarioNo + ";\n");
            //createSchemasQuery.append("create schema " + GenerateSQL.WORK_SCHEMA_NAME + ";\n");

            scriptRunner.runScript(new StringReader(createSchemasQuery.toString()));

            Reader sourceSchemaScript = getSourceSchemaReader();
            Reader targetSchemaScript = getTargetSchemaReader();
            scriptRunner.runScript(sourceSchemaScript);
            scriptRunner.runScript(sourceSQLScriptReader);
            scriptRunner.runScript(sourceInstanceSQLScriptReader);
            scriptRunner.runScript(targetSchemaScript);
            scriptRunner.runScript(targetSQLScriptReader);
        }
        if (isChainingScenario) {
            scriptRunner.runScript(
                    new StringReader("create schema " + GenerateSQL.INTERMEDIATE_SCHEMA_NAME + ";\n"));
            Reader intermediateSchemaScript = getIntermediateSchemaReader();
            scriptRunner.runScript(intermediateSchemaScript);
            scriptRunner.runScript(intermediateSQLScriptReader);
        }

        Statement statement = connection.createStatement();

        System.out.println("Starting Data Translation" + new java.util.Date());
        statement.execute(sqlScript);
        System.out.println("Data Translation Ended with " + statement.getUpdateCount() + "\t insertions\t"
                + new java.util.Date());
        SQLWarning warning = statement.getWarnings();
        String notice = SpicyEngineConstants.PRIMARY_KEY_CONSTR_NOTICE;
        while (warning != null) {
            if (warning.getMessage().startsWith(notice)) {
                String tempTableName = warning.getMessage()
                        .substring(warning.getMessage().lastIndexOf(notice) + notice.length()).trim();
                pkConstraintsTableNames.add(tempTableName);
            }
            warning = warning.getNextWarning();
        }

        ////Reader sqlReader = new StringReader(sqlScript);
        /////scriptRunner.runScript(sqlReader);
    } catch (Exception ex) {
        logger.error(ex);
        throw new DAOException(ex);
    } finally {
        connectionFactory.close(connection);
        try {
            if (sourceSQLScriptReader != null && sourceInstanceSQLScriptReader != null
                    && targetSQLScriptReader != null) {
                sourceSQLScriptReader.close();
                sourceInstanceSQLScriptReader.close();
                targetSQLScriptReader.close();
            }
        } catch (IOException ex) {
            logger.error("Unable to close readers..." + ex);
        }
    }
    //return loadInstance(mappingTask, accessConfiguration, scenarioNo);
}

From source file:com.foundationdb.server.service.security.SecurityServiceIT.java

@Test
public void postgresAuthenticated() throws Exception {
    Connection conn = openPostgresConnection("user1", "password");
    Statement stmt = conn.createStatement();
    ResultSet rs = stmt.executeQuery("SELECT id FROM utable");
    assertTrue(rs.next());/*from   w w  w  .ja  va  2 s.com*/
    assertEquals(1, rs.getInt(1));
    rs.close();
    stmt.execute("DROP TABLE utable");
    stmt.close();
    conn.close();
}

From source file:com.vertica.hadoop.VerticaOutputFormat.java

/**
  * Optionally called at the end of a job to optimize any newly created and
  * loaded tables. Useful for new tables with more than 100k records.
  * // w ww  .  j av  a2s. c o  m
  * @param conf
  * @throws Exception
  */
public static void optimize(Configuration conf) throws Exception {
    VerticaConfiguration vtconfig = new VerticaConfiguration(conf);
    Connection conn = vtconfig.getConnection(true);

    // TODO: consider more tables and skip tables with non-temp projections 
    Relation vTable = new Relation(vtconfig.getOutputTableName());
    Statement stmt = conn.createStatement();
    ResultSet rs = null;
    HashSet<String> tablesWithTemp = new HashSet<String>();

    //for now just add the single output table
    tablesWithTemp.add(vTable.getQualifiedName().toString());

    // map from table name to set of projection names
    HashMap<String, Collection<String>> tableProj = new HashMap<String, Collection<String>>();
    rs = stmt.executeQuery("select projection_schema, anchor_table_name, projection_name from projections;");
    while (rs.next()) {
        String ptable = rs.getString(1) + "." + rs.getString(2);
        if (!tableProj.containsKey(ptable)) {
            tableProj.put(ptable, new HashSet<String>());
        }

        tableProj.get(ptable).add(rs.getString(3));
    }

    for (String table : tablesWithTemp) {
        if (!tableProj.containsKey(table)) {
            throw new RuntimeException("Cannot optimize table with no data: " + table);
        }
    }

    String designName = (new Integer(conn.hashCode())).toString();
    stmt.execute("select dbd_create_workspace('" + designName + "')");
    stmt.execute("select dbd_create_design('" + designName + "', '" + designName + "')");
    stmt.execute("select dbd_add_design_tables('" + designName + "', '" + vTable.getQualifiedName().toString()
            + "')");
    stmt.execute("select dbd_populate_design('" + designName + "', '" + designName + "')");

    //Execute
    stmt.execute("select dbd_create_deployment('" + designName + "', '" + designName + "')");
    stmt.execute("select dbd_add_deployment_design('" + designName + "', '" + designName + "', '" + designName
            + "')");
    stmt.execute("select dbd_add_deployment_drop('" + designName + "', '" + designName + "')");
    stmt.execute("select dbd_execute_deployment('" + designName + "', '" + designName + "')");

    //Cleanup
    stmt.execute("select dbd_drop_deployment('" + designName + "', '" + designName + "')");
    stmt.execute("select dbd_remove_design('" + designName + "', '" + designName + "')");
    stmt.execute("select dbd_drop_design('" + designName + "', '" + designName + "')");
    stmt.execute("select dbd_drop_workspace('" + designName + "')");
}

From source file:com.sqewd.open.dal.core.persistence.db.H2DbPersister.java

private void checkSetup() throws Exception {
    XMLConfiguration config = new XMLConfiguration(dbconfig);
    String version = config.getString(_CONFIG_SETUP_VERSION_);
    if (version == null || version.isEmpty())
        throw new Exception(
                "Invalid DB Setup Configuration : Missing parameter [" + _CONFIG_SETUP_VERSION_ + "]");

    if (!checkSchema()) {
        SimpleDbQuery dbq = new SimpleDbQuery();
        List<String> createsql = dbq.getCreateTableDDL(DBVersion.class);
        Connection conn = getConnection(true);
        Statement stmnt = conn.createStatement();
        try {//  w  ww  .  ja va 2 s. com
            for (String sql : createsql) {
                log.debug("TABLE SQL [" + sql + "]");
                stmnt.execute(sql);
            }

            DBVersion dbv = (DBVersion) DataManager.newInstance(DBVersion.class);
            dbv.setVersion(version);
            save(dbv, false);

            NodeList nl = XMLUtils.search(_CONFIG_SETUP_ENTITIES_, config.getDocument().getDocumentElement());
            if (nl != null && nl.getLength() > 0) {
                for (int ii = 0; ii < nl.getLength(); ii++) {
                    Element elm = (Element) nl.item(ii);
                    String eclass = elm.getAttribute("class");
                    if (eclass != null && !eclass.isEmpty()) {
                        Class<?> cls = Class.forName(eclass);
                        createsql = dbq.getCreateTableDDL(cls);
                        for (String sql : createsql) {
                            log.debug("TABLE SQL [" + sql + "]");
                            stmnt.execute(sql);
                        }
                        createIndex(elm, cls, dbq, stmnt);
                    }
                }
            }

        } finally {
            if (stmnt != null && !stmnt.isClosed())
                stmnt.close();

        }
    } else {
        List<AbstractEntity> versions = read("", DBVersion.class, 1);
        if (versions == null || versions.isEmpty()) {
            throw new Exception("Error retrieving Schema Version. Database might be corrupted.");
        }
        for (AbstractEntity ver : versions) {
            if (ver instanceof DBVersion) {
                DBVersion dbv = (DBVersion) ver;
                if (dbv.getVersion().compareTo(version) != 0) {
                    throw new Exception("Database Version missmatch, Expection version [" + version
                            + "], current DB version [" + dbv.getVersion() + "]");
                }
            }
        }
    }
}

From source file:com.thoughtworks.go.server.database.H2DatabaseTest.java

@Test
void shouldBackupDatabase() throws Exception {
    File destDir = new File(".");
    SystemEnvironment systemEnvironment = mock(SystemEnvironment.class);
    Database database = new H2Database(systemEnvironment);
    Database spy = spy(database);//from  w  w w  . j  a v a 2s  .co  m
    BasicDataSource dataSource = mock(BasicDataSource.class);
    Connection connection = mock(Connection.class);
    Statement statement = mock(Statement.class);
    doReturn(dataSource).when(spy).createDataSource();
    when(dataSource.getConnection()).thenReturn(connection);
    when(connection.createStatement()).thenReturn(statement);
    when(statement.execute(anyString())).thenReturn(true);

    spy.backup(destDir);

    verify(statement).execute(anyString());
}

From source file:com.frostwire.database.sqlite.SQLiteDatabase.java

public void close() {
    if (open.compareAndSet(true, false)) {
        try {/*from   ww w  .ja  v  a2 s . c om*/
            Statement statement = connection.createStatement();
            statement.execute("SHUTDOWN");
            connection.close();
        } catch (Throwable e) {
            LOG.warn("Error closing the smart search database", e);
        }
    }
}