Example usage for java.sql Statement execute

List of usage examples for java.sql Statement execute

Introduction

In this page you can find the example usage for java.sql Statement execute.

Prototype

boolean execute(String sql) throws SQLException;

Source Link

Document

Executes the given SQL statement, which may return multiple results.

Usage

From source file:jp.co.tis.gsp.tools.dba.dialect.PostgresqlDialect.java

@Override
public void dropAll(String user, String password, String adminUser, String adminPassword, String schema)
        throws MojoExecutionException {
    Connection conn = null;/*from   w  w w .j av a  2s  .  co  m*/
    Statement stmt = null;
    try {
        conn = DriverManager.getConnection(url, adminUser, adminPassword);
        stmt = conn.createStatement();

        if (!existsSchema(conn, normalizeSchemaName(schema))) {
            stmt.execute("CREATE SCHEMA " + schema);
            stmt.execute("ALTER SCHEMA " + schema + " OWNER TO " + user);
            stmt.execute("ALTER USER " + user + " Set search_path TO " + schema);
            return;
        } else {
            // ??????????????
            stmt.execute("ALTER SCHEMA " + schema + " OWNER TO " + user);
            stmt.execute("ALTER USER " + user + " Set search_path TO " + schema);
        }

        // ???
        String nmzschema = normalizeSchemaName(schema);
        String dropListSql = "SELECT TABLE_NAME, CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE TABLE_SCHEMA='"
                + nmzschema + "' AND CONSTRAINT_TYPE='FOREIGN KEY'";
        dropObjectsInSchema(conn, dropListSql, nmzschema, OBJECT_TYPE.FK);

        dropListSql = "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA='" + nmzschema + "'";
        dropObjectsInSchema(conn, dropListSql, nmzschema, OBJECT_TYPE.VIEW);

        dropListSql = "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE' AND TABLE_SCHEMA='"
                + nmzschema + "'";
        dropObjectsInSchema(conn, dropListSql, nmzschema, OBJECT_TYPE.TABLE);

    } catch (SQLException e) {
        throw new MojoExecutionException("?", e);
    } finally {
        ConnectionUtil.close(conn);
        StatementUtil.close(stmt);
    }
}

From source file:jp.ikedam.jenkins.plugins.extensible_choice_parameter.DatabaseChoiceListProvider.java

/**
 * /*  w  ww  .java2s  .  c  o m*/
 * @return
 */
private List<String> performDbQuery() {

    Connection conn = null;
    ResultSet resultat = null;
    List<String> resultList = new ArrayList<String>();
    try {
        String driver = getJdbcDriver();
        if (driver == null || StringUtils.isEmpty(driver)) {
            LOGGER.log(Level.WARNING, "Invalid driver");
        }

        Class.forName(driver);

        String user = getDbUsername();
        if (user == null || StringUtils.isEmpty(user)) {
            LOGGER.log(Level.WARNING, "Invalid user");
        }

        String password = getDbPassword();
        if (password == null || StringUtils.isEmpty(password)) {
            LOGGER.log(Level.WARNING, "Invalid password");
        }

        String urlBase = getJdbcUrl();
        if (urlBase == null || StringUtils.isEmpty(urlBase)) {
            LOGGER.log(Level.WARNING, "Invalid JDBC URL");
        }

        String colomndb = getDbColumn();
        if (colomndb == null || StringUtils.isEmpty(colomndb)) {
            LOGGER.log(Level.WARNING, "Invalid column");
        }

        String namedb = getDbName();
        if (namedb == null || StringUtils.isEmpty(namedb)) {
            LOGGER.log(Level.WARNING, "Invalid DB name");
        }

        String tabledb = getDbTable();
        if (tabledb == null || StringUtils.isEmpty(tabledb)) {
            LOGGER.log(Level.WARNING, "table database invalid");
        }

        conn = DriverManager.getConnection(urlBase, user, password);

        // By default, a SELECT * is performed
        String selectQuery = "select " + colomndb + " from " + namedb + "." + tabledb;

        // Use plain old JDBC to build and execute the query against the configured db
        Statement statement = conn.createStatement();
        boolean result = statement.execute(selectQuery);
        if (result) {
            resultat = statement.executeQuery(selectQuery);
            while (resultat.next()) {
                resultList.add(resultat.getString(1));
            }
        } else {
            LOGGER.log(Level.WARNING, "No result found with the query: " + selectQuery);
        }
    } catch (SQLException se) {
        LOGGER.log(Level.SEVERE, "Unable to access the database: " + dbName + "." + se);
    } catch (ClassNotFoundException e) {
        LOGGER.log(Level.SEVERE, "The driver " + jdbcDriver + " cannot be found in the classpath.");
    } catch (Exception e) {
        LOGGER.log(Level.SEVERE, "Unable to access the database: " + dbName + "." + e);
    } finally {
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException se) {
            LOGGER.log(Level.SEVERE, "Fermeture de connection impossible, SQLException : " + se);
        }
    }

    // If no results are returned, read values from the fallback file if configured
    if (resultList.isEmpty()) {

        Scanner scanner = null;
        try {
            scanner = new Scanner(new FileInputStream(getFallbackFilePath()));
            while (scanner.hasNextLine()) {
                String env = scanner.nextLine().trim();
                if (StringUtils.isNotBlank(env)) {
                    resultList.add(env);
                }
            }
        } catch (FileNotFoundException e) {
            LOGGER.log(Level.WARNING, "Unable to read the fallback file: " + e);
        } finally {
            if (scanner != null) {
                scanner.close();
            }
        }
    }

    // Perform alphabetical sorting on results
    Collections.sort(resultList);

    return resultList;
}

From source file:com.splout.db.engine.MySQLOutputFormat.java

public void initPartition(int partition, Path local) throws IOException {

    Path mysqlDb = new Path(local.getParent(), partition + "");

    LOG.info("Initializing SQL connection [" + partition + "]");
    try {// w ww.  j  a  v a2 s  . c o  m
        PortLock portLock = PortUtils.getNextAvailablePort(EmbeddedMySQLConfig.DEFAULT_PORT);

        EmbeddedMySQL mySQL = null;
        EmbeddedMySQLConfig config = null;
        HashMap<String, Object> customConfig = new HashMap<String, Object>();

        // Fixing memory for indexation. Main important parameters is myisam_sort_buffer_size
        // and key_buffer_size. See http://dev.mysql.com/doc/refman/5.0/en/server-system-variables.html
        long totalMem = getConf().getLong(GLOBAL_MEMORY_AVAILABLE_FOR_INDEXING, 100 * 1024 * 1024);
        double shareForSortBuffer = 0.9;
        customConfig.put("myisam_sort_buffer_size", (long) (shareForSortBuffer * totalMem));
        customConfig.put("key_buffer_size", (long) ((1 - shareForSortBuffer) * totalMem));
        customConfig.put("myisam_max_sort_file_size", 9223372036854775807l);

        try {
            File mysqlDir = new File(mysqlDb.toString());
            LOG.info("Going to instantiate a MySQLD in: " + mysqlDir + ", port [" + portLock.getPort()
                    + "] (partition: " + partition + ")");

            config = new EmbeddedMySQLConfig(portLock.getPort(), EmbeddedMySQLConfig.DEFAULT_USER,
                    EmbeddedMySQLConfig.DEFAULT_PASS, mysqlDir, customConfig);

            mySQL = new EmbeddedMySQL(config);
            mySQL.start(true);
        } catch (Exception e) {
            throw e;
        } finally {
            portLock.release();
        }

        mySQLs.put(partition, mySQL);

        // MySQL is successfully started at this point, or an Exception would have been thrown.
        Class.forName(EmbeddedMySQL.DRIVER);
        Connection conn = DriverManager.getConnection(config.getLocalJDBCConnection(GENERATED_DB_NAME),
                config.getUser(), config.getPass());
        conn.setAutoCommit(false);
        connCache.put(partition, conn);
        Statement st = conn.createStatement();

        // Init transaction
        for (String sql : getPreSQL()) {
            LOG.info("Executing: " + sql);
            st.execute(sql);
        }
        st.execute("BEGIN");
        st.close();

        Map<String, PreparedStatement> stMap = new HashMap<String, PreparedStatement>();
        stCache.put(partition, stMap);
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:jp.co.tis.gsp.tools.dba.dialect.SqlserverDialect.java

@Override
public void createUser(String user, String password, String adminUser, String adminPassword)
        throws MojoExecutionException {
    Connection conn = null;/*  w  w w. j  av a  2s. c  o  m*/
    Statement stmt = null;
    try {
        conn = DriverManager.getConnection(url, adminUser, adminPassword);
        stmt = conn.createStatement();
        if (existsUser(adminUser, adminPassword, user)) {
            return;
        }
        stmt.execute("CREATE LOGIN " + user + " WITH PASSWORD = '" + password + "'");
        stmt.execute("CREATE USER " + user + " FOR LOGIN " + user);
        stmt.execute("sp_addrolemember 'db_ddladmin','" + user + "'");

    } catch (SQLException e) {
        throw new MojoExecutionException("CREATE USER?", e);
    } finally {
        StatementUtil.close(stmt);
        ConnectionUtil.close(conn);
    }
}

From source file:com.collective.celos.ci.testing.fixtures.deploy.hive.HiveTableDeployer.java

private void loadFromTempToRealDb(Statement statement, String tableName, List<String> partDef)
        throws SQLException {
    String partitions;/* ww w.  j  a  va2s. c  o m*/
    if (partDef.isEmpty()) {
        partitions = "";
    } else {
        partitions = String.format(PARTITION_PATTERN, StringUtils.join(partDef, ", "));
    }

    statement.execute(SET_PARTITION_MODE_NONSTRICT);
    String loadData = String.format(LOAD_DATA_PATTERN, tableName, partitions, tableName);
    statement.executeUpdate(loadData);
}

From source file:com.draagon.meta.manager.db.driver.MSSQLDriver.java

/**
 * Creates a table in the database/*from   w w w  .  j  a  v a 2  s .c o m*/
 */
@Override
public void createTable(Connection c, TableDef table) throws SQLException {
    String query = "CREATE TABLE [" + table + "] (\n";

    boolean multi = (table.getPrimaryKeys().size() > 1);

    boolean hasIdentity = false;

    // Create the individual table fields
    int found = 0;
    for (ColumnDef col : table.getColumns()) {
        String name = col.getName();
        if (name == null || name.length() == 0) {
            throw new IllegalArgumentException("No name defined for column [" + col + "]");
        }

        if (found > 0)
            query += ",\n";
        found++;

        String flags = "";
        if (col.isPrimaryKey() && !multi)
            flags = "PRIMARY KEY ";
        else if (col.isUnique())
            flags = "UNIQUE ";
        //else if (getManager().isIndex(mf)) flags = "NONCLUSTERED ";

        switch (col.getSQLType()) {
        case Types.BOOLEAN:
        case Types.BIT:
            query += "[" + name + "] [bit] " + flags;
            break;
        case Types.TINYINT:
            query += "[" + name + "] [tinyint] " + flags;
            break;
        case Types.SMALLINT:
            query += "[" + name + "] [smallint] " + flags;
            break;
        case Types.INTEGER:
            query += "[" + name + "] [int] " + flags;
            break;
        case Types.BIGINT:
            query += "[" + name + "] [bigint] " + flags;
            break;
        case Types.FLOAT:
            query += "[" + name + "] [float] " + flags;
            break;
        case Types.DOUBLE:
            query += "[" + name + "] [decimal](19,4) " + flags;
            break;
        case Types.TIMESTAMP:
            query += "[" + name + "] [datetime] " + flags;
            break;
        case Types.VARCHAR:
            query += "[" + name + "] [varchar](" + col.getLength() + ") " + flags;
            break;

        default:
            throw new IllegalArgumentException("Table [" + table + "] with Column [" + col
                    + "] is of SQL type (" + col.getSQLType() + ") which is not support by this database");
        }

        // Create the identity columns
        if (col.isAutoIncrementor()) {
            if (hasIdentity)
                throw new MetaException(
                        "Table [" + table + "] cannot have multiple identity (auto id) columns!");

            query += "NOT NULL IDENTITY( " + col.getSequence().getStart() + ", "
                    + col.getSequence().getIncrement() + " ) ";

            hasIdentity = true;
        }
    }

    query += "\n)";

    // This means there were no columns defined for the table
    if (found == 0)
        return;

    if (log.isDebugEnabled()) {
        log.debug("Creating table [" + table + "]: " + query);
    }
    //ystem.out.println( ">>>> Creating table [" + table + "]: " + query);

    Statement s = c.createStatement();
    try {
        s.execute(query);
    } finally {
        s.close();
    }
}

From source file:com.flexive.core.Database.java

private static DataSource tryGetDefaultDataSource(Context c, String dataSourceName,
        DefaultDataSourceInitializer initializer) throws SQLException {
    // try to get and initialize the default datasource in JavaEE 6 containers,
    // as configured in the GlobalConfigurationEngineBean EJB
    Connection con = null;//from  w  w  w.j  a  v  a  2  s  .  c o m
    Statement stmt = null;
    try {
        DataSource ds = (DataSource) c.lookup(dataSourceName);
        if (ds != null && !isDefaultDataSourceInitialized(ds)) {
            // try to initialize schema
            con = getDefaultInitConnection(c);
            if (con != null) {
                if (LOG.isInfoEnabled()) {
                    LOG.info("Schema not initialized for default datasource " + dataSourceName
                            + ", initializing...");
                }
                stmt = con.createStatement();
                stmt.execute("CREATE SCHEMA " + initializer.getSchema());
                stmt.close();
                try {
                    initializer.initSchema(con, StorageManager.getStorageImpl("H2"));
                } catch (Exception ex) {
                    if (LOG.isErrorEnabled()) {
                        LOG.error("Failed to initialize schema " + initializer.getSchema() + " for "
                                + dataSourceName + ": " + ex.getMessage(), ex);
                    }
                }
            } else {
                if (LOG.isErrorEnabled()) {
                    LOG.error("Default configuration schema not initialized, but failed to retrieve"
                            + " data source " + GlobalConfigurationEngineBean.DEFAULT_DS_INIT);
                }
                // ignore for now, the caller will get an exception when calling getConnection()
                // on the data source anyway
            }
        }
        return ds;
    } catch (NamingException e) {
        // not bound
        return null;
    } finally {
        closeObjects(Database.class, con, stmt);
    }
}

From source file:com.streamsets.pipeline.stage.lib.hive.HiveQueryExecutor.java

public void executeAlterTableAddColumnsQuery(String qualifiedTableName,
        LinkedHashMap<String, HiveType> columnTypeMap) throws StageException {
    String sql = buildAddColumnsQuery(qualifiedTableName, columnTypeMap);
    LOG.debug("Executing SQL:", sql);
    Statement statement = null;
    try (Connection con = DriverManager.getConnection(jdbcUrl)) {
        statement = con.createStatement();
        statement.execute(sql);
    } catch (SQLException e) {
        LOG.error("SQL Exception happened when adding columns", e);
        throw new StageException(Errors.HIVE_20, sql, e.getMessage());
    } finally {/*  w ww.jav a 2 s.com*/
        closeStatement(statement);
    }
}

From source file:com.streamsets.pipeline.stage.lib.hive.HiveQueryExecutor.java

/**
 * Execute Alter Table set Table Properties
 * @param qualifiedTableName qualified table name.
 * @param partitionPath parition path.//  w w  w. ja  va 2 s.com
 * @throws StageException in case of any {@link SQLException}
 */
public void executeAlterTableSetTblPropertiesQuery(String qualifiedTableName, String partitionPath)
        throws StageException {
    String sql = buildSetTablePropertiesQuery(qualifiedTableName, partitionPath);
    LOG.debug("Executing SQL:", sql);
    Statement statement = null;
    try (Connection con = DriverManager.getConnection(jdbcUrl)) {
        statement = con.createStatement();
        statement.execute(sql);
    } catch (SQLException e) {
        LOG.error("SQL Exception happened when adding partition", e);
        throw new StageException(Errors.HIVE_20, sql, e.getMessage());
    } finally {
        closeStatement(statement);
    }
}

From source file:com.cloudera.recordbreaker.analyzer.DataQuery.java

String grabTable(DataDescriptor desc) throws SQLException, IOException {
    // Set up Hive table
    Path p = desc.getFilename();/*www .j  a v a  2s.  c o  m*/
    String tablename = tableCache.get(p);
    if (tablename == null) {
        tablename = "datatable" + Math.abs(r.nextInt());
        Statement stmt = hiveCon.createStatement();
        try {
            String creatTxt = desc.getHiveCreateTableStatement(tablename);
            LOG.info("Create: " + creatTxt);
            stmt.execute(creatTxt);
            tables.put(p, tablename);
        } finally {
            stmt.close();
        }

        // Copy avro version of data into secret location prior to Hive import
        FileSystem fs = FileSystem.get(conf);
        Path tmpTables = new Path(tmpTablesDir);
        if (!fs.exists(tmpTables)) {
            fs.mkdirs(tmpTables, new FsPermission("-rwxrwxrwx"));
        }
        Path secretDst = new Path(tmpTables, "r" + r.nextInt());
        LOG.info("Preparing Avro data at " + secretDst);
        desc.prepareAvroFile(fs, fs, secretDst, conf);
        fs.setPermission(secretDst, new FsPermission("-rwxrwxrwx"));

        // Import data
        stmt = hiveCon.createStatement();
        try {
            LOG.info("Import data into Hive: " + desc.getHiveImportDataStatement(tablename, secretDst));
            stmt.execute(desc.getHiveImportDataStatement(tablename, secretDst));
            isLoaded.add(p);
        } finally {
            stmt.close();
        }

        // Refresh impala metadata
        stmt = impalaCon.createStatement();
        try {
            try {
                LOG.info("Rebuilding Impala metadata...");
                stmt.execute("INVALIDATE METADATA");
            } catch (Exception iex) {
                LOG.info("Impala metadata rebuild failed: " + iex.toString());
            }
        } finally {
            stmt.close();
        }

        // Insert into table cache
        tableCache.put(p, tablename);
    }
    return tablename;
}