List of usage examples for java.sql Statement execute
boolean execute(String sql) throws SQLException;
From source file:de.klemp.middleware.controller.Controller.java
/** * This method queries a request to the database. If the request expects a * result, then the variable "get" has to be set true. The returned result * is the first column of the resultSet. So this method is only for getting * one variable back. This method does not test the code of the query. * /*from w ww.j av a 2 s . c om*/ * @param query * String with the SQL-query * @param get * true, if the query has a result * @return result String: result of the query. "" if there is no result. */ public static String queryDatabase(String query, boolean get) { String s = ""; createDBConnection(); try { Statement st = conn.createStatement(); if (get == true) { ResultSet result = st.executeQuery(query); if (result.next()) { s = result.getString(1); } } else { st.execute(query); } } catch (SQLException e) { logger.error("Database could not be queried", e); } closeDBConnection(); return s; }
From source file:jp.co.tis.gsp.tools.dba.dialect.SqlserverDialect.java
private void dropObject(Connection conn, String schema, String objectType, String objectName) throws SQLException { Statement stmt = null; try {/* ww w .ja v a2 s.c om*/ stmt = conn.createStatement(); String sql = "DROP " + objectType + " " + schema + "." + objectName; System.err.println(sql); stmt.execute(sql); } catch (SQLException e) { throw e; } finally { StatementUtil.close(stmt); } }
From source file:com.emergya.persistenceGeo.dao.impl.PostgisDBManagementDaoHibernateImpl.java
/** * Creates a new database table with the columns specified by * <code>columns</code>. Also add and register a new geometry column geom * in the table./*from w w w . j a v a 2s . c om*/ * * @param tableName * name of the table. * @param columns * list of columns definitions. * @param srsCode * spatial system reference code. * @param geometryType * geometry type (LINE, POINT or POLYGON). * @return <code>true</code> if successful, <code>false</code> if not. */ private boolean createLayerTable(String tableName, List<DynaBean> columns, int srsCode, GeometryType geometryType) { boolean created = true; if (columns == null || columns.size() == 0) { throw new IllegalArgumentException( "No se puede crear una tabla sin columnas. Indique las columnas a crear."); } StringBuilder columnsSQL = new StringBuilder(); for (int i = 0; i < columns.size(); i++) { DynaBean col = columns.get(i); columnsSQL.append(col.get("name")).append(" ").append(col.get("type")); if (col.get("primaryKey") != null && ((Boolean) col.get("primaryKey")) == true) { columnsSQL.append(" PRIMARY KEY"); } if (i != columns.size() - 1) { columnsSQL.append(", "); } } SessionFactoryImplementor sfi = (SessionFactoryImplementor) getSessionFactory(); String schema = sfi.getSettings().getDefaultSchemaName(); final String sql = MessageFormat.format(CREATE_TABLE_SQL, schema, tableName, columnsSQL.toString()); final String addGeometryColumnSql = String.format(ADD_GEOMETRY_COLUMN_SQL, schema, tableName, srsCode, geometryType, 2); try { getSession().doWork(new Work() { @Override public void execute(Connection connection) throws SQLException { Statement stmt = connection.createStatement(); stmt.execute(sql); stmt.execute(addGeometryColumnSql); } }); } catch (Exception e) { logger.error(String.format("Error creando la tabla \"%s\" de tipo %s", tableName, GEOMETRY_TYPE_SQL), e); created = false; } return created; }
From source file:com.splicemachine.derby.impl.sql.execute.operations.InsertOperationIT.java
@Test public void testInsertFromSubOperation() throws Exception { Map<String, Integer> nameCountMap = Maps.newHashMap(); Statement s = methodWatcher.getStatement(); s.execute("insert into Y values('sfines')"); s.execute("insert into Y values('sfines')"); nameCountMap.put("sfines", 2); s.execute("insert into Y values('jzhang')"); s.execute("insert into Y values('jzhang')"); s.execute("insert into Y values('jzhang')"); nameCountMap.put("jzhang", 3); s.execute("insert into Y values('jleach')"); nameCountMap.put("jleach", 1); methodWatcher.commit();//w w w. ja va 2 s . c o m s = methodWatcher.getStatement(); int rowsInserted = s .executeUpdate("insert into Z (name,count) select name,count(name) from Y group by name"); Assert.assertEquals(nameCountMap.size(), rowsInserted); methodWatcher.commit(); ResultSet rs = methodWatcher.executeQuery("select * from Z"); int groupCount = 0; while (rs.next()) { String name = rs.getString(1); Integer count = rs.getInt(2); Assert.assertNotNull("Name is null!", name); Assert.assertNotNull("Count is null!", count); int correctCount = nameCountMap.get(name); Assert.assertEquals("Incorrect count returned for name " + name, correctCount, count.intValue()); groupCount++; } Assert.assertEquals("Incorrect number of groups returned!", nameCountMap.size(), groupCount); }
From source file:com.splicemachine.derby.impl.sql.execute.operations.InsertOperationIT.java
/** * The idea here is to test that PreparedStatement inserts won't barf if you do * multiple inserts with different where clauses each time * * @throws Exception/*from w w w . j a va2 s. c om*/ */ @Test public void testInsertFromBoundedSubSelectThatChanges() throws Exception { Statement s = methodWatcher.getStatement(); s.execute("insert into L (name) values ('gdavis'),('mzweben'),('rreimer')"); PreparedStatement ps = methodWatcher .prepareStatement("insert into J (name) select name from L a where a.name = ?"); ps.setString(1, "rreimer"); ps.executeUpdate(); ResultSet rs = methodWatcher.executeQuery("select * from J"); int count = 0; while (rs.next()) { Assert.assertEquals("Incorrect name inserted!", "rreimer", rs.getString(1)); count++; } Assert.assertEquals("Incorrect number of results returned!", 1, count); ps.setString(1, "mzweben"); ps.executeUpdate(); List<String> correct = Arrays.asList("rreimer", "mzweben"); rs = methodWatcher.executeQuery("select * from J"); count = 0; while (rs.next()) { String next = rs.getString(1); boolean found = false; for (String correctName : correct) { if (correctName.equals(next)) { found = true; break; } } Assert.assertTrue("Value " + next + " unexpectedly appeared!", found); count++; } Assert.assertEquals("Incorrect number of results returned!", correct.size(), count); }
From source file:com.cloudera.sqoop.manager.CubridManagerExportTest.java
public void createTableAndPopulateData(String table) { String fulltableName = manager.escapeTableName(table); Statement stmt = null; // Drop the existing table, if there is one. try {/*from w w w. j a va 2s. com*/ conn = manager.getConnection(); stmt = conn.createStatement(); stmt.execute("DROP TABLE IF EXISTS " + fulltableName); conn.commit(); } catch (SQLException sqlE) { LOG.info("Table was not dropped: " + sqlE.getMessage()); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing stmt", ex); } } // Create and populate table try { conn = manager.getConnection(); conn.setAutoCommit(false); stmt = conn.createStatement(); // create the database table and populate it with data. stmt.executeUpdate( "CREATE TABLE " + fulltableName + " (" + "id INT NOT NULL, " + "name VARCHAR(24) NOT NULL, " + "salary FLOAT, " + "dept VARCHAR(32), " + "PRIMARY KEY (id))"); conn.commit(); } catch (SQLException sqlE) { LOG.error("Encountered SQL Exception: ", sqlE); sqlE.printStackTrace(); fail("SQLException when running test setUp(): " + sqlE); } finally { try { if (null != stmt) { stmt.close(); } } catch (Exception ex) { LOG.warn("Exception while closing connection/stmt", ex); } } }
From source file:org.cfr.capsicum.test.AbstractCayenneJUnit4DbUnitSpringContextTests.java
/** * Executes a set of commands to drop/create database objects. *///from ww w . ja v a2s. c o m protected boolean safeExecute(String sql) throws SQLException { Connection connection = DataSourceUtils.getConnection(dataSource); Statement statement = connection.createStatement(); try { statement.execute(sql); return true; } catch (SQLException ex) { return false; } finally { statement.close(); DataSourceUtils.releaseConnection(connection, dataSource); } }
From source file:namedatabasescraper.NameDatabaseScraper.java
private void attachDatabase() throws CannotLoadJDBCDriverException, SQLException, IOException, ErrorCreatingDatabaseException { try {//from w w w . j a va 2 s . c om Class.forName("org.sqlite.JDBC"); } catch (ClassNotFoundException ex) { logger.log(Level.SEVERE, "Could not load SQLite JDBC driver: {0}", ex.getMessage()); throw new CannotLoadJDBCDriverException(); } String dbFilename = this.settingsDirname + File.separator + "names.sqlite"; File dbFile = new File(dbFilename); this.connection = DriverManager.getConnection("jdbc:sqlite:" + dbFilename); Statement statement = this.connection.createStatement(); try { ResultSet rs = statement.executeQuery("SELECT name FROM names LIMIT 1"); while (rs.next()) { //lastProgramRun = rs.getString("value"); } } catch (SQLException e) { logger.log(Level.INFO, "Creating new name table"); statement.execute("CREATE TABLE names (name TEXT, scraper_id TEXT)"); } }
From source file:fr.jetoile.hadoopunit.component.HiveServer2BootstrapTest.java
@Test public void hiveServer2ShouldStart() throws InterruptedException, ClassNotFoundException, SQLException { // assertThat(Utils.available("127.0.0.1", 20103)).isFalse(); // Load the Hive JDBC driver LOGGER.info("HIVE: Loading the Hive JDBC Driver"); Class.forName("org.apache.hive.jdbc.HiveDriver"); ///*from ww w . j ava2s. co m*/ // Create an ORC table and describe it // // Get the connection Connection con = DriverManager .getConnection( "jdbc:hive2://" + configuration.getString(HadoopUnitConfig.HIVE_SERVER2_HOSTNAME_KEY) + ":" + configuration.getInt(HadoopUnitConfig.HIVE_SERVER2_PORT_KEY) + "/" + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY), "user", "pass"); // Create the DB Statement stmt; try { String createDbDdl = "CREATE DATABASE IF NOT EXISTS " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Create Database Statement: {}", createDbDdl); stmt.execute(createDbDdl); } catch (Exception e) { e.printStackTrace(); } // Drop the table incase it still exists String dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl); stmt.execute(dropDdl); // Create the ORC table String createDdl = "CREATE TABLE IF NOT EXISTS " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY) + " (id INT, msg STRING) " + "PARTITIONED BY (dt STRING) " + "CLUSTERED BY (id) INTO 16 BUCKETS " + "STORED AS ORC tblproperties(\"orc.compress\"=\"NONE\")"; stmt = con.createStatement(); LOGGER.info("HIVE: Running Create Table Statement: {}", createDdl); stmt.execute(createDdl); // Issue a describe on the new table and display the output LOGGER.info("HIVE: Validating Table was Created: "); ResultSet resultSet = stmt.executeQuery( "DESCRIBE FORMATTED " + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY)); int count = 0; while (resultSet.next()) { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) { System.out.print(resultSet.getString(i)); } System.out.println(); count++; } assertEquals(33, count); // Drop the table dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "." + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY); stmt = con.createStatement(); LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl); stmt.execute(dropDdl); }
From source file:org.sakaiproject.sitestats.impl.DBHelper.java
public void preloadDefaultReports() { HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { Connection c = null;//from w ww .j a v a 2 s . c o m InputStreamReader isr = null; BufferedReader br = null; try { ClassPathResource defaultReports = new ClassPathResource(dbVendor + "/default_reports.sql"); LOG.info("init(): - preloading sitestats default reports"); isr = new InputStreamReader(defaultReports.getInputStream()); br = new BufferedReader(isr); c = session.connection(); String sqlLine = null; while ((sqlLine = br.readLine()) != null) { sqlLine = sqlLine.trim(); if (!sqlLine.equals("") && !sqlLine.startsWith("--")) { if (sqlLine.endsWith(";")) { sqlLine = sqlLine.substring(0, sqlLine.indexOf(";")); } Statement st = null; try { st = c.createStatement(); st.execute(sqlLine); } catch (SQLException e) { if (!"23000".equals(e.getSQLState())) { LOG.warn("Failed to preload default report: " + sqlLine, e); } } catch (Exception e) { LOG.warn("Failed to preload default report: " + sqlLine, e); } finally { if (st != null) st.close(); } } } } catch (HibernateException e) { LOG.error("Error while preloading default reports", e); } catch (Exception e) { LOG.error("Error while preloading default reports", e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { } } if (isr != null) { try { isr.close(); } catch (IOException e) { } } if (c != null) { c.close(); } } return null; } }; getHibernateTemplate().execute(hcb); }