List of usage examples for java.sql Statement execute
boolean execute(String sql) throws SQLException;
From source file:de.unibi.techfak.bibiserv.BiBiToolsTest.java
private static DataSource derbydb() throws Exception { EmbeddedDataSource ds = new EmbeddedDataSource(); String db = "test/testdb_" + System.currentTimeMillis(); // check if database exists File db_dir = new File(db); if (db_dir.exists()) { try {/* w ww . java2s.c o m*/ FileUtils.deleteDirectory(db_dir); } catch (IOException e) { assertTrue(e.getMessage(), false); } } ds.setDatabaseName(db); ds.setCreateDatabase("create"); Connection con = ds.getConnection(); Statement stmt = con.createStatement(); // read SQL Statement from file BufferedReader r = new BufferedReader( new InputStreamReader(new FileInputStream("src/test/config/status.sql"))); String line; StringBuilder sql = new StringBuilder(); while ((line = r.readLine()) != null) { // skip commend lines if (!line.startsWith("--")) { sql.append(line); sql.append('\n'); } } r.close(); // execute sqlcmd's for (String sqlcmd : sql.toString().split(";")) { sqlcmd = sqlcmd.trim(); // ignore trailing/ending whitespaces sqlcmd = sqlcmd.replaceAll("\n\n", "\n"); // remove double newline if (sqlcmd.length() > 1) { // if string contains more than one char, execute sql cmd LOG.debug(sqlcmd + "\n"); stmt.execute(sqlcmd); } } // close stmt stmt.close(); return ds; }
From source file:com.emergya.persistenceGeo.dao.impl.PostgisDBManagementDaoHibernateImpl.java
@Override public void changeTableColumnName(String tableName, String oldColumnName, String newColumnName) { SessionFactoryImplementor sfi = (SessionFactoryImplementor) getSessionFactory(); String schema = sfi.getSettings().getDefaultSchemaName(); final String sql = MessageFormat.format(ALTER_TABLE_SQL, schema, tableName, oldColumnName, newColumnName); getSession().doWork(new Work() { @Override/* w w w. j a v a2 s . c om*/ public void execute(Connection connection) throws SQLException { Statement stmt = connection.createStatement(); stmt.execute(sql); } }); }
From source file:com.thoughtworks.go.server.database.H2DatabaseTest.java
@Test void shouldThrowUpWhenBackupFails() throws Exception { File destDir = new File("."); SystemEnvironment systemEnvironment = mock(SystemEnvironment.class); Database database = new H2Database(systemEnvironment); Database spy = spy(database);/*from w ww. j a v a 2s .c o m*/ BasicDataSource dataSource = mock(BasicDataSource.class); Connection connection = mock(Connection.class); Statement statement = mock(Statement.class); doReturn(dataSource).when(spy).createDataSource(); when(dataSource.getConnection()).thenReturn(connection); when(connection.createStatement()).thenReturn(statement); when(statement.execute(anyString())).thenThrow(new SQLException("i failed")); try { spy.backup(destDir); } catch (RuntimeException e) { assertThat(e.getMessage(), is("i failed")); } verify(statement).execute(anyString()); }
From source file:com.evolveum.midpoint.repo.sql.SqlAuditServiceImpl.java
/** * This method creates temporary table for cleanup audit method. * * @param session//from w ww . java 2 s . c om * @param dialect * @param tempTable */ private void createTemporaryTable(Session session, final Dialect dialect, final String tempTable) { session.doWork(new Work() { @Override public void execute(Connection connection) throws SQLException { //check if table exists try { Statement s = connection.createStatement(); s.execute("select id from " + tempTable + " where id = 1"); //table already exists return; } catch (Exception ex) { //we expect this on the first time } StringBuilder sb = new StringBuilder(); sb.append(dialect.getCreateTemporaryTableString()); sb.append(' ').append(tempTable).append(" (id "); sb.append(dialect.getTypeName(Types.BIGINT)); sb.append(" not null)"); sb.append(dialect.getCreateTemporaryTablePostfix()); Statement s = connection.createStatement(); s.execute(sb.toString()); } }); }
From source file:com.quest.orahive.HiveJdbcClient.java
private static void createOracleTableWithRetry(OraHiveOptions opts, List<OracleTableColumn> oracleColumns, Connection oracleConnection) { try {//from ww w . ja va 2 s .c om Statement statement = oracleConnection.createStatement(); StringBuilder columnClause = new StringBuilder(); for (int idx = 0; idx < oracleColumns.size(); idx++) { OracleTableColumn column = oracleColumns.get(idx); if (idx > 0) columnClause.append(", "); columnClause.append(String.format("%s %s", column.getName(), column.oracleDataType)); } String sql = String.format("CREATE TABLE %s (%s)", getOracleTableName(opts), columnClause.toString()); sql += getOracleTablespaceClause(opts); LOG.info(String.format("Executing SQL: %s", sql)); statement.execute(sql); statement.close(); } catch (SQLException ex) { LOG.error("Unable to create an Oracle table to store the results of the Hive query.", ex); System.out.println( String.format("\nWould you like to retry creating the Oracle table \"%s\"?\n" + "(y/n)", getOracleTableName(opts))); try { if (Utilities.readYNFromStdIn()) // Recurse... createOracleTableWithRetry(opts, oracleColumns, oracleConnection); else System.exit(1); } catch (IOException e) { LOG.error(e.getMessage()); System.exit(1); } } }
From source file:de.erdesignerng.test.sql.AbstractReverseEngineeringTestImpl.java
protected void loadSingleSQL(Connection aConnection, String aResource) throws IOException, SQLException { String theSQL = readResourceFile(aResource); Statement theStatement = aConnection.createStatement(); StringTokenizer theST = new StringTokenizer(theSQL, ";"); while (theST.hasMoreTokens()) { String theSingleSQL = theST.nextToken(); if (StringUtils.isNotEmpty(theSingleSQL)) { theStatement.execute(theSingleSQL); }// w ww .j av a 2 s . c o m } theStatement.close(); }
From source file:com.vmware.upgrade.sql.task.RawSQLTask.java
/** * Executes the supplied SQL statement. No response is captured. * * @throws SQLException//from w w w .j a v a2 s .co m * if an {@code SQLException} is encountered while executing the procedure * @throws IllegalStateException * if the database is not connected */ @Override public Void call() throws SQLException { setState(ExecutionState.RUNNING); if (!databaseContext.isConnected()) { setState(ExecutionState.FAILED); throw new IllegalStateException("Database not connected"); } Statement stmt = null; try { if (!StringUtils.isEmpty(sql)) { stmt = databaseContext.getConnection().createStatement(); logger.debug("Executing sql ''{0}''", sql); stmt.execute(sql); } incrementProgress(); setState(ExecutionState.COMPLETED); } catch (SQLException sqle) { setState(ExecutionState.FAILED); throw sqle; } finally { if (stmt != null) { try { stmt.close(); } catch (SQLException e) { logger.trace(e, "Exception encountered when closing a statement."); } } } return null; }
From source file:com.emergya.persistenceGeo.dao.impl.PostgisDBManagementDaoHibernateImpl.java
@Override public void deleteLayerTable(String tableName) { SessionFactoryImplementor sfi = (SessionFactoryImplementor) getSessionFactory(); String schema = sfi.getSettings().getDefaultSchemaName(); final String dropTableSql = MessageFormat.format(DROP_TABLE_SQL, schema, tableName); final String deleteGeometryColumnSql = String.format(REMOVE_GEOMETRY_COLUMN_SQL, tableName); getSession().doWork(new Work() { @Override//from w w w . jav a 2s . c o m public void execute(Connection connection) throws SQLException { Statement stmt = connection.createStatement(); stmt.execute(dropTableSql); stmt.execute(deleteGeometryColumnSql); } }); }
From source file:com.vertica.hivestoragehandler.VerticaOutputFormat.java
/** * Optionally called at the end of a job to optimize any newly created and * loaded tables. Useful for new tables with more than 100k records. * // w w w . j a v a 2 s . co m * @param conf * @throws Exception */ public static void optimize(Configuration conf) throws Exception { VerticaConfiguration vtconfig = new VerticaConfiguration(conf); Connection conn = vtconfig.getConnection(true); // TODO: consider more tables and skip tables with non-temp projections VerticaRelation vTable = new VerticaRelation(vtconfig.getOutputTableName()); Statement stmt = conn.createStatement(); ResultSet rs = null; HashSet<String> tablesWithTemp = new HashSet<String>(); //for now just add the single output table tablesWithTemp.add(vTable.getQualifiedName().toString()); // map from table name to set of projection names HashMap<String, Collection<String>> tableProj = new HashMap<String, Collection<String>>(); rs = stmt.executeQuery("select projection_schema, anchor_table_name, projection_name from projections;"); while (rs.next()) { String ptable = rs.getString(1) + "." + rs.getString(2); if (!tableProj.containsKey(ptable)) { tableProj.put(ptable, new HashSet<String>()); } tableProj.get(ptable).add(rs.getString(3)); } for (String table : tablesWithTemp) { if (!tableProj.containsKey(table)) { throw new RuntimeException("Cannot optimize table with no data: " + table); } } String designName = (new Integer(conn.hashCode())).toString(); stmt.execute("select dbd_create_workspace('" + designName + "')"); stmt.execute("select dbd_create_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_design_tables('" + designName + "', '" + vTable.getQualifiedName().toString() + "')"); stmt.execute("select dbd_populate_design('" + designName + "', '" + designName + "')"); //Execute stmt.execute("select dbd_create_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_design('" + designName + "', '" + designName + "', '" + designName + "')"); stmt.execute("select dbd_add_deployment_drop('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_execute_deployment('" + designName + "', '" + designName + "')"); //Cleanup stmt.execute("select dbd_drop_deployment('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_remove_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_design('" + designName + "', '" + designName + "')"); stmt.execute("select dbd_drop_workspace('" + designName + "')"); }
From source file:org.apache.hive.jdbc.TestSSL.java
private void setupTestTableWithData(String tableName, Path dataFilePath, Connection hs2Conn) throws Exception { Statement stmt = hs2Conn.createStatement(); stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop table if exists " + tableName); stmt.execute("create table " + tableName + " (under_col int comment 'the under column', value string)"); // load data//from w w w . j a v a 2s. c om stmt.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName); stmt.close(); }