List of usage examples for java.sql Statement execute
boolean execute(String sql) throws SQLException;
From source file:gsn.vsensor.TestStreamExporterVirtualSensor.java
public void testLogStatementIntoMySQLDB() { StreamExporterVirtualSensor vs = new StreamExporterVirtualSensor(); // configure parameters ArrayList<KeyValue> params = new ArrayList<KeyValue>(); params.add(new KeyValueImp(StreamExporterVirtualSensor.PARAM_URL, url)); params.add(new KeyValueImp(StreamExporterVirtualSensor.PARAM_USER, user)); params.add(new KeyValueImp(StreamExporterVirtualSensor.PARAM_PASSWD, passwd)); config.setMainClassInitialParams(params); vs.setVirtualSensorConfiguration(config); vs.initialize();//from ww w. ja v a 2s . c om // configure datastream Vector<DataField> fieldTypes = new Vector<DataField>(); Object[] data = null; for (String type : DataTypes.TYPE_NAMES) fieldTypes.add(new DataField(type, type, type)); int i = 0; for (Object value : DataTypes.TYPE_SAMPLE_VALUES) data[i++] = value; long timeStamp = new Date().getTime(); StreamElement streamElement = new StreamElement(fieldTypes.toArray(new DataField[] {}), (Serializable[]) data, timeStamp); // give datastream to vs vs.dataAvailable(streamName, streamElement); // clean up and control boolean result = true; try { DriverManager.registerDriver(new com.mysql.jdbc.Driver()); Connection connection = DriverManager.getConnection(url, user, passwd); Statement statement = connection.createStatement(); statement.execute("SELECT * FROM " + streamName); System.out.println("result" + result); result = statement.getResultSet().last(); System.out.println("result" + result); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); result = false; } assertTrue(result); }
From source file:com.bc.fiduceo.db.AbstractDriver.java
@Override public void clear() throws SQLException { Statement statement = connection.createStatement(); statement.execute("DROP TABLE IF EXISTS TIMEAXIS"); statement = connection.createStatement(); statement.execute("DROP TABLE IF EXISTS SATELLITE_OBSERVATION"); connection.createStatement();/* w ww . j a va2s . c o m*/ statement.execute("DROP TABLE IF EXISTS SENSOR"); }
From source file:geocodingissues.Main.java
public void addColumns() { Statement s = null; try {//from w ww . j av a 2 s.co m s = connection.createStatement(); s.execute("ALTER TABLE issues " + "ADD COLUMN street_no INT, " + "ADD COLUMN street VARCHAR(64), " + "ADD COLUMN neighborhood VARCHAR(64)"); } catch (Exception e) { System.out.println("Problem in altering the database 1"); } }
From source file:com.edgenius.wiki.installation.ConnectionProxy.java
public Statement createStatement() throws SQLException { Statement stmt = conn.createStatement(); if (!StringUtils.isBlank(schema)) { if (Server.DBTYPE_DB2.equalsIgnoreCase(type)) { stmt.execute("set current schema = " + schema); } else if (Server.DBTYPE_ORACLE9I.equalsIgnoreCase(type)) { stmt.execute("alter session set current_schema=" + schema); } else if (Server.DBTYPE_POSTGRESQL.equalsIgnoreCase(type)) { stmt.execute("SET search_path TO " + schema); } else {/* ww w . j av a 2 s.com*/ //TODO: how about MYSQL? does it have schema? } } return stmt; }
From source file:net.sourceforge.vulcan.spring.jdbc.JdbcSchemaMigrator.java
private void executeSql(final Resource resource) throws IOException { final String text = loadResource(resource); log.info("Running migration script " + resource.getFilename()); final String[] commands = text.split(";"); for (String command : commands) { final String trimmed = command.trim(); if (StringUtils.isBlank(trimmed)) { continue; }/*w ww .ja v a2s . c o m*/ jdbcTemplate.execute(new StatementCallback() { public Object doInStatement(Statement stmt) throws SQLException, DataAccessException { stmt.execute(trimmed); final Connection conn = stmt.getConnection(); if (!conn.getAutoCommit()) { conn.commit(); } return null; } }); } }
From source file:com.spankr.tutorial.TestConnectionDAO.java
/** * @throws SQLException/*from w ww.ja v a 2s. com*/ * */ @Test public void getSomeData() throws SQLException { Connection con = null; try { printActiveConnections(); con = ds.getConnection(); printActiveConnections(); Statement stmt = null; log.info("Creating sample_table"); stmt = con.createStatement(); stmt.execute( "CREATE TABLE sample_table (id INT IDENTITY, first_name VARCHAR(30), last_name VARCHAR(30), age INT)"); DbUtils.closeQuietly(stmt); PreparedStatement pstmt = null; log.info("Inserting a person into sample_table"); pstmt = con.prepareStatement("INSERT INTO sample_table VALUES (null, ?, ?, ?)"); pstmt.setString(1, "Bob"); pstmt.setString(2, "Haskins"); pstmt.setInt(3, 38); Assert.assertTrue(pstmt.executeUpdate() == 1); // success means exactly one row inserted DbUtils.closeQuietly(pstmt); log.info("Getting a count of rows in sample_table"); stmt = con.createStatement(); ResultSet rs = stmt.executeQuery("SELECT count(1) from sample_table"); if (rs.next()) { long l = rs.getLong(1); assertTrue("Shouldn't get a zero count", l > 0); log.debug(String.format("Total records = %s", l)); } else { fail("Nothing returned from the database query"); } } catch (SQLException e) { fail("Unable to create the database table"); } finally { DbUtils.closeQuietly(con); printActiveConnections(); } }
From source file:de.langmi.spring.batch.examples.readers.jdbc.JdbcPagingItemReaderTests.java
/** * Setup Datasource and create table for test. * * @throws Exception //from w w w .ja va 2 s . c o m */ @Before public void setUp() throws Exception { // DataSource Setup, apache commons dataSource = new BasicDataSource(); dataSource.setDriverClassName("org.hsqldb.jdbcDriver"); dataSource.setUrl("jdbc:hsqldb:mem:testdb"); dataSource.setUsername("sa"); dataSource.setPassword(""); // drop table if exists Connection conn = dataSource.getConnection(); Statement st = conn.createStatement(); st.execute(DROP_TEST_TABLE); conn.commit(); st.close(); conn.close(); // create table conn = dataSource.getConnection(); st = conn.createStatement(); st.execute(CREATE_TEST_TABLE); conn.commit(); st.close(); conn.close(); // fill with values conn = dataSource.getConnection(); // prevent auto commit for batching conn.setAutoCommit(false); PreparedStatement ps = conn.prepareStatement(INSERT); // fill with values for (int i = 0; i < EXPECTED_COUNT; i++) { ps.setString(1, String.valueOf(i)); ps.addBatch(); } ps.executeBatch(); conn.commit(); ps.close(); conn.close(); }
From source file:test.other.T_DaoTest.java
public void test1() throws SQLException { System.out.println("Start!"); File file = new File("?.txt"); if (!file.exists() || !file.isFile() || file.isDirectory()) { return;// w w w.j ava 2 s. co m } StringBuilder sBuilder = new StringBuilder((int) (file.length() + 1)); System.out.println(System.nanoTime() / 1000000); try (Scanner sc = new Scanner(file, "UTF-8")) { while (sc.hasNextLine()) { sBuilder.append(sc.nextLine()); } // note that Scanner suppresses exceptions if (sc.ioException() != null) { throw sc.ioException(); } } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.out.println(System.nanoTime() / 1000000); String insert = "insert into fc_post(content)values(" + DaoTemplate.quote(sBuilder.toString()) + ")"; System.out.println(System.nanoTime() / 1000000); java.sql.Statement statement = conn.createStatement(); statement.execute(insert); System.out.println(System.nanoTime() / 1000000); System.out.println("finished"); }
From source file:com.linkedin.pinot.integration.tests.RealtimeClusterIntegrationTest.java
@BeforeClass public void setUp() throws Exception { // Start ZK and Kafka startZk();// w ww .j a va2 s . c om kafkaStarters = KafkaStarterUtils.startServers(getKafkaBrokerCount(), KafkaStarterUtils.DEFAULT_KAFKA_PORT, KafkaStarterUtils.DEFAULT_ZK_STR, KafkaStarterUtils.getDefaultKafkaConfiguration()); // Create Kafka topic createKafkaTopic(KAFKA_TOPIC, KafkaStarterUtils.DEFAULT_ZK_STR); // Start the Pinot cluster startController(); startBroker(); startServer(); // Unpack data final List<File> avroFiles = unpackAvroData(_tmpDir, SEGMENT_COUNT); File schemaFile = getSchemaFile(); // Load data into H2 ExecutorService executor = Executors.newCachedThreadPool(); setupH2AndInsertAvro(avroFiles, executor); // Initialize query generator setupQueryGenerator(avroFiles, executor); // Push data into the Kafka topic pushAvroIntoKafka(avroFiles, executor, KAFKA_TOPIC); // Wait for data push, query generator initialization and H2 load to complete executor.shutdown(); executor.awaitTermination(10, TimeUnit.MINUTES); // Create Pinot table setUpTable("mytable", "DaysSinceEpoch", "daysSinceEpoch", KafkaStarterUtils.DEFAULT_ZK_STR, KAFKA_TOPIC, schemaFile, avroFiles.get(0)); // Wait until the Pinot event count matches with the number of events in the Avro files long timeInFiveMinutes = System.currentTimeMillis() + 5 * 60 * 1000L; Statement statement = _connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); statement.execute("select count(*) from mytable"); ResultSet rs = statement.getResultSet(); rs.first(); int h2RecordCount = rs.getInt(1); rs.close(); waitForRecordCountToStabilizeToExpectedCount(h2RecordCount, timeInFiveMinutes); }
From source file:de.langmi.spring.batch.examples.readers.support.CompositeCursorItemReaderTest.java
/** * Properly tears down a HSQLDB in-memory database. * * @param dataSource// w w w . ja v a 2s .c o m * @throws Exception */ private void tearDownDataSource(final DataSource dataSource) throws Exception { Connection conn = dataSource.getConnection(); Statement st = conn.createStatement(); st.execute("SHUTDOWN"); conn.commit(); st.close(); }