List of usage examples for java.sql Statement execute
boolean execute(String sql) throws SQLException;
From source file:com.datatorrent.contrib.hive.HiveMockTest.java
public static void hiveInitializeDatabase(HiveStore hiveStore) throws SQLException { hiveStore.connect();//from w w w .ja v a 2s. c o m Statement stmt = hiveStore.getConnection().createStatement(); // show tables String sql = "show tables"; LOG.debug(sql); ResultSet res = stmt.executeQuery(sql); if (res.next()) { LOG.debug("tables are {}", res.getString(1)); } stmt.execute("DROP TABLE " + tablename); stmt.execute("CREATE TABLE IF NOT EXISTS " + tablename + " (col1 String) PARTITIONED BY(dt STRING) " + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' \n" + "STORED AS TEXTFILE "); /*ResultSet res = stmt.execute("CREATE TABLE IF NOT EXISTS temp4 (col1 map<string,int>,col2 map<string,int>,col3 map<string,int>,col4 map<String,timestamp>, col5 map<string,double>,col6 map<string,double>,col7 map<string,int>,col8 map<string,int>) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' \n" + "COLLECTION ITEMS TERMINATED BY '\n' \n" + "MAP KEYS TERMINATED BY ':' \n" + "LINES TERMINATED BY '\n' " + "STORED AS TEXTFILE");*/ hiveStore.disconnect(); }
From source file:com.ibm.research.rdf.store.runtime.service.sql.StoreHelper.java
protected static void ensureExistenceOfUserTablespaceAndBuiltInRecursiveStoreProcedure(Connection con, Store store) throws RdfStoreException { if (!(store.getStoreBackend() == Backend.db2)) { // TODO for postgres return;/*from w ww . j a v a 2 s . c o m*/ } TemporaryTableSpaceCreation cmd = new TemporaryTableSpaceCreation(USERTEMPTABLESPACE_NAME, USERTEMPTABLESPACE_FILE); Statement st = null; try { try { st = con.createStatement(); st.execute(cmd.toSQL()); store.setUserTablespace(USERTEMPTABLESPACE_NAME); st.execute(REACHABLENODES_PROC_DEF); } catch (SQLException ex) { } } finally { if (st != null) { try { st.close(); } catch (SQLException ex) { } } } }
From source file:HSqlManager.java
public static void clearDatabase(Connection connection) throws SQLException { Connection db = connection;/*from ww w . j a va2 s . c om*/ Statement stmt = db.createStatement(); stmt.execute("TRUNCATE SCHEMA Primerdb RESTART IDENTITY AND COMMIT NO CHECK"); }
From source file:ece356.UserDBAO.java
public static Connection getConnection() throws ClassNotFoundException, SQLException, NamingException { InitialContext cxt = new InitialContext(); if (cxt == null) { throw new RuntimeException("Unable to create naming context!"); }/* w ww .j a v a 2 s . c o m*/ Context dbContext = (Context) cxt.lookup("java:comp/env"); DataSource ds = (DataSource) dbContext.lookup("jdbc/myDatasource"); if (ds == null) { throw new RuntimeException("Data source not found!"); } Connection con = ds.getConnection(); /*Class.forName("com.mysql.jdbc.Driver"); Connection con = DriverManager.getConnection(url, user, pwd);*/ Statement stmt = null; try { con.createStatement(); stmt = con.createStatement(); stmt.execute("USE ece356db_" + nid); } finally { if (stmt != null) { stmt.close(); } } return con; }
From source file:HSqlManager.java
private static void commonInitialize(int bps, Connection connection) throws SQLException, IOException { String base = new File("").getAbsolutePath(); CSV.makeDirectory(new File(base + "/PhageData")); INSTANCE = ImportPhagelist.getInstance(); INSTANCE.parseAllPhages(bps);// w ww .j a v a2 s.c o m written = true; Connection db = connection; db.setAutoCommit(false); Statement stat = db.createStatement(); stat.execute("SET FILES LOG FALSE\n"); PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers" + "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + " Values(?,?,true,false,false,?,?)"); ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;"); List<String[]> phages = new ArrayList<>(); while (call.next()) { String[] r = new String[3]; r[0] = call.getString("Strain"); r[1] = call.getString("Cluster"); r[2] = call.getString("Name"); phages.add(r); } phages.parallelStream().map(x -> x[0]).collect(Collectors.toSet()).parallelStream().forEach(x -> { phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).forEach(z -> { try { List<String> clustphages = phages.stream().filter(a -> a[0].equals(x) && a[1].equals(z)) .map(a -> a[2]).collect(Collectors.toList()); Set<String> primers = Collections.synchronizedSet(CSV .readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphages.get(0) + ".csv")); clustphages.remove(0); clustphages.parallelStream().forEach(phage -> { primers.retainAll( CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv")); }); int i = 0; for (CharSequence a : primers) { try { //finish update st.setInt(1, bps); st.setString(2, a.toString()); st.setString(3, x); st.setString(4, z); st.addBatch(); } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + z); } i++; if (i == 1000) { i = 0; st.executeBatch(); db.commit(); } } if (i > 0) { st.executeBatch(); db.commit(); } } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + z); } }); }); stat.execute("SET FILES LOG TRUE\n"); st.close(); stat.close(); System.out.println("Common Updated"); }
From source file:de.unibi.cebitec.bibiserv.util.visualizer.AbstractTest.java
private static DataSource derbydb() throws Exception { EmbeddedDataSource ds = new EmbeddedDataSource(); String db = "test/testdb_" + System.currentTimeMillis(); // check if database exists File db_dir = new File(db); if (db_dir.exists()) { try {// w w w. ja v a 2 s.c o m FileUtils.deleteDirectory(db_dir); } catch (IOException e) { LOG.error(e); assertTrue(e.getMessage(), false); } } ds.setDatabaseName(db); ds.setCreateDatabase("create"); Connection con = ds.getConnection(); Statement stmt = con.createStatement(); // read SQL Statement from file BufferedReader r = new BufferedReader( new InputStreamReader(TestRAEDA.class.getResourceAsStream("/status.sql"))); String line; StringBuilder sql = new StringBuilder(); while ((line = r.readLine()) != null) { // skip commend lines if (!line.startsWith("--")) { sql.append(line); sql.append('\n'); } } r.close(); // execute sqlcmd's for (String sqlcmd : sql.toString().split(";")) { sqlcmd = sqlcmd.trim(); // ignore trailing/ending whitespaces sqlcmd = sqlcmd.replaceAll("\n\n", "\n"); // remove double newline if (sqlcmd.length() > 1) { // if string contains more than one char, execute sql cmd LOG.debug(sqlcmd + "\n"); stmt.execute(sqlcmd); } } // close stmt stmt.close(); return ds; }
From source file:HSqlPrimerDesign.java
public static void checker(Connection connection, int bps) throws ClassNotFoundException, SQLException, InstantiationException, IllegalAccessException, IOException { String base = new File("").getAbsolutePath(); Connection db = connection;//w w w . java2 s . co m db.setAutoCommit(false); Statement stat = db.createStatement(); PrintWriter log = new PrintWriter(new File("checkertest.log")); ImportPhagelist.getInstance().parseAllPhagePrimers(bps); stat.execute("SET FILES LOG FALSE;\n"); ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;"); List<String[]> phages = new ArrayList<>(); String strain = ""; while (call.next()) { String[] r = new String[3]; r[0] = call.getString("Strain"); r[1] = call.getString("Cluster"); r[2] = call.getString("Name"); phages.add(r); if (r[2].equals("xkcd")) { strain = r[0]; } } String x = strain; phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).forEach(z -> { System.out.println("Starting:" + z); try { List<String> primers = new ArrayList<String>(); Set<String> clustphages = phages.stream().filter(a -> a[0].equals(x) && a[1].equals(z)) .map(a -> a[2]).collect(Collectors.toSet()); ResultSet resultSet = stat.executeQuery("Select * from primerdb.primers" + " where Strain ='" + x + "' and Cluster ='" + z + "' and UniqueP = true" + " and Bp = " + Integer.valueOf(bps) + " and Hairpin = false"); while (resultSet.next()) { primers.add(resultSet.getString("Sequence")); } if (primers.size() > 0) { for (int i = 0; i < 4; i++) { String primer = primers.get(i); for (String clustphage : clustphages) { if (!CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphage + ".csv") .contains(primer)) log.println("Problem " + z); } } Set<String> nonclustphages = phages.stream().filter(a -> a[0].equals(x) && !a[1].equals(z)) .map(a -> a[2]).collect(Collectors.toSet()); log.println("Cluster phages done"); for (int i = 0; i < 4; i++) { String primer = primers.get(i); for (String nonclustphage : nonclustphages) { if (CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + nonclustphage + ".csv") .contains(primer)) log.println("Problem " + z); } } log.println("NonCluster phages done"); } } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + z); } log.println(z); log.flush(); System.gc(); }); stat.execute("SET FILES LOG TRUE\n"); stat.close(); System.out.println("Primers Matched"); }
From source file:HSqlManager.java
@SuppressWarnings("Duplicates") @Deprecated/*from www . jav a 2 s. c o m*/ private static void mycoCommonInitialize(int bps, Connection connection) throws SQLException, IOException { long time = System.currentTimeMillis(); String base = new File("").getAbsolutePath(); CSV.makeDirectory(new File(base + "/PhageData")); INSTANCE = ImportPhagelist.getInstance(); // INSTANCE.parseAllPhages(bps); written = true; Connection db = connection; db.setAutoCommit(false); Statement stat = db.createStatement(); stat.execute("SET FILES LOG FALSE\n"); PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers" + "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + " Values(?,?,true,false,false,?,?)"); ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;"); List<String[]> phages = new ArrayList<>(); String strain = ""; while (call.next()) { String[] r = new String[3]; r[0] = call.getString("Strain"); r[1] = call.getString("Cluster"); r[2] = call.getString("Name"); phages.add(r); if (r[2].equals("xkcd")) { strain = r[0]; } } call.close(); String x = strain; Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()); Map<String, List<String>> clusters = new HashMap<>(); clust.parallelStream().forEach(cluster -> clusters.put(cluster, phages.stream() .filter(a -> a[0].equals(x) && a[1].equals(cluster)).map(a -> a[2]).collect(Collectors.toList()))); for (String z : clusters.keySet()) { try { List<String> clustphages = clusters.get(z); Set<String> primers = Collections.synchronizedSet( CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphages.get(0) + ".csv")); clustphages.remove(0); for (String phage : clustphages) { // String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta"); // String sequence =seqs[0]+seqs[1]; // Map<String, List<Integer>> seqInd = new HashMap<>(); // for (int i = 0; i <= sequence.length()-bps; i++) { // String sub=sequence.substring(i,i+bps); // if(seqInd.containsKey(sub)){ // seqInd.get(sub).add(i); // }else { // List<Integer> list = new ArrayList<>(); // list.add(i); // seqInd.put(sub,list); // } // } // primers = primers.stream().filter(seqInd::containsKey).collect(Collectors.toSet()); // primers =Sets.intersection(primers,CSV.readCSV(base + "/PhageData/"+Integer.toString(bps) // + phage + ".csv")); // System.gc(); // String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta"); // String sequence =seqs[0]+seqs[1]; // primers.stream().filter(sequence::contains); primers.retainAll(CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv")); // Set<CharSequence> prim = primers; // for (CharSequence primer: primers){ // if(seqInd.containsKey(primer)){ // prim.remove(primer); // } // } // primers=prim; } int i = 0; for (String a : primers) { try { //finish update st.setInt(1, bps); st.setString(2, a); st.setString(3, x); st.setString(4, z); st.addBatch(); } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + z); } i++; if (i == 1000) { i = 0; st.executeBatch(); db.commit(); } } if (i > 0) { st.executeBatch(); db.commit(); } } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + z); } System.out.println(z); } stat.execute("SET FILES LOG TRUE\n"); st.close(); stat.close(); System.out.println("Common Updated"); System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60); }
From source file:br.edu.claudivan.controledegastos.dao.test.utils.ScriptRunner.java
public void execute(BufferedReader bufferedReader) throws IOException, SQLException { StringBuilder scriptBuilder = new StringBuilder(); while (bufferedReader.ready()) { scriptBuilder.append(bufferedReader.readLine()); }// w ww. j a va2 s . c o m Connection con = this.dataSource.getConnection(); Statement st = con.createStatement(); st.execute(scriptBuilder.toString()); con.close(); }
From source file:com.mirth.connect.server.controllers.tests.TestUtils.java
public static void fixMessageIdSequence(String channelId) throws Exception { Connection connection = null; long localChannelId = ChannelController.getInstance().getLocalChannelId(channelId); String database = (String) Donkey.getInstance().getConfiguration().getDonkeyProperties().get("database"); Long maxId = null;/*from w w w . jav a 2 s. c om*/ if (database.equals("derby") || database.equals("mysql") || database.equals("sqlserver")) { Statement statement = null; ResultSet result = null; try { connection = getConnection(); statement = connection.createStatement(); result = statement.executeQuery("SELECT MAX(id) FROM d_m" + localChannelId); result.next(); maxId = result.getLong(1) + 1; close(result); statement.execute("DELETE FROM d_message_sequences WHERE local_channel_id = " + localChannelId); statement.execute( "INSERT INTO d_message_sequences (local_channel_id) VALUES (" + localChannelId + ")"); connection.commit(); } finally { close(result); close(statement); close(connection); } } logger.debug("Message ID sequence updated to: " + maxId); }