Example usage for java.sql Statement execute

List of usage examples for java.sql Statement execute

Introduction

In this page you can find the example usage for java.sql Statement execute.

Prototype

boolean execute(String sql) throws SQLException;

Source Link

Document

Executes the given SQL statement, which may return multiple results.

Usage

From source file:com.porvak.bracket.social.jdbc.versioned.SqlDatabaseChange.java

@Override
protected void doExecuteStatement(Statement statement) throws SQLException {
    statement.execute(sql);
}

From source file:HSqlManager.java

public static void primerAnalysis(Connection connection, int bps) throws SQLException, IOException {
    long time = System.currentTimeMillis();
    DpalLoad.main(new String[1]);
    HSqlPrimerDesign.Dpal_Inst = DpalLoad.INSTANCE_WIN64;
    String base = new File("").getAbsolutePath();
    CSV.makeDirectory(new File(base + "/PhageData"));
    INSTANCE = ImportPhagelist.getInstance();
    INSTANCE.parseAllPhages(bps);//  w ww . j a  v  a 2s.  co m
    System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60);
    time = System.currentTimeMillis();
    written = true;
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    stat.execute("SET FILES LOG FALSE\n");
    //        PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers" +
    //                "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" +
    //                " Values(?,?,true,false,false,?,?)");
    PreparedStatement st = db.prepareStatement(
            "INSERT INTO Primerdb.Primers" + "(Bp,Sequence,Strain,Cluster,Tm,GC,UniqueP,CommonP,Hairpin) "
                    + "VALUES(?,?,?,?,?,?,true,true,?)");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);

        //            if(strain.equals("-myco")) {
        //                if (r[2].equals("xkcd")) {
        //                    strain = r[0];
        //                }
        //            }else if(strain.equals("-arthro")){
        //                if (r[2].equals("ArV1")) {
        //                    strain = r[0];
        //                }
        //            }
    }
    call.close();

    Set<String> strains = phages.stream().map(y -> y[0]).collect(Collectors.toSet());
    for (String x : strains) {
        Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1])
                .collect(Collectors.toSet());
        Map<String, Integer> clustersNum = new HashMap<>();
        Map<Integer, String> clustersName = new HashMap<>();
        Map<Integer, List<String>> clusters = new HashMap<>();
        Map<Bytes, Primer> primers = new HashMap<>();
        int i = 0;
        for (String cluster : clust) {
            clustersName.put(i, cluster);
            clustersNum.put(cluster, i);
            i++;
        }
        clust.parallelStream()
                .forEach(cluster -> clusters.put(clustersNum.get(cluster),
                        phages.stream().filter(a -> a[0].equals(x) && a[1].equals(cluster)).map(a -> a[2])
                                .collect(Collectors.toList())));
        for (int z : clusters.keySet()) {
            //            try {
            List<String> clustphages = clusters.get(z);
            for (String phage : clustphages) {
                Set<Bytes> phagprimers =
                        //Read from CSV file here
                        //Premade CSV files of all possible
                        //primers in a phage genome
                        CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv").stream()
                                .map(l -> new Bytes(l.getBytes())).collect(Collectors.toSet());
                for (Bytes primer : phagprimers) {
                    if (!primers.containsKey(primer)) {
                        primers.put(primer, new Primer(z));
                    } else {
                        Primer select = primers.get(primer);
                        select.phageCount++;
                        if (!select.containsCluster(z)) {
                            select.addCluster(z);
                        }
                    }

                }

            }
            System.out.println(clustersName.get(z));
        }
        int count = 0;
        Iterator<Map.Entry<Bytes, Primer>> primersSet = primers.entrySet().iterator();
        while (primersSet.hasNext()) {
            Map.Entry<Bytes, Primer> primer = primersSet.next();
            Primer primerInf = primer.getValue();
            if (primerInf.clusters.length != 1) {
                primer.setValue(null);
            } else {
                int primerClust = -1;
                for (int cluster : primerInf.clusters) {
                    primerClust = cluster;
                }
                if (primerInf.phageCount != clusters.get(primerClust).size()) {
                    primer.setValue(null);
                } else {
                    count++;
                }
            }
        }
        System.out.print("Unique Count: ");
        System.out.println(count);
        System.out.print("Primer Count: ");
        System.out.println(primers.size());
        i = 0;
        for (Bytes a : primers.keySet()) {
            Primer primerInf = primers.get(a);
            if (primerInf != null) {
                String primerClust = "";
                for (int cluster : primerInf.clusters) {
                    primerClust = clustersName.get(cluster);
                }
                String str = new String(a.bytes);
                try {
                    st.setInt(1, bps);
                    st.setString(2, str);
                    st.setString(3, x);
                    st.setString(4, primerClust);
                    //                        st.setDouble(5, HSqlPrimerDesign.primerTm(str, 0, 800, 1.5, 0.2));
                    st.setDouble(5, HSqlPrimerDesign.easytm(str));
                    st.setDouble(6, HSqlPrimerDesign.gcContent(str));
                    st.setBoolean(7, HSqlPrimerDesign.calcHairpin(str, 4));
                    st.addBatch();
                } catch (SQLException e) {
                    e.printStackTrace();
                    System.out.println("Error occurred at " + x + " " + primerClust);
                }
                i++;
                if (i == 1000) {
                    i = 0;
                    st.executeBatch();
                    db.commit();
                }
            }
        }
        if (i > 0) {
            st.executeBatch();
            db.commit();
        }

        //        }

        System.out.println("Unique Updated");
        System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60);
    }
    stat.execute("SET FILES LOG TRUE;");
    st.close();
    stat.close();
}

From source file:com.adaptris.core.util.JdbcUtil.java

public static Connection testConnection(Connection sqlConnection, String testStatement, boolean debugMode)
        throws SQLException {
    Statement stmt = sqlConnection.createStatement();
    ResultSet rs = null;/*from  www.j a v  a 2s .c om*/
    try {
        if (isEmpty(testStatement)) {
            return sqlConnection;
        }
        if (debugMode) {
            rs = stmt.executeQuery(testStatement);
            if (rs.next()) {
                StringBuffer sb = new StringBuffer("TestStatement Results - ");
                ResultSetMetaData rsm = rs.getMetaData();
                for (int i = 1; i <= rsm.getColumnCount(); i++) {
                    sb.append("[");
                    sb.append(rsm.getColumnName(i));
                    sb.append("=");
                    sb.append(rs.getObject(i));
                    sb.append("] ");
                }
                log.trace(sb.toString());
            }
        } else {
            stmt.execute(testStatement);
        }
    } finally {
        JdbcUtil.closeQuietly(rs);
        JdbcUtil.closeQuietly(stmt);
    }
    return sqlConnection;
}

From source file:HSqlManager.java

public static void uniqueDB(Connection connection, int bps) throws ClassNotFoundException, SQLException,
        InstantiationException, IllegalAccessException, IOException {
    DpalLoad.main(new String[1]);
    HSqlPrimerDesign.Dpal_Inst = DpalLoad.INSTANCE_WIN64;
    String base = new File("").getAbsolutePath();
    if (!written) {
        CSV.makeDirectory(new File(base + "/PhageData"));
        INSTANCE.readFileAll(INSTANCE.path).stream().forEach(x -> {
            try {
                CSV.writeDataCSV(x[1], Fasta.process(x[1], bps), bps);
            } catch (IOException e) {
                e.printStackTrace();// w w w  .  j a  v a2s . co m
            }
        });
    }
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    PrintWriter log = new PrintWriter(new File("javalog.log"));
    stat.execute("SET FILES LOG FALSE;\n");
    PreparedStatement st = db
            .prepareStatement("UPDATE Primerdb.Primers" + " SET UniqueP = true, Tm = ?, GC =?, Hairpin =?"
                    + "WHERE Cluster = ? and Strain = ? and " + "Sequence = ? and Bp = ?");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
    }
    phages.stream().map(x -> x[0]).collect(Collectors.toSet()).stream().forEach(x -> {
        phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).parallelStream()
                .forEach(z -> {
                    try {
                        Set<String> nonclustphages = phages.stream()
                                .filter(a -> a[0].equals(x) && !a[1].equals(z)).map(a -> a[2])
                                .collect(Collectors.toSet());
                        ResultSet resultSet = stat.executeQuery("Select Sequence from primerdb.primers"
                                + " where Strain ='" + x + "' and Cluster ='" + z + "' and CommonP = true"
                                + " and Bp = " + Integer.valueOf(bps) + " ");
                        Set<CharSequence> primers = Collections.synchronizedSet(new HashSet<>());
                        while (resultSet.next()) {
                            primers.add(resultSet.getString("Sequence"));
                        }
                        for (String phage : nonclustphages) {
                            CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv")
                                    .parallelStream().filter(primer -> primers.contains(primer))
                                    .forEach(primers::remove);

                        }
                        int i = 0;
                        for (CharSequence a : primers) {
                            try {
                                st.setDouble(1, HSqlPrimerDesign.primerTm(a, 0, 800, 1.5, 0.2));
                                st.setDouble(2, HSqlPrimerDesign.gcContent(a));
                                st.setBoolean(3, HSqlPrimerDesign.calcHairpin((String) a, 4));
                                st.setString(4, z);
                                st.setString(5, x);
                                st.setString(6, a.toString());
                                st.setInt(7, bps);
                                st.addBatch();
                            } catch (SQLException e) {
                                e.printStackTrace();
                                System.out.println("Error occurred at " + x + " " + z);
                            }
                            i++;
                            if (i == 1000) {
                                i = 0;
                                st.executeBatch();
                                db.commit();
                            }
                        }
                        if (i > 0) {
                            st.executeBatch();
                            db.commit();
                        }
                    } catch (SQLException e) {
                        e.printStackTrace();
                        System.out.println("Error occurred at " + x + " " + z);
                    }
                    log.println(z);
                    log.flush();
                    System.gc();
                });
    });
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Unique Updated");
}

From source file:cc.osint.graphd.db.SQLDB.java

public void shutdown() throws SQLException {
    Statement st = conn.createStatement();
    st.execute("SHUTDOWN");
    conn.close();/*from ww w. j  av a2 s.  c  om*/
}

From source file:com.earldouglas.xjdl.io.JdbcLicenseLoaderTest.java

@Override
protected LicenseLoader[] validLicenseLoaders() throws Exception {
    String key = "encrypt!encrypt!";

    License license = new BasicLicense();
    String encryptedLicense = new LicenseCreator().encryptLicense(license, key);

    Connection connection = basicDataSource.getConnection();
    Statement statement = connection.createStatement();
    statement.execute("insert into licenses values('" + encryptedLicense + "')");

    JdbcLicenseLoader jdbcLicenseLoader = new JdbcLicenseLoader();
    jdbcLicenseLoader.setKey("encrypt!encrypt!");
    jdbcLicenseLoader.setDataSource(basicDataSource);

    return new LicenseLoader[] { jdbcLicenseLoader };
}

From source file:com.espertech.esper.epl.db.TestDatabaseDMConnFactory.java

private void tryAndCloseConnection(Connection connection) throws Exception {
    Statement stmt = connection.createStatement();
    stmt.execute("select 1 from dual");
    ResultSet result = stmt.getResultSet();
    result.next();//from   www  . j  ava2 s  .c o  m
    assertEquals(1, result.getInt(1));
    result.close();
    stmt.close();
    connection.close();
}

From source file:com.buddycloud.mediaserver.business.jdbc.schema.Schema.java

@SuppressWarnings("unchecked")
public void runScript(MetaDataSource dataSource, String sqlFile)
        throws IOException, FileNotFoundException, SQLException {
    List<String> readLines = IOUtils.readLines(new FileInputStream(sqlFile));

    Connection connection = dataSource.getConnection();
    StringBuilder statementStr = new StringBuilder();

    for (String line : readLines) {
        statementStr.append(line);/*www  .j  a  v a2 s  .  co m*/
        if (line.endsWith(SQL_DELIMITER)) {
            Statement statement = connection.createStatement();
            statement.execute(statementStr.toString());
            statement.close();
            statementStr.setLength(0);
        }
    }

    connection.close();
}

From source file:cn.kane.osgi.controller.test.DataPreparor.java

private void executeSql(String sql, Connection connection) throws SQLException {
    Statement statement = connection.createStatement();
    statement.execute(sql);
}

From source file:org.jdal.logic.DbAdminManagerSupport.java

public boolean execute(final String sql) throws DataAccessException {
    if (log.isDebugEnabled()) {
        log.debug("Executing SQL statement [" + sql + "]");
    }//from  ww  w . j av  a 2 s . co  m
    class ExecuteStatementCallback implements StatementCallback<Boolean>, SqlProvider {
        public Boolean doInStatement(Statement stmt) throws SQLException {
            return stmt.execute(sql);
        }

        public String getSql() {
            return sql;
        }
    }

    return template.execute(new ExecuteStatementCallback());
}