Example usage for java.sql Connection setAutoCommit

List of usage examples for java.sql Connection setAutoCommit

Introduction

In this page you can find the example usage for java.sql Connection setAutoCommit.

Prototype

void setAutoCommit(boolean autoCommit) throws SQLException;

Source Link

Document

Sets this connection's auto-commit mode to the given state.

Usage

From source file:com.concursive.connect.web.modules.common.social.rating.dao.Rating.java

public static synchronized RatingBean save(Connection db, int userId, int projectId, int objectId, String vote,
        String table, String uniqueField, int setInappropriateColumn) throws SQLException {
    boolean commit = false;
    try {/*w  w w.  j  a v  a 2  s  .  c  o  m*/
        commit = db.getAutoCommit();
        if (commit) {
            db.setAutoCommit(false);
        }
        // Determine the current value
        int existingVote = queryUserRating(db, userId, objectId, table, uniqueField);
        int newVote = Integer.parseInt(vote);
        PreparedStatement pst = null;
        ResultSet rs = null;
        // Determine if an update, insert, or delete is required
        if (existingVote == -1) {
            // Perform an insert
            pst = db.prepareStatement("INSERT INTO " + table + "_rating " + "(project_id, "
                    + (Project.PRIMARY_KEY.equals(uniqueField) ? "" : uniqueField + ", ")
                    + ((setInappropriateColumn != Constants.UNDEFINED) ? "inappropriate, " : "")
                    + "rating, enteredby) " + "VALUES (?, "
                    + (Project.PRIMARY_KEY.equals(uniqueField) ? "" : "?, ")
                    + ((setInappropriateColumn != Constants.UNDEFINED) ? "?, " : "") + "?, ?)");
            int i = 0;
            pst.setInt(++i, projectId);
            if (!Project.PRIMARY_KEY.equals(uniqueField)) {
                pst.setInt(++i, objectId);
            }
            if (setInappropriateColumn != Constants.UNDEFINED) {
                pst.setBoolean(++i, (setInappropriateColumn == Constants.TRUE));
            }
            pst.setInt(++i, newVote);
            pst.setInt(++i, userId);
            pst.execute();
            pst.close();
        } else if (existingVote != newVote) {
            // Try an update
            pst = db.prepareStatement("UPDATE " + table + "_rating " + "SET rating = ?, entered = "
                    + DatabaseUtils.getCurrentTimestamp(db) + " "
                    + ((setInappropriateColumn != Constants.UNDEFINED) ? ", inappropriate = ? " : "") + "WHERE "
                    + uniqueField + " = ? AND enteredby = ? ");
            int i = 0;
            pst.setInt(++i, newVote);
            if (setInappropriateColumn != Constants.UNDEFINED) {
                pst.setBoolean(++i, (setInappropriateColumn == Constants.TRUE));
            }
            pst.setInt(++i, objectId);
            pst.setInt(++i, userId);
            pst.execute();
            pst.close();
        }
        if (existingVote != newVote) {
            // Update the object count and value
            pst = db.prepareStatement("UPDATE " + table + " "
                    + "SET rating_count = rating_count + ?, rating_value = rating_value + ?, "
                    + "rating_avg = ((rating_value + ?) / (rating_count + ?)) " + "WHERE " + uniqueField
                    + " = ? ");
            int i = 0;
            if (existingVote == -1) {
                if (newVote == INAPPROPRIATE_COMMENT) {
                    //rating count is incremented, but no change in rating value, therefore, rating average decreases
                    pst.setInt(++i, 1);
                    pst.setInt(++i, 0);
                    pst.setInt(++i, 0);
                    pst.setInt(++i, 1);
                } else {
                    pst.setInt(++i, 1);
                    pst.setInt(++i, newVote);
                    pst.setInt(++i, newVote);
                    pst.setInt(++i, 1);
                }
            } else {
                if (newVote == INAPPROPRIATE_COMMENT || existingVote == INAPPROPRIATE_COMMENT) {
                    if (newVote == INAPPROPRIATE_COMMENT) {
                        //The effects of the previous rating are reversed.
                        pst.setInt(++i, 0);
                        pst.setInt(++i, (-1) * existingVote);
                        pst.setInt(++i, (-1) * existingVote);
                        pst.setInt(++i, 0);
                    } else if (existingVote == INAPPROPRIATE_COMMENT) {
                        //The new rating by the user is recorded,
                        //as an existing inappropriate comment was never considered towards rating value, no additional math is required
                        pst.setInt(++i, 0);
                        pst.setInt(++i, newVote);
                        pst.setInt(++i, newVote);
                        pst.setInt(++i, 0);
                    }
                } else {
                    pst.setInt(++i, 0);
                    pst.setInt(++i, newVote - existingVote);
                    pst.setInt(++i, newVote - existingVote);
                    pst.setInt(++i, 0);
                }
            }
            pst.setInt(++i, objectId);
            //System.out.println(pst);
            pst.execute();
            pst.close();
        }

        if (setInappropriateColumn != Constants.UNDEFINED) {
            int inappropriateCount = 0;
            pst = db.prepareStatement("SELECT count(*) AS ic " + "FROM " + table + "_rating " + "WHERE "
                    + uniqueField + " = ? AND inappropriate = ?");
            int i = 0;
            pst.setInt(++i, objectId);
            pst.setBoolean(++i, true);
            rs = pst.executeQuery();
            if (rs.next()) {
                inappropriateCount = rs.getInt("ic");
            }
            rs.close();
            pst.close();

            pst = db.prepareStatement("UPDATE " + table + " " + "SET  inappropriate_count = ? " + "WHERE "
                    + uniqueField + " = ? ");
            i = 0;
            pst.setInt(++i, inappropriateCount);
            pst.setInt(++i, objectId);
            pst.execute();
            pst.close();
        }

        // Retrieve the values
        pst = db.prepareStatement(
                "SELECT rating_count, rating_value " + "FROM " + table + " WHERE " + uniqueField + " = ?");
        pst.setInt(1, objectId);
        rs = pst.executeQuery();
        int count = 0;
        int value = 0;
        if (rs.next()) {
            count = rs.getInt("rating_count");
            value = rs.getInt("rating_value");
        }
        rs.close();
        pst.close();
        if (commit) {
            db.commit();
        }
        // Share the rating bean
        RatingBean rating = new RatingBean();
        rating.setItemId(objectId);
        rating.setCount(count);
        rating.setValue(value);
        return rating;
    } catch (Exception e) {
        if (commit) {
            db.rollback();
        }
        LOG.error("save", e);
        throw new SQLException(e.getMessage());
    } finally {
        if (commit) {
            db.setAutoCommit(true);
        }
    }
}

From source file:net.firejack.platform.core.utils.db.DBUtils.java

private static void insertDataToTargetTable(TablesMapping mapping, Connection sourceConnection,
        Connection targetConnection) throws SQLException {
    Map<Column, Column> columnMapping = mapping.getColumnMapping();
    if (columnMapping.isEmpty()) {
        logger.warn("No columns are detected - no data to insert.");
    } else {//  w  w  w .ja  v  a2 s  .com
        ResultSet rs = selectDataFromSource(sourceConnection, mapping);

        String insertQuery = populateInsertQuery(mapping);
        PreparedStatement insertStatement = targetConnection.prepareStatement(insertQuery);
        targetConnection.setAutoCommit(false);
        try {
            int currentStep = 1;
            while (rs.next()) {
                for (int i = 1; i <= columnMapping.size(); i++) {
                    insertStatement.setObject(i, rs.getObject(i));
                }
                insertStatement.addBatch();
                if (++currentStep > DEFAULT_BATCH_SIZE) {
                    insertStatement.executeBatch();
                    targetConnection.commit();
                    currentStep = 1;
                }
            }
            if (currentStep != 1) {
                insertStatement.executeBatch();
                targetConnection.commit();
            }
        } catch (SQLException e) {
            logger.error(e.getMessage(), e);
            targetConnection.rollback();
        } finally {
            insertStatement.close();
            rs.close();
        }
    }
}

From source file:com.aurel.track.admin.customize.category.filter.PredefinedQueryBL.java

/**
 * Add the hardcoded filters Use JDBC because negative objectIDs should be
 * added/*from   w w  w  . ja va 2 s .  c  o m*/
 */
public static void addHardcodedFilters() {
    LOGGER.info("Add hardcoded filters");
    FilterFacade filterFacade = FilterFacadeFactory.getInstance()
            .getFilterFacade(TQueryRepositoryBean.QUERY_PURPOSE.TREE_FILTER, true);
    List<TPersonBean> personBeans = PersonBL.loadPersons();
    // Get not closed stateIDs
    List<TStateBean> notClosedStateBeans = StatusBL.loadNotClosedStates();
    List<Integer> notClosedStateIDs = GeneralUtils.createIntegerListFromBeanList(notClosedStateBeans);
    Integer[] notClosedStatesArr = GeneralUtils.createIntegerArrFromCollection(notClosedStateIDs);
    // get closed stateIDs
    List<TStateBean> closedStateBeans = StatusBL.loadClosedStates();
    List<Integer> closedStateIDs = GeneralUtils.createIntegerListFromBeanList(closedStateBeans);
    Integer[] closedStatesArr = GeneralUtils.createIntegerArrFromCollection(closedStateIDs);
    List<String> predefinedFilterClobStms = new ArrayList<String>();
    List<String> predefinedFilterStms = new ArrayList<String>();
    ILabelBean allItemsFilterBean = filterFacade.getByKey(PREDEFINED_QUERY.ALL_ITEMS);
    if (allItemsFilterBean == null) {
        LOGGER.info("Add 'All issues' filter");
        predefinedFilterClobStms
                .add(addPredefinedQueryClob(PREDEFINED_QUERY.ALL_ITEMS, getAllIssuesExpression()));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.ALL_ITEMS, "All issues"));
    }
    ILabelBean unresolvedBean = filterFacade.getByKey(PREDEFINED_QUERY.OUTSTANDING);
    if (unresolvedBean == null) {
        LOGGER.info("Add 'Outstanding' filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.OUTSTANDING,
                getOutstandingExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.OUTSTANDING, "Outstanding"));
    }
    ILabelBean myFilterBean = filterFacade.getByKey(PREDEFINED_QUERY.MY_ITEMS);
    if (myFilterBean == null) {
        LOGGER.info("Add 'My items' filter");
        predefinedFilterClobStms.add(
                addPredefinedQueryClob(PREDEFINED_QUERY.MY_ITEMS, getMyItemsExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.MY_ITEMS, "My items"));
    }
    ILabelBean managersBean = filterFacade.getByKey(PREDEFINED_QUERY.MANAGERS_ITEMS);
    if (managersBean == null) {
        LOGGER.info("Add manager filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.MANAGERS_ITEMS,
                getManagerItemsExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.MANAGERS_ITEMS, "I''m the manager"));
    }
    ILabelBean responsibleBean = filterFacade.getByKey(PREDEFINED_QUERY.RESPONSIBLES_ITEMS);
    if (responsibleBean == null) {
        LOGGER.info("Add responsible filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.RESPONSIBLES_ITEMS,
                getResponsibleItemsExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.RESPONSIBLES_ITEMS, "I''m responsible"));
    }
    ILabelBean reporterBean = filterFacade.getByKey(PREDEFINED_QUERY.AUTHOR_ITEMS);
    if (reporterBean == null) {
        LOGGER.info("Add author filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.AUTHOR_ITEMS,
                getReporterItemsExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.AUTHOR_ITEMS, "I''m the author"));
    }
    ILabelBean watcherBean = filterFacade.getByKey(PREDEFINED_QUERY.WATCHER_ITEMS);
    if (watcherBean == null) {
        LOGGER.info("Add watcher filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.WATCHER_ITEMS,
                getWatcherItemsExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.WATCHER_ITEMS, "I''m watcher"));
    }
    ILabelBean meetingsBean = filterFacade.getByKey(PREDEFINED_QUERY.MEETINGS);
    if (meetingsBean == null) {
        LOGGER.info("Add 'Meetings' filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.MEETINGS,
                getMeetingItemsExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.MEETINGS, "Meetings"));
    }
    ILabelBean unscheduledBean = filterFacade.getByKey(PREDEFINED_QUERY.UNSCHEDULED);
    if (unscheduledBean == null) {
        LOGGER.info("Add 'Unscheduled' filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.UNSCHEDULED,
                getUnscheduledItemsExpression(notClosedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.UNSCHEDULED, "Unscheduled"));
    }
    ILabelBean closedRecentlyBean = filterFacade.getByKey(PREDEFINED_QUERY.CLOSED_RECENTLY);
    if (closedRecentlyBean == null) {
        LOGGER.info("Add 'Closed recently' filter");
        predefinedFilterClobStms.add(addPredefinedQueryClob(PREDEFINED_QUERY.CLOSED_RECENTLY,
                getRecentlyClosedItemsExpression(closedStatesArr)));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.CLOSED_RECENTLY, "Closed recently"));
    }
    ILabelBean addedRecentlyBean = filterFacade.getByKey(PREDEFINED_QUERY.ADDED_RECENTLY);
    if (addedRecentlyBean == null) {
        LOGGER.info("Add 'Added recently' filter");
        predefinedFilterClobStms.add(
                addPredefinedQueryClob(PREDEFINED_QUERY.ADDED_RECENTLY, getRecentlyAddedItemsExpression()));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.ADDED_RECENTLY, "Added recently"));
    }
    ILabelBean updatedRecentlyBean = filterFacade.getByKey(PREDEFINED_QUERY.UPDATED_RECENTLY);
    if (updatedRecentlyBean == null) {
        LOGGER.info("Add 'Updated recently' filter");
        predefinedFilterClobStms.add(
                addPredefinedQueryClob(PREDEFINED_QUERY.UPDATED_RECENTLY, getRecentlyUpdatedItemsExpression()));
        predefinedFilterStms.add(addPredefinedQuery(PREDEFINED_QUERY.UPDATED_RECENTLY, "Updated recently"));
    }
    ILabelBean scrumBoardFilterBean = filterFacade.getByKey(PREDEFINED_QUERY.SCRUM_BOARD);
    if (scrumBoardFilterBean == null) {
        LOGGER.info("Add 'Scrum board' filter");
        predefinedFilterClobStms
                .add(addPredefinedQueryClob(PREDEFINED_QUERY.SCRUM_BOARD, getAllIssuesExpression()));
        predefinedFilterStms.add(addPredefinedFilterWitView(PREDEFINED_QUERY.SCRUM_BOARD, "Scrum board",
                IssueListViewDescriptor.CARD));
    }

    ILabelBean kanbanBoardFilterBean = filterFacade.getByKey(PREDEFINED_QUERY.KANBAN_BOARD);
    if (kanbanBoardFilterBean == null) {
        LOGGER.info("Add 'Kanban board' filter");
        predefinedFilterClobStms
                .add(addPredefinedQueryClob(PREDEFINED_QUERY.KANBAN_BOARD, getAllIssuesExpression()));
        predefinedFilterStms.add(addPredefinedFilterWitView(PREDEFINED_QUERY.KANBAN_BOARD, "Kanban board",
                IssueListViewDescriptor.CARD));
    }
    Connection cono = null;
    try {
        cono = InitDatabase.getConnection();
        Statement ostmt = cono.createStatement();
        cono.setAutoCommit(false);
        for (String filterClobStmt : predefinedFilterClobStms) {
            ostmt.executeUpdate(filterClobStmt);
        }
        for (String filterStmt : predefinedFilterStms) {
            ostmt.executeUpdate(filterStmt);
        }
        cono.commit();
        cono.setAutoCommit(true);
    } catch (Exception e) {
        LOGGER.debug(ExceptionUtils.getStackTrace(e));
    } finally {
        try {
            if (cono != null) {
                cono.close();
            }
        } catch (Exception e) {
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
    }
    // include in menu for all users
    List<Integer> filterIDs = MenuitemFilterBL.getFilterIDsToSubscribe();
    LOGGER.info("Number of filters to subscribe " + filterIDs.size());
    if (!filterIDs.isEmpty()) {
        for (TPersonBean personBean : personBeans) {
            Integer personID = personBean.getObjectID();
            MenuitemFilterBL.subscribePersonsToFilters(personID, filterIDs);
        }
    }
    scrumBoardFilterBean = filterFacade.getByKey(PREDEFINED_QUERY.SCRUM_BOARD);
    if (scrumBoardFilterBean != null) {
        addCardLayout(scrumBoardFilterBean.getObjectID(), SystemFields.INTEGER_STATE,
                SystemFields.INTEGER_PRIORITY, (List) notClosedStateBeans);
    }
    kanbanBoardFilterBean = filterFacade.getByKey(PREDEFINED_QUERY.KANBAN_BOARD);
    if (kanbanBoardFilterBean != null) {
        addCardLayout(kanbanBoardFilterBean.getObjectID(), SystemFields.INTEGER_RESPONSIBLE,
                SystemFields.INTEGER_PRIORITY, (List) PersonBL.loadActivePersons());
    }
}

From source file:com.ibm.research.rdf.store.runtime.service.sql.StoreHelper.java

public static void releaseAutoCommit(Connection conn, boolean changedAutoCommit) {
    if (changedAutoCommit) {

        try {/*from   w  w w  .  j  a  va2s  .co m*/
            conn.commit();
            conn.setAutoCommit(true);
        } catch (SQLException e) {
        }
    }
}

From source file:com.trackplus.ddl.DataWriter.java

private static void executeUpdate(Connection con, Statement stmt, String s) throws DDLException {
    try {/*from   w  w w  .  j ava 2 s.c  o  m*/
        stmt.executeUpdate(s);
    } catch (SQLException e) {
        LOGGER.error("Error execute script line:" + e.getMessage());
        LOGGER.warn("-------------\n\n");
        LOGGER.warn(s);
        LOGGER.warn("-------------\n\n");
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
        try {
            stmt.close();
            con.rollback();
            con.setAutoCommit(true);
        } catch (SQLException ex) {
            throw new DDLException(e.getMessage(), e);
        }
        throw new DDLException(e.getMessage(), e);
    }
}

From source file:HSqlManager.java

public static void getClusterSizes(Connection connection) throws ClassNotFoundException, SQLException,
        InstantiationException, IllegalAccessException, IOException {

    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    PrintWriter unique = new PrintWriter(new File("clustercount.log"));
    ResultSet call = stat.executeQuery("Select DISTINCT Cluster From Primerdb.Primers;");
    Set<String> clust = new HashSet<>();
    while (call.next()) {
        clust.add(call.getString("Cluster"));
    }/*  w w  w . j  a v  a2 s . c  om*/
    unique.print("Cluster\t");
    unique.print("Phage Count\t");
    unique.print("18bp Unique Count\t");
    unique.print("22bp Unique Count\t");
    unique.print("25bp Unique Count\n");
    unique.flush();
    for (String c : clust) {
        call = stat.executeQuery("Select * From Primerdb.Primers where Cluster = '" + c + "'" + "and Bp = "
                + Integer.toString(18) + "and CommonP = true and UniqueP = true;");
        int count = 0;
        while (call.next()) {
            count++;
        }
        call = stat.executeQuery("Select * From Primerdb.Primers where Cluster = '" + c + "'" + "and Bp = "
                + Integer.toString(22) + "and CommonP = true and UniqueP = true;");
        int count2 = 0;
        while (call.next()) {
            count2++;
        }
        call = stat.executeQuery("Select * From Primerdb.Primers where Cluster = '" + c + "'" + "and Bp = "
                + Integer.toString(25) + "and CommonP = true and UniqueP = true;");
        int count3 = 0;
        while (call.next()) {
            count3++;
        }
        call = stat.executeQuery("Select * From Primerdb.Phages where Cluster = '" + c + "';");
        int count4 = 0;
        while (call.next()) {
            count4++;
        }
        unique.print(c + "\t");
        unique.print(count4 + "\t");
        unique.print(count + "\t");
        unique.print(count2 + "\t");
        unique.print(count3 + "\n");
        unique.flush();
    }
    System.out.println("Meta recorded");
}

From source file:HSqlManager.java

private static void commonInitialize(int bps, Connection connection) throws SQLException, IOException {
    String base = new File("").getAbsolutePath();
    CSV.makeDirectory(new File(base + "/PhageData"));
    INSTANCE = ImportPhagelist.getInstance();
    INSTANCE.parseAllPhages(bps);//from ww  w  .  ja va2s.  c  o m
    written = true;
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    stat.execute("SET FILES LOG FALSE\n");
    PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers"
            + "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + " Values(?,?,true,false,false,?,?)");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
    }
    phages.parallelStream().map(x -> x[0]).collect(Collectors.toSet()).parallelStream().forEach(x -> {
        phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).forEach(z -> {
            try {
                List<String> clustphages = phages.stream().filter(a -> a[0].equals(x) && a[1].equals(z))
                        .map(a -> a[2]).collect(Collectors.toList());
                Set<String> primers = Collections.synchronizedSet(CSV
                        .readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphages.get(0) + ".csv"));
                clustphages.remove(0);
                clustphages.parallelStream().forEach(phage -> {
                    primers.retainAll(
                            CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv"));
                });
                int i = 0;
                for (CharSequence a : primers) {
                    try {
                        //finish update
                        st.setInt(1, bps);
                        st.setString(2, a.toString());
                        st.setString(3, x);
                        st.setString(4, z);
                        st.addBatch();
                    } catch (SQLException e) {
                        e.printStackTrace();
                        System.out.println("Error occurred at " + x + " " + z);
                    }
                    i++;
                    if (i == 1000) {
                        i = 0;
                        st.executeBatch();
                        db.commit();
                    }
                }
                if (i > 0) {
                    st.executeBatch();
                    db.commit();
                }
            } catch (SQLException e) {
                e.printStackTrace();
                System.out.println("Error occurred at " + x + " " + z);
            }
        });
    });
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Common Updated");
}

From source file:HSqlManager.java

public static void uniqueDB(Connection connection, int bps) throws ClassNotFoundException, SQLException,
        InstantiationException, IllegalAccessException, IOException {
    DpalLoad.main(new String[1]);
    HSqlPrimerDesign.Dpal_Inst = DpalLoad.INSTANCE_WIN64;
    String base = new File("").getAbsolutePath();
    if (!written) {
        CSV.makeDirectory(new File(base + "/PhageData"));
        INSTANCE.readFileAll(INSTANCE.path).stream().forEach(x -> {
            try {
                CSV.writeDataCSV(x[1], Fasta.process(x[1], bps), bps);
            } catch (IOException e) {
                e.printStackTrace();//from  w  w  w. j a  v  a 2  s . co m
            }
        });
    }
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    PrintWriter log = new PrintWriter(new File("javalog.log"));
    stat.execute("SET FILES LOG FALSE;\n");
    PreparedStatement st = db
            .prepareStatement("UPDATE Primerdb.Primers" + " SET UniqueP = true, Tm = ?, GC =?, Hairpin =?"
                    + "WHERE Cluster = ? and Strain = ? and " + "Sequence = ? and Bp = ?");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
    }
    phages.stream().map(x -> x[0]).collect(Collectors.toSet()).stream().forEach(x -> {
        phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).parallelStream()
                .forEach(z -> {
                    try {
                        Set<String> nonclustphages = phages.stream()
                                .filter(a -> a[0].equals(x) && !a[1].equals(z)).map(a -> a[2])
                                .collect(Collectors.toSet());
                        ResultSet resultSet = stat.executeQuery("Select Sequence from primerdb.primers"
                                + " where Strain ='" + x + "' and Cluster ='" + z + "' and CommonP = true"
                                + " and Bp = " + Integer.valueOf(bps) + " ");
                        Set<CharSequence> primers = Collections.synchronizedSet(new HashSet<>());
                        while (resultSet.next()) {
                            primers.add(resultSet.getString("Sequence"));
                        }
                        for (String phage : nonclustphages) {
                            CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv")
                                    .parallelStream().filter(primer -> primers.contains(primer))
                                    .forEach(primers::remove);

                        }
                        int i = 0;
                        for (CharSequence a : primers) {
                            try {
                                st.setDouble(1, HSqlPrimerDesign.primerTm(a, 0, 800, 1.5, 0.2));
                                st.setDouble(2, HSqlPrimerDesign.gcContent(a));
                                st.setBoolean(3, HSqlPrimerDesign.calcHairpin((String) a, 4));
                                st.setString(4, z);
                                st.setString(5, x);
                                st.setString(6, a.toString());
                                st.setInt(7, bps);
                                st.addBatch();
                            } catch (SQLException e) {
                                e.printStackTrace();
                                System.out.println("Error occurred at " + x + " " + z);
                            }
                            i++;
                            if (i == 1000) {
                                i = 0;
                                st.executeBatch();
                                db.commit();
                            }
                        }
                        if (i > 0) {
                            st.executeBatch();
                            db.commit();
                        }
                    } catch (SQLException e) {
                        e.printStackTrace();
                        System.out.println("Error occurred at " + x + " " + z);
                    }
                    log.println(z);
                    log.flush();
                    System.gc();
                });
    });
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Unique Updated");
}

From source file:HSqlManager.java

public static void commonClusterNewPhages(Connection connection, int bps) throws SQLException, IOException,
        ClassNotFoundException, IllegalAccessException, InstantiationException {
    Connection db = connection;
    String base = new File("").getAbsolutePath();
    db.setAutoCommit(false);
    PreparedStatement st = db.prepareStatement("UPDATE Primerdb.Primers SET CommonP = False,"
            + " UniqueP = False" + " WHERE Cluster = ? and " + "Strain = ? and Sequence = ? and Bp =?");
    Statement stat = db.createStatement();
    if (newPhages != null) {
        List<String[]> phages = newPhages;
        phages.forEach(x -> {/*  w w  w  . ja v  a  2 s.  c  o  m*/
            try {
                CSV.writeDataCSV(x[0], Fasta.process(x[0], bps), bps);
                CSV.writeDataCSV(x[0], Fasta.processPrimers(x[0], bps), bps);
            } catch (IOException e) {
                e.printStackTrace();
            }
            Set<CharSequence> primers = new HashSet<>();
            try {
                ResultSet rs = stat.executeQuery(
                        "SELECT * FROM Primerdb.Primers WHERE" + " Sequence = '" + x[1] + "' and Clusters = '"
                                + x[2] + "' and CommonP = True" + "and Bp =" + Integer.valueOf(bps));
                while (rs.next()) {
                    primers.add((CharSequence) rs.getString("Sequence"));
                }
                primers.removeAll(CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + x[0] + ".csv"));
                if (primers.size() != 0) {
                    primers.forEach(y -> {
                        try {
                            //finish update
                            st.setString(1, x[1]);
                            st.setString(2, x[2]);
                            st.setString(3, y.toString());
                            st.setInt(4, bps);
                            st.addBatch();
                        } catch (SQLException e) {
                            e.printStackTrace();
                        }
                    });
                    st.executeBatch();
                    db.commit();
                }
            } catch (SQLException e) {
                e.printStackTrace();
            }
        });
    }
    System.out.println("Common Updated");
    st.close();
}

From source file:HSqlManager.java

@SuppressWarnings("Duplicates")
@Deprecated/*  w  ww .  j  a  v a2 s.c  om*/
private static void mycoCommonInitialize(int bps, Connection connection) throws SQLException, IOException {
    long time = System.currentTimeMillis();
    String base = new File("").getAbsolutePath();
    CSV.makeDirectory(new File(base + "/PhageData"));
    INSTANCE = ImportPhagelist.getInstance();
    //        INSTANCE.parseAllPhages(bps);
    written = true;
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    stat.execute("SET FILES LOG FALSE\n");
    PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers"
            + "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" + " Values(?,?,true,false,false,?,?)");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    String strain = "";
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
        if (r[2].equals("xkcd")) {
            strain = r[0];
        }
    }
    call.close();
    String x = strain;
    Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet());
    Map<String, List<String>> clusters = new HashMap<>();
    clust.parallelStream().forEach(cluster -> clusters.put(cluster, phages.stream()
            .filter(a -> a[0].equals(x) && a[1].equals(cluster)).map(a -> a[2]).collect(Collectors.toList())));
    for (String z : clusters.keySet()) {
        try {
            List<String> clustphages = clusters.get(z);
            Set<String> primers = Collections.synchronizedSet(
                    CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + clustphages.get(0) + ".csv"));
            clustphages.remove(0);
            for (String phage : clustphages) {
                //                    String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta");
                //                    String sequence =seqs[0]+seqs[1];
                //                    Map<String, List<Integer>> seqInd = new HashMap<>();
                //                    for (int i = 0; i <= sequence.length()-bps; i++) {
                //                        String sub=sequence.substring(i,i+bps);
                //                        if(seqInd.containsKey(sub)){
                //                            seqInd.get(sub).add(i);
                //                        }else {
                //                            List<Integer> list = new ArrayList<>();
                //                            list.add(i);
                //                            seqInd.put(sub,list);
                //                        }
                //                    }
                //                    primers = primers.stream().filter(seqInd::containsKey).collect(Collectors.toSet());
                //                    primers =Sets.intersection(primers,CSV.readCSV(base + "/PhageData/"+Integer.toString(bps)
                //                            + phage + ".csv"));
                //                    System.gc();
                //                            String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta");
                //                            String sequence =seqs[0]+seqs[1];
                //                            primers.stream().filter(sequence::contains);
                primers.retainAll(CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv"));
                //                    Set<CharSequence> prim = primers;
                //                    for (CharSequence primer: primers){
                //                        if(seqInd.containsKey(primer)){
                //                            prim.remove(primer);
                //                        }
                //                    }
                //                    primers=prim;
            }
            int i = 0;
            for (String a : primers) {
                try {
                    //finish update
                    st.setInt(1, bps);
                    st.setString(2, a);
                    st.setString(3, x);
                    st.setString(4, z);
                    st.addBatch();
                } catch (SQLException e) {
                    e.printStackTrace();
                    System.out.println("Error occurred at " + x + " " + z);
                }
                i++;
                if (i == 1000) {
                    i = 0;
                    st.executeBatch();
                    db.commit();
                }
            }
            if (i > 0) {
                st.executeBatch();
                db.commit();
            }
        } catch (SQLException e) {
            e.printStackTrace();
            System.out.println("Error occurred at " + x + " " + z);
        }
        System.out.println(z);
    }
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Common Updated");
    System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60);
}