Example usage for java.sql PreparedStatement addBatch

List of usage examples for java.sql PreparedStatement addBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement addBatch.

Prototype

void addBatch() throws SQLException;

Source Link

Document

Adds a set of parameters to this PreparedStatement object's batch of commands.

Usage

From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java

public/* synchronized */Map<String, HotRestoreInfo> pause(Map<String, HotRestoreInfo> hriMap) {
    if (isClosed())
        return null;
    if (!USE_CHECKPOINTS_AS_PAUSE_TABLE)
        return pauseBatchIndividually(hriMap);

    String updateCmdNoLogIndex = "update " + (USE_CHECKPOINTS_AS_PAUSE_TABLE ? getCTable() : getPTable())
            + " set serialized=?, has_serialized=true where  paxos_id=?";

    Map<String, HotRestoreInfo> paused = new HashMap<String, HotRestoreInfo>();
    HotRestoreInfo[] hris = hriMap.values().toArray(new HotRestoreInfo[0]);
    PreparedStatement pstmt = null;
    Connection conn = null;//from   w  w w. j  a v  a 2  s  .  c  o m
    try {
        Map<String, HotRestoreInfo> batch = new HashMap<String, HotRestoreInfo>();
        for (int i = 0; i < hris.length; i++) {
            String paxosID = hris[i].paxosID;
            if (conn == null) {
                conn = this.getDefaultConn();
                conn.setAutoCommit(false);
                pstmt = conn.prepareStatement(updateCmdNoLogIndex);
            }
            pstmt.setString(1, hriMap.get(paxosID).toString());
            pstmt.setString(2, paxosID);

            pstmt.addBatch();
            batch.put(paxosID, hris[i]);
            if ((i + 1) % MAX_DB_BATCH_SIZE == 0 || (i + 1) == hriMap.size()) {
                pstmt.executeBatch();
                conn.commit();
                pstmt.clearBatch();
                paused.putAll(batch);

                log.log(Level.FINE, "{0} paused [{1}] ,[{2}]",
                        new Object[] { this, Util.truncatedLog(batch.keySet(), 16) });
                batch.clear();
            }
        }
    } catch (SQLException e) {
        log.severe(this + " failed to pause batch " + Util.truncatedLog(hriMap.keySet(), 10));
        e.printStackTrace();
    } finally {
        cleanup(pstmt);
        cleanup(conn);
    }
    paused.putAll(this.pauseBatchIndividually(this.diffHRI(hriMap, paused)));
    return paused;
}

From source file:com.flexive.ejb.beans.PhraseEngineBean.java

private void updatePhrasePosition(Connection con, int category, long assignmentOwner, long nodeId,
        long nodeMandator, long phraseId, long phraseMandator, long pos, boolean checkPositioning)
        throws SQLException {
    PreparedStatement psUpdate = null, psFetch = null;
    try {/*  w w  w.j  a v  a2 s  . c o m*/
        //                                                                     1                2            3                  4              5               6         7
        psUpdate = con.prepareStatement("UPDATE " + TBL_PHRASE_MAP
                + " SET POS=? WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND PHRASEID=? AND PMANDATOR=? AND CAT=?");
        psUpdate.setLong(2, assignmentOwner);
        psUpdate.setLong(3, nodeId);
        psUpdate.setLong(4, nodeMandator);
        psUpdate.setInt(7, category);
        if (checkPositioning) {
            //                                                                                                         1            2                  3         4
            psFetch = con.prepareStatement("SELECT POS, PHRASEID, PMANDATOR FROM " + TBL_PHRASE_MAP
                    + " WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND CAT=? AND DIRECT=TRUE ORDER BY POS");
            psFetch.setLong(1, assignmentOwner);
            psFetch.setLong(2, nodeId);
            psFetch.setLong(3, nodeMandator);
            psFetch.setInt(4, category);
            ResultSet rs = psFetch.executeQuery();
            long currPos = 0;
            while (rs != null && rs.next()) {
                long _pos = rs.getLong(1);
                long _phraseId = rs.getLong(2);
                long _phraseMandatorId = rs.getLong(3);
                //                    if (phraseId == _phraseId && phraseMandator == _phraseMandatorId)
                //                        continue;
                currPos++;
                if (currPos == pos)
                    continue;
                if (_pos == currPos)
                    continue;
                psUpdate.setLong(1, currPos);
                psUpdate.setLong(5, _phraseId);
                psUpdate.setLong(6, _phraseMandatorId);
                psUpdate.addBatch();
            }
        }
        psUpdate.setLong(1, pos);
        psUpdate.setLong(5, phraseId);
        psUpdate.setLong(6, phraseMandator);
        psUpdate.addBatch();
        psUpdate.executeBatch();
    } finally {
        Database.closeObjects(PhraseEngineBean.class, psFetch, psUpdate);
    }
}

From source file:HSqlManager.java

public static void primerAnalysis(Connection connection, int bps) throws SQLException, IOException {
    long time = System.currentTimeMillis();
    DpalLoad.main(new String[1]);
    HSqlPrimerDesign.Dpal_Inst = DpalLoad.INSTANCE_WIN64;
    String base = new File("").getAbsolutePath();
    CSV.makeDirectory(new File(base + "/PhageData"));
    INSTANCE = ImportPhagelist.getInstance();
    INSTANCE.parseAllPhages(bps);//from  ww  w.j  a v  a  2  s  .c  om
    System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60);
    time = System.currentTimeMillis();
    written = true;
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    stat.execute("SET FILES LOG FALSE\n");
    //        PreparedStatement st = db.prepareStatement("Insert INTO Primerdb.Primers" +
    //                "(Bp,Sequence, CommonP, UniqueP, Picked, Strain, Cluster)" +
    //                " Values(?,?,true,false,false,?,?)");
    PreparedStatement st = db.prepareStatement(
            "INSERT INTO Primerdb.Primers" + "(Bp,Sequence,Strain,Cluster,Tm,GC,UniqueP,CommonP,Hairpin) "
                    + "VALUES(?,?,?,?,?,?,true,true,?)");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);

        //            if(strain.equals("-myco")) {
        //                if (r[2].equals("xkcd")) {
        //                    strain = r[0];
        //                }
        //            }else if(strain.equals("-arthro")){
        //                if (r[2].equals("ArV1")) {
        //                    strain = r[0];
        //                }
        //            }
    }
    call.close();

    Set<String> strains = phages.stream().map(y -> y[0]).collect(Collectors.toSet());
    for (String x : strains) {
        Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1])
                .collect(Collectors.toSet());
        Map<String, Integer> clustersNum = new HashMap<>();
        Map<Integer, String> clustersName = new HashMap<>();
        Map<Integer, List<String>> clusters = new HashMap<>();
        Map<Bytes, Primer> primers = new HashMap<>();
        int i = 0;
        for (String cluster : clust) {
            clustersName.put(i, cluster);
            clustersNum.put(cluster, i);
            i++;
        }
        clust.parallelStream()
                .forEach(cluster -> clusters.put(clustersNum.get(cluster),
                        phages.stream().filter(a -> a[0].equals(x) && a[1].equals(cluster)).map(a -> a[2])
                                .collect(Collectors.toList())));
        for (int z : clusters.keySet()) {
            //            try {
            List<String> clustphages = clusters.get(z);
            for (String phage : clustphages) {
                Set<Bytes> phagprimers =
                        //Read from CSV file here
                        //Premade CSV files of all possible
                        //primers in a phage genome
                        CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv").stream()
                                .map(l -> new Bytes(l.getBytes())).collect(Collectors.toSet());
                for (Bytes primer : phagprimers) {
                    if (!primers.containsKey(primer)) {
                        primers.put(primer, new Primer(z));
                    } else {
                        Primer select = primers.get(primer);
                        select.phageCount++;
                        if (!select.containsCluster(z)) {
                            select.addCluster(z);
                        }
                    }

                }

            }
            System.out.println(clustersName.get(z));
        }
        int count = 0;
        Iterator<Map.Entry<Bytes, Primer>> primersSet = primers.entrySet().iterator();
        while (primersSet.hasNext()) {
            Map.Entry<Bytes, Primer> primer = primersSet.next();
            Primer primerInf = primer.getValue();
            if (primerInf.clusters.length != 1) {
                primer.setValue(null);
            } else {
                int primerClust = -1;
                for (int cluster : primerInf.clusters) {
                    primerClust = cluster;
                }
                if (primerInf.phageCount != clusters.get(primerClust).size()) {
                    primer.setValue(null);
                } else {
                    count++;
                }
            }
        }
        System.out.print("Unique Count: ");
        System.out.println(count);
        System.out.print("Primer Count: ");
        System.out.println(primers.size());
        i = 0;
        for (Bytes a : primers.keySet()) {
            Primer primerInf = primers.get(a);
            if (primerInf != null) {
                String primerClust = "";
                for (int cluster : primerInf.clusters) {
                    primerClust = clustersName.get(cluster);
                }
                String str = new String(a.bytes);
                try {
                    st.setInt(1, bps);
                    st.setString(2, str);
                    st.setString(3, x);
                    st.setString(4, primerClust);
                    //                        st.setDouble(5, HSqlPrimerDesign.primerTm(str, 0, 800, 1.5, 0.2));
                    st.setDouble(5, HSqlPrimerDesign.easytm(str));
                    st.setDouble(6, HSqlPrimerDesign.gcContent(str));
                    st.setBoolean(7, HSqlPrimerDesign.calcHairpin(str, 4));
                    st.addBatch();
                } catch (SQLException e) {
                    e.printStackTrace();
                    System.out.println("Error occurred at " + x + " " + primerClust);
                }
                i++;
                if (i == 1000) {
                    i = 0;
                    st.executeBatch();
                    db.commit();
                }
            }
        }
        if (i > 0) {
            st.executeBatch();
            db.commit();
        }

        //        }

        System.out.println("Unique Updated");
        System.out.println((System.currentTimeMillis() - time) / Math.pow(10, 3) / 60);
    }
    stat.execute("SET FILES LOG TRUE;");
    st.close();
    stat.close();
}

From source file:com.flexive.ejb.beans.PhraseEngineBean.java

/**
 * {@inheritDoc}//from w ww .j  av  a 2 s .  co  m
 */
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public void assignPhrases(int category, long position, long assignmentOwner, long nodeId, long nodeMandator,
        FxPhrase[] phrases) throws FxApplicationException {
    if (phrases == null || phrases.length == 0)
        return;
    checkMandatorAccess(assignmentOwner, FxContext.getUserTicket());
    Connection con = null;
    PreparedStatement ps = null;
    try {
        // Obtain a database connection
        con = Database.getDbConnection();
        //check categories
        ps = con.prepareStatement("SELECT ID FROM " + TBL_PHRASE_TREE + " WHERE ID=? AND MANDATOR=? AND CAT=?");
        ps.setLong(1, nodeId);
        ps.setLong(2, nodeMandator);
        ps.setInt(3, category);
        ResultSet rs = ps.executeQuery();
        if (rs == null || !(rs.next()))
            throw new FxNotFoundException("ex.phrases.node.notFound.id", nodeId, nodeMandator);
        ps.close();
        long startPhraseId = -1, startPhraseMandator = -1;
        ps = con.prepareStatement("SELECT PHRASEID,PMANDATOR FROM " + TBL_PHRASE_MAP
                + " WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND POS>=? AND CAT=? AND DIRECT=TRUE ORDER BY POS ASC");
        ps.setLong(1, assignmentOwner);
        ps.setLong(2, nodeId);
        ps.setLong(3, nodeMandator);
        ps.setLong(4, position);
        ps.setInt(5, category);
        rs = ps.executeQuery();
        while (rs != null && rs.next()) {
            long pid = rs.getLong(1);
            long mid = rs.getLong(2);
            if (!phrasesContains(phrases, pid, mid)) {
                startPhraseId = pid;
                startPhraseMandator = mid;
                break;
            }
        }
        ps.close();
        boolean useMaxPos = startPhraseId == -1 || startPhraseMandator == -1;
        //remove all contained phrases
        ps = con.prepareStatement("DELETE FROM " + TBL_PHRASE_MAP
                + " WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND PHRASEID=? AND PMANDATOR=? AND CAT=? AND DIRECT=TRUE");
        ps.setLong(1, assignmentOwner);
        ps.setLong(2, nodeId);
        ps.setLong(3, nodeMandator);
        ps.setInt(6, category);
        for (FxPhrase rm : phrases) {
            ps.setLong(4, rm.getId());
            ps.setLong(5, rm.getMandator());
            ps.addBatch();
        }
        ps.executeBatch();
        ps.close();
        //close gaps and reposition
        updatePhrasePosition(con, category, assignmentOwner, nodeId, nodeMandator, -1, -1, 0, true);
        int insertPos = -1;
        if (!useMaxPos) {
            ps = con.prepareStatement("SELECT POS FROM " + TBL_PHRASE_MAP
                    + " WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND PHRASEID=? AND PMANDATOR=? AND CAT=? AND DIRECT=?");
            ps.setLong(1, assignmentOwner);
            ps.setLong(2, nodeId);
            ps.setLong(3, nodeMandator);
            ps.setLong(4, startPhraseId);
            ps.setLong(5, startPhraseMandator);
            ps.setInt(6, category);
            ps.setBoolean(7, true);
            rs = ps.executeQuery();
            if (rs != null && rs.next())
                insertPos = rs.getInt(1);
            ps.close();
        }
        if (insertPos == -1)
            useMaxPos = true;
        if (!useMaxPos) {
            //make room for the phrases to insert
            ps = con.prepareStatement("UPDATE " + TBL_PHRASE_MAP + " SET POS=POS+" + (phrases.length)
                    + " WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND POS>? AND CAT=? AND DIRECT=?");
            ps.setLong(1, assignmentOwner);
            ps.setLong(2, nodeId);
            ps.setLong(3, nodeMandator);
            ps.setLong(4, insertPos);
            ps.setInt(5, category);
            ps.setBoolean(6, true);
            ps.executeUpdate();
            ps.close();
        } else {
            ps = con.prepareStatement("SELECT MAX(POS) FROM " + TBL_PHRASE_MAP
                    + " WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND CAT=? AND DIRECT=?");
            ps.setLong(1, assignmentOwner);
            ps.setLong(2, nodeId);
            ps.setLong(3, nodeMandator);
            ps.setInt(4, category);
            ps.setBoolean(5, true);
            rs = ps.executeQuery();
            if (rs != null && rs.next())
                insertPos = rs.getInt(1);
            else
                insertPos = 1; //fallback: first entry
            ps.close();
        }
        ps = con.prepareStatement("INSERT INTO " + TBL_PHRASE_MAP
                + "(MANDATOR,CAT,NODEID,NODEMANDATOR,PHRASEID,PMANDATOR,POS,DIRECT)VALUES(?,?,?,?,?,?,?,?)");
        ps.setLong(1, assignmentOwner);
        ps.setInt(2, category);
        ps.setLong(3, nodeId);
        ps.setLong(4, nodeMandator);
        ps.setBoolean(8, true);
        for (FxPhrase phrase : phrases) {
            ps.setLong(5, phrase.getId());
            ps.setLong(6, phrase.getMandator());
            ps.setLong(7, ++insertPos);
            ps.addBatch();
        }
        ps.executeBatch();
        if (phrases.length > 10)
            rebuildPhraseChildMapping(con, assignmentOwner, category, -1, -1);
        else {
            for (FxPhrase phrase : phrases) {
                rebuildPhraseChildMapping(con, assignmentOwner, category, phrase.getId(), phrase.getMandator());
            }
        }
    } catch (SQLException exc) {
        EJBUtils.rollback(ctx);
        throw new FxDbException(LOG, exc, "ex.db.sqlError", exc.getMessage()).asRuntimeException();
    } finally {
        Database.closeObjects(PhraseEngineBean.class, con, ps);
    }
}

From source file:org.jamwiki.db.AnsiQueryHandler.java

/**
 *
 *//*w  w w  .  j av  a2  s .co m*/
public void insertTopicVersions(List<TopicVersion> topicVersions, Connection conn) throws SQLException {
    PreparedStatement stmt = null;
    ResultSet rs = null;
    boolean useBatch = (topicVersions.size() > 1);
    try {
        if (!this.autoIncrementPrimaryKeys()) {
            stmt = conn.prepareStatement(STATEMENT_INSERT_TOPIC_VERSION);
        } else if (useBatch) {
            // generated keys don't work in batch mode
            stmt = conn.prepareStatement(STATEMENT_INSERT_TOPIC_VERSION_AUTO_INCREMENT);
        } else {
            stmt = conn.prepareStatement(STATEMENT_INSERT_TOPIC_VERSION_AUTO_INCREMENT,
                    Statement.RETURN_GENERATED_KEYS);
        }
        int topicVersionId = -1;
        if (!this.autoIncrementPrimaryKeys() || useBatch) {
            // manually retrieve next topic version id when using batch
            // mode or when the database doesn't support generated keys.
            topicVersionId = this.nextTopicVersionId(conn);
        }
        for (TopicVersion topicVersion : topicVersions) {
            if (!this.autoIncrementPrimaryKeys() || useBatch) {
                // FIXME - if two threads update the database simultaneously then
                // it is possible that this code could set the topic version ID
                // to a value that is different from what the database ends up
                // using.
                topicVersion.setTopicVersionId(topicVersionId++);
            }
            this.prepareTopicVersionStatement(topicVersion, stmt);
            if (useBatch) {
                stmt.addBatch();
            } else {
                stmt.executeUpdate();
            }
            if (this.autoIncrementPrimaryKeys() && !useBatch) {
                rs = stmt.getGeneratedKeys();
                if (!rs.next()) {
                    throw new SQLException("Unable to determine auto-generated ID for database record");
                }
                topicVersion.setTopicVersionId(rs.getInt(1));
            }
        }
        if (useBatch) {
            stmt.executeBatch();
        }
    } finally {
        // close only the statement and result set - leave the connection open for further use
        DatabaseConnection.closeConnection(null, stmt, rs);
        stmt = null;
        rs = null;
    }
}

From source file:org.pentaho.di.core.database.Database.java

/**
 * Insert a row into the database using a prepared statement that has all values set.
 *
 * @param ps/* w ww . j a v a2  s  . co  m*/
 *          The prepared statement
 * @param batch
 *          True if you want to use batch inserts (size = commit size)
 * @param handleCommit
 *          True if you want to handle the commit here after the commit size (False e.g. in case the step handles
 *          this, see TableOutput)
 * @return true if the rows are safe: if batch of rows was sent to the database OR if a commit was done.
 * @throws KettleDatabaseException
 */
public boolean insertRow(PreparedStatement ps, boolean batch, boolean handleCommit)
        throws KettleDatabaseException {
    String debug = "insertRow start";
    boolean rowsAreSafe = false;
    boolean isBatchUpdate = false;

    try {
        // Unique connections and Batch inserts don't mix when you want to roll
        // back on certain databases.
        // That's why we disable the batch insert in that case.
        //
        boolean useBatchInsert = batch && getDatabaseMetaData().supportsBatchUpdates()
                && databaseMeta.supportsBatchUpdates() && Const.isEmpty(connectionGroup);

        //
        // Add support for batch inserts...
        //
        if (!isAutoCommit()) {
            if (useBatchInsert) {
                debug = "insertRow add batch";
                ps.addBatch(); // Add the batch, but don't forget to run the batch
            } else {
                debug = "insertRow exec update";
                ps.executeUpdate();
            }
        } else {
            ps.executeUpdate();
        }

        written++;

        if (handleCommit) { // some steps handle the commit themselves (see e.g.
                            // TableOutput step)
            if (!isAutoCommit() && (written % commitsize) == 0) {
                if (useBatchInsert) {
                    isBatchUpdate = true;
                    debug = "insertRow executeBatch commit";
                    ps.executeBatch();
                    commit();
                    ps.clearBatch();
                } else {
                    debug = "insertRow normal commit";
                    commit();
                }
                written = 0;
                rowsAreSafe = true;
            }
        }

        return rowsAreSafe;
    } catch (BatchUpdateException ex) {
        throw createKettleDatabaseBatchException("Error updating batch", ex);
    } catch (SQLException ex) {
        if (isBatchUpdate) {
            throw createKettleDatabaseBatchException("Error updating batch", ex);
        } else {
            throw new KettleDatabaseException("Error inserting/updating row", ex);
        }
    } catch (Exception e) {
        // System.out.println("Unexpected exception in ["+debug+"] : "+e.getMessage());
        throw new KettleDatabaseException("Unexpected error inserting/updating row in part [" + debug + "]", e);
    }
}

From source file:com.viettel.logistic.wms.dao.StockGoodsSerialDAO.java

public ResultDTO importStockGoodsSerialBatch(StockTransDTO stockTrans, StockTransDetailDTO stockTransDetail,
        List<StockTransSerialDTO> lstStockTransSerial, Connection connection, String serialStatus) {
    ResultDTO resultDTO = new ResultDTO();
    //connection.
    //THONG TIN SO LUONG NHAP
    Double amount = 0D;/*w w  w. j  a  v a  2  s  .c o  m*/
    Double amountIssue = 0D;
    //PREPARE STATEMENTS
    PreparedStatement prstmtInsertStockTransSerial;
    PreparedStatement prstmtInsertStockGoodsSerial;
    //SQL
    StringBuilder sqlStockGoodsSerial = new StringBuilder();
    StringBuilder sqlStockTransSerial = new StringBuilder();
    String serial;
    String prefixSerial = "";
    String suffixFromSerial;
    String suffixToSerial;
    String fromSerial;
    String toSerial;
    int numberNeedToCommit = 0;
    int numberOfSuccess = 0;
    int numberOfFail = 0;
    //
    List paramsStockTransSerial;
    List paramsStockGoodsSerial;

    try {
        //1.KHOI TAO SESSION
        //2.1 TAO STATEMENTS STOCK_GOODS_SERIAL
        sqlStockGoodsSerial
                .append(" INSERT INTO stock_goods_serial (id, cust_id, owner_id, owner_type, goods_id,");
        sqlStockGoodsSerial.append("       goods_state, status,sale_type, change_user,");
        sqlStockGoodsSerial.append("       price,channel_type_id, barcode, change_date,");
        sqlStockGoodsSerial.append(
                "       import_date, sale_date, bincode, add_infor, cell_code,serial,partner_id,import_stock_trans_id,order_id) ");
        sqlStockGoodsSerial.append(
                " VALUES (STOCK_GOODS_SERIAL_SEQ.nextval,?,?,?,?,?,?,?,?,TO_NUMBER(?),TO_NUMBER(?),?,to_date(?,'dd/MM/yyyy hh24:mi:ss'),to_date(?,'dd/MM/yyyy hh24:mi:ss'),?,?,?,?,?,TO_NUMBER(?),TO_NUMBER(?),TO_NUMBER(?)) ");
        sqlStockGoodsSerial.append(" LOG ERRORS REJECT LIMIT UNLIMITED ");
        //2.2 TAO STATEMENTS STOCK_TRANS_SERIAL
        sqlStockTransSerial.append("INSERT INTO stock_trans_serial ");
        sqlStockTransSerial.append(" ( stock_trans_serial_id, stock_trans_id,"
                + "       stock_trans_detail_id, stock_trans_date, goods_id,"
                + "       goods_code, goods_name, goods_state, goods_unit_type,"
                + "       from_serial, to_serial," + "       amount_order, amount_real, bincode, barcode, "
                + "       create_datetime," + "       cell_code ) ");
        sqlStockTransSerial.append(
                " VALUES (STOCK_TRANS_SERIAL_SEQ.nextval,?,?,to_date(?,'dd/MM/yyyy hh24:mi:ss'),?,?,?,?,?,?,?,?,?,?,?,to_date(?,'dd/MM/yyyy hh24:mi:ss'),?) LOG ERRORS REJECT LIMIT UNLIMITED ");
        //3. TAO PREPARE STATEMENT
        prstmtInsertStockTransSerial = connection.prepareStatement(sqlStockTransSerial.toString());
        prstmtInsertStockGoodsSerial = connection.prepareStatement(sqlStockGoodsSerial.toString());
        //Chi tiet serial  
        for (StockTransSerialDTO stockTransSerial : lstStockTransSerial) {
            fromSerial = stockTransSerial.getFromSerial();
            toSerial = stockTransSerial.getToSerial();
            numberNeedToCommit++;
            //SET PARAMS FOR STOCK_TRANS_SERIAL
            paramsStockTransSerial = setParamsStockTransSerial(stockTransSerial);
            //SET PARAMS AND ADD TO BATCH
            for (int idx = 0; idx < paramsStockTransSerial.size(); idx++) {
                prstmtInsertStockTransSerial.setString(idx + 1,
                        DataUtil.nvl(paramsStockTransSerial.get(idx), "").toString());
            }
            prstmtInsertStockTransSerial.addBatch();
            //Insert chi tiet serial
            if (!StringUtils.isInteger(fromSerial) || !StringUtils.isInteger(toSerial)) {//Serial la ky tu
                serial = fromSerial;
                paramsStockGoodsSerial = setParamsStockGoodsSerial(stockTrans, stockTransDetail,
                        stockTransSerial, serial, serialStatus);
                //SET PARAMS AND ADD TO BATCH
                for (int idx = 0; idx < paramsStockGoodsSerial.size(); idx++) {
                    try {
                        prstmtInsertStockGoodsSerial.setString(idx + 1,
                                DataUtil.nvl(paramsStockGoodsSerial.get(idx), "").toString());
                    } catch (Exception e) {
                        System.out.println(idx);
                    }
                }
                prstmtInsertStockGoodsSerial.addBatch();
                //bo sung them amount issue
            } else {//Serial la so
                //Kiem tra do dai serial kneu >19 thi cat do kieu Long chi co do dai toi da 19
                int iLengthSuffixSerial = 0;
                if (fromSerial.length() > Constants.SERIAL_LIMIT) {
                    prefixSerial = fromSerial.substring(0, fromSerial.length() - Constants.SERIAL_LIMIT);
                    suffixFromSerial = fromSerial.substring(fromSerial.length() - Constants.SERIAL_LIMIT,
                            fromSerial.length());
                    suffixToSerial = toSerial.substring(toSerial.length() - Constants.SERIAL_LIMIT,
                            toSerial.length());
                    iLengthSuffixSerial = suffixFromSerial.length();
                } else {
                    suffixFromSerial = fromSerial;
                    suffixToSerial = toSerial;
                    iLengthSuffixSerial = fromSerial.length();
                }
                //
                String tmpSuffixSerial;
                for (Long lSerial = Long.parseLong(suffixFromSerial); lSerial <= Long
                        .parseLong(suffixToSerial); lSerial++) {
                    tmpSuffixSerial = DataUtil.lPad(lSerial.toString(), "0", iLengthSuffixSerial);
                    serial = prefixSerial + tmpSuffixSerial;
                    paramsStockGoodsSerial = setParamsStockGoodsSerial(stockTrans, stockTransDetail,
                            stockTransSerial, serial, serialStatus);
                    //SET PARAMS AND ADD TO BATCH
                    for (int idx = 0; idx < paramsStockGoodsSerial.size(); idx++) {
                        prstmtInsertStockGoodsSerial.setString(idx + 1,
                                DataUtil.nvl(paramsStockGoodsSerial.get(idx), "").toString());
                    }
                    prstmtInsertStockGoodsSerial.addBatch();
                    //Bo sung them thong tin so luong amount issue
                    //amountIssue++;
                }
            } //END IF
            if (numberNeedToCommit >= Constants.COMMIT_NUM) {
                try {
                    prstmtInsertStockGoodsSerial.executeBatch();
                    prstmtInsertStockTransSerial.executeBatch();
                    numberOfSuccess = numberOfSuccess + numberNeedToCommit;
                } catch (Exception ex) {
                    numberOfFail = numberOfFail + numberNeedToCommit;
                }
                numberNeedToCommit = 0;
            }
        } //END FOR
        if (numberNeedToCommit > 0) {
            try {
                prstmtInsertStockTransSerial.executeBatch();
                prstmtInsertStockGoodsSerial.executeBatch();
                numberOfSuccess += numberNeedToCommit;
            } catch (Exception ex) {
                //                    connection.rollback();
                numberOfFail += numberNeedToCommit;
            }
        }
        prstmtInsertStockTransSerial.close();
        prstmtInsertStockGoodsSerial.close();
    } catch (SQLException | NumberFormatException e) {
        Logger.getLogger(StockGoodsSerialDAO.class.getName()).log(Level.SEVERE, null, e);
    }

    //lay so luong hang hoa insert vao ban err$_
    List<StockGoodsSerialInforDTO> lstError = getListErrorImportRevoke(stockTrans.getStockTransId());
    int amountError = 0;
    if (lstError != null) {
        amountError = lstError.size();
    }
    Double strAmount = Double.parseDouble(stockTransDetail.getAmountReal() + "");
    numberOfSuccess = Integer.parseInt(String.format("%.0f", strAmount)) - amountError;//tru so luong hang insert loi => so luong hang insert thanh cong
    numberOfFail = amountError;//so luong hang loi do ta ton tai serial cua khach hang trong kho
    amountIssue = (double) numberOfSuccess;
    //
    resultDTO.setMessage(ParamUtils.SUCCESS);
    resultDTO.setQuantityFail(numberOfFail);
    resultDTO.setQuantitySucc(numberOfSuccess);
    resultDTO.setAmount(amount);
    resultDTO.setAmountIssue(amountIssue);
    // tra ve list serial loi
    resultDTO.setLstStockGoodsSerialInforDTO(lstError);
    return resultDTO;
}

From source file:com.flexive.ejb.beans.PhraseEngineBean.java

/**
 * {@inheritDoc}/*www  . j a v a2  s .  com*/
 */
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public void moveTreeNodeAssignment(int category, long assignmentOwner, long nodeId, long nodeMandatorId,
        long phraseId, long phraseMandator, int delta) throws FxNotFoundException, FxNoAccessException {
    if (delta == 0)
        return;
    checkMandatorAccess(assignmentOwner, FxContext.getUserTicket());
    Connection con = null;
    PreparedStatement ps = null;
    try {
        // Obtain a database connection
        con = Database.getDbConnection();

        List<Long> positionsId = Lists.newArrayListWithCapacity(50);
        List<Long> positionsMandator = Lists.newArrayListWithCapacity(50);

        //0..phrase id, 1..phrase mandator,2..pos
        ps = con.prepareStatement("SELECT PHRASEID,PMANDATOR,POS FROM " + TBL_PHRASE_MAP
                + " WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND CAT=? ORDER BY POS");
        ps.setLong(1, assignmentOwner);
        ps.setLong(2, nodeId);
        ps.setLong(3, nodeMandatorId);
        ps.setInt(4, category);
        ResultSet rs = ps.executeQuery();
        long currPos = 1;
        int index = -1;
        while (rs != null && rs.next()) {
            if (index == -1 && phraseId == rs.getLong(1))
                index = (int) currPos - 1;
            positionsId.add(rs.getLong(1));
            positionsMandator.add(rs.getLong(2));
            currPos++;
        }
        if (positionsId.size() < 2 || index == -1) //only one node or node not found, can not change position
            return;
        int newIndex = index + delta;
        if (newIndex < 0)
            newIndex = 0;
        if (delta > 0)
            newIndex++;
        if (newIndex > (positionsId.size() - 1))
            newIndex = positionsId.size();
        positionsId.add(newIndex, phraseId);
        positionsMandator.add(newIndex, phraseMandator);
        if (newIndex > index) {
            positionsId.remove(index);
            positionsMandator.remove(index);
        } else {
            positionsId.remove(index + 1);
            positionsMandator.remove(index + 1);
        }
        //write back new positionsId
        ps.close();
        ps = con.prepareStatement("UPDATE " + TBL_PHRASE_MAP
                + " SET POS=? WHERE MANDATOR=? AND NODEID=? AND NODEMANDATOR=? AND PHRASEID=? AND PMANDATOR=? AND CAT=?");
        ps.setLong(2, assignmentOwner);
        ps.setLong(3, nodeId);
        ps.setLong(4, nodeMandatorId);
        ps.setInt(7, category);
        for (int i = 1; i <= positionsId.size(); i++) {
            ps.setLong(1, i);
            ps.setLong(5, positionsId.get(i - 1));
            ps.setLong(6, positionsMandator.get(i - 1));
            ps.addBatch();
        }
        ps.executeBatch();
    } catch (SQLException exc) {
        EJBUtils.rollback(ctx);
        throw new FxDbException(LOG, exc, "ex.db.sqlError", exc.getMessage()).asRuntimeException();
    } finally {
        Database.closeObjects(PhraseEngineBean.class, con, ps);
    }
}

From source file:i5.las2peer.services.mobsos.SurveyService.java

@POST
@Consumes(MediaType.APPLICATION_JSON)/*from ww  w.  java 2  s.c om*/
@Path("surveys/{id}/responses")
@Summary("submit response data to given survey.")
@ApiResponses(value = { @ApiResponse(code = 200, message = "Survey response submitted successfully."),
        @ApiResponse(code = 400, message = "Survey response invalid -or- questionnaire form invalid. Cause: ..."),
        @ApiResponse(code = 404, message = "Survey does not exist -or- No questionnaire defined for survey."),
        @ApiResponse(code = 400, message = "Survey response already submitted."), })
public HttpResponse submitSurveyResponseJSON(@PathParam("id") int id, @ContentParam String answerJSON) {
    Date now = new Date();
    String onAction = "submitting response to survey " + id;
    try {

        // retrieve survey by id;
        HttpResponse rs = getSurvey(id);
        if (rs.getStatus() != 200) {
            return rs;
        }

        JSONObject s = (JSONObject) JSONValue.parse(rs.getResult());

        // check if survey expired/not started
        SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
        df.setTimeZone(TimeZone.getTimeZone("GMT"));

        Date start = df.parse((String) s.get("start"));
        Date end = df.parse((String) s.get("end"));

        if (now.getTime() > end.getTime()) {
            HttpResponse resp = new HttpResponse("Cannot submit response. Survey expired.");
            resp.setStatus(403);
            return resp;
        } else if (now.getTime() < start.getTime()) {
            HttpResponse resp = new HttpResponse("Cannot submit response. Survey has not begun, yet.");
            resp.setStatus(403);
            return resp;
        }

        // check for questionnaire form
        int qid = Integer.parseInt(s.get("qid") + "");

        if (qid == -1) {
            HttpResponse result = new HttpResponse("No questionnaire defined for survey " + id + "!");
            result.setStatus(404);
            return result;
        }

        // retrieve questionnaire form for survey to do answer validation
        HttpResponse r = downloadQuestionnaireForm(qid);

        if (200 != r.getStatus()) {
            // if questionnaire form does not exist, pass on response containing error status
            return r;
        }

        Document form;
        JSONObject answer;

        // parse form to XML document incl. validation
        try {
            form = validateQuestionnaireData(r.getResult());
        } catch (SAXException e) {
            HttpResponse result = new HttpResponse("Questionnaire form is invalid! Cause: " + e.getMessage());
            result.setStatus(400);
            return result;
        }

        try {
            //System.out.println(answerJSON);

            answer = (JSONObject) JSONValue.parseWithException(answerJSON);
        } catch (ParseException e) {
            HttpResponse result = new HttpResponse(
                    "Survey response is not valid JSON! Cause: " + e.getMessage());
            result.setStatus(400);
            return result;
        }

        JSONObject answerFieldTable;

        // validate if answer matches form.
        try {
            answerFieldTable = validateResponse(form, answer);
        } catch (IllegalArgumentException e) {
            HttpResponse result = new HttpResponse("Survey response is invalid! Cause: " + e.getMessage());
            result.setStatus(400);
            return result;
        }

        // after all validation finally persist survey response in database
        int surveyId = id;

        String sub = (String) getActiveUserInfo().get("sub");

        if (getActiveAgent().getId() == getActiveNode().getAnonymous().getId()) {
            sub += now.getTime();
        }

        Connection conn = null;
        PreparedStatement stmt = null;
        ResultSet rset = null;

        try {
            conn = dataSource.getConnection();
            stmt = conn.prepareStatement(
                    "insert into " + jdbcSchema + ".response(uid,sid,qkey,qval,time) values (?,?,?,?,?)");

            Iterator<String> it = answerFieldTable.keySet().iterator();
            while (it.hasNext()) {

                String qkey = it.next();
                String qval = "" + answerFieldTable.get(qkey);

                stmt.setString(1, sub);
                stmt.setInt(2, surveyId);
                stmt.setString(3, qkey);
                stmt.setString(4, qval);
                stmt.setTimestamp(5, new Timestamp(now.getTime()));
                stmt.addBatch();

            }
            stmt.executeBatch();

            HttpResponse result = new HttpResponse("Response to survey " + id + " submitted successfully.");
            result.setStatus(200);
            return result;

        } catch (SQLException | UnsupportedOperationException e) {
            if (0 <= e.getMessage().indexOf("Duplicate")) {
                HttpResponse result = new HttpResponse("Survey response already submitted!");
                result.setStatus(409);
                return result;
            } else {
                e.printStackTrace();
                return internalError(onAction);
            }
        } finally {
            try {
                if (rset != null)
                    rset.close();
            } catch (Exception e) {
                e.printStackTrace();
                return internalError(onAction);
            }
            try {
                if (stmt != null)
                    stmt.close();
            } catch (Exception e) {
                e.printStackTrace();
                return internalError(onAction);
            }
            try {
                if (conn != null)
                    conn.close();
            } catch (Exception e) {
                e.printStackTrace();
                return internalError(onAction);
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        return internalError(onAction);
    }
}

From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java

private/* synchronized */Set<String> pauseLogIndex(Map<String, LogIndex> toCommit) {
    if (isClosed())
        return null;
    if (!USE_CHECKPOINTS_AS_PAUSE_TABLE)
        return this.pauseLogIndexIndividually(toCommit);
    String updateCmd = "update " + (USE_CHECKPOINTS_AS_PAUSE_TABLE ? getCTable() : getPTable())
            + " set logindex=? where  paxos_id=?";

    PreparedStatement pstmt = null;
    Connection conn = null;// w ww  . j  a  v a 2s.  c  o m
    Set<String> paused = new HashSet<String>();
    Set<String> batch = new HashSet<String>();
    synchronized (this.messageLog) {
        try {
            int i = 0;
            for (String paxosID : toCommit.keySet()) {
                LogIndex logIndex = toCommit.get(paxosID);
                if (conn == null) {
                    conn = this.getDefaultConn();
                    conn.setAutoCommit(false);
                    pstmt = conn.prepareStatement(updateCmd);
                }

                byte[] logIndexBytes = logIndex != null ? deflate(logIndex.toString().getBytes(CHARSET)) : null;
                if (logIndexBytes != null && ENABLE_INSTRUMENTATION && Util.oneIn(Integer.MAX_VALUE))
                    DelayProfiler.updateMovAvg("logindex_size", logIndexBytes.length);
                Blob blob = conn.createBlob();
                if (logIndexBytes != null)
                    blob.setBytes(1, logIndexBytes);
                pstmt.setBlob(1, logIndexBytes != null ? blob : null);
                pstmt.setString(2, paxosID);
                pstmt.addBatch();
                batch.add(paxosID);
                if ((i + 1) % MAX_DB_BATCH_SIZE == 0 || (i + 1) == toCommit.size()) {
                    pstmt.executeBatch();
                    conn.commit();
                    pstmt.clearBatch();
                    paused.addAll(batch);

                    log.log(Level.FINE, "{0} paused logIndex batch {1}",
                            new Object[] { this, Util.truncatedLog(batch, 16) });
                    batch.clear();
                }
                i++;
            }
        } catch (SQLException | IOException sqle) {
            log.severe(this + " failed to pause logIndex batch");
            sqle.printStackTrace();
        } finally {
            cleanup(pstmt);
            cleanup(conn);
        }
        // free up memory
        for (String paxosID : paused)
            this.messageLog.uncache(paxosID);
    }
    if (paused.size() != toCommit.size())
        paused.addAll(this.pauseLogIndexIndividually(diffLI(toCommit, paused)));
    return paused;
}