Example usage for java.sql ResultSet CONCUR_UPDATABLE

List of usage examples for java.sql ResultSet CONCUR_UPDATABLE

Introduction

In this page you can find the example usage for java.sql ResultSet CONCUR_UPDATABLE.

Prototype

int CONCUR_UPDATABLE

To view the source code for java.sql ResultSet CONCUR_UPDATABLE.

Click Source Link

Document

The constant indicating the concurrency mode for a ResultSet object that may be updated.

Usage

From source file:com.taobao.datax.plugins.writer.oraclejdbcwriter.OracleJdbcWriter.java

@Override
public int prepare(PluginParam param) {
    this.setParam(param);

    DBSource.register(this.sourceUniqKey, this.genProperties());

    if (StringUtils.isBlank(this.pre))
        return PluginStatus.SUCCESS.value();

    Statement stmt = null;/*from  w  w  w. java  2s. c o m*/
    try {
        this.connection = DBSource.getConnection(this.sourceUniqKey);

        stmt = this.connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);

        for (String subSql : this.pre.split(";")) {
            this.logger.info(String.format("Excute prepare sql %s .", subSql));
            stmt.execute(subSql);
        }
        this.connection.commit();
        return PluginStatus.SUCCESS.value();
    } catch (Exception e) {
        throw new DataExchangeException(e.getCause());
    } finally {
        try {
            if (null != stmt) {
                stmt.close();
            }
            if (null != this.connection) {
                this.connection.close();
                this.connection = null;
            }
        } catch (SQLException e) {
        }
    }
}

From source file:org.etudes.jforum.dao.sqlserver.SqlServerPostDAO.java

/** 
 * @see org.etudes.jforum.dao.PostDAO#selectAllByTopicByLimit(int, int, int)
 *//*from  w ww  .j  av  a 2s  .c  om*/
public List selectAllByTopicByLimit(int topicId, int startFrom, int count) throws Exception {
    List l = new ArrayList();

    String top = SystemGlobals.getSql("GenericModel.selectByLimit");

    PreparedStatement p = JForum.getConnection().prepareStatement(
            top + " " + count + " " + SystemGlobals.getSql("PostModel.selectAllByTopicByLimit1") + " " + top
                    + " " + startFrom + " " + SystemGlobals.getSql("PostModel.selectAllByTopicByLimit2"),
            ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
    if (logger.isDebugEnabled())
        logger.debug(top + " " + count + " " + SystemGlobals.getSql("PostModel.selectAllByTopicByLimit1") + " "
                + top + " " + startFrom + " " + SystemGlobals.getSql("PostModel.selectAllByTopicByLimit2"));
    p.setInt(1, topicId);
    p.setInt(2, topicId);

    ResultSet rs = p.executeQuery();

    while (rs.next()) {
        l.add(this.makePost(rs));
    }

    rs.close();
    p.close();

    return l;
}

From source file:org.biblionum.ouvrage.modele.OuvrageModele.java

/**
 * Java method that updates a row in the generated sql table
 *
 * @param con (open java.sql.Connection)
 * @param auteur//from   w  ww .java 2s . c  o m
 * @param editeur
 * @param annee_edition
 * @param resume
 * @param nb_page
 * @param emplacement
 * @param couverture
 * @param ouvrageTipeid
 * @param categorieOuvrageid
 * @param niveauid_niveau
 * @param filiereid
 * @param titre
 * @return boolean (true on success)
 * @throws SQLException
 */
public boolean updateUtilisateur(DataSource ds, int keyId, String auteur, String editeur, int annee_edition,
        String resume, int nb_page, String emplacement, String couverture, int ouvrageTipeid,
        int categorieOuvrageid, int niveauid_niveau, int filiereid, String titre) throws SQLException {
    con = ds.getConnection();
    String sql = "SELECT * FROM ouvrage WHERE id = ?";
    PreparedStatement statement = con.prepareStatement(sql, ResultSet.TYPE_SCROLL_SENSITIVE,
            ResultSet.CONCUR_UPDATABLE);
    statement.setInt(1, keyId);
    ResultSet entry = statement.executeQuery();

    entry.last();
    int rows = entry.getRow();
    entry.beforeFirst();
    if (rows == 0) {
        entry.close();
        statement.close();
        con.close();
        return false;
    }
    entry.next();

    if (auteur != null) {
        entry.updateString("auteur", auteur);
    }
    if (editeur != null) {
        entry.updateString("editeur", editeur);
    }
    entry.updateInt("annee_edition", annee_edition);
    if (resume != null) {
        entry.updateString("resume", resume);
    }
    entry.updateInt("nb_page", nb_page);
    if (emplacement != null) {
        entry.updateString("emplacement", emplacement);
    }
    if (couverture != null) {
        entry.updateString("couverture", couverture);
    }
    entry.updateInt("ouvrageTipeid", ouvrageTipeid);
    entry.updateInt("categorieOuvrageid", categorieOuvrageid);
    entry.updateInt("niveauid_niveau", niveauid_niveau);
    entry.updateInt("filiereid", filiereid);
    if (titre != null) {
        entry.updateString("titre", titre);
    }

    entry.updateRow();
    entry.close();
    statement.close();
    con.close();
    return true;
}

From source file:org.apache.ambari.server.checks.CheckDatabaseHelper.java

protected void checkForNotMappedConfigsToCluster() {
    String GET_NOT_MAPPED_CONFIGS_QUERY = "select type_name from clusterconfig where type_name not in (select type_name from clusterconfigmapping)";
    Set<String> nonSelectedConfigs = new HashSet<>();
    ResultSet rs = null;//from   w  w w  .  j  a va 2 s .com
    try {
        Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
                ResultSet.CONCUR_UPDATABLE);
        rs = statement.executeQuery(GET_NOT_MAPPED_CONFIGS_QUERY);
        if (rs != null) {
            while (rs.next()) {
                nonSelectedConfigs.add(rs.getString("type_name"));
            }
        }
        if (!nonSelectedConfigs.isEmpty()) {
            LOG.warn(
                    "You have config(s): {} that is(are) not mapped (in clusterconfigmapping table) to any cluster!",
                    StringUtils.join(nonSelectedConfigs, ","));
            warningAvailable = true;
        }
    } catch (SQLException e) {
        LOG.error("Exception occurred during check for not mapped configs to cluster procedure: ", e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                LOG.error("Exception occurred during result set closing procedure: ", e);
            }
        }
    }
}

From source file:net.solarnetwork.node.dao.jdbc.AbstractBatchableJdbcDao.java

private BatchResult batchProcessInternal(final BatchCallback<T> callback, final BatchOptions options) {
    final String querySql = getBatchJdbcStatement(options);
    final AtomicInteger rowCount = new AtomicInteger(0);
    getJdbcTemplate().execute(new ConnectionCallback<Object>() {

        @Override//from   w w  w .j av a 2s.c om
        public net.solarnetwork.node.dao.BatchableDao.BatchResult doInConnection(Connection con)
                throws SQLException, DataAccessException {
            PreparedStatement queryStmt = null;
            ResultSet queryResult = null;
            try {
                queryStmt = con.prepareStatement(querySql,
                        (options.isUpdatable() ? ResultSet.TYPE_SCROLL_SENSITIVE : ResultSet.TYPE_FORWARD_ONLY),
                        (options.isUpdatable() ? ResultSet.CONCUR_UPDATABLE : ResultSet.CONCUR_READ_ONLY),
                        ResultSet.CLOSE_CURSORS_AT_COMMIT);
                queryResult = queryStmt.executeQuery();
                while (queryResult.next()) {
                    T entity = getBatchRowEntity(options, queryResult, rowCount.incrementAndGet());
                    BatchCallbackResult rowResult = callback.handle(entity);
                    switch (rowResult) {
                    case CONTINUE:
                        break;
                    case STOP:
                        return null;
                    case DELETE:
                        queryResult.deleteRow();
                        break;
                    case UPDATE:
                    case UPDATE_STOP:
                        updateBatchRowEntity(options, queryResult, rowCount.intValue(), entity);
                        queryResult.updateRow();
                        if (rowResult == BatchCallbackResult.UPDATE_STOP) {
                            return null;
                        }
                        break;
                    }
                }
            } finally {
                if (queryResult != null) {
                    queryResult.close();
                }
                if (queryStmt != null) {
                    queryStmt.close();
                }
            }

            return null;
        }
    });
    return new BasicBatchResult(rowCount.intValue());
}

From source file:net.ymate.platform.persistence.jdbc.scaffold.EntityGenerator.java

/**
 * @param dbName     ???// w  w  w  .jav  a  2  s . com
 * @param dbUserName ??
 * @param tableName  ??
 * @return ?????
 */
private TableMeta getTableMeta(String dbName, String dbUserName, String tableName) {
    IConnectionHolder _connHolder = null;
    Statement _statement = null;
    ResultSet _resultSet = null;
    Map<String, ColumnInfo> _tableFields = new LinkedHashMap<String, ColumnInfo>();
    List<String> _pkFields = new LinkedList<String>();
    TableMeta _meta = new TableMeta(_pkFields, _tableFields);
    try {
        _connHolder = __jdbc.getDefaultConnectionHolder();
        String _dbType = _connHolder.getDialect().getName();
        DatabaseMetaData _dbMetaData = _connHolder.getConnection().getMetaData();
        System.out.println(">>> Catalog: " + dbName);
        System.out.println(">>> Schema: " + dbUserName);
        System.out.println(">>> Table: " + tableName);
        _resultSet = _dbMetaData.getPrimaryKeys(dbName,
                _dbType.equalsIgnoreCase("oracle") ? dbUserName.toUpperCase() : dbUserName, tableName);
        if (_resultSet == null) {
            System.err.println("Database table \"" + tableName + "\" primaryKey resultSet is null, ignored");
            return null;
        } else {
            while (_resultSet.next()) {
                _pkFields.add(_resultSet.getString(4).toLowerCase());
            }
            if (_pkFields.isEmpty()) {
                System.err
                        .println("Database table \"" + tableName + "\" does not set the primary key, ignored");
                return null;
            } else {
                //
                System.out.println(">>> " + "COLUMN_NAME / " + "COLUMN_CLASS_NAME / " + "PRIMARY_KEY / "
                        + "AUTO_INCREMENT / " + "SIGNED / " + "PRECISION / " + "SCALE / " + "NULLABLE / "
                        + "DEFAULT / " + "REMARKS");
                //
                _statement = _connHolder.getConnection().createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
                        ResultSet.CONCUR_UPDATABLE);
                _resultSet = _statement.executeQuery(
                        "SELECT * FROM ".concat(_connHolder.getDialect().wrapIdentifierQuote(tableName)));
                ResultSetMetaData _rsMetaData = _resultSet.getMetaData();
                //
                for (int _idx = 1; _idx <= _rsMetaData.getColumnCount(); _idx++) {
                    // ??
                    ResultSet _column = _dbMetaData.getColumns(dbName,
                            _dbType.equalsIgnoreCase("oracle") ? dbUserName.toUpperCase() : dbUserName,
                            tableName, _rsMetaData.getColumnName(_idx));
                    if (_column.next()) {
                        // ????
                        _tableFields.put(_rsMetaData.getColumnName(_idx).toLowerCase(),
                                new ColumnInfo(_rsMetaData.getColumnName(_idx).toLowerCase(),
                                        _rsMetaData.getColumnClassName(_idx), _rsMetaData.isAutoIncrement(_idx),
                                        _rsMetaData.isSigned(_idx), _rsMetaData.getPrecision(_idx),
                                        _rsMetaData.getScale(_idx), _rsMetaData.isNullable(_idx),
                                        _column.getString("COLUMN_DEF"), _column.getString("REMARKS")));
                        System.out.println("--> " + _rsMetaData.getColumnName(_idx).toLowerCase() + "\t"
                                + _rsMetaData.getColumnClassName(_idx) + "\t"
                                + _pkFields.contains(_rsMetaData.getColumnName(_idx).toLowerCase()) + "\t"
                                + _rsMetaData.isAutoIncrement(_idx) + "\t" + _rsMetaData.isSigned(_idx) + "\t"
                                + _rsMetaData.getPrecision(_idx) + "\t" + _rsMetaData.getScale(_idx) + "\t"
                                + _rsMetaData.isNullable(_idx) + "\t" + _column.getString("COLUMN_DEF") + "\t"
                                + _column.getString("REMARKS"));
                    }
                    _column.close();
                }
            }
        }
    } catch (Exception e) {
        if (e instanceof RuntimeException) {
            throw (RuntimeException) e;
        }
        throw new RuntimeException(e);
    } finally {
        if (_statement != null) {
            try {
                _statement.close();
            } catch (SQLException e) {
                _LOG.warn("", e);
            }
        }
        if (_resultSet != null) {
            try {
                _resultSet.close();
            } catch (SQLException e) {
                _LOG.warn("", e);
            }
        }
        if (_connHolder != null) {
            _connHolder.release();
        }
    }
    return _meta;
}

From source file:eu.optimis_project.monitoring.storage.MySQLStorageManager.java

private void createTable() throws SQLException {
    final String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(" + ENTRIES_COLUMNNAME_SERVICEID
            + " VARCHAR(256) NOT NULL, " + ENTRIES_COLUMNNAME_INSTANCEID + " VARCHAR(256) NOT NULL, "
            + ENTRIES_COLUMNNAME_NAME + " VARCHAR(256) NOT NULL, " + ENTRIES_COLUMNNAME_DATA
            + " VARCHAR(256) NOT NULL, " + ENTRIES_COLUMNNAME_TIMESTAMP + " BIGINT NOT NULL, " + "PRIMARY KEY ("
            + ENTRIES_COLUMNNAME_SERVICEID + ", " + ENTRIES_COLUMNNAME_TIMESTAMP + ", "
            + ENTRIES_COLUMNNAME_NAME + ")" + ");";

    Statement statement = null;/*from www .  j av  a2 s . c o m*/
    try {
        log.debug("Executing query: " + createTable);
        statement = getConnection().createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_UPDATABLE);
        statement.execute(createTable);
    } catch (SQLException e) {
        throw e;
    } finally {
        try {
            if (statement != null) {
                statement.close();
            }
        } catch (SQLException e) {
            log.debug("Failed to close statement.", e);
        }
    }
}

From source file:com.handu.open.dubbo.monitor.dao.base.DubboInvokeBaseDAO.java

/**
 * SQL?//from  w ww.  j  a  v  a2s.com
 *
 * @param sql SQL?
 * @return List<Map>
 */
public List<Map> querySql(String sql) {
    List<Map> list = Lists.newArrayList();
    try {
        ResultSet rs = getSqlSession().getConnection()
                .prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE)
                .executeQuery();
        try {
            ResultSetMetaData rsm = rs.getMetaData(); //
            int col = rsm.getColumnCount(); //
            String[] colName = new String[col];
            //???, colName
            for (int i = 0; i < col; i++) {
                colName[i] = rsm.getColumnName(i + 1);
            }
            rs.beforeFirst();
            while (rs.next()) {
                Map<String, String> map = Maps.newHashMap();
                for (String aColName : colName) {
                    map.put(aColName, rs.getString(aColName));
                }
                list.add(map);
            }
        } catch (SQLException e) {
            e.printStackTrace();
            return null;
        }
    } catch (SQLException e) {
        e.printStackTrace();
    }
    return list;
}

From source file:com.adaptris.jdbc.connection.FailoverDatasourceTest.java

@Test
public void testStatements() throws Exception {
    Connection conn = new MyProxy();
    try {/* www  .  j  a v  a2s.co m*/
        try {
            conn.nativeSQL("SELECT * FROM SEQUENCES");
        } catch (Exception e) {

        }
        try {
            conn.createStatement();
        } catch (Exception e) {

        }
        try {
            conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE);
        } catch (Exception e) {

        }
        try {
            conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE);
        } catch (Exception e) {

        }
        try {
            conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE,
                    ResultSet.CLOSE_CURSORS_AT_COMMIT);
        } catch (Exception e) {

        }
        try {
            conn.prepareStatement("SELECT * FROM SEQUENCES");
        } catch (Exception e) {

        }
        try {
            conn.prepareStatement("SELECT * FROM SEQUENCES", ResultSet.TYPE_FORWARD_ONLY,
                    ResultSet.CONCUR_READ_ONLY);
        } catch (Exception e) {

        }
        try {
            conn.prepareStatement("SELECT * FROM SEQUENCES", ResultSet.TYPE_FORWARD_ONLY,
                    ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT);
        } catch (Exception e) {

        }
        try {
            conn.prepareStatement("INSERT INTO sequences (id, seq_number) values ('id', 2)",
                    Statement.NO_GENERATED_KEYS);
        } catch (Exception e) {
        }
        try {
            conn.prepareStatement("INSERT INTO sequences (id, seq_number) values ('id', 2)", new int[0]);
        } catch (Exception e) {
        }
        try {
            conn.prepareStatement("INSERT INTO sequences (id, seq_number) values ('id', 2)", new String[0]);
        } catch (Exception e) {

        }

        try {
            conn.prepareCall("SELECT * FROM SEQUENCES");
        } catch (Exception e) {

        }
        try {
            conn.prepareCall("SELECT * FROM SEQUENCES", ResultSet.TYPE_FORWARD_ONLY,
                    ResultSet.CONCUR_READ_ONLY);
        } catch (Exception e) {

        }
        try {
            conn.prepareCall("SELECT * FROM SEQUENCES", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
                    ResultSet.CLOSE_CURSORS_AT_COMMIT);
        } catch (Exception e) {

        }
    } finally {
        JdbcUtil.closeQuietly(conn);

    }
}

From source file:net.pms.database.TableMusicBrainzReleases.java

/**
 * Stores the MBID with information from this {@link Tag} in the database
 *
 * @param mBID the MBID to store//w  w w. j av a  2 s . c om
 * @param tag the {@link Tag} who's information should be associated with
 *        the given MBID
 */
public static void writeMBID(final String mBID, final CoverArtArchiveTagInfo tagInfo) {
    boolean trace = LOGGER.isTraceEnabled();

    try (Connection connection = database.getConnection()) {
        String query = "SELECT * FROM " + TABLE_NAME + constructTagWhere(tagInfo, true);
        if (trace) {
            LOGGER.trace("Searching for release MBID with \"{}\" before update", query);
        }

        tableLock.writeLock().lock();
        try (Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
                ResultSet.CONCUR_UPDATABLE)) {
            connection.setAutoCommit(false);
            try (ResultSet result = statement.executeQuery(query)) {
                if (result.next()) {
                    if (StringUtil.hasValue(mBID) || !StringUtil.hasValue(result.getString("MBID"))) {
                        if (trace) {
                            LOGGER.trace("Updating row {} to MBID \"{}\"", result.getInt("ID"), mBID);
                        }
                        result.updateTimestamp("MODIFIED", new Timestamp(System.currentTimeMillis()));
                        if (StringUtil.hasValue(mBID)) {
                            result.updateString("MBID", mBID);
                        } else {
                            result.updateNull("MBID");
                        }
                        result.updateRow();
                    } else if (trace) {
                        LOGGER.trace("Leaving row {} alone since previous information seems better",
                                result.getInt("ID"));
                    }
                } else {
                    if (trace) {
                        LOGGER.trace(
                                "Inserting new row for MBID \"{}\":\n" + "     Artist    \"{}\"\n"
                                        + "     Album     \"{}\"\n" + "     Title     \"{}\"\n"
                                        + "     Year      \"{}\"\n" + "     Artist ID \"{}\"\n"
                                        + "     Track ID  \"{}\"\n",
                                mBID, tagInfo.artist, tagInfo.album, tagInfo.title, tagInfo.year,
                                tagInfo.artistId, tagInfo.trackId);
                    }

                    result.moveToInsertRow();
                    result.updateTimestamp("MODIFIED", new Timestamp(System.currentTimeMillis()));
                    if (StringUtil.hasValue(mBID)) {
                        result.updateString("MBID", mBID);
                    }
                    if (StringUtil.hasValue(tagInfo.album)) {
                        result.updateString("ALBUM", left(tagInfo.album, 1000));
                    }
                    if (StringUtil.hasValue(tagInfo.artist)) {
                        result.updateString("ARTIST", left(tagInfo.artist, 1000));
                    }
                    if (StringUtil.hasValue(tagInfo.title)) {
                        result.updateString("TITLE", left(tagInfo.title, 1000));
                    }
                    if (StringUtil.hasValue(tagInfo.year)) {
                        result.updateString("YEAR", left(tagInfo.year, 20));
                    }
                    if (StringUtil.hasValue(tagInfo.artistId)) {
                        result.updateString("ARTIST_ID", tagInfo.artistId);
                    }
                    if (StringUtil.hasValue(tagInfo.trackId)) {
                        result.updateString("TRACK_ID", tagInfo.trackId);
                    }
                    result.insertRow();
                }
            } finally {
                connection.commit();
            }
        } finally {
            tableLock.writeLock().unlock();
        }
    } catch (SQLException e) {
        LOGGER.error("Database error while writing Music Brainz ID \"{}\" for \"{}\": {}", mBID, tagInfo,
                e.getMessage());
        LOGGER.trace("", e);
    }
}