Example usage for java.sql Connection setReadOnly

List of usage examples for java.sql Connection setReadOnly

Introduction

In this page you can find the example usage for java.sql Connection setReadOnly.

Prototype

void setReadOnly(boolean readOnly) throws SQLException;

Source Link

Document

Puts this connection in read-only mode as a hint to the driver to enable database optimizations.

Usage

From source file:org.horizontaldb.integration.InterceptorMockEnvironmentTest.java

@Test
public void shouldValidateMultiLevelShardedCalls() throws SQLException {
    ConversationRegistry mockRegistry = EasyMock.createMock(ConversationRegistry.class);
    TenantContext mockTenantContext = EasyMock.createMock(TenantContext.class);
    org.apache.tomcat.jdbc.pool.DataSource mockDataSource = EasyMock
            .createMock(org.apache.tomcat.jdbc.pool.DataSource.class);
    DataSourceResource mockDataSourceResource = new DataSourceResource(mockDataSource);
    ShardBeanResolver mockShardBeanResolver = EasyMock.createMock(ShardBeanResolver.class);
    ShardBeanEnricher mockShardBeanEnricher = EasyMock.createMock(ShardBeanEnricher.class);
    Connection mockConnection = EasyMock.createMock(Connection.class);
    PreparedStatement mockStatement = EasyMock.createMock(PreparedStatement.class);
    ResultSet mockResultset = EasyMock.createMock(ResultSet.class);

    conversationRegistryMockProxy.setMockRegistry(mockRegistry);
    tenantContextMockProxy.setMockTenantContext(mockTenantContext);
    dataSourceFactoryMockProxy.setMockDataSourceResource(mockDataSourceResource);
    shardBeanResolverMockProxy.setMockResolver(mockShardBeanResolver);
    shardBeanEnricherMockProxy.setMockEnricher(mockShardBeanEnricher);

    // This is the protocol that the interceptors should follow during a sharded call
    mockRegistry.startConversation(testUserHelper.getJoeToken());
    expect(mockRegistry.hasConversation(TestUser.JOE.name())).andReturn(true);
    expect(mockTenantContext.resolveCurrentTenantIdentifier()).andReturn(TestUser.JOE.name());
    mockRegistry.addResource(TestUser.JOE.name(), mockDataSourceResource);

    // resolve Dao for TestServiceTwo
    expect(mockShardBeanResolver.getBean(same(PersonDao.class), anyObject(ShardContext.class))).andReturn(null);
    mockRegistry.addResource(same(TestUser.JOE.name()), anyObject(PersonDao.class));
    mockShardBeanEnricher.setup(anyObject(PersonDao.class), anyObject(ShardContext.class));
    mockShardBeanEnricher.tearDown(anyObject(PersonDao.class), anyObject(ShardContext.class));

    // Hibernate transaction flow
    expect(mockDataSource.getConnection()).andReturn(mockConnection);
    mockConnection.setReadOnly(true);
    expect(mockConnection.getAutoCommit()).andReturn(false);
    expect(mockConnection.prepareStatement(anyObject(String.class))).andReturn(mockStatement);
    expect(mockStatement.executeQuery()).andReturn(mockResultset);
    expect(mockStatement.getWarnings()).andReturn(null);
    mockStatement.clearWarnings();//w ww  . jav  a  2 s. c  o m
    expect(mockStatement.getMaxRows()).andReturn(0);
    expect(mockStatement.getQueryTimeout()).andReturn(0);
    expect(mockResultset.next()).andReturn(true);
    expect(mockResultset.next()).andReturn(false);
    expect(mockResultset.getLong(anyObject(String.class))).andReturn(0l);
    expect(mockResultset.wasNull()).andReturn(false);
    expect(mockResultset.getLong(anyObject(String.class))).andReturn(0l);
    expect(mockResultset.wasNull()).andReturn(true);
    expect(mockResultset.getString(anyObject(String.class))).andReturn("mockPerson");
    expect(mockResultset.wasNull()).andReturn(false);
    mockResultset.close();
    mockStatement.close();
    mockConnection.commit();
    // end Hibernate transaction

    // resolve Dao for TestServiceThree
    expect(mockRegistry.hasConversation(TestUser.JOE.name())).andReturn(true);
    expect(mockShardBeanResolver.getBean(same(DepartmentDao.class), anyObject(ShardContext.class)))
            .andReturn(null);
    mockRegistry.addResource(same(TestUser.JOE.name()), anyObject(DepartmentDaoImpl.class));
    mockShardBeanEnricher.setup(anyObject(DepartmentDaoImpl.class), anyObject(ShardContext.class));
    mockShardBeanEnricher.tearDown(anyObject(DepartmentDaoImpl.class), anyObject(ShardContext.class));

    // Hibernate transaction flow
    expect(mockConnection.prepareStatement(anyObject(String.class))).andReturn(mockStatement);
    expect(mockStatement.executeQuery()).andReturn(mockResultset);
    expect(mockStatement.getWarnings()).andReturn(null);
    mockStatement.clearWarnings();
    expect(mockStatement.getMaxRows()).andReturn(0);
    expect(mockStatement.getQueryTimeout()).andReturn(0);
    expect(mockResultset.next()).andReturn(true);
    expect(mockResultset.next()).andReturn(false);
    expect(mockResultset.getLong(anyObject(String.class))).andReturn(0l);
    expect(mockResultset.wasNull()).andReturn(false);
    expect(mockResultset.getString(anyObject(String.class))).andReturn("mockDepartment");
    expect(mockResultset.wasNull()).andReturn(false);
    mockResultset.close();
    mockStatement.close();
    // end Hibernate transaction

    // cleanup after service calls
    mockDataSource.close(true);
    mockRegistry.teardownConversation(testUserHelper.getJoeToken());

    replay(mockRegistry, mockTenantContext, mockShardBeanResolver, mockShardBeanEnricher, mockDataSource,
            mockConnection, mockStatement, mockResultset);

    try {
        testService.authenticate(testUserHelper.getJoeToken());

        testService.callNestedServiceChain(TestUser.JOE.name());
    } finally {
        testService.logoff(testUserHelper.getJoeToken());
    }

    verify(mockRegistry, mockTenantContext, mockShardBeanResolver, mockShardBeanEnricher, mockDataSource,
            mockConnection, mockStatement, mockResultset);
}

From source file:helma.objectmodel.db.NodeManager.java

/**
 * Count the nodes contained in the child collection of the home node
 * which is defined by Relation rel.// ww  w  .  j  a va2  s  . c  o m
 */
public int countNodes(Node home, Relation rel) throws Exception {
    DbMapping type = rel == null ? null : rel.otherType;
    if (type == null || !type.isRelational()) {
        // this should never be called for embedded nodes
        throw new RuntimeException("countNodes called for non-relational node " + home);
    }
    int retval = 0;
    Connection con = type.getConnection();
    // set connection to read-only mode
    if (!con.isReadOnly())
        con.setReadOnly(true);

    Statement stmt = null;
    long logTimeStart = logSql ? System.currentTimeMillis() : 0;
    String query = null;

    try {
        StringBuffer b = rel.getCountSelect();

        if (home.getSubnodeRelation() != null) {
            // use the manually set subnoderelation of the home node
            query = b.append(" ").append(home.getSubnodeRelation()).toString();
        } else {
            // let relation object build the query
            rel.buildQuery(b, home, false, true);
            query = b.toString();
        }

        stmt = con.createStatement();
        ResultSet rs = stmt.executeQuery(query);

        if (!rs.next()) {
            retval = 0;
        } else {
            retval = rs.getInt(1);
        }
    } finally {
        if (logSql) {
            long logTimeStop = System.currentTimeMillis();
            logSqlStatement("SQL SELECT_COUNT", type.getTableName(), logTimeStart, logTimeStop, query);
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (Exception ignore) {
            }
        }
    }

    return (rel.maxSize > 0) ? Math.min(rel.maxSize, retval) : retval;
}

From source file:helma.objectmodel.db.NodeManager.java

/**
 * Generates an ID for the table by finding out the maximum current value
 *///from   w w  w  .  java2s.c o m
synchronized String generateMaxID(DbMapping map) throws Exception {
    String retval = null;
    Statement stmt = null;
    long logTimeStart = logSql ? System.currentTimeMillis() : 0;
    String q = new StringBuffer("SELECT MAX(").append(map.getIDField()).append(") FROM ")
            .append(map.getTableName()).toString();

    try {
        Connection con = map.getConnection();
        // set connection to read-only mode
        if (!con.isReadOnly())
            con.setReadOnly(true);

        stmt = con.createStatement();

        ResultSet rs = stmt.executeQuery(q);

        // check for empty table
        if (!rs.next()) {
            long currMax = map.getNewID(0);

            retval = Long.toString(currMax);
        } else {
            long currMax = rs.getLong(1);

            currMax = map.getNewID(currMax);
            retval = Long.toString(currMax);
        }
    } finally {
        if (logSql) {
            long logTimeStop = System.currentTimeMillis();
            logSqlStatement("SQL SELECT_MAX", map.getTableName(), logTimeStart, logTimeStop, q);
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (Exception ignore) {
            }
        }
    }

    return retval;
}

From source file:helma.objectmodel.db.NodeManager.java

/**
 *  Performs the actual deletion of a node from either the embedded or an external
 *  SQL database./* www.j a  v  a  2  s  . c  om*/
 */
public void deleteNode(IDatabase db, ITransaction txn, Node node) throws Exception {
    DbMapping dbm = node.getDbMapping();

    if ((dbm == null) || !dbm.isRelational()) {
        db.deleteNode(txn, node.getID());
    } else {
        Statement st = null;
        long logTimeStart = logSql ? System.currentTimeMillis() : 0;
        String str = new StringBuffer("DELETE FROM ").append(dbm.getTableName()).append(" WHERE ")
                .append(dbm.getIDField()).append(" = ").append(node.getID()).toString();

        try {
            Connection con = dbm.getConnection();
            // set connection to write mode
            if (con.isReadOnly())
                con.setReadOnly(false);

            st = con.createStatement();

            st.executeUpdate(str);

        } finally {
            if (logSql) {
                long logTimeStop = System.currentTimeMillis();
                logSqlStatement("SQL DELETE", dbm.getTableName(), logTimeStart, logTimeStop, str);
            }
            if (st != null) {
                try {
                    st.close();
                } catch (Exception ignore) {
                }
            }
        }
    }

    // node may still be cached via non-primary keys. mark as invalid
    node.setState(Node.INVALID);
}

From source file:helma.objectmodel.db.NodeManager.java

/**
 *  Similar to getNodeIDs, but returns a List that contains the nodes property names instead of IDs
 *//*from w  w  w  .  j  av  a  2s.  c om*/
public Vector getPropertyNames(Node home, Relation rel) throws Exception {
    DbMapping type = rel == null ? null : rel.otherType;
    if (type == null || !type.isRelational()) {
        // this should never be called for embedded nodes
        throw new RuntimeException("getPropertyNames called for non-relational node " + home);
    }
    Vector retval = new Vector();

    Connection con = rel.otherType.getConnection();
    // set connection to read-only mode
    if (!con.isReadOnly())
        con.setReadOnly(true);

    Statement stmt = null;
    long logTimeStart = logSql ? System.currentTimeMillis() : 0;
    String query = null;

    try {
        // NOTE: we explicitly convert tables StringBuffer to a String
        // before appending to be compatible with JDK 1.3
        StringBuffer b = rel.getNamesSelect();

        if (home.getSubnodeRelation() != null) {
            b.append(" ").append(home.getSubnodeRelation());
        } else {
            // let relation object build the query
            rel.buildQuery(b, home, true, false);
        }

        stmt = con.createStatement();

        query = b.toString();

        ResultSet rs = stmt.executeQuery(query);

        while (rs.next()) {
            String n = rs.getString(1);

            if (n != null) {
                retval.add(n);
            }
        }
    } finally {
        if (logSql) {
            long logTimeStop = System.currentTimeMillis();
            logSqlStatement("SQL SELECT_ACCESSNAMES", type.getTableName(), logTimeStart, logTimeStop, query);
        }

        if (stmt != null) {
            try {
                stmt.close();
            } catch (Exception ignore) {
            }
        }
    }

    return retval;
}

From source file:helma.objectmodel.db.NodeManager.java

/**
 *  Loades subnodes via subnode relation. This is similar to getNodeIDs, but it
 *  actually loades all nodes in one go, which is better for small node collections.
 *  This method is used when xxx.loadmode=aggressive is specified.
 *//*from  w  w  w .  j  a  v a  2s. c o m*/
public List getNodes(Node home, Relation rel) throws Exception {
    // This does not apply for groupby nodes - use getNodeIDs instead
    assert rel.groupby == null;

    if ((rel == null) || (rel.otherType == null) || !rel.otherType.isRelational()) {
        // this should never be called for embedded nodes
        throw new RuntimeException("getNodes called for non-relational node " + home);
    }

    List retval = new ArrayList();
    DbMapping dbm = rel.otherType;

    Connection con = dbm.getConnection();
    // set connection to read-only mode
    if (!con.isReadOnly())
        con.setReadOnly(true);

    Statement stmt = con.createStatement();
    DbColumn[] columns = dbm.getColumns();
    Relation[] joins = dbm.getJoins();
    String query = null;
    long logTimeStart = logSql ? System.currentTimeMillis() : 0;

    try {
        StringBuffer b = dbm.getSelect(rel);

        if (home.getSubnodeRelation() != null) {
            b.append(home.getSubnodeRelation());
        } else {
            // let relation object build the query
            rel.buildQuery(b, home, true, false);
        }

        query = b.toString();

        if (rel.maxSize > 0) {
            stmt.setMaxRows(rel.maxSize);
        }

        ResultSet rs = stmt.executeQuery(query);

        while (rs.next()) {
            // create new Nodes.
            Node node = createNode(rel.otherType, rs, columns, 0);
            if (node == null) {
                continue;
            }
            Key primKey = node.getKey();

            retval.add(new NodeHandle(primKey));

            registerNewNode(node, null);

            fetchJoinedNodes(rs, joins, columns.length);
        }

    } finally {
        if (logSql) {
            long logTimeStop = System.currentTimeMillis();
            logSqlStatement("SQL SELECT_ALL", dbm.getTableName(), logTimeStart, logTimeStop, query);
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (Exception ignore) {
            }
        }
    }

    return retval;
}

From source file:helma.objectmodel.db.NodeManager.java

String generateSequenceID(DbMapping map) throws Exception {
    Statement stmt = null;//from  w ww  .j  av a2 s  .c o m
    String retval = null;
    long logTimeStart = logSql ? System.currentTimeMillis() : 0;
    String q;
    if (map.isOracle()) {
        q = new StringBuffer("SELECT ").append(map.getIDgen()).append(".nextval FROM dual").toString();
    } else if (map.isPostgreSQL() || map.isH2()) {
        q = new StringBuffer("SELECT nextval('").append(map.getIDgen()).append("')").toString();
    } else {
        throw new RuntimeException("Unable to generate sequence: unknown DB");
    }

    try {
        Connection con = map.getConnection();
        // TODO is it necessary to set connection to write mode here?
        if (con.isReadOnly())
            con.setReadOnly(false);

        stmt = con.createStatement();

        ResultSet rs = stmt.executeQuery(q);

        if (!rs.next()) {
            throw new SQLException("Error creating ID from Sequence: empty recordset");
        }

        retval = rs.getString(1);
    } finally {
        if (logSql) {
            long logTimeStop = System.currentTimeMillis();
            logSqlStatement("SQL SELECT_NEXTVAL", map.getTableName(), logTimeStart, logTimeStop, q);
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (Exception ignore) {
            }
        }
    }

    return retval;
}

From source file:helma.objectmodel.db.NodeManager.java

/**
 *  Loades subnodes via subnode relation. Only the ID index is loaded, the nodes are
 *  loaded later on demand.//from   w w  w .j av  a2  s .c o m
 */
public List getNodeIDs(Node home, Relation rel) throws Exception {
    DbMapping type = rel == null ? null : rel.otherType;
    if (type == null || !type.isRelational()) {
        // this should never be called for embedded nodes
        throw new RuntimeException("getNodeIDs called for non-relational node " + home);
    }
    List retval = new ArrayList();

    // if we do a groupby query (creating an intermediate layer of groupby nodes),
    // retrieve the value of that field instead of the primary key
    Connection con = type.getConnection();
    // set connection to read-only mode
    if (!con.isReadOnly())
        con.setReadOnly(true);

    Statement stmt = null;
    long logTimeStart = logSql ? System.currentTimeMillis() : 0;
    String query = null;

    try {
        StringBuffer b = rel.getIdSelect();

        if (home.getSubnodeRelation() != null) {
            // subnode relation was explicitly set
            query = b.append(" ").append(home.getSubnodeRelation()).toString();
        } else {
            // let relation object build the query
            rel.buildQuery(b, home, true, false);
            query = b.toString();
        }

        stmt = con.createStatement();

        if (rel.maxSize > 0) {
            stmt.setMaxRows(rel.maxSize);
        }

        ResultSet result = stmt.executeQuery(query);

        // problem: how do we derive a SyntheticKey from a not-yet-persistent Node?
        Key k = (rel.groupby != null) ? home.getKey() : null;

        while (result.next()) {
            String kstr = result.getString(1);

            // jump over null values - this can happen especially when the selected
            // column is a group-by column.
            if (kstr == null) {
                continue;
            }

            // make the proper key for the object, either a generic DB key or a groupby key
            Key key = (rel.groupby == null) ? (Key) new DbKey(rel.otherType, kstr)
                    : (Key) new SyntheticKey(k, kstr);
            retval.add(new NodeHandle(key));

            // if these are groupby nodes, evict nullNode keys
            if (rel.groupby != null) {
                Node n = (Node) cache.get(key);

                if ((n != null) && n.isNullNode()) {
                    evictKey(key);
                }
            }
        }
    } finally {
        if (logSql) {
            long logTimeStop = System.currentTimeMillis();
            logSqlStatement("SQL SELECT_IDS", type.getTableName(), logTimeStart, logTimeStop, query);
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (Exception ignore) {
            }
        }
    }

    return retval;
}

From source file:helma.objectmodel.db.NodeManager.java

private Node getNodeByKey(ITransaction txn, DbKey key) throws Exception {
    // Note: Key must be a DbKey, otherwise will not work for relational objects
    Node node = null;/*from w  w w .  j av  a 2  s.co  m*/
    DbMapping dbm = app.getDbMapping(key.getStorageName());
    String kstr = key.getID();

    if ((dbm == null) || !dbm.isRelational()) {
        node = (Node) db.getNode(txn, kstr);
        if ((node != null) && (dbm != null)) {
            node.setDbMapping(dbm);
        }
    } else {
        String idfield = dbm.getIDField();

        Statement stmt = null;
        String query = null;
        long logTimeStart = logSql ? System.currentTimeMillis() : 0;

        try {
            Connection con = dbm.getConnection();
            // set connection to read-only mode
            if (!con.isReadOnly())
                con.setReadOnly(true);

            stmt = con.createStatement();

            DbColumn[] columns = dbm.getColumns();
            Relation[] joins = dbm.getJoins();

            StringBuffer b = dbm.getSelect(null).append("WHERE ");
            dbm.appendCondition(b, idfield, kstr);
            dbm.addJoinConstraints(b, " AND ");
            query = b.toString();

            ResultSet rs = stmt.executeQuery(query);

            if (!rs.next()) {
                return null;
            }
            node = createNode(dbm, rs, columns, 0);

            fetchJoinedNodes(rs, joins, columns.length);

            if (rs.next()) {
                app.logError("Warning: More than one value returned for query " + query);
            }
        } finally {
            if (logSql) {
                long logTimeStop = System.currentTimeMillis();
                logSqlStatement("SQL SELECT_BYKEY", dbm.getTableName(), logTimeStart, logTimeStop, query);
            }
            if (stmt != null) {
                try {
                    stmt.close();
                } catch (Exception ignore) {
                    // ignore
                }
            }
        }
    }

    return node;
}

From source file:helma.objectmodel.db.NodeManager.java

private Node getNodeByRelation(ITransaction txn, Node home, String kstr, Relation rel, DbMapping dbm)
        throws Exception {
    Node node = null;/*  w w w. j  ava 2  s . c o m*/

    if (rel != null && rel.virtual) {
        if (rel.needsPersistence()) {
            node = (Node) home.createNode(kstr);
        } else {
            node = new Node(home, kstr, safe, rel.prototype);
        }

        // set prototype and dbmapping on the newly created virtual/collection node
        node.setPrototype(rel.prototype);
        node.setDbMapping(rel.getVirtualMapping());
    } else if (rel != null && rel.groupby != null) {
        node = home.getGroupbySubnode(kstr, false);

        if (node == null && (dbm == null || !dbm.isRelational())) {
            node = (Node) db.getNode(txn, kstr);
        }
        return node;
    } else if (rel == null || dbm == null || !dbm.isRelational()) {
        node = (Node) db.getNode(txn, kstr);
        node.setDbMapping(dbm);
        return node;
    } else {
        Statement stmt = null;
        String query = null;
        long logTimeStart = logSql ? System.currentTimeMillis() : 0;

        try {
            Connection con = dbm.getConnection();
            // set connection to read-only mode
            if (!con.isReadOnly())
                con.setReadOnly(true);
            DbColumn[] columns = dbm.getColumns();
            Relation[] joins = dbm.getJoins();
            StringBuffer b = dbm.getSelect(rel);

            if (home.getSubnodeRelation() != null && !rel.isComplexReference()) {
                // combine our key with the constraints in the manually set subnode relation
                b.append(" WHERE ");
                dbm.appendCondition(b, rel.accessName, kstr);
                // add join contraints in case this is an old oracle style join
                dbm.addJoinConstraints(b, " AND ");
                // add potential constraints from manually set subnodeRelation
                String subrel = home.getSubnodeRelation().trim();
                if (subrel.length() > 5) {
                    b.append(" AND (");
                    b.append(subrel.substring(5).trim());
                    b.append(")");
                }
            } else {
                rel.buildQuery(b, home, dbm, kstr, false, false);
            }

            stmt = con.createStatement();

            query = b.toString();

            ResultSet rs = stmt.executeQuery(query);

            if (!rs.next()) {
                return null;
            }

            node = createNode(dbm, rs, columns, 0);

            fetchJoinedNodes(rs, joins, columns.length);

            if (rs.next()) {
                app.logError("Warning: More than one value returned for query " + query);
            }

        } finally {
            if (logSql) {
                long logTimeStop = System.currentTimeMillis();
                logSqlStatement("SQL SELECT_BYRELATION", dbm.getTableName(), logTimeStart, logTimeStop, query);
            }
            if (stmt != null) {
                try {
                    stmt.close();
                } catch (Exception ignore) {
                    // ignore
                }
            }
        }
    }

    return node;
}