List of usage examples for java.sql PreparedStatement setMaxRows
void setMaxRows(int max) throws SQLException;
ResultSet
object generated by this Statement
object can contain to the given number. From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java
/** * Load a KiWiNamespace with the given prefix, or null if the namespace does not exist. The method will first * look in the node cache for cached nodes. If no cache entry is found, it will run a database query * ("load.namespace_prefix")./*from ww w.j a v a 2 s .c o m*/ * * @param prefix the prefix to look for * @return the KiWiNamespace with this prefix or null if it does not exist * @throws SQLException */ public KiWiNamespace loadNamespaceByPrefix(String prefix) throws SQLException { KiWiNamespace element = namespacePrefixCache.get(prefix); if (element != null) { return element; } requireJDBCConnection(); // prepare a query; we will only iterate once, read only, and need only one result row since the id is unique PreparedStatement query = getPreparedStatement("load.namespace_prefix"); query.setString(1, prefix); query.setMaxRows(1); // run the database query and if it yields a result, construct a new node; the method call will take care of // caching the constructed node for future calls ResultSet result = query.executeQuery(); try { if (result.next()) { return constructNamespaceFromDatabase(result); } else { return null; } } finally { result.close(); } }
From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java
/** * Load a KiWiNamespace with the given uri, or null if the namespace does not exist. The method will first * look in the node cache for cached nodes. If no cache entry is found, it will run a database query * ("load.namespace_prefix")./*from ww w. j ava2 s.com*/ * * @param uri the uri to look for * @return the KiWiNamespace with this uri or null if it does not exist * @throws SQLException */ public KiWiNamespace loadNamespaceByUri(String uri) throws SQLException { KiWiNamespace element = namespaceUriCache.get(uri); if (element != null) { return element; } requireJDBCConnection(); // prepare a query; we will only iterate once, read only, and need only one result row since the id is unique PreparedStatement query = getPreparedStatement("load.namespace_uri"); query.setString(1, uri); query.setMaxRows(1); // run the database query and if it yields a result, construct a new node; the method call will take care of // caching the constructed node for future calls ResultSet result = query.executeQuery(); try { if (result.next()) { return constructNamespaceFromDatabase(result); } else { return null; } } finally { result.close(); } }
From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java
/** * Load a KiWiNode by database ID. The method will first look in the node cache for cached nodes. If * no cache entry is found, it will run a database query ('load.node_by_id') on the NODES table and * construct an appropriate subclass instance of KiWiNode with the obtained values. The result will be * constructed based on the value of the NTYPE column as follows: * <ul>/*from ww w . j av a2 s.com*/ * <li>'uri' - KiWiUriResource using the id and svalue (as URI) columns</li> * <li>'bnode' - KiWiAnonResource using the id and svalue (as AnonId) columns</li> * <li>'string' - KiWiStringLiteral using the id, svalue (literal value), lang (literal * language) and ltype (literal type) columns</li> * <li>'int' - KiWiIntLiteral using the id, svalue (string value), ivalue (integer value) * and ltype (literal type) columns</li> * <li>'double' - KiWiDoubleLiteral using the id, svalue (string value), dvalue (double * value) and ltype (literal type) columns</li> * <li>'boolean' - KiWiBooleanLiteral using the id, svalue (string value), bvalue (boolean * value) and ltype (literal type) columns</li> * <li>'date' - KiWiDateLiteral using the id, svalue (string value), tvalue (time value) * and ltype (literal type) columns</li> * </ul> * When a node is loaded from the database, it will be added to the different caches to speed up * subsequent requests. * * @param id the database id of the node to load * @return an instance of a KiWiNode subclass representing the node with the given database id; * type depends on value of the ntype column */ public KiWiNode loadNodeById(long id) throws SQLException { // look in cache KiWiNode element = nodeCache.get(id); if (element != null) { return element; } requireJDBCConnection(); // prepare a query; we will only iterate once, read only, and need only one result row since the id is unique PreparedStatement query = getPreparedStatement("load.node_by_id"); synchronized (query) { query.setLong(1, id); query.setMaxRows(1); // run the database query and if it yields a result, construct a new node; the method call will take care of // caching the constructed node for future calls ResultSet result = query.executeQuery(); try { if (result.next()) { return constructNodeFromDatabase(result); } else { return null; } } finally { result.close(); } } }
From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java
/** * Batch load the nodes with the given ids. This method aims to offer performance improvements by reducing * database roundtrips./*from w w w . j a v a2 s . c o m*/ * @param ids array of ids to retrieve * @return array of nodes corresponding to these ids (in the same order) * @throws SQLException */ public KiWiNode[] loadNodesByIds(long... ids) throws SQLException { requireJDBCConnection(); KiWiNode[] result = new KiWiNode[ids.length]; // first look in the cache for any ids that have already been loaded ArrayList<Long> toFetch = new ArrayList<>(ids.length); for (int i = 0; i < ids.length; i++) { if (ids[i] != 0) { result[i] = nodeCache.get(ids[i]); if (result[i] == null) { toFetch.add(ids[i]); } } } if (toFetch.size() > 0) { // declare variables before to optimize stack allocation int position = 0; int nextBatchSize; PreparedStatement query; KiWiNode node; while (position < toFetch.size()) { nextBatchSize = computeBatchSize(position, toFetch.size()); query = getPreparedStatement("load.nodes_by_ids", nextBatchSize); synchronized (query) { for (int i = 0; i < nextBatchSize; i++) { query.setLong(i + 1, toFetch.get(position + i)); } query.setMaxRows(nextBatchSize); // run the database query and if it yields a result, construct a new node; the method call will take care of // caching the constructed node for future calls ResultSet rows = query.executeQuery(); try { while (rows.next()) { node = constructNodeFromDatabase(rows); for (int i = 0; i < ids.length; i++) { if (ids[i] == node.getId()) { result[i] = node; } } } } finally { rows.close(); } position += nextBatchSize; } } } return result; }
From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java
public KiWiTriple loadTripleById(long id) throws SQLException { // look in cache KiWiTriple element = tripleCache.get(id); if (element != null) { return element; }/*from w w w .j a v a 2 s.c o m*/ requireJDBCConnection(); // prepare a query; we will only iterate once, read only, and need only one result row since the id is unique PreparedStatement query = getPreparedStatement("load.triple_by_id"); query.setLong(1, id); query.setMaxRows(1); // run the database query and if it yields a result, construct a new node; the method call will take care of // caching the constructed node for future calls ResultSet result = query.executeQuery(); try { if (result.next()) { return constructTripleFromDatabase(result); } else { return null; } } finally { result.close(); } }
From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java
/** * Load a KiWiUriResource by URI. The method will first look in the node cache for cached nodes. If * no cache entry is found, it will run a database query ('load.uri_by_uri') on the NODES table and * construct a new KiWiUriResource using the values of the id and svalue columns. * <p/>/* w ww.j a v a2 s. co m*/ * When a node is loaded from the database, it will be added to the different caches to speed up * subsequent requests. * * @param uri the URI of the resource to load * @return the KiWiUriResource identified by the given URI or null if it does not exist */ public KiWiUriResource loadUriResource(String uri) throws SQLException { Preconditions.checkNotNull(uri); // look in cache KiWiUriResource element = uriCache.get(uri); if (element != null) { return element; } requireJDBCConnection(); uriLock.lock(); try { // prepare a query; we will only iterate once, read only, and need only one result row since the id is unique PreparedStatement query = getPreparedStatement("load.uri_by_uri"); query.setString(1, uri); query.setMaxRows(1); // run the database query and if it yields a result, construct a new node; the method call will take care of // caching the constructed node for future calls ResultSet result = query.executeQuery(); try { if (result.next()) { return (KiWiUriResource) constructNodeFromDatabase(result); } else { return null; } } finally { result.close(); } } finally { uriLock.unlock(); } }
From source file:org.apache.marmotta.kiwi.persistence.KiWiConnection.java
/** * Load a KiWiAnonResource by anonymous ID. The method will first look in the node cache for * cached nodes. If no cache entry is found, it will run a database query ('load.bnode_by_anonid') * on the NODES table and construct a new KiWiAnonResource using the values of the id and * svalue columns.//from w w w .j a v a 2s. c o m * <p/> * When a node is loaded from the database, it will be added to the different caches to speed up * subsequent requests. * * @param id the anonymous ID of the resource to load * @return the KiWiAnonResource identified by the given internal ID or null if it does not exist */ public KiWiAnonResource loadAnonResource(String id) throws SQLException { // look in cache KiWiAnonResource element = bnodeCache.get(id); if (element != null) { return element; } requireJDBCConnection(); bnodeLock.lock(); try { // prepare a query; we will only iterate once, read only, and need only one result row since the id is unique PreparedStatement query = getPreparedStatement("load.bnode_by_anonid"); query.setString(1, id); query.setMaxRows(1); // run the database query and if it yields a result, construct a new node; the method call will take care of // caching the constructed node for future calls ResultSet result = query.executeQuery(); try { if (result.next()) { return (KiWiAnonResource) constructNodeFromDatabase(result); } else { return null; } } finally { result.close(); } } finally { bnodeLock.unlock(); } }
From source file:org.apache.ode.scheduler.simple.jdbc.SchedulerDAOConnectionImpl.java
@SuppressWarnings("unchecked") public List<JobDAO> dequeueImmediate(String nodeId, long maxtime, int maxjobs) throws DatabaseException { ArrayList<JobDAO> ret = new ArrayList<JobDAO>(maxjobs); Connection con = null;/*w w w. j a va 2 s . c o m*/ PreparedStatement ps = null; try { con = getConnection(); ps = con.prepareStatement(SCHEDULE_IMMEDIATE); ps.setString(1, nodeId); ps.setLong(2, maxtime); ps.setMaxRows(maxjobs); ResultSet rs = ps.executeQuery(); while (rs.next()) { Scheduler.JobDetails details = new Scheduler.JobDetails(); details.instanceId = asLong(rs.getObject("instanceId")); details.mexId = (String) rs.getObject("mexId"); details.processId = (String) rs.getObject("processId"); details.type = (String) rs.getObject("type"); details.channel = (String) rs.getObject("channel"); details.correlatorId = (String) rs.getObject("correlatorId"); details.correlationKeySet = (String) rs.getObject("correlationKeySet"); details.retryCount = asInteger(rs.getObject("retryCount")); details.inMem = rs.getBoolean("inMem"); if (rs.getObject("detailsExt") != null) { try { ObjectInputStream is = new ObjectInputStream(rs.getBinaryStream("detailsExt")); details.detailsExt = (Map<String, Object>) is.readObject(); is.close(); } catch (Exception e) { throw new DatabaseException("Error deserializing job detailsExt", e); } } { //For compatibility reasons, we check whether there are entries inside //jobDetailsExt blob, which correspond to extracted entries. If so, we //use them. Map<String, Object> detailsExt = details.getDetailsExt(); if (detailsExt.get("type") != null) { details.type = (String) detailsExt.get("type"); } if (detailsExt.get("iid") != null) { details.instanceId = (Long) detailsExt.get("iid"); } if (detailsExt.get("pid") != null) { details.processId = (String) detailsExt.get("pid"); } if (detailsExt.get("inmem") != null) { details.inMem = (Boolean) detailsExt.get("inmem"); } if (detailsExt.get("ckey") != null) { details.correlationKeySet = (String) detailsExt.get("ckey"); } if (detailsExt.get("channel") != null) { details.channel = (String) detailsExt.get("channel"); } if (detailsExt.get("mexid") != null) { details.mexId = (String) detailsExt.get("mexid"); } if (detailsExt.get("correlatorId") != null) { details.correlatorId = (String) detailsExt.get("correlatorId"); } if (detailsExt.get("retryCount") != null) { details.retryCount = Integer.parseInt((String) detailsExt.get("retryCount")); } } JobDAO job = new JobDAOImpl(rs.getLong("ts"), rs.getString("jobid"), rs.getBoolean("transacted"), details); ret.add(job); } rs.close(); ps.close(); // mark jobs as scheduled, UPDATE_SCHEDULED_SLOTS at a time int j = 0; int updateCount = 0; ps = con.prepareStatement(UPDATE_SCHEDULED); for (int updates = 1; updates <= (ret.size() / UPDATE_SCHEDULED_SLOTS) + 1; updates++) { for (int i = 1; i <= UPDATE_SCHEDULED_SLOTS; i++) { ps.setString(i, j < ret.size() ? ret.get(j).getJobId() : ""); j++; } ps.execute(); updateCount += ps.getUpdateCount(); } if (updateCount != ret.size()) { __log.error("Updating scheduled jobs failed to update all jobs; expected=" + ret.size() + " actual=" + updateCount); return null; } } catch (SQLException se) { throw new DatabaseException(se); } finally { close(ps); close(con); } return ret; }
From source file:org.apache.ode.scheduler.simple.JdbcDelegate.java
@SuppressWarnings("unchecked") public List<Job> dequeueImmediate(String nodeId, long maxtime, int maxjobs) throws DatabaseException { ArrayList<Job> ret = new ArrayList<Job>(maxjobs); Connection con = null;//from w w w . j a va 2s.co m PreparedStatement ps = null; try { con = getConnection(); ps = con.prepareStatement(SCHEDULE_IMMEDIATE); ps.setString(1, nodeId); ps.setLong(2, maxtime); ps.setMaxRows(maxjobs); ResultSet rs = ps.executeQuery(); while (rs.next()) { Scheduler.JobDetails details = new Scheduler.JobDetails(); details.instanceId = asLong(rs.getObject("instanceId")); details.mexId = (String) rs.getObject("mexId"); details.processId = (String) rs.getObject("processId"); details.type = (String) rs.getObject("type"); details.channel = (String) rs.getObject("channel"); details.correlatorId = (String) rs.getObject("correlatorId"); details.correlationKeySet = (String) rs.getObject("correlationKeySet"); details.retryCount = asInteger(rs.getObject("retryCount")); details.inMem = asBoolean(rs.getInt("inMem")); if (rs.getObject("detailsExt") != null) { try { ObjectInputStream is = new ObjectInputStream(rs.getBinaryStream("detailsExt")); details.detailsExt = (Map<String, Object>) is.readObject(); is.close(); } catch (Exception e) { throw new DatabaseException("Error deserializing job detailsExt", e); } } { //For compatibility reasons, we check whether there are entries inside //jobDetailsExt blob, which correspond to extracted entries. If so, we //use them. Map<String, Object> detailsExt = details.getDetailsExt(); if (detailsExt.get("type") != null) { details.type = (String) detailsExt.get("type"); } if (detailsExt.get("iid") != null) { details.instanceId = (Long) detailsExt.get("iid"); } if (detailsExt.get("pid") != null && detailsExt.get("pid") instanceof String) { details.processId = (String) detailsExt.get("pid"); } if (detailsExt.get("inmem") != null) { details.inMem = (Boolean) detailsExt.get("inmem"); } if (detailsExt.get("ckey") != null) { details.correlationKeySet = (String) detailsExt.get("ckey"); } if (detailsExt.get("channel") != null) { details.channel = (String) detailsExt.get("channel"); } if (detailsExt.get("mexid") != null) { details.mexId = (String) detailsExt.get("mexid"); } if (detailsExt.get("correlatorId") != null) { details.correlatorId = (String) detailsExt.get("correlatorId"); } if (detailsExt.get("retryCount") != null) { details.retryCount = Integer.parseInt((String) detailsExt.get("retryCount")); } } Job job = new Job(rs.getLong("ts"), rs.getString("jobid"), asBoolean(rs.getInt("transacted")), details); ret.add(job); } rs.close(); ps.close(); } catch (SQLException se) { throw new DatabaseException(se); } finally { close(ps); close(con); } return ret; }
From source file:org.apache.sqoop.repository.common.CommonRepositoryHandler.java
/** * {@inheritDoc}/*from w w w. j a v a 2s. com*/ */ @Override public MSubmission findLastSubmissionForJob(long jobId, Connection conn) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement(crudQueries.getStmtSelectSubmissionsForJob()); stmt.setLong(1, jobId); stmt.setMaxRows(1); rs = stmt.executeQuery(); if (!rs.next()) { return null; } return loadSubmission(rs, conn); } catch (SQLException ex) { logException(ex, jobId); throw new SqoopException(CommonRepositoryError.COMMON_0037, ex); } finally { closeResultSets(rs); closeStatements(stmt); } }