Example usage for java.sql PreparedStatement getResultSet

List of usage examples for java.sql PreparedStatement getResultSet

Introduction

In this page you can find the example usage for java.sql PreparedStatement getResultSet.

Prototype

ResultSet getResultSet() throws SQLException;

Source Link

Document

Retrieves the current result as a ResultSet object.

Usage

From source file:org.zaproxy.zap.extension.websocket.db.TableWebSocket.java

/**
 * Filter out and count messages according to payloadFilter
 *
 * @param criteria//w  ww. ja v  a2s.  c  o  m
 * @param opcodes Null when all opcodes should be retrieved.
 * @param inScopeChannelIds
 * @param payloadFilter Null when all payloads should be retrieved.
 * @param payloadLength
 * @return number of message that fulfill given template
 * @throws DatabaseException
 */
private int countMessageWithPayloadFilter(WebSocketMessageDTO criteria, List<Integer> opcodes,
        List<Integer> inScopeChannelIds, WebSocketMessagesPayloadFilter payloadFilter, int payloadLength)
        throws DatabaseException {
    String query = "SELECT m.opcode, m.payload_utf8 FROM websocket_message AS m "
            + "LEFT OUTER JOIN websocket_message_fuzz f "
            + "ON m.message_id = f.message_id AND m.channel_id = f.channel_id " + "<where> ";
    int count = 0;
    try {
        PreparedStatement stmt = buildMessageCriteriaStatement(query, criteria, opcodes, inScopeChannelIds);
        stmt.execute();
        ResultSet resultSet = stmt.getResultSet();
        try {
            while (resultSet.next()) {
                String payload;
                // read payload
                if (resultSet.getInt("opcode") != WebSocketMessage.OPCODE_BINARY) {

                    if (payloadLength == -1) {
                        // load all characters
                        payload = resultSet.getString("payload_utf8");
                    } else {
                        Clob clob = resultSet.getClob("payload_utf8");
                        int length = Math.min(payloadLength, (int) clob.length());
                        payload = clob.getSubString(1, length);
                        clob.free();
                    }
                    if (payloadFilter.isStringValidWithPattern(payload)) {
                        count++;
                    }
                }
            }
        } finally {
            resultSet.close();
            stmt.close();
        }
    } catch (SQLException e) {
        throw new DatabaseException(e);
    }

    return count;
}

From source file:ch.elexis.data.Query.java

/**
 * Execute the {@link PreparedStatement} on the database.
 * /*from  w ww.j  a v  a  2 s  .co m*/
 * @param ps
 * @param values
 * @return
 */
public ArrayList<String> execute(final PreparedStatement ps, final String[] values) {
    try {
        for (int i = 0; i < values.length; i++) {
            ps.setString(i + 1, values[i]);
        }
        if (ps.execute() == true) {
            ArrayList<String> ret = new ArrayList<String>();
            ResultSet res = ps.getResultSet();
            while (res.next()) {
                ret.add(res.getString(1));
            }
            return ret;
        }
    } catch (Exception ex) {
        ElexisStatus status = new ElexisStatus(ElexisStatus.ERROR, CoreHub.PLUGIN_ID, ElexisStatus.CODE_NONE,
                "Fehler beim Ausfhren von " + sql.toString(), ex, ElexisStatus.LOG_ERRORS);
        throw new PersistenceException(status);
    }
    return null;
}

From source file:csiro.pidsvc.mappingstore.ManagerJson.java

@SuppressWarnings("unchecked")
public JSONArray getSettings() throws SQLException {
    PreparedStatement pst = null;
    ResultSet rs = null;/*from  w  ww  .j a  v a 2  s. c om*/
    JSONArray ret = new JSONArray();

    try {
        pst = _connection.prepareStatement("SELECT * FROM configuration");
        if (pst.execute()) {
            for (rs = pst.getResultSet(); rs.next();)
                ret.add(JSONObjectHelper.create("name", rs.getString(1), "value", rs.getString(2)));
        }
    } finally {
        if (rs != null)
            rs.close();
        if (pst != null)
            pst.close();
    }
    return ret;
}

From source file:net.solarnetwork.node.dao.jdbc.AbstractJdbcDao.java

/**
 * Persist a domain object, without using auto-generated keys.
 * /*from ww w . ja  v  a2  s .com*/
 * @param obj
 *        the domain object to persist
 * @param sqlInsert
 *        the SQL insert statement to use
 * @return the primary key created for the domain object
 */
protected Long storeDomainObjectWithoutAutogeneratedKeys(final T obj, final String sqlInsert) {
    Object result = getJdbcTemplate().execute(new PreparedStatementCreator() {

        @Override
        public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
            PreparedStatement ps = con.prepareStatement(sqlInsert);
            setStoreStatementValues(obj, ps);
            return ps;
        }
    }, new PreparedStatementCallback<Object>() {

        @Override
        public Object doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException {
            ps.execute();
            int count = ps.getUpdateCount();
            if (count == 1 && ps.getMoreResults()) {
                ResultSet rs = ps.getResultSet();
                if (rs.next()) {
                    return rs.getObject(1);
                }
            }
            return null;
        }
    });
    if (result instanceof Long) {
        return (Long) result;
    } else if (result instanceof Number) {
        return Long.valueOf(((Number) result).longValue());
    }
    if (log.isWarnEnabled()) {
        log.warn("Unexpected (non-number) primary key returned: " + result);
    }
    return null;
}

From source file:org.openmrs.util.databasechange.MigrateConceptReferenceTermChangeSet.java

/**
 * @see liquibase.change.custom.CustomTaskChange#execute(liquibase.database.Database)
 *///ww  w . j  a  va  2 s .  com
@Override
public void execute(Database database) throws CustomChangeException {
    final JdbcConnection connection = (JdbcConnection) database.getConnection();
    Boolean prevAutoCommit = null;

    PreparedStatement selectTypes = null;
    PreparedStatement batchUpdateMap = null;
    PreparedStatement selectMap = null;
    PreparedStatement updateMapTerm = null;
    PreparedStatement insertTerm = null;
    PreparedStatement updateMapType = null;

    try {
        prevAutoCommit = connection.getAutoCommit();
        connection.setAutoCommit(false);

        //Prepare a list of types and their ids.
        Map<String, Integer> typesToIds = new HashMap<String, Integer>();

        selectTypes = connection.prepareStatement("select * from concept_map_type");
        selectTypes.execute();
        ResultSet selectTypeResult = selectTypes.getResultSet();

        while (selectTypeResult.next()) {
            typesToIds.put(selectTypeResult.getString("name").trim().toUpperCase(),
                    selectTypeResult.getInt("concept_map_type_id"));
        }
        selectTypes.close();

        //The FK on concept_reference_term_id is not yet created so we are safe to copy over IDs. 
        //The trims are done to be able to compare properly.
        batchUpdateMap = connection.prepareStatement(
                "update concept_reference_map set" + " concept_reference_term_id = concept_map_id,"
                        + " source_code = trim(source_code), comment = trim(comment)");
        batchUpdateMap.execute();
        batchUpdateMap.close();

        //Preparing statements for use in the loop.
        updateMapTerm = connection.prepareStatement(
                "update concept_reference_map set" + " concept_reference_term_id = ? where concept_map_id = ?");
        insertTerm = connection.prepareStatement("insert into concept_reference_term"
                + " (concept_reference_term_id, uuid, concept_source_id, code, creator, date_created, description)"
                + " values (?, ?, ?, ?, ?, ?, ?)");
        updateMapType = connection.prepareStatement(
                "update concept_reference_map set" + " concept_map_type_id = ? where concept_map_id = ?");

        int prevSource = -1;
        String prevSourceCode = null;
        String prevComment = null;
        int prevInsertedTerm = -1;

        //In addition to source and source_code we order by UUID to always insert the same term if run on different systems.
        selectMap = connection.prepareStatement(
                "select * from concept_reference_map" + " order by source, source_code, uuid");
        selectMap.execute();

        final ResultSet selectMapResult = selectMap.getResultSet();

        while (selectMapResult.next()) {
            final int conceptMapId = selectMapResult.getInt("concept_map_id");
            final int source = selectMapResult.getInt("source");
            final String sourceCode = selectMapResult.getString("source_code");
            final String comment = selectMapResult.getString("comment");
            final int creator = selectMapResult.getInt("creator");
            final Date dateCreated = selectMapResult.getDate("date_created");
            final String uuid = selectMapResult.getString("uuid");

            final Integer mapTypeId = determineMapTypeId(comment, typesToIds);
            final int updatedMapTypeId = (mapTypeId == null) ? typesToIds.get(DEFAULT_CONCEPT_MAP_TYPE)
                    : mapTypeId;
            updateMapType.setInt(1, updatedMapTypeId);
            updateMapType.setInt(2, conceptMapId);
            updateMapType.execute();
            if (updateMapType.getUpdateCount() != 1) {
                throw new CustomChangeException("Failed to set map type: " + mapTypeId + " for map: "
                        + conceptMapId + ", updated rows: " + updateMapType.getUpdateCount());
            }

            if (source == prevSource && (sourceCode == prevSourceCode
                    || (sourceCode != null && sourceCode.equals(prevSourceCode)))) {
                if (mapTypeId == null && comment != null && !comment.equals(prevComment)) {
                    log.warn("Lost comment '" + comment + "' for map " + conceptMapId + ". Preserved comment "
                            + prevComment);
                }

                //We need to use the last inserted term.
                updateMapTerm.setInt(1, prevInsertedTerm);
                updateMapTerm.setInt(2, conceptMapId);

                updateMapTerm.execute();
                if (updateMapTerm.getUpdateCount() != 1) {
                    throw new CustomChangeException(
                            "Failed to set reference term: " + prevInsertedTerm + " for map: " + conceptMapId
                                    + ", updated rows: " + updateMapTerm.getUpdateCount());
                }
            } else {
                insertTerm.setInt(1, conceptMapId);
                //We need to guaranty that UUIDs are always the same when run on different systems.
                insertTerm.setString(2, UUID.nameUUIDFromBytes(uuid.getBytes()).toString());
                insertTerm.setInt(3, source);
                insertTerm.setString(4, sourceCode);
                insertTerm.setInt(5, creator);
                insertTerm.setDate(6, dateCreated);
                if (mapTypeId == null) {
                    insertTerm.setString(7, comment);
                } else {
                    insertTerm.setString(7, null);
                }

                insertTerm.execute();

                prevInsertedTerm = conceptMapId;
            }

            prevSource = source;
            prevSourceCode = sourceCode;
            prevComment = comment;
        }
        selectMap.close();
        updateMapType.close();
        updateMapTerm.close();
        insertTerm.close();

        connection.commit();
    } catch (Exception e) {
        try {
            if (connection != null) {
                connection.rollback();
            }
        } catch (Exception ex) {
            log.error("Failed to rollback", ex);
        }

        throw new CustomChangeException(e);
    } finally {
        closeStatementQuietly(selectTypes);
        closeStatementQuietly(batchUpdateMap);
        closeStatementQuietly(selectMap);
        closeStatementQuietly(updateMapTerm);
        closeStatementQuietly(insertTerm);
        closeStatementQuietly(updateMapType);

        if (connection != null && prevAutoCommit != null) {
            try {
                connection.setAutoCommit(prevAutoCommit);
            } catch (DatabaseException e) {
                log.error("Failed to reset auto commit", e);
            }
        }
    }
}

From source file:at.alladin.rmbt.statisticServer.export.ExportResource.java

@Get
public Representation request(final String entity) {
    //Before doing anything => check if a cached file already exists and is new enough
    String property = System.getProperty("java.io.tmpdir");

    final String filename_zip;
    final String filename_csv;

    //allow filtering by month/year
    int year = -1;
    int month = -1;
    int hours = -1;
    boolean hoursExport = false;
    boolean dateExport = false;

    if (getRequest().getAttributes().containsKey("hours")) { // export by hours
        try {/*  w  ww. j  av a  2s.c  o  m*/
            hours = Integer.parseInt(getRequest().getAttributes().get("hours").toString());
        } catch (NumberFormatException ex) {
            //Nothing -> just fall back
        }
        if (hours <= 7 * 24 && hours >= 1) { //limit to 1 week (avoid DoS)
            hoursExport = true;
        }
    } else if (!hoursExport && getRequest().getAttributes().containsKey("year")) { // export by month/year 
        try {
            year = Integer.parseInt(getRequest().getAttributes().get("year").toString());
            month = Integer.parseInt(getRequest().getAttributes().get("month").toString());
        } catch (NumberFormatException ex) {
            //Nothing -> just fall back
        }
        if (year < 2099 && month > 0 && month <= 12 && year > 2000) {
            dateExport = true;
        }
    }

    if (hoursExport) {
        filename_zip = FILENAME_ZIP_HOURS.replace("%HOURS%", String.format("%03d", hours));
        filename_csv = FILENAME_CSV_HOURS.replace("%HOURS%", String.format("%03d", hours));
        cacheThresholdMs = 5 * 60 * 1000; //5 minutes
    } else if (dateExport) {
        filename_zip = FILENAME_ZIP.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%",
                String.format("%02d", month));
        filename_csv = FILENAME_CSV.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%",
                String.format("%02d", month));
        cacheThresholdMs = 23 * 60 * 60 * 1000; //23 hours
    } else {
        filename_zip = FILENAME_ZIP_CURRENT;
        filename_csv = FILENAME_CSV_CURRENT;
        cacheThresholdMs = 3 * 60 * 60 * 1000; //3 hours
    }

    final File cachedFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv));
    final File generatingFile = new File(
            property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp");
    if (cachedFile.exists()) {

        //check if file has been recently created OR a file is currently being created
        if (((cachedFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())
                || (generatingFile.exists()
                        && (generatingFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())) {

            //if so, return the cached file instead of a cost-intensive new one
            final OutputRepresentation result = new OutputRepresentation(
                    zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) {

                @Override
                public void write(OutputStream out) throws IOException {
                    InputStream is = new FileInputStream(cachedFile);
                    IOUtils.copy(is, out);
                    out.close();
                }

            };
            if (zip) {
                final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT);
                disposition.setFilename(filename_zip);
                result.setDisposition(disposition);
            }
            return result;

        }
    }

    final String timeClause;

    if (dateExport)
        timeClause = " AND (EXTRACT (month FROM t.time AT TIME ZONE 'UTC') = " + month
                + ") AND (EXTRACT (year FROM t.time AT TIME ZONE 'UTC') = " + year + ") ";
    else if (hoursExport)
        timeClause = " AND time > now() - interval '" + hours + " hours' ";
    else
        timeClause = " AND time > current_date - interval '31 days' ";

    final String sql = "SELECT" + " ('P' || t.open_uuid) open_uuid,"
            + " ('O' || t.open_test_uuid) open_test_uuid,"
            + " to_char(t.time AT TIME ZONE 'UTC', 'YYYY-MM-DD HH24:MI:SS') time_utc,"
            + " nt.group_name cat_technology," + " nt.name network_type,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN"
            + " t.geo_lat" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_lat*1111)/1111" + " ELSE null"
            + " END) lat,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN"
            + " t.geo_long" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_long*741)/741 " + " ELSE null"
            + " END) long," + " (CASE WHEN ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN"
            + " 'rastered'" + //make raster transparent
            " ELSE t.geo_provider" + " END) loc_src,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') "
            + " THEN round(t.geo_accuracy::float * 10)/10 "
            + " WHEN (t.geo_accuracy < 100) AND ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN 100"
            + // limit accuracy to 100m
            " WHEN (t.geo_accuracy < ?) THEN round(t.geo_accuracy::float * 10)/10"
            + " ELSE null END) loc_accuracy, "
            + " (CASE WHEN (t.zip_code < 1000 OR t.zip_code > 9999) THEN null ELSE t.zip_code END) zip_code,"
            + " t.gkz gkz," + " t.country_location country_location," + " t.speed_download download_kbit,"
            + " t.speed_upload upload_kbit," + " round(t.ping_median::float / 100000)/10 ping_ms,"
            + " t.lte_rsrp," + " t.lte_rsrq," + " ts.name server_name," + " duration test_duration,"
            + " num_threads," + " t.plattform platform," + " COALESCE(adm.fullname, t.model) model,"
            + " client_software_version client_version," + " network_operator network_mcc_mnc,"
            + " network_operator_name network_name," + " network_sim_operator sim_mcc_mnc," + " nat_type,"
            + " public_ip_asn asn," + " client_public_ip_anonymized ip_anonym,"
            + " (ndt.s2cspd*1000)::int ndt_download_kbit," + " (ndt.c2sspd*1000)::int ndt_upload_kbit,"
            + " COALESCE(t.implausible, false) implausible," + " t.signal_strength" + " FROM test t"
            + " LEFT JOIN network_type nt ON nt.uid=t.network_type"
            + " LEFT JOIN device_map adm ON adm.codename=t.model"
            + " LEFT JOIN test_server ts ON ts.uid=t.server_id" + " LEFT JOIN test_ndt ndt ON t.uid=ndt.test_id"
            + " WHERE " + " t.deleted = false" + timeClause + " AND status = 'FINISHED'" + " ORDER BY t.uid";

    final String[] columns;
    final List<String[]> data = new ArrayList<>();
    PreparedStatement ps = null;
    ResultSet rs = null;
    try {
        ps = conn.prepareStatement(sql);

        //insert filter for accuracy
        double accuracy = Double.parseDouble(settings.getString("RMBT_GEO_ACCURACY_DETAIL_LIMIT"));
        ps.setDouble(1, accuracy);
        ps.setDouble(2, accuracy);
        ps.setDouble(3, accuracy);
        ps.setDouble(4, accuracy);
        ps.setDouble(5, accuracy);
        ps.setDouble(6, accuracy);

        if (!ps.execute())
            return null;
        rs = ps.getResultSet();

        final ResultSetMetaData meta = rs.getMetaData();
        final int colCnt = meta.getColumnCount();
        columns = new String[colCnt];
        for (int i = 0; i < colCnt; i++)
            columns[i] = meta.getColumnName(i + 1);

        while (rs.next()) {
            final String[] line = new String[colCnt];

            for (int i = 0; i < colCnt; i++) {
                final Object obj = rs.getObject(i + 1);
                line[i] = obj == null ? null : obj.toString();
            }

            data.add(line);
        }
    } catch (final SQLException e) {
        e.printStackTrace();
        return null;
    } finally {
        try {
            if (rs != null)
                rs.close();
            if (ps != null)
                ps.close();
        } catch (final SQLException e) {
            e.printStackTrace();
        }
    }

    final OutputRepresentation result = new OutputRepresentation(
            zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) {
        @Override
        public void write(OutputStream out) throws IOException {
            //cache in file => create temporary temporary file (to 
            // handle errors while fulfilling a request)
            String property = System.getProperty("java.io.tmpdir");
            final File cachedFile = new File(
                    property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp");
            OutputStream outf = new FileOutputStream(cachedFile);

            if (zip) {
                final ZipOutputStream zos = new ZipOutputStream(outf);
                final ZipEntry zeLicense = new ZipEntry("LIZENZ.txt");
                zos.putNextEntry(zeLicense);
                final InputStream licenseIS = getClass().getResourceAsStream("DATA_LICENSE.txt");
                IOUtils.copy(licenseIS, zos);
                licenseIS.close();

                final ZipEntry zeCsv = new ZipEntry(filename_csv);
                zos.putNextEntry(zeCsv);
                outf = zos;
            }

            final OutputStreamWriter osw = new OutputStreamWriter(outf);
            final CSVPrinter csvPrinter = new CSVPrinter(osw, csvFormat);

            for (final String c : columns)
                csvPrinter.print(c);
            csvPrinter.println();

            for (final String[] line : data) {
                for (final String f : line)
                    csvPrinter.print(f);
                csvPrinter.println();
            }
            csvPrinter.flush();

            if (zip)
                outf.close();

            //if we reach this code, the data is now cached in a temporary tmp-file
            //so, rename the file for "production use2
            //concurrency issues should be solved by the operating system
            File newCacheFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv));
            Files.move(cachedFile.toPath(), newCacheFile.toPath(), StandardCopyOption.ATOMIC_MOVE,
                    StandardCopyOption.REPLACE_EXISTING);

            FileInputStream fis = new FileInputStream(newCacheFile);
            IOUtils.copy(fis, out);
            fis.close();
            out.close();
        }
    };
    if (zip) {
        final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT);
        disposition.setFilename(filename_zip);
        result.setDisposition(disposition);
    }

    return result;
}

From source file:com.alibaba.wasp.jdbc.TestPreparedStatement.java

public void testGetMoreResults() throws SQLException {
    Statement stat = conn.createStatement();
    PreparedStatement prep;
    ResultSet rs;/*from   www  .  ja  v  a  2s .  c  om*/
    stat.execute("CREATE TABLE TEST(ID INT)");
    stat.execute("INSERT INTO TEST VALUES(1)");

    prep = conn.prepareStatement("SELECT * FROM TEST");
    // just to check if it doesn't throw an exception - it may be null
    prep.getMetaData();
    assertTrue(prep.execute());
    rs = prep.getResultSet();
    assertFalse(prep.getMoreResults());
    assertEquals(-1, prep.getUpdateCount());
    // supposed to be closed now
    assertThrows(SQLErrorCode.OBJECT_CLOSED, rs).next();
    assertEquals(-1, prep.getUpdateCount());

    prep = conn.prepareStatement("UPDATE TEST SET ID = 2");
    assertFalse(prep.execute());
    assertEquals(1, prep.getUpdateCount());
    assertFalse(prep.getMoreResults(Statement.CLOSE_CURRENT_RESULT));
    assertEquals(-1, prep.getUpdateCount());
    // supposed to be closed now
    assertThrows(SQLErrorCode.OBJECT_CLOSED, rs).next();
    assertEquals(-1, prep.getUpdateCount());

    prep = conn.prepareStatement("DELETE FROM TEST");
    prep.executeUpdate();
    assertFalse(prep.getMoreResults());
    assertEquals(-1, prep.getUpdateCount());
}

From source file:org.openbravo.test.accounting.RecordID2Test.java

private BigDecimal getBalance(String strRecordID2, String strTableId, String strCAcctSchemaId,
        String strAccountId) {// w w  w  .  j a v a  2  s. co m
    String sql = "select coalesce(sum(amtacctdr-amtacctcr),0) " //
            + "from fact_Acct where record_ID2 = ?" + " and ad_table_id = ?"
            + " and c_acctschema_id = ? and account_id = ?";

    PreparedStatement sqlQuery = null;
    ResultSet rs = null;
    try {
        sqlQuery = new DalConnectionProvider(false).getPreparedStatement(sql);
        sqlQuery.setString(1, strRecordID2);
        sqlQuery.setString(2, strTableId);
        sqlQuery.setString(3, strCAcctSchemaId);
        sqlQuery.setString(4, strAccountId);
        sqlQuery.execute();
        sqlQuery.setMaxRows(1);
        rs = sqlQuery.getResultSet();
        while (rs.next()) {
            return rs.getBigDecimal(1);
        }
    } catch (Exception e) {
        assertFalse(true);
        log.error("Error when executing query", e);
    } finally {
        try {
            if (sqlQuery != null) {
                sqlQuery.close();
            }
            if (rs != null) {
                rs.close();
            }
        } catch (Exception e) {
            log.error("Error when closing statement", e);
        }
    }
    return BigDecimal.ZERO;
}

From source file:csiro.pidsvc.mappingstore.ManagerJson.java

public JSONObject checkMappingPathExists(String mappingPath) throws SQLException {
    PreparedStatement pst = null;
    ResultSet rs = null;/*w w w .  j  a v a  2  s  .  c  om*/
    boolean exists = false;

    try {
        pst = _connection.prepareStatement("SELECT 1 FROM mapping WHERE mapping_path = ?");
        pst.setString(1, mappingPath);

        if (pst.execute()) {
            rs = pst.getResultSet();
            exists = rs.next();
        }
    } catch (Exception e) {
        _logger.debug(e);
    } finally {
        if (rs != null)
            rs.close();
        if (pst != null)
            pst.close();
    }
    return JSONObjectHelper.create("exists", exists, "mapping_path", mappingPath);
}

From source file:csiro.pidsvc.mappingstore.ManagerJson.java

@SuppressWarnings("unchecked")
public JSONObject getConditionSetConfig(String name) throws SQLException {
    PreparedStatement pst = null;
    ResultSet rs = null;//from   ww  w  . j av a2  s  .c  om
    JSONObject ret = new JSONObject();

    try {
        pst = _connection.prepareStatement("SELECT * FROM condition_set WHERE name = ?;");
        pst.setString(1, name);

        if (pst.execute()) {
            rs = pst.getResultSet();
            if (rs.next()) {
                ret.put("name", rs.getString("name"));
                ret.put("description", rs.getString("description"));
                ret.put("conditions", getConditionsBySetId(rs.getInt("condition_set_id")));
            }
        }
    } finally {
        if (rs != null)
            rs.close();
        if (pst != null)
            pst.close();
    }
    return ret;
}