Example usage for java.sql SQLException getLocalizedMessage

List of usage examples for java.sql SQLException getLocalizedMessage

Introduction

In this page you can find the example usage for java.sql SQLException getLocalizedMessage.

Prototype

public String getLocalizedMessage() 

Source Link

Document

Creates a localized description of this throwable.

Usage

From source file:chanupdater.ChanUpdater.java

private void cleanUp() throws WebUtilException, MalformedURLException, IOException {
    try {// www. ja v  a  2  s.c o m
        chnTbl.optimize();
        PageItemCache pic = new PageItemCache(db);
        String cmd = "DELETE FROM " + pic.getName() + " WHERE name='ChannelStats'";
        db.execute(cmd);

        //            // get a kerberos ticket
        //            String home = System.getenv("HOME");
        //            String keytab = home + "/secure/joseph.areeda.keytab";
        //            String user = "joseph.areeda@LIGO.ORG";
        //            ExternalProgramManager.getTGT(keytab, user);
        //
        //            String chstatUrl = "http://localhost/viewer/?act=ChannelStats";
        //            URL url = new URL(chstatUrl);
        //            InputStream is = url.openConnection().getInputStream();
        //
        //            try (BufferedReader reader = new BufferedReader(new InputStreamReader(is)))
        //            {
        //                String line = null;               
        //                while ((line = reader.readLine()) != null)
        //                {
        //                    
        //                }
        //            }
    } catch (SQLException ex) {
        String ermsg = "Error optimizing table: " + ex.getClass().getSimpleName();
        ermsg += " - " + ex.getLocalizedMessage();
        throw new WebUtilException(ermsg);
    }
}

From source file:com.splicemachine.derby.impl.load.HdfsUnsafeImportIT.java

@Test
public void testConstaintsImportNullBadDir() throws Exception {
    // DB-5017: When bad record dir is null or empty, the input file dir becomes the bad record dir
    String inputFileName = "constraintViolation.csv";
    String inputFileOrigin = getResourceDirectory() + inputFileName;
    // copy the given input file under a temp folder so that it will get cleaned up
    // this used to go under the "target/test-classes" folder but doesn't work when we execute test from
    // a different location.
    File newImportFile = tempFolder.newFile(inputFileName);
    FileUtils.copyFile(new File(inputFileOrigin), newImportFile);
    assertTrue("Import file copy failed: " + newImportFile.getCanonicalPath(), newImportFile.exists());
    String badFileName = newImportFile.getParent() + "/" + inputFileName + ".bad";

    PreparedStatement ps = methodWatcher
            .prepareStatement(format("call SYSCS_UTIL.IMPORT_DATA_UNSAFE(" + "'%s'," + // schema name
                    "'%s'," + // table name
                    "null," + // insert column list
                    "'%s'," + // file path
                    "','," + // column delimiter
                    "null," + // character delimiter
                    "null," + // timestamp format
                    "null," + // date format
                    "null," + // time format
                    "%d," + // max bad records
                    "null," + // bad record dir
                    "null," + // has one line records
                    "null)", // char set
                    spliceSchemaWatcher.schemaName, TABLE_20, newImportFile.getCanonicalPath(), 0));
    try {//from   w  ww  .j  a va  2 s. co m
        ps.execute();
        fail("Too many bad records.");
    } catch (SQLException e) {
        assertEquals("Expected too many bad records, but got: " + e.getLocalizedMessage(), "SE009",
                e.getSQLState());
    }
    boolean exists = existsBadFile(new File(newImportFile.getParent()), inputFileName + ".bad");
    assertTrue("Bad file " + badFileName + " does not exist.", exists);
}

From source file:org.rhq.plugins.postgres.PostgresServerComponent.java

/**
 * Get data about the database server. Currently we have two categories:
 * <ul>/* w w  w .j a va 2  s.com*/
 * <li>Database.* are metrics that are obtained from the database server itself</li>
 * <li>Process.* are metrics obtained from the native system.</li>
 * </ul>
 *
 * @param  report  the report where all collected measurement data will be added
 * @param  metrics the schedule of what needs to be collected when
 *
 */
public void getValues(MeasurementReport report, Set<MeasurementScheduleRequest> metrics) {

    for (MeasurementScheduleRequest request : metrics) {
        String property = request.getName();
        if (property.startsWith("Process.")) {
            if (aggregateProcessInfo != null) {
                aggregateProcessInfo.refresh();

                //report.addData(new MeasurementDataNumeric(request, getProcessProperty(request.getName())));

                Object val = lookupAttributeProperty(aggregateProcessInfo,
                        property.substring("Process.".length()));
                if (val != null && val instanceof Number) {
                    //                        aggregateProcessInfo.getAggregateMemory().Cpu().getTotal()
                    report.addData(new MeasurementDataNumeric(request, ((Number) val).doubleValue()));
                }
            }
        } else if (property.startsWith("Database")) {
            try {
                if (property.endsWith("startTime")) {
                    // db start time
                    ResultSet rs = getConnection().createStatement()
                            .executeQuery("SELECT pg_postmaster_start_time()");
                    try {
                        if (rs.next()) {
                            report.addData(new MeasurementDataTrait(request, rs.getTimestamp(1).toString()));
                        }
                    } finally {
                        rs.close();
                    }
                } else if (property.endsWith("backends")) {
                    // number of connected backends
                    ResultSet rs = getConnection().createStatement()
                            .executeQuery("select count(*) from pg_stat_activity");
                    try {
                        if (rs.next()) {
                            report.addData(new MeasurementDataNumeric(request, (double) rs.getLong(1)));
                        }
                    } finally {
                        rs.close();
                    }
                }

            } catch (SQLException e) {
                log.warn("Can not collect property: " + property + ": " + e.getLocalizedMessage());
            }
        }
    }
}

From source file:com.ibm.research.rdf.store.jena.impl.DB2QueryExecutionImpl.java

public void abort() {
    if (rs != null && rs.getResultSet() != null) {
        try {//w ww. j a va  2s  .c om
            rs.getResultSet().getStatement().cancel();
        } catch (SQLException e) {
            log.error("Cannot abort transaction", e);
            throw new RdfStoreException(e.getLocalizedMessage(), e);
        } finally {
            try {
                DB2CloseObjects.close(rs.getResultSet(), rs.getResultSet().getStatement());
            } catch (SQLException e) {

            }
        }
    }
}

From source file:no.sintef.jarfter.PostgresqlInteractor.java

public InputStream getFilesFile(String fileid) throws JarfterException {
    checkConnection();/*w  ww  .  j  a  v a 2  s . com*/

    try {
        PreparedStatement st = conn.prepareStatement("SELECT file FROM files WHERE fileid = ?");
        st.setString(1, fileid);
        ResultSet rs = st.executeQuery();
        while (rs.next()) {
            InputStream fileStream = rs.getBinaryStream("file");
            return fileStream;
        }
        throw new JarfterException(JarfterException.Error.SQL_NO_DATABASE_ENTRY);
    } catch (SQLException sqle) {
        log("getFileFiles - got SQLException...");
        error(sqle);
        throw new JarfterException(JarfterException.Error.SQL_UNKNOWN_ERROR, sqle.getLocalizedMessage());
    }
}

From source file:no.sintef.jarfter.PostgresqlInteractor.java

public String getFilesFilename(String fileid) throws JarfterException {
    checkConnection();/*from  ww  w .j  ava  2s  . co m*/

    try {
        PreparedStatement st = conn.prepareStatement("SELECT filename FROM files WHERE fileid = ?");
        st.setString(1, fileid);
        ResultSet rs = st.executeQuery();
        while (rs.next()) {
            String filename = rs.getString("filename");
            return filename; // fileid should be unique.
        }
        throw new JarfterException(JarfterException.Error.SQL_NO_DATABASE_ENTRY,
                "No filename entry with fileid " + fileid);
    } catch (SQLException sqle) {
        log("getFileFilename - got SQLException...");
        error(sqle);
        throw new JarfterException(JarfterException.Error.SQL_UNKNOWN_ERROR, sqle.getLocalizedMessage());
    }
}

From source file:no.sintef.jarfter.PostgresqlInteractor.java

public void disconnect() {
    log("disconnect - Disconnecting from database!");
    if (conn != null) {
        try {/*from   w w  w  . j a v  a 2s. c  o m*/
            conn.close();
        } catch (SQLException e) {
            log("disconnect - conn.close() threw SQLException");
            error(e);
            throw new JarfterException(JarfterException.Error.SQL_UNKNOWN_ERROR, e.getLocalizedMessage());
        }
    }
}

From source file:no.sintef.jarfter.PostgresqlInteractor.java

public JsonObject selectAllTransfomations(String filter_uri) throws JarfterException {
    checkConnection();/*from  w  w  w .  j  av  a  2s  .  c o m*/

    String table = "transformations";
    String uri = "uri";
    String name = "name";
    String metadata = "metadata";
    String clojure = "clojure";
    JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder();
    try {
        PreparedStatement st = conn.prepareStatement("SELECT " + uri + ", " + name + ", " + metadata + ", "
                + clojure + " FROM transformations WHERE uri ~ ?;");
        if (filter_uri == null) {
            st.setString(1, ".*");
        } else {
            st.setString(1, filter_uri);
        }
        log("selectAllTransfomations - calling executeQuery");
        ResultSet rs = st.executeQuery();
        while (rs.next()) {
            JsonObjectBuilder job = Json.createObjectBuilder();
            job.add(uri, rs.getString(uri));
            job.add(name, rs.getString(name));
            job.add(metadata, rs.getString(metadata));
            job.add(clojure, rs.getString(clojure));
            jsonArrayBuilder.add(job.build());
        }

        rs.close();
        st.close();
    } catch (SQLException sqle) {
        log("selectAllTransformations - got SQLException");
        error(sqle);
        throw new JarfterException(JarfterException.Error.SQL_UNKNOWN_ERROR, sqle.getLocalizedMessage());
    }
    return Json.createObjectBuilder().add(table, jsonArrayBuilder.build()).build();
}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.CreateMetadataHelper.java

public Object doInPreparedStatement(PreparedStatement ps) throws SQLException {
    // ?/*  www .j  av  a  2s. c o  m*/

    Map<String, String> mAttrs = metaModel.getMAttrs();
    boolean hasChildMetaModel = metaModel.isHasChildMetaModel();

    // ???
    List<AbstractMetadata> metadatas = metaModel.getMetadatas();
    int size = metadatas.size();
    String code = "";
    String metaModelCode = "";
    MMMetadata parentMetadata = null;
    String logMsg = "";
    try {
        for (int i = 0; i < size; i++) {

            MMMetadata metadata = (MMMetadata) metadatas.get(i);
            if (metadata.isHasExist()) {
                // ??,??
                continue;
            }

            parentMetadata = metadata.getParentMetadata();
            if (parentMetadata == null) {
                String error = new StringBuilder("?:").append(metadata.getCode())
                        .append(" ,??!!").toString();
                log.error(error);
                throw new SQLException(error);
            }
            String metadataNamespace = genNamespace(parentMetadata, metadata.getId(), hasChildMetaModel);

            // ?ID
            ps.setString(1, metadata.getId());
            code = metadata.getCode();
            // ???
            ps.setString(2, code);
            // ???
            ps.setString(3,
                    (metadata.getName() == null || metadata.getName().equals("")) ? code : metadata.getName());
            // ID
            metaModelCode = metaModel.getCode();
            ps.setString(4, metaModelCode);

            // namespaceID
            ps.setString(5, metadataNamespace);
            ps.setString(6, parentMetadata.getId());
            // START_TIME: 
            ps.setLong(7, this.getGlobalTime());

            int index = setAttrs(ps, metadata, mAttrs);

            setPs(ps, metadata, index + 7);

            if (log.isDebugEnabled()) {
                log.debug(new StringBuilder().append(":parent_id:").append(parentMetadata.getId())
                        .append(",parent_code:").append(parentMetadata.getCode()).append(",instance_code:")
                        .append(code).append(",classifier_id:").append(metaModelCode).toString());
            }
            ps.addBatch();
            // ??
            ps.clearParameters();

            if (++super.count % super.batchSize == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }
        }

        if (super.count % super.batchSize != 0) {
            ps.executeBatch();
            ps.clearBatch();

        }
    } catch (SQLException e) {
        logMsg = new StringBuilder().append("?,?:parent_id:")
                .append(parentMetadata.getId()).append(",parent_code:").append(parentMetadata.getCode())
                .append(",instance_code:").append(code).append(",classifier_id:").append(metaModelCode)
                .append("  ?:").append(e.getLocalizedMessage()).toString();
        log.error(logMsg);
        AdapterExtractorContext.addExtractorLog(ExtractorLogLevel.ERROR, logMsg);
        throw e;
    }
    return null;
    // test for callback
    // throw new SQLException();
}

From source file:org.pentaho.platform.dataaccess.datasource.wizard.service.impl.DatasourceServiceImpl.java

public boolean testDataSourceConnection(String connectionName) throws DatasourceServiceException {
    if (!hasDataAccessPermission()) {
        logger.error(/*from   w  w  w  . j  a v a  2s .  co m*/
                Messages.getInstance().getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));//$NON-NLS-1$
        throw new DatasourceServiceException(
                Messages.getInstance().getErrorString("DatasourceServiceImpl.ERROR_0001_PERMISSION_DENIED"));//$NON-NLS-1$
    }
    Connection conn = null;
    try {
        conn = DatasourceServiceHelper.getDataSourceConnection(connectionName,
                PentahoSessionHolder.getSession());
        if (conn == null) {
            logger.error(Messages.getInstance().getErrorString(
                    "DatasourceServiceImpl.ERROR_0018_UNABLE_TO_TEST_CONNECTION", connectionName));//$NON-NLS-1$
            throw new DatasourceServiceException(Messages.getInstance().getErrorString(
                    "DatasourceServiceImpl.ERROR_0018_UNABLE_TO_TEST_CONNECTION", connectionName)); //$NON-NLS-1$
        }
    } finally {
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException e) {
            logger.error(Messages.getInstance().getErrorString(
                    "DatasourceServiceImpl.ERROR_0018_UNABLE_TO_TEST_CONNECTION", connectionName, //$NON-NLS-1$
                    e.getLocalizedMessage()), e);
            throw new DatasourceServiceException(Messages.getInstance().getErrorString(
                    "DatasourceServiceImpl.ERROR_0018_UNABLE_TO_TEST_CONNECTION", connectionName, //$NON-NLS-1$
                    e.getLocalizedMessage()), e);
        }
    }
    return true;
}