Example usage for java.sql ResultSetMetaData getColumnLabel

List of usage examples for java.sql ResultSetMetaData getColumnLabel

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getColumnLabel.

Prototype

String getColumnLabel(int column) throws SQLException;

Source Link

Document

Gets the designated column's suggested title for use in printouts and displays.

Usage

From source file:org.dspace.storage.rdbms.MockDatabaseManager.java

/**
 * Return the names of all the columns of the ResultSet.
 *
 * @param meta/*w  ww.  j a  v  a  2s  .com*/
 *            The ResultSetMetaData
 * @return The names of all the columns of the given table, as a List. Each
 *         element of the list is a String.
 * @exception SQLException
 *                If a database error occurs
 */
@Mock
static List<String> getColumnNames(ResultSetMetaData meta) throws SQLException {
    List<String> results = new ArrayList<String>();
    int columns = meta.getColumnCount();

    for (int i = 0; i < columns; i++) {
        results.add(meta.getColumnLabel(i + 1));
    }

    return results;
}

From source file:at.ac.univie.isc.asio.engine.sql.WebRowSetWriter.java

private void columnDefinition(final int idx, final ResultSetMetaData context)
        throws XMLStreamException, SQLException {
    // @formatter:off
    xml.writeStartElement(WRS, "column-definition");
    tag("column-index", idx);
    tag("auto-increment", context.isAutoIncrement(idx));
    tag("case-sensitive", context.isCaseSensitive(idx));
    tag("currency", context.isCurrency(idx));
    tag("nullable", context.isNullable(idx));
    tag("signed", context.isSigned(idx));
    tag("searchable", context.isSearchable(idx));
    tag("column-display-size", context.getColumnDisplaySize(idx));
    tag("column-label", context.getColumnLabel(idx));
    tag("column-name", context.getColumnName(idx));
    tag("schema-name", context.getSchemaName(idx));
    tag("column-precision", context.getPrecision(idx));
    tag("column-scale", context.getScale(idx));
    tag("table-name", context.getTableName(idx));
    tag("catalog-name", context.getCatalogName(idx));
    tag("column-type", context.getColumnType(idx));
    tag("column-type-name", context.getColumnTypeName(idx));
    xml.writeEndElement();//from www .ja v a  2s. com
    // @formatter:on
}

From source file:org.wandora.modules.GenericDatabaseInterface.java

public Rows makeRows(ResultSet rs) throws SQLException {
    Rows ret = new Rows();
    ResultSetMetaData md = rs.getMetaData();

    int columnCount = md.getColumnCount();
    String[] columns = new String[columnCount];
    for (int i = 0; i < columnCount; i++) {
        columns[i] = md.getColumnLabel(i + 1).toLowerCase();
    }/* ww w .  j a  v  a2 s  . co m*/

    while (rs.next()) {
        Row row = new Row();
        for (int i = 0; i < columnCount; i++) {
            Object o = rs.getObject(i + 1);
            row.put(columns[i], o);
        }
        ret.add(row);
    }

    return ret;
}

From source file:org.liveSense.api.beanprocessors.DbStandardBeanProcessor.java

/**
 * The positions in the returned array represent column numbers.  The 
 * values stored at each position represent the index in the 
 * <code>PropertyDescriptor[]</code> for the bean property that matches 
 * the column name.  If no bean property was found for a column, the 
 * position is set to <code>PROPERTY_NOT_FOUND</code>.
 * //w ww. j av a2  s  .c  o  m
 * @param rsmd The <code>ResultSetMetaData</code> containing column 
 * information.
 * 
 * @param props The bean property descriptors.
 * 
 * @param type  The bean type. It's required for annotations
 * 
 * @throws SQLException if a database access error occurs
 *
 * @return An int[] with column index to property index mappings.  The 0th 
 * element is meaningless because JDBC column indexing starts at 1.
 */
protected int[] mapColumnsToProperties(ResultSetMetaData rsmd, PropertyDescriptor[] props, Class<?> type)
        throws SQLException {

    int cols = rsmd.getColumnCount();
    int columnToProperty[] = new int[cols + 1];
    Arrays.fill(columnToProperty, PROPERTY_NOT_FOUND);

    // The original mapping code
    for (int col = 1; col <= cols; col++) {
        String columnName = rsmd.getColumnLabel(col);
        if (null == columnName || 0 == columnName.length()) {
            columnName = rsmd.getColumnName(col);
        }

        // Processing column names. In this case it's simple, because
        // we removing underscores.
        String originalRsName = columnName;
        columnName = columnName.replaceAll("_", "");

        for (int i = 0; i < props.length; i++) {

            // First we trying determinate annotation based column names
            Annotation[] annotations = AnnotationHelper.findFieldAnnotation(type, props[i].getName());
            if (annotations != null) {
                for (int j = 0; j < annotations.length; j++) {
                    if (annotations[j] instanceof Column) {
                        Column cAn = (Column) annotations[j];
                        if (cAn.name().equalsIgnoreCase(originalRsName)) {
                            columnToProperty[col] = i;
                        }
                    }
                }
            }

            if (columnName.equalsIgnoreCase(props[i].getName())) {
                // If annotation mapped, we do nothing
                if (columnToProperty[col] == PROPERTY_NOT_FOUND)
                    columnToProperty[col] = i;
                break;
            }
        }
    }

    return columnToProperty;
}

From source file:kenh.xscript.database.beans.ResultSetBean.java

/**
 * Use result set to initial a bean./*w w w . ja va2s .  c  o  m*/
 * 
 * @param rs
 * @param includeFieldName
 * @throws SQLException
 * @throws IllegalAccessException
 * @throws InstantiationException
 */
public ResultSetBean(ResultSet rs, boolean includeFieldName)
        throws SQLException, IllegalAccessException, InstantiationException {
    include_field_name = includeFieldName;

    LazyDynaClass beanClass = new LazyDynaClass();

    ResultSetMetaData m = rs.getMetaData();
    for (int i = 1; i <= m.getColumnCount(); i++) {
        Column c = new Column();

        try {
            c.catalogName = m.getCatalogName(i);
        } catch (SQLException e) {
        }
        try {
            c.className = m.getColumnClassName(i);
        } catch (SQLException e) {
        }
        try {
            c.displaySize = m.getColumnDisplaySize(i);
        } catch (SQLException e) {
        }
        try {
            c.label = m.getColumnLabel(i);
        } catch (SQLException e) {
        }
        try {
            c.name = m.getColumnName(i);
        } catch (SQLException e) {
        }
        try {
            c.type = m.getColumnType(i);
        } catch (SQLException e) {
        }
        try {
            c.typeName = m.getColumnTypeName(i);
        } catch (SQLException e) {
        }
        try {
            c.precision = m.getPrecision(i);
        } catch (SQLException e) {
        }
        try {
            c.scale = m.getScale(i);
        } catch (SQLException e) {
        }
        try {
            c.schemaName = m.getSchemaName(i);
        } catch (SQLException e) {
        }
        try {
            c.tableName = m.getTableName(i);
        } catch (SQLException e) {
        }

        beanClass.add(m.getColumnLabel(i).toLowerCase());
        beanClass.add("" + i);

        cols.add(c);
    }

    DynaBean colBean = beanClass.newInstance();
    int i = 1;
    for (Column col : cols) {
        String field = col.getLabel().toLowerCase();
        colBean.set(field, col.getLabel());
        colBean.set("" + i, col.getLabel());
        i++;
    }

    if (include_field_name)
        rows.add(colBean);

    while (rs.next()) {
        DynaBean bean = beanClass.newInstance();
        i = 1;
        for (Column c : cols) {
            String field = c.getLabel().toLowerCase();
            Object obj = rs.getObject(field);
            bean.set(field, obj);
            bean.set("" + i, obj);
            i++;
        }
        rows.add(bean);
    }

}

From source file:com.sangupta.fileanalysis.db.DBResultViewer.java

/**
 * View resutls of a {@link ResultSet}./*from w ww.  ja  va2s  .com*/
 * 
 * @param resultSet
 * @throws SQLException 
 */
public void viewResult(ResultSet resultSet) throws SQLException {
    if (resultSet == null) {
        // nothing to do
        return;
    }

    // collect the meta
    ResultSetMetaData meta = resultSet.getMetaData();

    final int numColumns = meta.getColumnCount();
    final int[] displaySizes = new int[numColumns + 1];
    final int[] colType = new int[numColumns + 1];

    for (int index = 1; index <= numColumns; index++) {
        colType[index] = meta.getColumnType(index);
        displaySizes[index] = getColumnSize(meta.getTableName(index), meta.getColumnName(index),
                colType[index]);
    }

    // display the header row
    for (int index = 1; index <= numColumns; index++) {
        center(meta.getColumnLabel(index), displaySizes[index]);
    }
    System.out.println("|");
    for (int index = 1; index <= numColumns; index++) {
        System.out.print("+" + StringUtils.repeat('-', displaySizes[index] + 2));
    }
    System.out.println("+");

    // start iterating over the result set
    int rowsDisplayed = 0;
    int numRecords = 0;
    while (resultSet.next()) {
        // read and display the value
        rowsDisplayed++;
        numRecords++;

        for (int index = 1; index <= numColumns; index++) {
            switch (colType[index]) {
            case Types.DECIMAL:
            case Types.DOUBLE:
            case Types.REAL:
                format(resultSet.getDouble(index), displaySizes[index]);
                continue;

            case Types.INTEGER:
            case Types.SMALLINT:
                format(resultSet.getInt(index), displaySizes[index]);
                continue;

            case Types.VARCHAR:
                format(resultSet.getString(index), displaySizes[index], false);
                continue;

            case Types.TIMESTAMP:
                format(resultSet.getTimestamp(index), displaySizes[index]);
                continue;

            case Types.BIGINT:
                format(resultSet.getBigDecimal(index), displaySizes[index]);
                continue;
            }
        }

        // terminator for row and new line
        System.out.println("|");

        // check for rows displayed
        if (rowsDisplayed == 20) {
            // ask the user if more data needs to be displayed
            String cont = ConsoleUtils.readLine("Type \"it\" for more: ", true);
            if (!"it".equalsIgnoreCase(cont)) {
                break;
            }

            // continue;
            rowsDisplayed = 0;
            continue;
        }
    }

    System.out.println("\nTotal number of records found: " + numRecords);
}

From source file:com.baifendian.swordfish.execserver.engine.hive.HiveSqlExec.java

/**
 *  sql ? ?, ?,  execute, ?//from w ww  . j av  a  2 s . c o  m
 *
 * @param createFuncs ?
 * @param sqls  sql
 * @param isContinue ?, ???
 * @param resultCallback , ?
 * @param queryLimit ?
 * @param remainTime ?, 
 */
public boolean execute(List<String> createFuncs, List<String> sqls, boolean isContinue,
        ResultCallback resultCallback, Integer queryLimit, int remainTime) {

    // ?
    if (remainTime <= 0) {
        return false;
    }

    // ?
    queryLimit = (queryLimit != null) ? queryLimit : defaultQueryLimit;

    HiveConnection hiveConnection = null;
    Statement sta = null;
    Thread logThread = null;

    //  hive ?
    HiveService2ConnectionInfo hiveService2ConnectionInfo = hiveUtil.getHiveService2ConnectionInfo(userName);

    logger.info("execution connection information:{}", hiveService2ConnectionInfo);

    HiveService2Client hiveService2Client = hiveUtil.getHiveService2Client();

    try {
        try {
            hiveConnection = hiveService2Client.borrowClient(hiveService2ConnectionInfo);

            sta = hiveConnection.createStatement();
            //        sta.setQueryTimeout(remainTime);

            // 
            logThread = new Thread(new JdbcLogRunnable(sta));
            logThread.setDaemon(true);
            logThread.start();

            // set queue
            if (queueSQL != null) {
                logger.info("hive queue : {}", queueSQL);
                sta.execute(queueSQL);
            }

            //  function
            if (createFuncs != null) {
                for (String createFunc : createFuncs) {
                    logger.info("hive create function sql: {}", createFunc);
                    sta.execute(createFunc);
                }
            }
        } catch (Exception e) {
            logger.error("execute query exception", e);

            // , , ?
            handlerResults(0, sqls, FlowStatus.FAILED, resultCallback);

            return false;
        }

        //  sql ?
        for (int index = 0; index < sqls.size(); ++index) {
            String sql = sqls.get(index);

            Date startTime = new Date();

            logger.info("hive execute sql: {}", sql);

            ExecResult execResult = new ExecResult();
            execResult.setIndex(index);
            execResult.setStm(sql);

            try {
                // ? query  show ?
                if (HiveUtil.isTokQuery(sql) || HiveUtil.isLikeShowStm(sql)) {
                    sta.setMaxRows(queryLimit);
                    ResultSet res = sta.executeQuery(sql);

                    ResultSetMetaData resultSetMetaData = res.getMetaData();
                    int count = resultSetMetaData.getColumnCount();

                    List<String> colums = new ArrayList<>();
                    for (int i = 1; i <= count; i++) {
                        colums.add(resultSetMetaData.getColumnLabel(
                                i)/*parseColumnName(resultSetMetaData.getColumnLabel(i), colums)*/);
                    }

                    execResult.setTitles(colums);

                    List<List<String>> datas = new ArrayList<>();

                    //  1,  query ?
                    if (count > 1 || HiveUtil.isTokQuery(sql)) {
                        while (res.next()) {
                            List<String> values = new ArrayList<>();
                            for (int i = 1; i <= count; ++i) {
                                values.add(res.getString(i));
                            }

                            datas.add(values);
                        }
                    } else {
                        StringBuffer buffer = new StringBuffer();

                        while (res.next()) {
                            buffer.append(res.getString(1));
                            buffer.append("\n");
                        }

                        List<String> values = new ArrayList<>();
                        values.add(buffer.toString().trim());

                        datas.add(values);
                    }

                    execResult.setValues(datas);
                } else {
                    sta.execute(sql);
                }

                // ??
                execResult.setStatus(FlowStatus.SUCCESS);

                // ?
                if (resultCallback != null) {
                    Date endTime = new Date();
                    resultCallback.handleResult(execResult, startTime, endTime);
                }
            } catch (SQLTimeoutException e) {
                // sql 
                logger.error("executeQuery timeout exception", e);

                handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                return false;
            } catch (DaoSemanticException | HiveSQLException e) {
                // 
                logger.error("executeQuery exception", e);

                if (isContinue) {
                    handlerResult(index, sql, FlowStatus.FAILED, resultCallback);
                } else {
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }
            } catch (Exception e) {
                // TTransport 
                if (e.toString().contains("TTransportException")) {
                    logger.error("Get TTransportException return a client", e);
                    // ???
                    //            hiveService2Client.invalidateObject(hiveService2ConnectionInfo, hiveConnection);
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }

                // socket 
                if (e.toString().contains("SocketException")) {
                    logger.error("SocketException clear pool", e);
                    hiveService2Client.clear();
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }

                logger.error("executeQuery exception", e);

                if (isContinue) {
                    handlerResult(index, sql, FlowStatus.FAILED, resultCallback);
                } else {
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }
            }
        }
    } finally {
        // 
        try {
            if (sta != null) {
                sta.close();
            }
        } catch (Exception e) {
            logger.error("Catch an exception", e);
        }

        try {
            // 
            if (hiveConnection != null) {
                // 
                hiveConnection.close();

                // , ??
                hiveService2Client.returnClient(hiveService2ConnectionInfo, hiveConnection);
            }
        } catch (Exception e) {
            logger.error("Catch an exception", e);
        }

        // 
        try {
            if (logThread != null) {
                logThread.interrupt();
                logThread.join(HiveUtil.DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT);
            }
        } catch (Exception e) {
            //        logger.error("Catch an exception", e);
        }
    }

    return true;
}

From source file:org.seasar.dbflute.logic.sql2entity.cmentity.DfCustomizeEntityMetaExtractor.java

public Map<String, DfColumnMeta> extractColumnMetaInfoMap(ResultSet rs, String sql,
        DfForcedJavaNativeProvider forcedJavaNativeProvider) throws SQLException {
    final Map<String, DfColumnMeta> columnMetaInfoMap = StringKeyMap.createAsFlexibleOrdered();
    final ResultSetMetaData md = rs.getMetaData();
    for (int i = 1; i <= md.getColumnCount(); i++) {
        final DfColumnMeta metaInfo = new DfColumnMeta();

        String sql2EntityRelatedTableName = null;
        try {//from w w  w  .j a v  a 2  s.  co m
            sql2EntityRelatedTableName = md.getTableName(i);
        } catch (SQLException ignored) {
            // Because this table name is not required. This is for classification.
            String msg = "ResultSetMetaData.getTableName(" + i + ") threw the exception:";
            msg = msg + " " + ignored.getMessage();
            _log.info(msg);
        }
        metaInfo.setSql2EntityRelatedTableName(sql2EntityRelatedTableName);

        String columnName = md.getColumnLabel(i);
        final String relatedColumnName = md.getColumnName(i);
        metaInfo.setSql2EntityRelatedColumnName(relatedColumnName);
        if (columnName == null || columnName.trim().length() == 0) {
            columnName = relatedColumnName;
        }
        if (columnName == null || columnName.trim().length() == 0) {
            final String ln = ln();
            String msg = "The columnName is invalid: columnName=" + columnName + ln;
            msg = msg + "ResultSetMetaData returned invalid value." + ln;
            msg = msg + "sql=" + sql;
            throw new IllegalStateException(msg);
        }
        metaInfo.setColumnName(columnName);

        final int columnType = md.getColumnType(i);
        metaInfo.setJdbcDefValue(columnType);

        final String columnTypeName = md.getColumnTypeName(i);
        metaInfo.setDbTypeName(columnTypeName);

        int columnSize = md.getPrecision(i);
        if (!DfColumnExtractor.isColumnSizeValid(columnSize)) {
            // ex) sum(COLUMN)
            columnSize = md.getColumnDisplaySize(i);
        }
        metaInfo.setColumnSize(columnSize);

        final int scale = md.getScale(i);
        metaInfo.setDecimalDigits(scale);

        if (forcedJavaNativeProvider != null) {
            final String sql2entityForcedJavaNative = forcedJavaNativeProvider.provide(columnName);
            metaInfo.setSql2EntityForcedJavaNative(sql2entityForcedJavaNative);
        }

        // column comment is not set here (no comment on meta data)
        // if select column comment is specified, comment will be set later

        columnMetaInfoMap.put(columnName, metaInfo);
    }
    return columnMetaInfoMap;
}

From source file:org.orbisgis.orbisserver.baseserver.model.Session.java

/**
 * Returns the DatabaseContent object which contains the representation of the Database.
 * @return The DatabaseContent object./*from   w  w w. j  av a2s  .c  om*/
 */
public DatabaseContent getDatabaseContent() {
    DatabaseContent dbContent = new DatabaseContent();
    try (Connection connection = ds.getConnection()) {
        for (String tableName : JDBCUtilities.getTableNames(connection.getMetaData(), null, null, null,
                new String[] { "TABLE", "LINKED TABLE", "VIEW", "EXTERNAL", "UIodfsghjmodfhjgodujhfg" })) {
            DatabaseTable dbTable = new DatabaseTable(TableLocation.parse(tableName));
            //Get the list of the columns of a table
            ResultSet rs1 = connection.createStatement()
                    .executeQuery(String.format("select * from %s limit 1", dbTable.getName()));
            ResultSetMetaData metaData = rs1.getMetaData();
            //If the column isn't a geometry, add it to the map
            for (int i = 1; i <= metaData.getColumnCount(); i++) {
                if (!metaData.getColumnTypeName(i).equalsIgnoreCase("GEOMETRY")) {
                    dbTable.addField(metaData.getColumnLabel(i), metaData.getColumnTypeName(i));
                }
            }
            //Once the non geometric columns are get, do the same with the geometric one.
            Statement statement = connection.createStatement();
            String query = String.format("SELECT * FROM GEOMETRY_COLUMNS WHERE F_TABLE_NAME LIKE '%s';",
                    TableLocation.parse(dbTable.getName()).getTable());
            ResultSet rs = statement.executeQuery(query);
            while (rs.next()) {
                dbTable.addField(rs.getString(4), SFSUtilities.getGeometryTypeNameFromCode(rs.getInt(6)));
            }
            dbContent.addTable(dbTable);
        }
    } catch (SQLException e) {
        LOGGER.error("Unable to get the database information.\nCause : " + e.getMessage());
    }
    return dbContent;
}

From source file:org.sakaiproject.webservices.SakaiReport.java

protected Document toDocument(ResultSet rs) throws ParserConfigurationException, SQLException {
    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
    DocumentBuilder builder = factory.newDocumentBuilder();
    Document doc = builder.newDocument();

    Element results = doc.createElement("Results");
    doc.appendChild(results);/*from  ww w  .j a va  2s  . c  o  m*/

    ResultSetMetaData rsmd = rs.getMetaData();
    int colCount = rsmd.getColumnCount();

    while (rs.next()) {
        Element row = doc.createElement("Row");
        results.appendChild(row);

        for (int i = 1; i <= colCount; i++) {
            String columnName = rsmd.getColumnLabel(i);
            Object value = null;
            try {
                value = getColumnValue(rs, rsmd.getColumnType(i), i);
                Element node = doc.createElement(columnName);
                node.appendChild(doc.createTextNode(stripInvalidXmlCharacters(value.toString())));
                row.appendChild(node);
            } catch (IOException e) {
                // probably shouldn't just ignore an issue...
                e.printStackTrace();
            }

        }
    }
    return doc;
}