Example usage for java.sql ResultSetMetaData getColumnName

List of usage examples for java.sql ResultSetMetaData getColumnName

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getColumnName.

Prototype

String getColumnName(int column) throws SQLException;

Source Link

Document

Get the designated column's name.

Usage

From source file:fll.web.developer.QueryHandler.java

@SuppressFBWarnings(value = {
        "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE" }, justification = "Executing query from user")
@Override/*from  w  ww  .ja v a2  s  .  co m*/
protected void processRequest(final HttpServletRequest request, final HttpServletResponse response,
        final ServletContext application, final HttpSession session) throws IOException, ServletException {
    final List<String> columnNames = new LinkedList<String>();
    final List<Map<String, String>> data = new LinkedList<Map<String, String>>();
    String error = null;

    DataSource datasource = ApplicationAttributes.getDataSource(application);
    Statement stmt = null;
    ResultSet rs = null;
    Connection connection = null;
    try {
        connection = datasource.getConnection();
        final String query = request.getParameter(QUERY_PARAMETER);
        stmt = connection.createStatement();
        rs = stmt.executeQuery(query);

        ResultSetMetaData meta = rs.getMetaData();
        for (int columnNum = 1; columnNum <= meta.getColumnCount(); ++columnNum) {
            columnNames.add(meta.getColumnName(columnNum).toLowerCase());
        }
        while (rs.next()) {
            final Map<String, String> row = new HashMap<String, String>();
            for (final String columnName : columnNames) {
                final String value = rs.getString(columnName);
                row.put(columnName, value);
            }
            data.add(row);
        }

    } catch (final SQLException e) {
        error = e.getMessage();
        LOGGER.error("Exception doing developer query", e);
    } finally {
        SQLFunctions.close(rs);
        SQLFunctions.close(stmt);
        SQLFunctions.close(connection);
    }

    response.setContentType("application/json");
    response.setCharacterEncoding(Utilities.DEFAULT_CHARSET.name());

    final ResultData result = new ResultData(columnNames, data, error);
    final ObjectMapper jsonMapper = new ObjectMapper();
    final Writer writer = response.getWriter();

    jsonMapper.writeValue(writer, result);
}

From source file:net.mlw.vlh.adapter.jdbc.util.ResultSetMapGenerator.java

public ResultSetMapGenerator(ResultSet result, boolean useName, boolean lowerCase) throws SQLException {
    this.result = result;
    ResultSetMetaData metadata = result.getMetaData();

    int columnCount = metadata.getColumnCount();
    names = new String[columnCount];
    for (int i = 0; i < columnCount; i++) {
        names[i] = (useName) ? metadata.getColumnName(i + 1) : metadata.getColumnLabel(i + 1);
        if (names[i] == null || names[i].length() == 0) {
            names[i] = (useName) ? metadata.getColumnLabel(i + 1) : metadata.getColumnName(i + 1);
        }//from www  .  j  a v  a2s .  com

        if (lowerCase) {
            names[i] = names[i].toLowerCase();
        }
    }
    LOGGER.debug(names);
}

From source file:org.schedoscope.metascope.util.HiveQueryExecutor.java

@Transactional
public HiveQueryResult executeQuery(String databaseName, String tableName, String fields,
        Set<MetascopeField> parameters, Map<String, String> params) {
    List<List<String>> rows = new ArrayList<List<String>>();

    HiveServerConnection hiveConn = new HiveServerConnection(config);

    hiveConn.connect();//from   w ww. j a va2 s.co m

    if (hiveConn.getConnection() == null) {
        return new HiveQueryResult("Could not connect to HiveServer2");
    }

    String where = "";
    List<String> values = new ArrayList<String>();
    if (params != null) {
        for (Entry<String, String> param : params.entrySet()) {
            if (param.getKey().equals("fqdn") || param.getKey().equals("_csrf")) {
                continue;
            }
            if (!param.getValue().isEmpty()) {
                boolean parameterExists = false;
                for (MetascopeField parameter : parameters) {
                    if (parameter.getFieldName().equals(param.getKey())) {
                        parameterExists = true;
                    }
                }
                if (!parameterExists) {
                    hiveConn.close();
                    return new HiveQueryResult("Query not allowed");
                }

                if (!where.isEmpty()) {
                    where += " AND ";
                }
                where += param.getKey() + "=?";
                values.add(param.getValue());
            }
        }
    }

    String sql = " SELECT " + fields;
    String parameterList = "";
    for (MetascopeField parameter : parameters) {
        sql += "," + parameter.getFieldName();
    }
    sql += parameterList;
    sql += " FROM " + databaseName + "." + tableName;
    sql += where.isEmpty() ? "" : " WHERE " + where;
    sql += " LIMIT 10";

    List<String> header = new ArrayList<String>();
    try {
        PreparedStatement pstmt = hiveConn.getConnection().prepareStatement(sql);
        for (int i = 1; i <= values.size(); i++) {
            pstmt.setString(i, values.get(i - 1));
        }
        ResultSet rs = pstmt.executeQuery();
        ResultSetMetaData rsmd = rs.getMetaData();

        for (int i = 1; i <= rsmd.getColumnCount(); i++) {
            header.add(rsmd.getColumnName(i));
        }

        while (rs.next()) {
            List<String> row = new ArrayList<String>();
            for (int i = 1; i <= rsmd.getColumnCount(); i++) {
                Object val = rs.getObject(i);
                String strVal = (val == null ? null : val.toString());
                row.add(strVal);
            }
            rows.add(row);
        }
    } catch (SQLException e) {
        LOG.error("Could not execute query", e);
        hiveConn.close();
        return new HiveQueryResult(e.getMessage());
    }

    hiveConn.close();
    return new HiveQueryResult(header, rows);
}

From source file:db.migration.V023__UpdateOrganisationToimipisteKoodi.java

public void migrate(JdbcTemplate jdbcTemplate) throws Exception {
    LOG.info("migrate()...");

    // Get all organisations
    List<Map> resultSet = jdbcTemplate.query("SELECT * FROM organisaatio o", new RowMapper<Map>() {
        @Override//from  ww w  .j av  a 2  s  . c om
        public Map mapRow(ResultSet rs, int rowNum) throws SQLException {
            Map r = new HashMap<String, Object>();

            ResultSetMetaData metadata = rs.getMetaData();
            for (int i = 1; i <= metadata.getColumnCount(); i++) {
                String cname = metadata.getColumnName(i);
                int ctype = metadata.getColumnType(i);

                switch (ctype) {
                case Types.VARCHAR:
                    r.put(cname, rs.getString(cname));
                    break;

                default:
                    break;
                }
            }

            LOG.debug("  read from db : org = {}", r);

            _organisations.put((String) r.get("oid"), r);
            return r;
        }
    });

    // Generate and update initial values for toimipistekoodis
    for (Map org : resultSet) {
        if (isToimipiste(org, jdbcTemplate)) {
            String tpKoodi = calculateToimipisteKoodi(org, jdbcTemplate);
            updateToimipisteKoodi(org, tpKoodi, jdbcTemplate);
        }
    }

    LOG.info("  Processed {} organisations, updated {} Opetuspistes", _organisations.size(), _numUpdated);

    LOG.info("migrate()... done.");
}

From source file:cc.osint.graphd.db.SQLDB.java

private JSONObject jsonizeResultSet(ResultSet rs) throws Exception {
    List<JSONObject> results = new ArrayList<JSONObject>();
    ResultSetMetaData md = rs.getMetaData();
    int colmax = md.getColumnCount();
    int i;/*from ww w .  ja  v a  2s.c o  m*/
    for (; rs.next();) {
        JSONObject result = new JSONObject();
        for (i = 1; i <= colmax; i++) {
            String colName = md.getColumnName(i).toLowerCase();
            String colClassName = md.getColumnClassName(i);
            String colType = md.getColumnTypeName(i);
            Object obj = rs.getObject(i);
            result.put(colName, obj);
            log.info(colName + ": " + colClassName + ": " + colType + ": " + obj.toString());
        }
        results.add(result);
    }
    JSONObject result = new JSONObject();
    result.put("results", results);
    return result;
}

From source file:com.aw.core.util.QTTbBnMapperBasicRowProcessor.java

private void initForBean(ResultSet rs) throws SQLException {
    if (!beanMapper.isMetatadaBuilt()) {
        ResultSetMetaData metaData = rs.getMetaData();
        List colNames = new ArrayList();
        for (int i = 0; i < metaData.getColumnCount(); i++) {
            String colName = metaData.getColumnName(i + 1);
            colNames.add(colName);//  w  w w .jav a 2 s.  c o  m
        }

        beanMapper.buildMetadata(colNames);
        dataRowProvider = new BeanDataRowProvider(rs);
    }
}

From source file:org.apache.zeppelin.phoenix.PhoenixInterpreter.java

private InterpreterResult executeSql(String sql) {
    try {/*from   w w w .  j  a  va2  s . co m*/
        if (exceptionOnConnect != null) {
            return new InterpreterResult(Code.ERROR, exceptionOnConnect.getMessage());
        }

        currentStatement = getJdbcConnection().createStatement();

        boolean isExplain = StringUtils.containsIgnoreCase(sql, EXPLAIN_PREDICATE);
        StringBuilder msg = (isExplain) ? new StringBuilder() : new StringBuilder(TABLE_MAGIC_TAG);

        ResultSet res = null;
        try {
            boolean hasResult = currentStatement.execute(sql);
            if (hasResult) { //If query had results
                res = currentStatement.getResultSet();
                //Append column names
                ResultSetMetaData md = res.getMetaData();
                String row = clean(isExplain, md.getColumnName(1));
                for (int i = 2; i < md.getColumnCount() + 1; i++)
                    row += TAB + clean(isExplain, md.getColumnName(i));
                msg.append(row + NEWLINE);

                //Append rows
                int rowCount = 0;
                while (res.next() && rowCount < getMaxResult()) {
                    row = clean(isExplain, res.getString(1));
                    for (int i = 2; i < md.getColumnCount() + 1; i++)
                        row += TAB + clean(isExplain, res.getString(i));
                    msg.append(row + NEWLINE);
                    rowCount++;
                }
            } else { // May have been upsert or DDL
                msg.append(UPDATE_HEADER + NEWLINE + "Rows affected: " + currentStatement.getUpdateCount()
                        + NEWLINE);
            }

        } finally {
            try {
                if (res != null)
                    res.close();
                getJdbcConnection().commit();
                currentStatement.close();
            } finally {
                currentStatement = null;
            }
        }

        return new InterpreterResult(Code.SUCCESS, msg.toString());
    } catch (SQLException ex) {
        logger.error("Can not run " + sql, ex);
        return new InterpreterResult(Code.ERROR, ex.getMessage());
    }
}

From source file:org.schedoscope.metascope.service.MetascopeDataDistributionService.java

@Async("background")
public void calculateDistribution(MetascopeTable table) {
    runningJobs.put(table.getFqdn(), true);

    String sql = DataDistributionSqlUtil.buildSql(table);

    HiveServerConnection hiveConn = new HiveServerConnection(config);

    hiveConn.connect();//  w ww  .  java2 s  . c  o m

    if (hiveConn.getConnection() == null) {
        runningJobs.put(table.getFqdn(), false);
        return;
    }

    try {
        Statement stmt = hiveConn.getConnection().createStatement();
        ResultSet rs = stmt.executeQuery(sql);

        ResultSetMetaData rsmd = rs.getMetaData();

        List<String> columnNames = new ArrayList<>();
        for (int i = 1; i <= rsmd.getColumnCount(); i++) {
            columnNames.add(rsmd.getColumnName(i));
        }

        long ts = System.currentTimeMillis();
        if (rs.next()) {
            for (String columnName : columnNames) {
                MetascopeDataDistribution mdd = new MetascopeDataDistribution();
                mdd.setId(table.getFqdn() + "." + columnName);
                mdd.setFqdn(table.getFqdn());
                mdd.setMetric(columnName);
                mdd.setValue(rs.getString(columnName));
                metascopeDataDistributionRepository.save(mdd);
            }
        }
    } catch (SQLException e) {
        hiveConn.close();
        runningJobs.put(table.getFqdn(), false);
        LOG.error("Could not execute hive query", e);
    }

    hiveConn.close();

    runningJobs.put(table.getFqdn(), false);
}

From source file:org.aludratest.cloud.web.report.SqlBean.java

public synchronized void execute() {
    resultColumns = null;//from   w  w  w .j  a  va2  s.c  o  m
    resultRows = null;

    if (sql == null) {
        return;
    }

    sql = sql.trim();
    while (sql.endsWith(";")) {
        sql = sql.substring(0, sql.length() - 1);
    }

    // easy basic check
    if (!sql.toLowerCase(Locale.US).startsWith("select ")) {
        FacesContext.getCurrentInstance().addMessage(null,
                JSFUtil.createErrorMessage("Only SELECT statements are allowed"));
        return;
    }

    try {
        RowSet rs = CloudManagerApplicationHolder.getInstance().getDatabase().populateQuery(sql);

        resultColumns = new ArrayList<String>();
        ResultSetMetaData meta = rs.getMetaData();
        for (int i = 1; i <= meta.getColumnCount(); i++) {
            resultColumns.add(meta.getColumnName(i));
        }

        rs.beforeFirst();
        resultRows = new ArrayList<Map<String, String>>();

        while (rs.next()) {
            Map<String, String> row = new HashMap<String, String>();

            // TODO nicer formatting etc.
            for (String col : resultColumns) {
                row.put(col, rs.getString(col));
            }

            resultRows.add(row);
        }
    } catch (SQLException se) {
        FacesContext.getCurrentInstance().addMessage(null,
                JSFUtil.createErrorMessage("SQLException: " + se.getMessage()));
    }
}

From source file:org.wte4j.impl.service.SimpleDbViewModelService.java

@Override
public WteDataModel createModel(Template<?> template, Object input) {
    String viewName = template.getProperties().get(VIEW_NAME);
    String pkColumnName = template.getProperties().get(PRIMARY_KEY_COLUMN_NAME);
    Integer pk = (Integer) input;
    Map<String, Object> dataMap = new HashMap<String, Object>(); //TODO PreparedStatement!
    String query = "select * from " + viewName + " where " + pkColumnName + "=" + pk;
    try {/*from w w  w.java 2 s. c  o  m*/
        Connection connection = null;
        Statement statement = null;
        try {
            connection = ds.getConnection();
            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);
            ResultSetMetaData metaData = rs.getMetaData();
            while (rs.next()) {
                for (int i = 1; i <= metaData.getColumnCount(); i++) {
                    String columnName = metaData.getColumnName(i).toLowerCase();
                    dataMap.put(columnName, rs.getObject(i));
                }
            }

        } finally {
            if (statement != null) {
                statement.close();
            }
            if (connection != null) {
                connection.close();
            }
        }
    } catch (SQLException e) {
        throw new WteException("error in createModel (" + viewName + ", " + pkColumnName + ", " + pk + ")", e);
    }
    return new WteMapModel(dataMap);
}