Example usage for java.sql ResultSet getMetaData

List of usage examples for java.sql ResultSet getMetaData

Introduction

In this page you can find the example usage for java.sql ResultSet getMetaData.

Prototype

ResultSetMetaData getMetaData() throws SQLException;

Source Link

Document

Retrieves the number, types and properties of this ResultSet object's columns.

Usage

From source file:com.vangent.hieos.logbrowser.util.TableModel.java

public TableModel(String sqlRequest, Map fieldsAndFormats, Connection c) throws SQLException {
    this.fieldsAndFormats = fieldsAndFormats;
    ResultSet statementResult;
    log.debug("TABLE_MODEL_SYSLOG: database connection created\n");

    Statement statement = c.createStatement();
    log.debug("TABLE_MODEL_SYSLOG: statement created\n");
    statementResult = statement.executeQuery(sqlRequest);
    log.debug("TABLE_MODEL_SYSLOG: Query executed\n");
    log.debug("<--" + new GregorianCalendar().getTime() + " TableModel close Database \n");

    ResultSetMetaData metaData = statementResult.getMetaData();
    int columnCount = metaData.getColumnCount();

    dataVector = new Vector<Vector<Object>>();
    headerVector = new Vector<String>();

    log.debug("TABLE_MODEL_SYSLOG: colomn count : " + columnCount + "\n");
    log.debug("TABLE_MODEL_SYSLOG: Table--------------------------------------");
    for (int i = 0; i < columnCount; i++) {
        headerVector.add(metaData.getColumnName((i + 1)));
        log.debug(metaData.getColumnName((i + 1)) + "\t");
    }//from ww w . j av  a  2 s. co m

    while (statementResult.next()) {
        Vector<Object> tmp = new Vector<Object>(columnCount);
        for (int j = 0; j < columnCount; j++) {
            String columnName = getColumnName(j);
            Object columnData = statementResult.getObject(columnName);
            columnData = getFormattedData(columnName, columnData);
            tmp.add(columnData);
            log.debug(columnData + "\t");
        }
        log.debug("\n");
        dataVector.add(tmp);
    }
}

From source file:com.ewcms.plugin.report.generate.factory.ChartFactory.java

/**
 * ?// w w  w . ja va 2s.  c  o m
 *
 * @param reportDataSet ??
 * @param sql SQL?
 * @return DefaultCategoryDataset
 * @throws BaseException
 */
@SuppressWarnings("rawtypes")
private DefaultCategoryDataset buildDataset(ChartReport report, Map<String, String> pageParams)
        throws BaseException {
    DefaultCategoryDataset dataset = new DefaultCategoryDataset();
    Connection con = null;
    EwcmsDataSourceServiceable service = null;
    int colCount;
    try {
        BaseDS dataSet = report.getBaseDS();
        String executableSQL = report.getChartSql();

        executableSQL = replaceParam(pageParams, report.getParameters(), executableSQL, true);

        if (dataSet == null) {
            con = dataSource.getConnection();
        } else {
            DataSourceFactoryable factory = (DataSourceFactoryable) getAlqcDataSourceFactory()
                    .getBean(dataSet.getClass());
            service = factory.createService(dataSet);
            con = service.openConnection();
        }

        Statement st = con.createStatement();
        ResultSet rs = st.executeQuery(executableSQL);

        colCount = rs.getMetaData().getColumnCount();

        if (colCount == 2) {
            while (rs.next()) {
                try {
                    try {
                        dataset.addValue(rs.getDouble(1), "", (Comparable) rs.getObject(2));
                    } catch (Exception e) {
                        dataset.addValue(rs.getDouble(2), "", (Comparable) rs.getObject(1));
                    }
                    //                       if (rs.getMetaData().getColumnType(1) == Types.NUMERIC){
                    //                            dataset.addValue(rs.getDouble(1), "", (Comparable) rs.getObject(2));
                    //                       }else if (rs.getMetaData().getColumnType(2) == Types.NUMERIC) {
                    //                            dataset.addValue(rs.getDouble(2), "", (Comparable) rs.getObject(1));
                    //                       }
                } catch (Exception e) {
                    logger.error("SQL?", e);
                    throw new BaseException("SQL?", "SQL?");
                }
            }
        } else if (colCount == 3) {
            while (rs.next()) {
                try {
                    //                       log.info(rs.getMetaData().getColumnType(1));
                    //                       log.info(rs.getMetaData().getColumnType(2));
                    //                       log.info(rs.getMetaData().getColumnType(3));
                    //                       if (rs.getMetaData().getColumnType(1) == Types.NUMERIC){
                    //                            dataset.addValue(rs.getDouble(1), (Comparable) rs.getObject(2), (Comparable) rs.getObject(3));
                    //                       }else if (rs.getMetaData().getColumnType(2) == Types.NUMERIC){
                    //                          dataset.addValue(rs.getDouble(2), (Comparable) rs.getObject(1), (Comparable) rs.getObject(3));
                    //                       }else if (rs.getMetaData().getColumnType(3) == Types.NUMERIC){
                    //                          dataset.addValue(rs.getDouble(3), (Comparable) rs.getObject(1), (Comparable) rs.getObject(2));
                    //                       }
                    try {
                        dataset.addValue(rs.getDouble(3), (Comparable) rs.getObject(1),
                                (Comparable) rs.getObject(2));
                    } catch (Exception e) {
                        try {
                            dataset.addValue(rs.getDouble(2), (Comparable) rs.getObject(1),
                                    (Comparable) rs.getObject(3));
                        } catch (Exception ex) {
                            dataset.addValue(rs.getDouble(1), (Comparable) rs.getObject(2),
                                    (Comparable) rs.getObject(3));
                        }
                    }
                } catch (Exception e) {
                    logger.error("SQL?", e);
                    throw new BaseException("SQL?", "SQL?");
                }
            }
        } else {
            logger.error("SQL??12");
            throw new BaseException("?12", "?12");
        }
        st.close();
        rs.close();
    } catch (SQLException e) {
        throw new BaseException(e.toString(), e.toString());
    } catch (ConvertException e) {
        throw new BaseException(e.toString(), e.toString());
    } catch (ClassNotFoundException e) {
        throw new BaseException(e.toString(), e.toString());
    } finally {
        if (service != null) {
            service.closeConnection();
        }
        if (con != null) {
            try {
                con.close();
            } catch (SQLException e) {
            }
            con = null;
        }
    }
    return dataset;
}

From source file:org.syncope.core.util.ImportExport.java

private void doExportTable(final TransformerHandler handler, final Connection conn, final String tableName)
        throws SQLException, SAXException {

    AttributesImpl atts = new AttributesImpl();

    PreparedStatement stmt = null;
    ResultSet rs = null;
    try {/*from w w  w  . j  a  v a 2s .  co m*/
        stmt = conn.prepareStatement("SELECT * FROM " + tableName + " a");
        rs = stmt.executeQuery();
        for (int rowNo = 0; rs.next(); rowNo++) {
            atts.clear();

            ResultSetMetaData metaData = rs.getMetaData();
            for (int i = 0; i < metaData.getColumnCount(); i++) {
                String columnName = metaData.getColumnName(i + 1);
                String value = rs.getString(columnName);
                if (value != null) {
                    atts.addAttribute("", "", columnName, "CDATA", value);
                }
            }

            handler.startElement("", "", tableName, atts);
            handler.endElement("", "", tableName);
        }
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                LOG.error("While closing result set", e);
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                LOG.error("While closing result set", e);
            }
        }
    }
}

From source file:com.ws.WS_TCS201.java

@Path("/GetTCD")
@JSONP(queryParam = "callback")
@GET/*from w  w w  .  jav a 2 s .  co  m*/
@Produces({ "application/x-javascript" })
public String GetTCD(@QueryParam("callback") String callback) {

    JSONObject obj1 = new JSONObject();
    LinkedList l1 = new LinkedList();
    //JSONArray l1 = new JSONArray();

    PreparedStatement prepStmt = null;

    try {
        String cSQL = "SELECT tcctcd,CONCAT(tcctcd,\" - \",trim(tcctxt)) AS name FROM TCSTCC "
                + "WHERE tcctcd NOT IN (\"A\",\"L\",\"N\",\"J\",\"R\",\"E\") " + "ORDER BY tcctcd ";
        prepStmt = connection.prepareStatement(cSQL);
        ResultSet result = prepStmt.executeQuery();
        ResultSetMetaData rsmd = result.getMetaData();
        int numcols = rsmd.getColumnCount();

        while (result.next()) {
            LinkedHashMap m1 = new LinkedHashMap();
            for (int j = 1; j <= numcols; j++) {
                Object obj = result.getObject(j);
                m1.put(rsmd.getColumnName(j).toString(), obj.toString());
            }
            l1.add(m1);
        }
        obj1.put("record", l1);
    } catch (SQLException e) {
        prepStmt = null;
        e.printStackTrace();
    } catch (Exception e) {
        prepStmt = null;
        e.printStackTrace();
    }
    return obj1.toString();
}

From source file:com.ws.WS_TCS201.java

@Path("/GetID/{com}")
@JSONP(queryParam = "callback")
@GET/*ww  w .ja  va  2  s.c o  m*/
@Produces({ "application/x-javascript" })
public String GetID(@QueryParam("callback") String callback, @PathParam("com") String com) {
    //JOptionPane.showMessageDialog(null, "??", "Which way?", JOptionPane.INFORMATION_MESSAGE );
    JSONObject obj1 = new JSONObject();
    LinkedList l1 = new LinkedList();
    //JSONArray l1 = new JSONArray();

    PreparedStatement prepStmt = null;

    DateFormat day = new SimpleDateFormat("yyyyMMdd");
    String tmpday = day.format(new java.util.Date());

    try {
        String cSQL = "SELECT tceemp,tcenam FROM TCSTCE "
                + "WHERE tcecom = ? AND ( tceljd=0 OR tceljd + 100 > \"" + tmpday + "\" ) "
                + "ORDER BY tceemp,tcecom ";
        prepStmt = connection.prepareStatement(cSQL);
        prepStmt.setString(1, com);
        ResultSet result = prepStmt.executeQuery();
        ResultSetMetaData rsmd = result.getMetaData();
        int numcols = rsmd.getColumnCount();

        while (result.next()) {
            LinkedHashMap m1 = new LinkedHashMap();
            for (int j = 1; j <= numcols; j++) {
                Object obj = result.getObject(j);
                m1.put(rsmd.getColumnName(j).toString(), obj.toString());
            }
            l1.add(m1);
        }
        obj1.put("record", l1);
    } catch (SQLException e) {
        prepStmt = null;
        e.printStackTrace();
    } catch (Exception e) {
        prepStmt = null;
        e.printStackTrace();
    }
    return obj1.toString();
}

From source file:com.baifendian.swordfish.execserver.engine.hive.HiveSqlExec.java

/**
 *  sql ? ?, ?,  execute, ?/*from  w w  w  . j  a v  a2 s.  co m*/
 *
 * @param createFuncs ?
 * @param sqls  sql
 * @param isContinue ?, ???
 * @param resultCallback , ?
 * @param queryLimit ?
 * @param remainTime ?, 
 */
public boolean execute(List<String> createFuncs, List<String> sqls, boolean isContinue,
        ResultCallback resultCallback, Integer queryLimit, int remainTime) {

    // ?
    if (remainTime <= 0) {
        return false;
    }

    // ?
    queryLimit = (queryLimit != null) ? queryLimit : defaultQueryLimit;

    HiveConnection hiveConnection = null;
    Statement sta = null;
    Thread logThread = null;

    //  hive ?
    HiveService2ConnectionInfo hiveService2ConnectionInfo = hiveUtil.getHiveService2ConnectionInfo(userName);

    logger.info("execution connection information:{}", hiveService2ConnectionInfo);

    HiveService2Client hiveService2Client = hiveUtil.getHiveService2Client();

    try {
        try {
            hiveConnection = hiveService2Client.borrowClient(hiveService2ConnectionInfo);

            sta = hiveConnection.createStatement();
            //        sta.setQueryTimeout(remainTime);

            // 
            logThread = new Thread(new JdbcLogRunnable(sta));
            logThread.setDaemon(true);
            logThread.start();

            // set queue
            if (queueSQL != null) {
                logger.info("hive queue : {}", queueSQL);
                sta.execute(queueSQL);
            }

            //  function
            if (createFuncs != null) {
                for (String createFunc : createFuncs) {
                    logger.info("hive create function sql: {}", createFunc);
                    sta.execute(createFunc);
                }
            }
        } catch (Exception e) {
            logger.error("execute query exception", e);

            // , , ?
            handlerResults(0, sqls, FlowStatus.FAILED, resultCallback);

            return false;
        }

        //  sql ?
        for (int index = 0; index < sqls.size(); ++index) {
            String sql = sqls.get(index);

            Date startTime = new Date();

            logger.info("hive execute sql: {}", sql);

            ExecResult execResult = new ExecResult();
            execResult.setIndex(index);
            execResult.setStm(sql);

            try {
                // ? query  show ?
                if (HiveUtil.isTokQuery(sql) || HiveUtil.isLikeShowStm(sql)) {
                    sta.setMaxRows(queryLimit);
                    ResultSet res = sta.executeQuery(sql);

                    ResultSetMetaData resultSetMetaData = res.getMetaData();
                    int count = resultSetMetaData.getColumnCount();

                    List<String> colums = new ArrayList<>();
                    for (int i = 1; i <= count; i++) {
                        colums.add(resultSetMetaData.getColumnLabel(
                                i)/*parseColumnName(resultSetMetaData.getColumnLabel(i), colums)*/);
                    }

                    execResult.setTitles(colums);

                    List<List<String>> datas = new ArrayList<>();

                    //  1,  query ?
                    if (count > 1 || HiveUtil.isTokQuery(sql)) {
                        while (res.next()) {
                            List<String> values = new ArrayList<>();
                            for (int i = 1; i <= count; ++i) {
                                values.add(res.getString(i));
                            }

                            datas.add(values);
                        }
                    } else {
                        StringBuffer buffer = new StringBuffer();

                        while (res.next()) {
                            buffer.append(res.getString(1));
                            buffer.append("\n");
                        }

                        List<String> values = new ArrayList<>();
                        values.add(buffer.toString().trim());

                        datas.add(values);
                    }

                    execResult.setValues(datas);
                } else {
                    sta.execute(sql);
                }

                // ??
                execResult.setStatus(FlowStatus.SUCCESS);

                // ?
                if (resultCallback != null) {
                    Date endTime = new Date();
                    resultCallback.handleResult(execResult, startTime, endTime);
                }
            } catch (SQLTimeoutException e) {
                // sql 
                logger.error("executeQuery timeout exception", e);

                handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                return false;
            } catch (DaoSemanticException | HiveSQLException e) {
                // 
                logger.error("executeQuery exception", e);

                if (isContinue) {
                    handlerResult(index, sql, FlowStatus.FAILED, resultCallback);
                } else {
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }
            } catch (Exception e) {
                // TTransport 
                if (e.toString().contains("TTransportException")) {
                    logger.error("Get TTransportException return a client", e);
                    // ???
                    //            hiveService2Client.invalidateObject(hiveService2ConnectionInfo, hiveConnection);
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }

                // socket 
                if (e.toString().contains("SocketException")) {
                    logger.error("SocketException clear pool", e);
                    hiveService2Client.clear();
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }

                logger.error("executeQuery exception", e);

                if (isContinue) {
                    handlerResult(index, sql, FlowStatus.FAILED, resultCallback);
                } else {
                    handlerResults(index, sqls, FlowStatus.FAILED, resultCallback);
                    return false;
                }
            }
        }
    } finally {
        // 
        try {
            if (sta != null) {
                sta.close();
            }
        } catch (Exception e) {
            logger.error("Catch an exception", e);
        }

        try {
            // 
            if (hiveConnection != null) {
                // 
                hiveConnection.close();

                // , ??
                hiveService2Client.returnClient(hiveService2ConnectionInfo, hiveConnection);
            }
        } catch (Exception e) {
            logger.error("Catch an exception", e);
        }

        // 
        try {
            if (logThread != null) {
                logThread.interrupt();
                logThread.join(HiveUtil.DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT);
            }
        } catch (Exception e) {
            //        logger.error("Catch an exception", e);
        }
    }

    return true;
}

From source file:edu.ku.brc.specify.toycode.mexconabio.DataObjTableModel.java

/**
 * The Data members must be set to call this:
 *     numColumns/*  w  ww. j  ava  2 s.c o  m*/
 *     itemsList
 * 
 */
protected void fillModels() {
    final String sqlStr = buildSQL();

    TimeLogger tml = new TimeLogger("Fetching Rows");
    values = new Vector<Object[]>();

    if (StringUtils.isNotEmpty(sqlStr)) {
        log.debug(sqlStr);
        try {
            PreparedStatement pStmt = conn.prepareStatement(sqlStr);
            if (searchValue != null) {
                pStmt.setObject(1, searchValue);
            }
            log.debug(sqlStr + " [" + searchValue + "]");

            tml.restart("Query");
            ResultSet rs = pStmt.executeQuery();
            ResultSetMetaData rsmd = rs.getMetaData();

            tml.restart("Loading");
            while (rs.next()) {
                Object[] row = new Object[numColumns];
                for (int i = 0; i < rsmd.getColumnCount(); i++) {
                    Object obj = rs.getObject(i + 1);
                    row[i] = obj instanceof String ? obj.toString().trim() : obj;
                }
                rowInfoList.add(new DataObjTableModelRowInfo(rs.getInt(1), false, false));
                values.add(row);
                processColumns(row);
            }
            rs.close();
            pStmt.close();
            tml.end();

        } catch (SQLException ex) {
            ex.printStackTrace();
        }
    }
    tml.restart("Step 2 - Addl Rows");
    addAdditionalRows(colDefItems, rowInfoList);

    tml.restart("Step 3");

    sameValues = new ArrayList<Boolean>(numColumns);
    hasDataList = new ArrayList<Boolean>(numColumns);
    for (int i = 0; i < numColumns; i++) {
        sameValues.add(true);
        hasDataList.add(false);
    }

    for (Object[] col : values) {
        for (int i = 0; i < numColumns; i++) {
            Object data = col[i];
            boolean hasData = data != null;

            if (hasData && !hasDataList.get(i)) {
                hasDataList.set(i, true);
                hasDataCols++;
            }
        }
    }

    tml.restart("Step  4 - adj cols");
    adjustHasDataColumns();

    tml.restart("Step  5 - Map");
    mapInx = new int[hasDataCols];
    int colInx = 0;
    //log.debug("-------------Has Data----------------------");
    for (int i = 0; i < numColumns; i++) {
        if (hasDataList.get(i)) {
            //log.debug(itemsList.get(i).getTitle());
            mapInx[colInx] = i;
            indexHash.put(i, colInx);
            //System.out.print("indexHash: "+i +" -> "+colInx);
            //log.debug("  mapInx:    "+colInx +" -> "+i);
            colInx++;
        }
    }

    tml.restart("Step  6 - same data");
    for (int i = 0; i < mapInx.length; i++) {
        colInx = mapInx[i];

        if (hasDataList.get(colInx)) {
            Object data = null;
            for (Object[] col : values) {
                Object newData = col[colInx];

                if (data == null) {
                    if (newData != null) {
                        data = newData;
                    }
                    continue;
                }

                if (newData != null && !data.equals(newData)) {
                    sameValues.set(colInx, false);
                    break;
                }
            }
        }
    }
    tml.end();
    /*
    log.debug("-----------Same------------------------");
    for (int i=0;i<mapInx.length;i++)
    {
        colInx = mapInx[i];
        if (sameValues.get(colInx))
        {
            log.debug(colInx + " " + itemsList.get(colInx).getTitle());
        }
    }*/

    items = new ArrayList<DBInfoBase>(colDefItems);
    doneFillingModels(values);
}

From source file:com.arcane.dao.Impl.PatternDaoImpl.java

@Override
public List<Pattern> getAllPatternList(String type) {
    //return pattern
    LOG.info("Returning requested pattern", type);
    JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
    String sql = "";
    String pattern = type;/*from   www. ja  va 2 s  .  co  m*/

    sql = "SELECT * from " + pattern;
    List<Pattern> templistPattern = jdbcTemplate.query(sql, new RowMapper<Pattern>() {

        @Override
        public Pattern mapRow(ResultSet rs, int rowNumber) throws SQLException {
            Pattern pattern1 = new Pattern();
            pattern1.setId(rs.getInt("id"));
            pattern1.setStock(rs.getString("stock"));
            pattern1.setTimeStamp(rs.getString("breakPoint"));
            pattern1.setName(rs.getMetaData().getTableName(1));
            return pattern1;
        }

    });

    Collections.sort(templistPattern, comparator);
    return templistPattern;
}

From source file:com.p5solutions.core.jpa.orm.rowbinder.EntityRowBinder.java

/**
 * Gets the basic result object./*  w ww . java2 s.com*/
 * 
 * @param rs
 *          the rs
 * @return the basic result object
 * @throws SQLException
 *           the sQL exception
 */
private Object getBasicResultObject(ResultSet rs) throws SQLException {
    final boolean isPlainClass = Object.class.equals(entityClass);
    final boolean isBasicClass = ReflectionUtility.isBasicClass(entityClass);

    ResultSetMetaData metaData = rs.getMetaData();

    // if the entity is a of sub-type object.class or is some sort of
    // primitive class such as BigDecimal, Integer, Double, Short, etc.
    if (isPlainClass || isBasicClass) {

        // / obviously if we have more than one column, we cannot
        // possibly map it
        // / to a plain old java object of type Object.class, since
        // there are no
        // / members to map the columns to!
        if (metaData.getColumnCount() > 1) {
            String error = "Cannot return multi-column resultset into "
                    + "a plain object of type Object.class. If you need to map a multi-column "
                    + "resultset, please use an object marked with @" + Entity.class + " annotation.";
            logger.error(error);

            throw new RuntimeException(error);
        }

        // // THIS SHOULD NEVER HAPPEN, QUERY EXCEPTION SHOULD
        // // BE THROWN IF THERE IS A SYNTAX ERROR IN THE QUERY.
        // if (metaData.getColumnCount() == 0) { }

        // Otherwise if there is only 1 column, and its within the scope
        // of plain object.class
        return (T) rs.getObject(1);
    }

    return null;
}

From source file:com.dexcoder.dal.spring.mapper.JdbcRowMapper.java

/**
 * Extract the values for all columns in the current row.
 * <p>Utilizes public setters and result set metadata.
 * @see java.sql.ResultSetMetaData/*from   w  ww .j av  a2 s.c om*/
 */
public T mapRow(ResultSet rs, int rowNumber) throws SQLException {
    Assert.state(this.mappedClass != null, "Mapped class was not specified");
    T mappedObject = BeanUtils.instantiate(this.mappedClass);
    BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(mappedObject);
    initBeanWrapper(bw);

    ResultSetMetaData rsmd = rs.getMetaData();
    int columnCount = rsmd.getColumnCount();
    Set<String> populatedProperties = (isCheckFullyPopulated() ? new HashSet<String>() : null);

    for (int index = 1; index <= columnCount; index++) {
        String column = JdbcUtils.lookupColumnName(rsmd, index);
        String field = lowerCaseName(column.replaceAll(" ", ""));
        PropertyDescriptor pd = this.mappedFields.get(field);
        if (pd != null) {
            try {
                Object value = getColumnValue(rs, index, pd);
                if (rowNumber == 0 && logger.isDebugEnabled()) {
                    logger.debug("Mapping column '" + column + "' to property '" + pd.getName() + "' of type ["
                            + ClassUtils.getQualifiedName(pd.getPropertyType()) + "]");
                }
                try {
                    bw.setPropertyValue(pd.getName(), value);
                } catch (TypeMismatchException ex) {
                    if (value == null && this.primitivesDefaultedForNullValue) {
                        if (logger.isDebugEnabled()) {
                            logger.debug("Intercepted TypeMismatchException for row " + rowNumber
                                    + " and column '" + column + "' with null value when setting property '"
                                    + pd.getName() + "' of type ["
                                    + ClassUtils.getQualifiedName(pd.getPropertyType()) + "] on object: "
                                    + mappedObject, ex);
                        }
                    } else {
                        throw ex;
                    }
                }
                if (populatedProperties != null) {
                    populatedProperties.add(pd.getName());
                }
            } catch (NotWritablePropertyException ex) {
                throw new DataRetrievalFailureException(
                        "Unable to map column '" + column + "' to property '" + pd.getName() + "'", ex);
            }
        } else {
            // No PropertyDescriptor found
            if (rowNumber == 0 && logger.isDebugEnabled()) {
                logger.debug("No property found for column '" + column + "' mapped to field '" + field + "'");
            }
        }
    }

    if (populatedProperties != null && !populatedProperties.equals(this.mappedProperties)) {
        throw new InvalidDataAccessApiUsageException(
                "Given ResultSet does not contain all fields " + "necessary to populate object of class ["
                        + this.mappedClass.getName() + "]: " + this.mappedProperties);
    }

    return mappedObject;
}