Example usage for java.sql ResultSet getMetaData

List of usage examples for java.sql ResultSet getMetaData

Introduction

In this page you can find the example usage for java.sql ResultSet getMetaData.

Prototype

ResultSetMetaData getMetaData() throws SQLException;

Source Link

Document

Retrieves the number, types and properties of this ResultSet object's columns.

Usage

From source file:io.pivotal.dataflow.task.app.jdbcgemfire.common.JdbcColumnToPojoRowMapper.java

@Override
public Map<String, Object> mapRow(ResultSet rs, int arg1) throws SQLException {
    Map<String, Object> builder = new HashMap<>();
    Map<String, Object> processed = new HashMap<>();
    ColumnStandardMapper mapper = new ColumnStandardMapper();

    ResultSetMetaData metaData = rs.getMetaData();
    for (int i = 1; i <= metaData.getColumnCount(); i++) {
        builder.put(JdbcUtils.lookupColumnName(metaData, i), JdbcUtils.getResultSetValue(rs, i));
    }//from www.  j a v  a2s.  co m
    // send map to groovyColumnProcessor.process()
    processed = mapper.process(builder);

    return processed;
}

From source file:com.datatorrent.contrib.enrichment.JDBCLoader.java

protected ArrayList<Object> getDataFrmResult(Object result) throws RuntimeException {
    try {//from w  w w.j  a v  a 2s .  c  om
        ResultSet resultSet = (ResultSet) result;
        if (resultSet.next()) {
            ResultSetMetaData rsdata = resultSet.getMetaData();
            // If the includefields is empty, populate it from ResultSetMetaData
            if (CollectionUtils.isEmpty(includeFields)) {
                if (includeFields == null) {
                    includeFields = new ArrayList<String>();
                }
                for (int i = 1; i <= rsdata.getColumnCount(); i++) {
                    includeFields.add(rsdata.getColumnName(i));
                }
            }
            ArrayList<Object> res = new ArrayList<Object>();
            for (String f : includeFields) {
                res.add(resultSet.getObject(f));
            }
            return res;
        } else {
            return null;
        }
    } catch (SQLException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.novartis.opensource.yada.format.ResultSetResultDelimitedConverter.java

/**
 * Converts columns of data in a {@link java.sql.ResultSet} to collection
 * of {@link List} objects containing values and stored in the current
 * {@link YADAQueryResult#getConvertedResults()} structure.
 * /*from  w ww .j ava2  s  .c  o  m*/
 * @param rs
 *          the result set to convert
 * @throws SQLException
 *           when {@link ResultSet} or {@link ResultSetMetaData} iteration
 *           fails
 */
protected void getDelimitedRows(ResultSet rs) throws SQLException {
    JSONObject h = (JSONObject) this.harmonyMap;
    ResultSetMetaData rsmd = rs.getMetaData();
    if (rsmd == null) // TODO What happens to headers when rsmd is null, or
                      // resultSet is empty?
        rsmd = new RowSetMetaDataImpl();
    int colCount = rsmd.getColumnCount();
    boolean hasYadaRnum = rsmd.getColumnName(colCount).toLowerCase().equals(JDBCAdaptor.ROWNUM_ALIAS);

    // handle headers
    // TODO How to suppress headers?
    for (int j = 1; j <= colCount; j++) {
        String colName = rsmd.getColumnName(j);
        if (!hasYadaRnum || !colName.toLowerCase().equals(JDBCAdaptor.ROWNUM_ALIAS)) {
            String col = colName;
            if (isHarmonized()) {
                if (h.has(colName)) {
                    col = h.getString(colName);
                }
            }
            getYADAQueryResult().addConvertedHeader(this.wrap(col));
        }
    }
    List<List<String>> convertedResult = new ArrayList<>();
    while (rs.next()) {
        List<String> resultsRow = new ArrayList<>();
        String colValue;
        for (int j = 1; j <= colCount; j++) {
            String colName = rsmd.getColumnName(j);
            if (!hasYadaRnum || !colName.toLowerCase().equals(JDBCAdaptor.ROWNUM_ALIAS)) {
                if (null == rs.getString(colName) || "null".equals(rs.getString(colName))) {
                    colValue = NULL_REPLACEMENT;
                } else {
                    colValue = this.wrap(rs.getString(colName));
                }
                resultsRow.add(colValue);
            }
        }
        convertedResult.add(resultsRow);
    }
    getYADAQueryResult().getConvertedResults().add(convertedResult);
}

From source file:com.squid.kraken.v4.caching.redis.datastruct.RawMatrix.java

public static RawMatrixStreamExecRes streamExecutionItemToByteArray(IExecutionItem item,
        long nbLinesExpectedLeft) throws IOException, SQLException {

    RawMatrixStreamExecRes res = new RawMatrixStreamExecRes();
    long metter_start = System.currentTimeMillis();
    ByteArrayOutputStream baout = new ByteArrayOutputStream();
    Output kout = new Output(baout);

    Kryo kryo = new Kryo();
    kryo.setRegistrationRequired(true);//from   w  w w . j a v a 2s  .c o m
    kryo.setReferences(false);

    ResultSet result = item.getResultSet();
    IJDBCDataFormatter formatter = item.getDataFormatter();
    //
    // if(logger.isDebugEnabled()){logger.debug(("Formatter
    // ="+formatter.getClass().toString()));}

    ResultSetMetaData metadata = result.getMetaData();
    int nbColumns = metadata.getColumnCount();

    IVendorSupport vendorSpecific = VendorSupportRegistry.INSTANCE.getVendorSupport(item.getDatabase());

    int[] colTypes = vendorSpecific.getVendorMetadataSupport().normalizeColumnType(result);

    // get columns #, type and names
    String[] colNames = new String[nbColumns];
    int i = 0;
    while (i < nbColumns) {
        colNames[i] = metadata.getColumnLabel(i + 1);
        i++;
    }
    // register
    // Class mapping have to be registered before we start writing
    HashMap<String, Integer> registration = new HashMap<String, Integer>();
    for (int val : colTypes) {
        if (!isPrimitiveType(val)) {
            String className = getJavaDatatype(val);
            try {
                if (registration.get(className) == null) {
                    registration.put(className, kryo.register(Class.forName(className)).getId());
                }
            } catch (ClassNotFoundException e0) {
                logger.info("Class " + className + " not found");
            } catch (NullPointerException e1) {
                logger.info("Class " + className + " not found");
            }
        }
    }

    // Register Hadoop type
    // registration.put("org.apache.hadoop.io.Text",kryo.register(org.apache.hadoop.io.Text.class).getId());
    // registration.put("byte[]", kryo.register(byte[].class).getId());

    // start writing!

    // registration
    kout.writeInt(registration.keySet().size());
    for (String s : registration.keySet()) {
        kout.writeString(s);
        kout.writeInt(registration.get(s));
        // logger.info(s + " " + registration.get(s));
    }

    // version
    int version = VERSION;
    if (version >= 1) {
        kout.writeInt(-1);// this is for V0 compatibility which miss
        // version information
        kout.writeInt(version);
    }

    // Redis cache type
    kout.writeInt(RedisCacheType.RAW_MATRIX.ordinal());

    // nb of columns
    kout.writeInt(nbColumns);

    // columns names
    for (String n : colNames)
        kout.writeString(n);

    // column type
    for (Integer t : colTypes)
        kout.writeInt(t);

    // rows
    // we need a different dictionary to check for first occurences
    HashMap<String, Integer> tempsDict = new HashMap<String, Integer>();
    int count = 0;
    int index = 0;
    boolean moreData = false;
    boolean maxSizeReached = false;

    while ((!maxSizeReached) && (moreData = result.next())) {
        i = 0;
        kout.writeBoolean(true);
        while (i < nbColumns) {
            Object value = result.getObject(i + 1);
            Object unbox = formatter.unboxJDBCObject(value, colTypes[i]);
            // if(logger.isDebugEnabled()){logger.debug(("unbox value is
            // "+unbox));}
            if (unbox instanceof String) {
                String stringVal = (String) unbox;
                // System.out.println(stringVal);
                Integer ref = tempsDict.get(stringVal);
                if (ref != null) {
                    kout.write(MEMBER_REFERENCE);// 4
                    kout.writeInt(ref);// 5
                } else {
                    kout.write(MEMBER_DEFINITION);// 4
                    kout.writeString(stringVal);
                    tempsDict.put(stringVal, new Integer(index));
                    index++;
                }
            } else {
                kout.write(MEMBER_VALUE);// 4
                // if(logger.isDebugEnabled()){logger.debug(("member
                // unbox " + unbox.toString()));}
                // if(logger.isDebugEnabled()){logger.debug(("member
                // value " + value.toString()));}
                kryo.writeClassAndObject(kout, unbox);
            }
            i++;
        }

        count++;

        // stats: display time for first 100th rows
        if (count == 100) {
            long intermediate = new Date().getTime();
            // logger.info("SQLQuery#" + item.getID() + " proceeded
            // first 100 items in "+(intermediate-metter_start)+" ms");
            logger.info("task=RawMatrix" + " method=streamExecutionItemToByteArray" + " duration="
                    + ((intermediate - metter_start)) + " error=false status=running queryid=" + item.getID());

        }
        // if max chunk size of 50MB reached, stop 
        if (count % 100 == 0) {
            float size = Math.round(baout.size() / 1048576);
            if (size >= maxChunkSizeInMB) {
                logger.info("Max size of " + maxChunkSizeInMB + "MB for one chunk reached");
                maxSizeReached = true;
            }
        }

        // DEBUG CODE TO CREATE SMALLER CHUNKS
        /*      if (count == 250){
                 maxSizeReached= true;
        //            logger.info("Max debug size of 250 items reached");
              }  */
    }

    // WRITE moredata
    kout.writeBoolean(false);// close the data stream, begin metadata

    //we stop either if maxSize was reach or if there were no more data to read
    boolean moreToRead = false; // we did not reach the end of the resultset
    boolean moreThanLimit = false; // is case of a limit query, did we reach the limit
    if (maxSizeReached) { // we  stopped because reached the hard memory limit for one chunk
        if ((nbLinesExpectedLeft > -1) && (!(count < nbLinesExpectedLeft))) { //we read exqctly as many lines as the limit
            moreThanLimit = true;
        } else {
            moreThanLimit = true;
            moreToRead = true;
        }
    } else {
        if (!moreData) { //no more lines to read
            if (nbLinesExpectedLeft > -1) { // limit
                if (!(count < nbLinesExpectedLeft)) { //we read as many lines as the limit
                    moreThanLimit = true;
                }
            }
        }
    }

    kout.writeBoolean(moreThanLimit);
    // -- V1 only
    if (version >= 1) {
        kout.writeLong(item.getExecutionDate().getTime());// the
        // computeDate
    }

    // stats: display total
    long metter_finish = new Date().getTime();
    // logger.info("SQLQuery#" + item.getID() + " serialized
    // "+(count-1)+" row(s) in "+(metter_finish-metter_start)+" ms,
    // compressed resulset size is "+size+" Mbytes");
    /*   logger.info("task=RawMatrix" + " method=streamExecutionItemToByteArray" + " duration="
    + (metter_finish - metter_start) / 1000 + " error=false status=done driver="
    + item.getDatabase().getName() + " queryid=" + item.getID() + " size= " + size + " SQLQuery#"
    + item.getID() + " serialized " + (count - 1) + " row(s) in " + (metter_finish - metter_start)
    + " ms, compressed resulset size is " + size + " Mbytes"); */
    // TODO Get project
    SQLStats queryLog = new SQLStats(Integer.toString(item.getID()), "streamExecutionItemToByteArray", "",
            (metter_finish - metter_start), item.getDatabase().getName());
    queryLog.setError(false);
    PerfDB.INSTANCE.save(queryLog);

    kout.close();

    res.setHasMore(moreToRead);
    res.setExecutionTime(metter_finish - metter_start);
    res.setNbLines(count);
    res.setStreamedMatrix(baout.toByteArray());

    return res;

}

From source file:com.chiorichan.account.adapter.sql.SqlAdapter.java

@Override
public AccountMetaData readAccount(String id) throws LoginException {
    try {//  www .  ja  va  2s.  c om
        AccountMetaData meta = new AccountMetaData();

        if (id == null || id.isEmpty())
            throw new LoginException(LoginExceptionReason.emptyUsername);

        Set<String> accountFieldSet = new HashSet<String>(accountFields);
        Set<String> accountColumnSet = new HashSet<String>();

        accountFieldSet.add("acctId");
        accountFieldSet.add("username");

        ResultSet rs = sql.query("SELECT * FROM `" + table + "` LIMIT 0;");

        ResultSetMetaData rsmd = rs.getMetaData();
        int columnCount = rsmd.getColumnCount();

        do {
            for (int i = 1; i < columnCount + 1; i++) {
                accountColumnSet.add(rsmd.getColumnName(i));
            }
        } while (rs.next());

        String additionalAccountFields = "";
        for (String f : accountFieldSet) {
            if (!f.isEmpty())
                if (accountColumnSet.contains(f))
                    additionalAccountFields += " OR `" + f + "` = '" + id + "'";
                else
                    for (String c : accountColumnSet) {
                        if (c.equalsIgnoreCase(f)) {
                            additionalAccountFields += " OR `" + c + "` = '" + id + "'";
                            break;
                        }
                    }
        }

        rs = sql.query("SELECT * FROM `" + table + "` WHERE " + additionalAccountFields.substring(4) + ";");

        if (rs == null || sql.getRowCount(rs) < 1)
            throw new LoginException(LoginExceptionReason.incorrectLogin);

        meta.setAll(DatabaseEngine.convertRow(rs));

        meta.set("displayName", (rs.getString("fname").isEmpty()) ? rs.getString("name")
                : rs.getString("fname") + " " + rs.getString("name"));

        return meta;
    } catch (SQLException e) {
        throw new LoginException(e);
    }
}

From source file:org.owasp.webgoat.plugin.introduction.SqlInjectionLesson5a.java

protected AttackResult injectableQuery(String accountName) {
    try {//from   ww  w  .j  a v a  2s.  c o m
        Connection connection = DatabaseUtilities.getConnection(getWebSession());
        String query = "SELECT * FROM user_data WHERE last_name = '" + accountName + "'";

        try {
            Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                    ResultSet.CONCUR_READ_ONLY);
            ResultSet results = statement.executeQuery(query);

            if ((results != null) && (results.first())) {
                ResultSetMetaData resultsMetaData = results.getMetaData();
                StringBuffer output = new StringBuffer();

                output.append(writeTable(results, resultsMetaData));
                results.last();

                // If they get back more than one user they succeeded
                if (results.getRow() >= 6) {
                    return trackProgress(success().feedback("sql-injection.5a.success")
                            .feedbackArgs(output.toString()).build());
                } else {
                    return trackProgress(failed().output(output.toString()).build());
                }
            } else {
                return trackProgress(failed().feedback("sql-injection.5a.no.results").build());

            }
        } catch (SQLException sqle) {

            return trackProgress(failed().output(sqle.getMessage()).build());
        }
    } catch (Exception e) {
        return trackProgress(failed().output(this.getClass().getName() + " : " + e.getMessage()).build());
    }
}

From source file:com.datatorrent.contrib.enrich.JDBCLoader.java

protected ArrayList<Object> getDataFrmResult(Object result) throws RuntimeException {
    try {//from   w  w  w  .j a  v a2 s .c o m
        ResultSet resultSet = (ResultSet) result;
        if (resultSet.next()) {
            ResultSetMetaData rsdata = resultSet.getMetaData();
            // If the includefields is empty, populate it from ResultSetMetaData
            if (CollectionUtils.isEmpty(includeFieldInfo)) {
                if (includeFieldInfo == null) {
                    includeFieldInfo = new ArrayList<>();
                }
                for (int i = 1; i <= rsdata.getColumnCount(); i++) {
                    String columnName = rsdata.getColumnName(i);
                    // TODO: Take care of type conversion.
                    includeFieldInfo.add(new FieldInfo(columnName, columnName, FieldInfo.SupportType.OBJECT));
                }
            }

            ArrayList<Object> res = new ArrayList<Object>();
            for (FieldInfo f : includeFieldInfo) {
                res.add(getConvertedData(resultSet.getObject(f.getColumnName()), f));
            }
            return res;
        } else {
            return null;
        }
    } catch (SQLException e) {
        throw new RuntimeException(e);
    }
}

From source file:io.github.benas.jql.shell.StringResultSetExtractor.java

@Override
public String extractData(ResultSet resultSet) throws SQLException, DataAccessException {
    RowCountCallbackHandler rowCountCallbackHandler = new RowCountCallbackHandler();
    rowCountCallbackHandler.processRow(resultSet);
    int columnCount = resultSet.getMetaData().getColumnCount();
    List<String> columnNames = asList(rowCountCallbackHandler.getColumnNames());
    String header = getHeader(columnNames);

    StringBuilder result = new StringBuilder(header);
    result.append("\n");

    while (resultSet.next()) {
        StringBuilder stringBuilder = new StringBuilder();
        int i = 1;
        while (i <= columnCount) {
            stringBuilder.append(resultSet.getObject(i));
            if (i < columnCount) {
                stringBuilder.append(" | ");
            }//  ww w  .ja  v a  2s  .  c  o  m
            i++;
        }
        result.append(stringBuilder.toString()).append("\n");
    }

    return result.toString();
}

From source file:com.openddal.test.BaseTestCase.java

public static void printResultSet(ResultSet rs) {
    try {/*w w  w  .  jav a2  s.  co m*/
        if (rs != null) {
            ResultSetMetaData md = rs.getMetaData();
            int cols = md.getColumnCount();
            StringBuffer sb = new StringBuffer();
            for (int i = 0; i < cols; i++) {
                sb.append(md.getColumnName(i + 1) + " ");
            }
            sb.append('\n');
            for (int i = 0; i < cols; i++) {
                sb.append("-------------------");
            }
            sb.append('\n');
            while (rs.next()) {
                for (int i = 0; i < cols; i++) {
                    sb.append(rs.getString(i + 1) + " ");
                }
            }
            sb.append("\n");
            System.out.println(sb.toString());
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.mvdb.etl.dao.impl.JdbcOrderDAO.java

@Override
public Map<String, ColumnMetadata> findMetadata() {
    String sql = "SELECT * FROM ORDERS limit 1";
    final Map<String, ColumnMetadata> metaDataMap = new HashMap<String, ColumnMetadata>();

    getJdbcTemplate().query(sql, new RowCallbackHandler() {

        @Override//from ww w.j  a v a2  s . co  m
        public void processRow(ResultSet row) throws SQLException {
            ResultSetMetaData rsm = row.getMetaData();
            int columnCount = rsm.getColumnCount();
            for (int column = 1; column < (columnCount + 1); column++) {
                ColumnMetadata metadata = new ColumnMetadata();
                metadata.setColumnLabel(rsm.getColumnLabel(column));
                metadata.setColumnName(rsm.getColumnName(column));
                metadata.setColumnType(rsm.getColumnType(column));
                metadata.setColumnTypeName(rsm.getColumnTypeName(column));

                metaDataMap.put(rsm.getColumnName(column), metadata);
            }

        }
    });

    return metaDataMap;
}