Example usage for java.sql ResultSet getMetaData

List of usage examples for java.sql ResultSet getMetaData

Introduction

In this page you can find the example usage for java.sql ResultSet getMetaData.

Prototype

ResultSetMetaData getMetaData() throws SQLException;

Source Link

Document

Retrieves the number, types and properties of this ResultSet object's columns.

Usage

From source file:co.nubetech.apache.hadoop.DateSplitter.java

public List<InputSplit> split(Configuration conf, ResultSet results, String colName) throws SQLException {

    long minVal;//from   ww  w.  ja  v a2 s.  c  o m
    long maxVal;

    int sqlDataType = results.getMetaData().getColumnType(1);
    minVal = resultSetColToLong(results, 1, sqlDataType);
    maxVal = resultSetColToLong(results, 2, sqlDataType);

    String lowClausePrefix = colName + " >= ";
    String highClausePrefix = colName + " < ";

    int numSplits = conf.getInt(MRJobConfig.NUM_MAPS, 1);
    if (numSplits < 1) {
        numSplits = 1;
    }

    if (minVal == Long.MIN_VALUE && maxVal == Long.MIN_VALUE) {
        // The range of acceptable dates is NULL to NULL. Just create a
        // single split.
        List<InputSplit> splits = new ArrayList<InputSplit>();
        splits.add(
                new DataDrivenDBInputFormat.DataDrivenDBInputSplit(colName + " IS NULL", colName + " IS NULL"));
        return splits;
    }

    // Gather the split point integers
    List<Long> splitPoints = split(numSplits, minVal, maxVal);
    List<InputSplit> splits = new ArrayList<InputSplit>();

    // Turn the split points into a set of intervals.
    long start = splitPoints.get(0);
    Date startDate = longToDate(start, sqlDataType);
    if (sqlDataType == Types.TIMESTAMP) {
        // The lower bound's nanos value needs to match the actual
        // lower-bound nanos.
        try {
            ((java.sql.Timestamp) startDate).setNanos(results.getTimestamp(1).getNanos());
        } catch (NullPointerException npe) {
            // If the lower bound was NULL, we'll get an NPE; just ignore it
            // and don't set nanos.
        }
    }

    for (int i = 1; i < splitPoints.size(); i++) {
        long end = splitPoints.get(i);
        Date endDate = longToDate(end, sqlDataType);

        if (i == splitPoints.size() - 1) {
            if (sqlDataType == Types.TIMESTAMP) {
                // The upper bound's nanos value needs to match the actual
                // upper-bound nanos.
                try {
                    ((java.sql.Timestamp) endDate).setNanos(results.getTimestamp(2).getNanos());
                } catch (NullPointerException npe) {
                    // If the upper bound was NULL, we'll get an NPE; just
                    // ignore it and don't set nanos.
                }
            }
            // This is the last one; use a closed interval.
            splits.add(new DataDrivenDBInputFormat.DataDrivenDBInputSplit(
                    lowClausePrefix + dateToString(startDate), colName + " <= " + dateToString(endDate)));
        } else {
            // Normal open-interval case.
            splits.add(new DataDrivenDBInputFormat.DataDrivenDBInputSplit(
                    lowClausePrefix + dateToString(startDate), highClausePrefix + dateToString(endDate)));
        }

        start = end;
        startDate = endDate;
    }

    if (minVal == Long.MIN_VALUE || maxVal == Long.MIN_VALUE) {
        // Add an extra split to handle the null case that we saw.
        splits.add(
                new DataDrivenDBInputFormat.DataDrivenDBInputSplit(colName + " IS NULL", colName + " IS NULL"));
    }

    return splits;
}

From source file:com.abixen.platform.module.chart.service.impl.AbstractDatabaseService.java

private DataSourceValueWeb getDataFromColumn(ResultSet row, String columnName) {
    try {//www .  j a va2  s  .com
        ResultSetMetaData resultSetMetaData = row.getMetaData();
        String columnTypeName = resultSetMetaData.getColumnTypeName(row.findColumn(columnName));
        if ("BIGINT".equals(columnTypeName)) {
            columnTypeName = "INTEGER";
        }
        if ("VARCHAR".equals(columnTypeName)) {
            columnTypeName = "STRING";
        }
        return getValueAsDataSourceValue(row, columnName, DataValueType.valueOf(columnTypeName));
    } catch (SQLException e) {
        throw new DataSourceValueException("Error when getting value from column. " + e.getMessage());
    }
}

From source file:io.stallion.dataAccess.BeanListHandler.java

public List<T> handle(ResultSet rs) throws SQLException {
    List<T> beans = list();
    String[] columnToProperty = null;
    while (rs.next()) {
        try {//from   w  w  w.ja va  2 s  . com
            if (columnToProperty == null) {
                columnToProperty = makeColumnToProperty(rs.getMetaData(), PropertyUtils.getPropertyNames(type));
            }
            T bean = (T) type.newInstance();
            for (int i = 1; i < columnToProperty.length; i++) {
                Object val = rs.getObject(i);
                String propertyName = columnToProperty[i];
                if (empty(propertyName)) {
                    continue;
                }
                PropertyUtils.setProperty(bean, propertyName, val);
            }
            beans.add(bean);
        } catch (InstantiationException e) {
            throw new RuntimeException(e);
        } catch (IllegalAccessException e) {
            throw new RuntimeException(e);
        }
    }
    return beans;
}

From source file:net.hydromatic.optiq.test.JdbcTest.java

static String toString(ResultSet resultSet) throws SQLException {
    StringBuilder buf = new StringBuilder();
    while (resultSet.next()) {
        int n = resultSet.getMetaData().getColumnCount();
        String sep = "";
        for (int i = 1; i <= n; i++) {
            buf.append(sep).append(resultSet.getMetaData().getColumnLabel(i)).append("=")
                    .append(resultSet.getObject(i));
            sep = "; ";
        }/*from   ww w  .ja v  a2s.  c o m*/
        buf.append("\n");
    }
    return buf.toString();
}

From source file:it.unibas.spicy.persistence.idgenerator.utils.ReadDB.java

public ArrayList<InputDataModel> readTargetDatabase(String[] targetColumns)
        throws SQLException, IOException, ClassNotFoundException {
    ArrayList<InputDataModel> inputData = new ArrayList<>();
    ArrayList<String> configurationProperties = getExportDatabaseConfig();
    Connection connection = null;
    try {/*from  w  w w  .  ja va  2s. c  o  m*/
        connection = getConnectionToDatabase(configurationProperties.get(0),
                configurationProperties.get(1) + configurationProperties.get(4), configurationProperties.get(2),
                configurationProperties.get(3));
        Statement statement = connection.createStatement();

        String columnsToQuery = "";
        for (String column : targetColumns) {
            columnsToQuery += column + ",";
        }

        ResultSet tableRows = statement.executeQuery(
                "SELECT " + columnsToQuery.substring(0, columnsToQuery.length() - 1) + " FROM " + table + ";");

        ResultSetMetaData rsmd = tableRows.getMetaData();
        int columnsNumber = rsmd.getColumnCount();
        while (tableRows.next()) {
            InputDataModel idm = new InputDataModel();
            for (int i = 1; i <= columnsNumber; i++) {
                idm.addValue(String.valueOf(tableRows.getObject(i)));
            }
            inputData.add(idm);
        }
    } catch (ClassNotFoundException | SQLException e) {
        System.err.println(e.getMessage());
        System.exit(-1);
    } finally {
        if (connection != null)
            connection.close();
    }
    return inputData;
}

From source file:org.syncope.core.AbstractTest.java

private void logTableContent(final Connection conn, final String tableName) throws SQLException {

    LOG.debug("Table: " + tableName);

    Statement stmt = null;/*ww w . ja  v  a2 s  .c o m*/
    ResultSet rs = null;
    try {
        stmt = conn.createStatement();
        rs = stmt.executeQuery("SELECT * FROM " + tableName);

        final StringBuilder row = new StringBuilder();
        while (rs.next()) {
            for (int i = 0; i < rs.getMetaData().getColumnCount(); i++) {
                row.append(rs.getMetaData().getColumnLabel(i + 1)).append("=").append(rs.getString(i + 1))
                        .append(" ");
            }

            LOG.debug(row.toString());

            row.delete(0, row.length());
        }
    } catch (SQLException sqle) {
        LOG.error("While dumping " + tableName + "content", sqle);
    } finally {
        rs.close();
        stmt.close();
    }
}

From source file:ca.on.gov.jus.icon.common.util.LookupNestedMapHandler.java

/**
 * Iterates through the query result set and creates a correctly
 * indexed map of maps.  This is only intended to work with a result
 * set that contains three columns and the first column must
 * cast successfully to a <code>Integer</code> datatype.
 * /*from www.  j a  v  a2 s . com*/
 * @param   rs      The result set that is used to obtain the name/value data.
 * @return   Object   The <code>java.util.Map</code> collection containing the results. 
 * @see org.apache.commons.dbutils.ResultSetHandler#handle(java.sql.ResultSet)
 */
public Object handle(ResultSet rs) throws SQLException {
    Map results = new HashMap();
    Map nestedMap = new HashMap();
    Integer currentId = null;

    // Get the number of columns in the result set.
    ResultSetMetaData rsmd = rs.getMetaData();
    int cols = rsmd.getColumnCount();

    // Populate the Map with the name value pairs
    // if the result set contains two columns of data.
    if (cols == MAP_RESULT_SET_COLUMNS) {
        while (rs.next()) {
            Object primaryId = rs.getObject(1);
            Object secondaryId = rs.getObject(2);
            Object secondaryValue = rs.getObject(3);

            // Initialize the current id on the first iteration.
            if (currentId == null) {
                currentId = (Integer) primaryId;
            }

            // Check if the primary id is not the same as the current id.         
            if (currentId.compareTo(primaryId) != 0) {
                // Put the current nested map into the result map
                // and create a new nested map.
                results.put(currentId, nestedMap);
                nestedMap = new HashMap();
                currentId = (Integer) primaryId;
            }

            // Put the key & value into the current nested map.
            // This occurs after checking if a new nested map is required.
            nestedMap.put(secondaryId, secondaryValue);
        }

        // Put the final nested map into the results map
        // once the iterations are complete.
        results.put(currentId, nestedMap);
    }

    return results;
}

From source file:com.abixen.platform.module.chart.service.impl.AbstractDatabaseService.java

public List<String> getColumns(Connection connection, String tableName) {

    List<String> columns = new ArrayList<>();

    try {/*  ww w.j a  v a 2s . c om*/
        Statement stmt = connection.createStatement();
        ResultSet rs = stmt.executeQuery("SELECT * FROM " + tableName);
        ResultSetMetaData rsmd = rs.getMetaData();

        int columnCount = rsmd.getColumnCount();

        IntStream.range(1, columnCount + 1).forEach(i -> {
            try {
                columns.add(rsmd.getColumnName(i));
            } catch (SQLException e) {
                e.printStackTrace();
            }
        });

    } catch (SQLException e) {
        e.printStackTrace();
    }

    return columns;
}

From source file:annis.sqlgen.AnnotatedSpanExtractor.java

@Override
public AnnotatedSpan mapRow(ResultSet resultSet, int rowNum) throws SQLException {
    long id = resultSet.getLong("id");
    String coveredText = resultSet.getString("span");

    Array arrayAnnotation = resultSet.getArray("annotations");
    ResultSetMetaData rsMeta = resultSet.getMetaData();
    Array arrayMeta = null;/*w  w  w  .  j a  v  a 2 s  . c  o m*/
    for (int i = 1; i <= rsMeta.getColumnCount(); i++) {
        if ("metadata".equals(rsMeta.getColumnName(i))) {
            arrayMeta = resultSet.getArray(i);
            break;
        }
    }

    List<Annotation> annotations = extractAnnotations(arrayAnnotation);
    List<Annotation> metaData = arrayMeta == null ? new LinkedList<Annotation>()
            : extractAnnotations(arrayMeta);

    // create key
    Array sqlKey = resultSet.getArray("key");
    Validate.isTrue(!resultSet.wasNull(), "Match group identifier must not be null");
    Validate.isTrue(sqlKey.getBaseType() == Types.BIGINT,
            "Key in database must be from the type \"bigint\" but was \"" + sqlKey.getBaseTypeName() + "\"");

    List<Long> key = Arrays.asList((Long[]) sqlKey.getArray());

    return new AnnotatedSpan(id, coveredText, annotations, metaData, key);
}

From source file:com.simplymeasured.prognosticator.ThreadedQueryRunnable.java

@Override
public void run() {
    NamedParameterJdbcTemplate template = new NamedParameterJdbcTemplate(dataSource);

    try {/*from w ww . j a v a 2 s  .c  om*/
        template.query(query, parameters, new RowCallbackHandler() {
            @Override
            public void processRow(ResultSet resultSet) throws SQLException {
                try {
                    Map<String, Object> result = Maps.newHashMap();

                    final ResultSetMetaData metadata = resultSet.getMetaData();

                    for (int i = 1; i <= metadata.getColumnCount(); i++) {
                        String columnTypeName = metadata.getColumnTypeName(i);

                        final Object value;

                        if ("array".equalsIgnoreCase(columnTypeName)) {
                            String stringValue = resultSet.getString(i);

                            if (stringValue != null) {
                                value = objectMapper.readValue(stringValue, List.class);
                            } else {
                                value = null;
                            }
                        } else if ("map".equalsIgnoreCase(columnTypeName)
                                || "struct".equalsIgnoreCase(columnTypeName)) {
                            String stringValue = resultSet.getString(i);

                            if (stringValue != null) {
                                value = objectMapper.readValue(stringValue, Map.class);
                            } else {
                                value = null;
                            }
                        } else {
                            value = resultSet.getObject(i);
                        }

                        result.put(metadata.getColumnName(i), value);
                    }

                    resultQueue.put(result);
                } catch (SQLException se) {
                    LOG.warn("Database error!", se);
                    throw new RuntimeException("Database error!", se);
                } catch (InterruptedException ie) {
                    LOG.warn("Query killed!", ie);
                    throw new RuntimeException("Query killed!", ie);
                } catch (Exception ex) {
                    LOG.warn("Unable to parse row!", ex);
                    throw new RuntimeException("Unable to parse row!", ex);
                }
            }
        });

        resultQueue.put(Collections.<String, Object>emptyMap());
    } catch (DataAccessException dae) {
        try {
            resultQueue.put(Collections.<String, Object>emptyMap());
        } catch (InterruptedException ie) {
            LOG.warn("Queue is dead!", ie);
        }

        LOG.warn("Unable to execute query - attempting to clean up", dae);
    } catch (InterruptedException ie) {
        LOG.warn("Queue is dead!", ie);
    }
}