Example usage for java.sql ResultSetMetaData getColumnType

List of usage examples for java.sql ResultSetMetaData getColumnType

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getColumnType.

Prototype

int getColumnType(int column) throws SQLException;

Source Link

Document

Retrieves the designated column's SQL type.

Usage

From source file:org.openiot.gsn.storage.StorageManager.java

public DataField[] tableToStructure(CharSequence tableName, Connection connection) throws SQLException {
    StringBuilder sb = new StringBuilder("select * from ").append(tableName).append(" where 1=0 ");
    ResultSet rs = null;/*from   w  ww.  ja  v  a2  s  . c  o m*/
    DataField[] toReturn = null;
    try {
        rs = executeQueryWithResultSet(sb, connection);
        ResultSetMetaData structure = rs.getMetaData();
        ArrayList<DataField> toReturnArr = new ArrayList<DataField>();
        for (int i = 1; i <= structure.getColumnCount(); i++) {
            String colName = structure.getColumnLabel(i);
            if (colName.equalsIgnoreCase("pk"))
                continue;
            int colType = structure.getColumnType(i);
            byte colTypeInGSN = convertLocalTypeToGSN(colType);
            toReturnArr.add(new DataField(colName, colTypeInGSN));
        }
        toReturn = toReturnArr.toArray(new DataField[] {});
    } finally {
        if (rs != null)
            close(rs);
    }
    return toReturn;
}

From source file:org.fastcatsearch.datasource.reader.DBReader.java

private void fill() throws IRException {

    bulkCount = 0;/*from   w ww.ja  v  a2s  .  c  o  m*/
    try {
        ResultSetMetaData rsMeta = null;
        //? Tmp ??? .
        deleteTmpLob();

        try {
            rsMeta = r.getMetaData();
        } catch (SQLException e) {
            return;
        }
        while (r.next()) {

            Map<String, Object> keyValueMap = new HashMap<String, Object>();

            for (int i = 0; i < columnCount; i++) {
                int columnIdx = i + 1;
                int type = rsMeta.getColumnType(columnIdx);

                String str = "";

                String lobType = null;
                if (type == Types.BLOB || type == Types.BINARY || type == Types.LONGVARBINARY
                        || type == Types.VARBINARY || type == Types.JAVA_OBJECT) {
                    lobType = LOB_BINARY;
                } else if (type == Types.CLOB || type == Types.NCLOB || type == Types.SQLXML
                        || type == Types.LONGVARCHAR || type == Types.LONGNVARCHAR) {
                    lobType = LOB_STRING;
                }

                if (lobType == null) {
                    str = r.getString(columnIdx);

                    if (str != null) {
                        keyValueMap.put(columnName[i], str);
                    } else {
                        //    ? ? ? NULL ? 
                        keyValueMap.put(columnName[i], "");
                    }
                } else {
                    File file = null;

                    if (lobType == LOB_BINARY) {
                        // logger.debug("Column-"+columnIdx+" is BLOB!");
                        // BLOB?   .
                        ByteArrayOutputStream buffer = null;
                        try {
                            if (!useBlobFile) {
                                buffer = new ByteArrayOutputStream();
                            }
                            file = readTmpBlob(i, columnIdx, rsMeta, buffer);
                            if (useBlobFile) {
                                keyValueMap.put(columnName[i], file);
                            } else {
                                keyValueMap.put(columnName[i], buffer.toByteArray());
                            }
                        } finally {
                            if (buffer != null) {
                                try {
                                    buffer.close();
                                } catch (IOException ignore) {
                                }
                            }
                        }
                    } else if (lobType == LOB_STRING) {
                        StringBuilder sb = null;
                        if (!useBlobFile) {
                            sb = new StringBuilder();
                        }
                        file = readTmpClob(i, columnIdx, rsMeta, sb);
                        if (useBlobFile) {
                            keyValueMap.put(columnName[i], file);
                        } else {
                            keyValueMap.put(columnName[i], sb.toString());
                        }
                    }

                    //?   ?? .
                    if (file != null) {
                        tmpFile.add(file);
                    }
                }
            }

            dataSet[bulkCount] = keyValueMap;
            bulkCount++;
            totalCnt++;

            if (bulkCount >= BULK_SIZE) {
                break;
            }
        }

    } catch (Exception e) {

        logger.debug("", e);

        try {
            if (r != null) {
                r.close();
            }
        } catch (SQLException ignore) {
        }

        try {
            if (pstmt != null) {
                pstmt.close();
            }
        } catch (SQLException ignore) {
        }

        try {
            if (con != null && !con.isClosed()) {
                con.close();
            }
        } catch (SQLException ignore) {
        }

        throw new IRException(e);
    }
}

From source file:gsn.storage.StorageManager.java

public DataField[] tableToStructure(CharSequence tableName, Connection connection) throws SQLException {
    StringBuilder sb = new StringBuilder("select * from ").append(tableName).append(" where 1=0 ");
    ResultSet rs = null;//from w  w w . j  a v a 2  s .  c o  m
    DataField[] toReturn = null;
    try {
        rs = executeQueryWithResultSet(sb, connection);
        ResultSetMetaData structure = rs.getMetaData();
        ArrayList<DataField> toReturnArr = new ArrayList<DataField>();
        for (int i = 1; i <= structure.getColumnCount(); i++) {
            String colName = structure.getColumnLabel(i);
            if (colName.equalsIgnoreCase("pk"))
                continue;
            int colType = structure.getColumnType(i);
            byte colTypeInGSN = convertLocalTypeToGSN(colType);
            if (colTypeInGSN == -100) {
                logger.error(
                        "The type can't be converted to GSN form - error description: virtual sensor name is: "
                                + tableName + ", field name is: " + colName + ", query is: " + sb);
            }
            toReturnArr.add(new DataField(colName, colTypeInGSN));
        }
        toReturn = toReturnArr.toArray(new DataField[] {});
    } finally {
        if (rs != null)
            close(rs);
    }
    return toReturn;
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcUtil.java

public void setColumnSpecificHeaders(Record record, Set<String> knownTableNames, ResultSetMetaData metaData,
        String jdbcNameSpacePrefix) throws SQLException {
    Record.Header header = record.getHeader();
    Set<String> tableNames = new HashSet<>();
    for (int i = 1; i <= metaData.getColumnCount(); i++) {
        header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".jdbcType",
                String.valueOf(metaData.getColumnType(i)));

        // Additional headers per various types
        switch (metaData.getColumnType(i)) {
        case Types.DECIMAL:
        case Types.NUMERIC:
            header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".scale",
                    String.valueOf(metaData.getScale(i)));
            header.setAttribute(jdbcNameSpacePrefix + metaData.getColumnLabel(i) + ".precision",
                    String.valueOf(metaData.getPrecision(i)));
            break;
        }/*from ww  w  . j  a v a2s . c om*/

        String tableName = metaData.getTableName(i);

        // Store the column's table name (if not empty)
        if (StringUtils.isNotEmpty(tableName)) {
            tableNames.add(tableName);
        }
    }

    if (tableNames.isEmpty()) {
        tableNames.addAll(knownTableNames);
    }

    header.setAttribute(jdbcNameSpacePrefix + "tables", Joiner.on(",").join(tableNames));
}

From source file:gsn.storage.StorageManager.java

/**
 * Returns false if the table doesnt exist. If the table exists but the
 * structure is not compatible with the specified fields the method throws
 * GSNRuntimeException. Note that this method doesn't close the connection
 *
 * @param tableName//from  w  ww  .j  a  v  a  2  s .  com
 * @param connection (this method will not close it and the caller is responsible
 *                   for closing the connection)
 * @return
 * @throws SQLException
 * @Throws GSNRuntimeException
 */

public boolean tableExists(CharSequence tableName, DataField[] fields, Connection connection)
        throws SQLException, GSNRuntimeException {
    if (!ValidityTools.isValidJavaVariable(tableName))
        throw new GSNRuntimeException("Table name is not valid");
    StringBuilder sb = new StringBuilder("select * from ").append(tableNameGeneratorInString(tableName))
            .append(" where 1=0 ");
    ResultSet rs = null;
    try {
        rs = executeQueryWithResultSet(sb, connection);
        ResultSetMetaData structure = rs.getMetaData();
        if (fields != null && fields.length > 0)
            nextField: for (DataField field : fields) {
                for (int i = 1; i <= structure.getColumnCount(); i++) {
                    String colName = structure.getColumnLabel(i);
                    int colType = structure.getColumnType(i);
                    int colTypeScale = structure.getScale(i);
                    if (field.getName().equalsIgnoreCase(colName)) {
                        byte gsnType = convertLocalTypeToGSN(colType, colTypeScale);
                        if (gsnType == -100) {
                            logger.error(
                                    "The type can't be converted to GSN form - error description: virtual sensor name is: "
                                            + tableName + ", field name is: " + colName + ", query is: " + sb);
                        }
                        if (field.getDataTypeID() == gsnType)
                            continue nextField;
                        else
                            throw new GSNRuntimeException("The column : " + colName + " in the >" + tableName
                                    + "< table is not compatible with type : " + field.getType()
                                    + ". The actual type for this table (currently in the database): "
                                    + colType);
                    }
                }
                throw new GSNRuntimeException("The table " + tableName + " in the database, doesn't have the >"
                        + field.getName() + "< column.");
            }
    } catch (SQLException e) {
        if (e.getErrorCode() == getTableNotExistsErrNo() || e.getMessage().contains("does not exist"))
            return false;
        else {
            logger.error(e.getMessage());
            throw e;
        }
    } finally {
        close(rs);
    }
    return true;
}

From source file:com.kylinolap.rest.service.QueryService.java

/**
 * @param sql/* ww  w.  j  av  a 2s . co  m*/
 * @param project
 * @return
 * @throws Exception
 */
private SQLResponse execute(String sql, SQLRequest sqlRequest) throws Exception {
    Connection conn = null;
    Statement stat = null;
    ResultSet resultSet = null;
    List<List<String>> results = new LinkedList<List<String>>();
    List<SelectedColumnMeta> columnMetas = new LinkedList<SelectedColumnMeta>();

    try {
        conn = getOLAPDataSource(sqlRequest.getProject()).getConnection();

        if (sqlRequest instanceof PrepareSqlRequest) {
            PreparedStatement preparedState = conn.prepareStatement(sql);

            for (int i = 0; i < ((PrepareSqlRequest) sqlRequest).getParams().length; i++) {
                setParam(preparedState, i + 1, ((PrepareSqlRequest) sqlRequest).getParams()[i]);
            }

            resultSet = preparedState.executeQuery();
        } else {
            stat = conn.createStatement();
            resultSet = stat.executeQuery(sql);
        }

        ResultSetMetaData metaData = resultSet.getMetaData();
        int columnCount = metaData.getColumnCount();

        // Fill in selected column meta
        for (int i = 1; i <= columnCount; ++i) {
            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
                    metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i),
                    metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i),
                    metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i),
                    metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i),
                    metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i),
                    metaData.isWritable(i), metaData.isDefinitelyWritable(i)));
        }

        List<String> oneRow = new LinkedList<String>();

        // fill in results
        while (resultSet.next()) {
            for (int i = 0; i < columnCount; i++) {
                oneRow.add((resultSet.getString(i + 1)));
            }

            results.add(new LinkedList<String>(oneRow));
            oneRow.clear();
        }
    } catch (Exception e) {
        logger.error(e.getLocalizedMessage(), e);
        throw e;
    } finally {
        close(resultSet, stat, conn);
    }

    boolean isPartialResult = false;
    String cube = "";
    long totalScanCount = 0;
    for (OLAPContext ctx : OLAPContext.getThreadLocalContexts()) {
        isPartialResult |= ctx.storageContext.isPartialResultReturned();
        cube = ctx.cubeInstance.getName();
        totalScanCount += ctx.storageContext.getTotalScanCount();
    }

    SQLResponse response = new SQLResponse(columnMetas, results, cube, 0, false, null, isPartialResult);
    response.setTotalScanCount(totalScanCount);

    return response;
}

From source file:org.apache.hadoop.chukwa.analysis.salsa.visualization.Heatmap.java

/**
 * Interfaces with database to get data and 
 * populate data structures for rendering
 *///from  w ww  .j  a va  2 s. co  m
public HeatmapData getData() {
    // preliminary setup
    OfflineTimeHandler time_offline;
    TimeHandler time_online;
    long start, end, min, max;

    if (offline_use) {
        time_offline = new OfflineTimeHandler(param_map, this.timezone);
        start = time_offline.getStartTime();
        end = time_offline.getEndTime();
    } else {
        time_online = new TimeHandler(this.request, this.timezone);
        start = time_online.getStartTime();
        end = time_online.getEndTime();
    }

    DatabaseWriter dbw = new DatabaseWriter(this.cluster);

    // setup query
    String query;
    if (this.query_state != null && this.query_state.equals("read")) {
        query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from ["
                + table
                + "] where finish_time between '[start]' and '[end]' and (state_name like 'read_local' or state_name like 'read_remote')";
    } else if (this.query_state != null && this.query_state.equals("write")) {
        query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from ["
                + table
                + "] where finish_time between '[start]' and '[end]' and (state_name like 'write_local' or state_name like 'write_remote' or state_name like 'write_replicated')";
    } else {
        query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from ["
                + table + "] where finish_time between '[start]' and '[end]' and state_name like '"
                + query_state + "'";
    }
    Macro mp = new Macro(start, end, query);
    query = mp.toString() + " order by start_time";

    ArrayList<HashMap<String, Object>> events = new ArrayList<HashMap<String, Object>>();

    ResultSet rs = null;

    log.debug("Query: " + query);
    // run query, extract results
    try {
        rs = dbw.query(query);
        ResultSetMetaData rmeta = rs.getMetaData();
        int col = rmeta.getColumnCount();
        while (rs.next()) {
            HashMap<String, Object> event = new HashMap<String, Object>();
            long event_time = 0;
            for (int i = 1; i <= col; i++) {
                if (rmeta.getColumnType(i) == java.sql.Types.TIMESTAMP) {
                    event.put(rmeta.getColumnName(i), rs.getTimestamp(i).getTime());
                } else {
                    event.put(rmeta.getColumnName(i), rs.getString(i));
                }
            }
            events.add(event);
        }
    } catch (SQLException ex) {
        // handle any errors
        log.error("SQLException: " + ex.getMessage());
        log.error("SQLState: " + ex.getSQLState());
        log.error("VendorError: " + ex.getErrorCode());
    } finally {
        dbw.close();
    }
    SimpleDateFormat format = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");

    log.info(events.size() + " results returned.");

    HashSet<String> host_set = new HashSet<String>();
    HashMap<String, Integer> host_indices = new HashMap<String, Integer>();
    HashMap<Integer, String> host_rev_indices = new HashMap<Integer, String>();

    // collect hosts, name unique hosts
    for (int i = 0; i < events.size(); i++) {
        HashMap<String, Object> event = events.get(i);
        String curr_host = (String) event.get("hostname");
        String other_host = (String) event.get("other_host");
        host_set.add(curr_host);
        host_set.add(other_host);
    }
    int num_hosts = host_set.size();

    Iterator<String> host_iter = host_set.iterator();
    for (int i = 0; i < num_hosts && host_iter.hasNext(); i++) {
        String curr_host = host_iter.next();
        host_indices.put(curr_host, new Integer(i));
        host_rev_indices.put(new Integer(i), curr_host);
    }

    System.out.println("Number of hosts: " + num_hosts);
    long stats[][] = new long[num_hosts][num_hosts];
    long count[][] = new long[num_hosts][num_hosts]; // used for averaging

    int start_millis = 0, end_millis = 0;

    // deliberate design choice to duplicate code PER possible operation
    // otherwise we have to do the mode check N times, for N states returned
    //
    // compute aggregate statistics
    log.info("Query statistic type: " + this.query_stat_type);
    if (this.query_stat_type.equals("transaction_count")) {
        for (int i = 0; i < events.size(); i++) {
            HashMap<String, Object> event = events.get(i);
            start = (Long) event.get("start_time");
            end = (Long) event.get("finish_time");
            start_millis = Integer.parseInt(((String) event.get("start_time_millis")));
            end_millis = Integer.parseInt(((String) event.get("finish_time_millis")));
            String cell = (String) event.get("state_name");
            String this_host = (String) event.get("hostname");
            String other_host = (String) event.get("other_host");
            int this_host_idx = host_indices.get(this_host).intValue();
            int other_host_idx = host_indices.get(other_host).intValue();

            // to, from
            stats[other_host_idx][this_host_idx] += 1;
        }
    } else if (this.query_stat_type.equals("avg_duration")) {
        for (int i = 0; i < events.size(); i++) {
            HashMap<String, Object> event = events.get(i);
            start = (Long) event.get("start_time");
            end = (Long) event.get("finish_time");
            start_millis = Integer.parseInt(((String) event.get("start_time_millis")));
            end_millis = Integer.parseInt(((String) event.get("finish_time_millis")));
            String cell = (String) event.get("state_name");
            String this_host = (String) event.get("hostname");
            String other_host = (String) event.get("other_host");
            int this_host_idx = host_indices.get(this_host).intValue();
            int other_host_idx = host_indices.get(other_host).intValue();

            long curr_val = end_millis - start_millis + ((end - start) * 1000);

            // to, from
            stats[other_host_idx][this_host_idx] += curr_val;
            count[other_host_idx][this_host_idx] += 1;
        }
        for (int i = 0; i < num_hosts; i++) {
            for (int j = 0; j < num_hosts; j++) {
                if (count[i][j] > 0)
                    stats[i][j] = stats[i][j] / count[i][j];
            }
        }
    } else if (this.query_stat_type.equals("avg_volume")) {
        for (int i = 0; i < events.size(); i++) {
            HashMap<String, Object> event = events.get(i);
            start = (Long) event.get("start_time");
            end = (Long) event.get("finish_time");
            start_millis = Integer.parseInt(((String) event.get("start_time_millis")));
            end_millis = Integer.parseInt(((String) event.get("finish_time_millis")));
            String cell = (String) event.get("state_name");
            String this_host = (String) event.get("hostname");
            String other_host = (String) event.get("other_host");
            int this_host_idx = host_indices.get(this_host).intValue();
            int other_host_idx = host_indices.get(other_host).intValue();

            long curr_val = Long.parseLong((String) event.get("bytes"));

            // to, from
            stats[other_host_idx][this_host_idx] += curr_val;
            count[other_host_idx][this_host_idx] += 1;
        }
        for (int i = 0; i < num_hosts; i++) {
            for (int j = 0; j < num_hosts; j++) {
                if (count[i][j] > 0)
                    stats[i][j] = stats[i][j] / count[i][j];
            }
        }
    } else if (this.query_stat_type.equals("total_duration")) {
        for (int i = 0; i < events.size(); i++) {
            HashMap<String, Object> event = events.get(i);
            start = (Long) event.get("start_time");
            end = (Long) event.get("finish_time");
            start_millis = Integer.parseInt(((String) event.get("start_time_millis")));
            end_millis = Integer.parseInt(((String) event.get("finish_time_millis")));
            String cell = (String) event.get("state_name");
            String this_host = (String) event.get("hostname");
            String other_host = (String) event.get("other_host");
            int this_host_idx = host_indices.get(this_host).intValue();
            int other_host_idx = host_indices.get(other_host).intValue();

            double curr_val = end_millis - start_millis + ((end - start) * 1000);

            // to, from
            stats[other_host_idx][this_host_idx] += curr_val;
        }
    } else if (this.query_stat_type.equals("total_volume")) {
        for (int i = 0; i < events.size(); i++) {
            HashMap<String, Object> event = events.get(i);
            start = (Long) event.get("start_time");
            end = (Long) event.get("finish_time");
            start_millis = Integer.parseInt(((String) event.get("start_time_millis")));
            end_millis = Integer.parseInt(((String) event.get("finish_time_millis")));
            String cell = (String) event.get("state_name");
            String this_host = (String) event.get("hostname");
            String other_host = (String) event.get("other_host");
            int this_host_idx = host_indices.get(this_host).intValue();
            int other_host_idx = host_indices.get(other_host).intValue();

            long curr_val = Long.parseLong((String) event.get("bytes"));

            // to, from
            stats[other_host_idx][this_host_idx] += curr_val;
        }
    }

    int[] permute = null;
    if (sort_nodes) {
        permute = hClust(stats);
        stats = doPermute(stats, permute);
    }

    Table agg_tab = new Table();
    agg_tab.addColumn("stat", long.class);
    min = Long.MAX_VALUE;
    max = Long.MIN_VALUE;
    agg_tab.addRows(num_hosts * num_hosts);

    // row-wise placement (row1, followed by row2, etc.)
    for (int i = 0; i < num_hosts; i++) {
        for (int j = 0; j < num_hosts; j++) {
            agg_tab.setLong((i * num_hosts) + j, "stat", stats[i][j]);
            if (stats[i][j] > max)
                max = stats[i][j];
            if (stats[i][j] > 0 && stats[i][j] < min)
                min = stats[i][j];
        }
    }
    if (min == Long.MAX_VALUE)
        min = 0;

    log.info(agg_tab);

    // collate data
    HeatmapData hd = new HeatmapData();
    hd.stats = new long[num_hosts][num_hosts];
    hd.stats = stats;
    hd.min = min;
    hd.max = max;
    hd.num_hosts = num_hosts;
    hd.agg_tab = agg_tab;

    this.add_info_extra = new String("\nState: " + this.prettyStateNames.get(this.query_state) + " ("
            + events.size() + " " + this.query_state + "'s [" + this.query_stat_type + "])\n"
            + "Plotted value range: [" + hd.min + "," + hd.max + "] (Zeros in black)");

    hd.hostnames = new String[num_hosts];
    for (int i = 0; i < num_hosts; i++) {
        String curr_host = host_rev_indices.get(new Integer(permute[i]));
        if (sort_nodes) {
            hd.hostnames[i] = new String(curr_host);
        } else {
            hd.hostnames[i] = new String(curr_host);
        }
    }

    return hd;
}

From source file:org.agnitas.util.AgnUtils.java

/**
 * Getter for property bshInterpreter.//  w  ww .j a va2  s . com
 *
 * @return Value of property bshInterpreter.
 */
public static Interpreter getBshInterpreter(int cID, int customerID, ApplicationContext con) {
    DataSource ds = (DataSource) con.getBean("dataSource");
    Interpreter aBsh = new Interpreter();
    NameSpace aNameSpace = aBsh.getNameSpace();
    aNameSpace.importClass("org.agnitas.util.AgnUtils");

    String sqlStatement = "select * from customer_" + cID + "_tbl cust where cust.customer_id=" + customerID;
    Connection dbCon = DataSourceUtils.getConnection(ds);

    try {
        Statement stmt = dbCon.createStatement();
        ResultSet rset = stmt.executeQuery(sqlStatement);
        ResultSetMetaData aMeta = rset.getMetaData();

        if (rset.next()) {
            for (int i = 1; i <= aMeta.getColumnCount(); i++) {
                switch (aMeta.getColumnType(i)) {
                case java.sql.Types.BIGINT:
                case java.sql.Types.INTEGER:
                case java.sql.Types.NUMERIC:
                case java.sql.Types.SMALLINT:
                case java.sql.Types.TINYINT:
                    if (rset.getObject(i) != null) {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Integer.class,
                                new Integer(rset.getInt(i)), null);
                    } else {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Integer.class, null,
                                null);
                    }
                    break;

                case java.sql.Types.DECIMAL:
                case java.sql.Types.DOUBLE:
                case java.sql.Types.FLOAT:
                    if (rset.getObject(i) != null) {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Double.class,
                                new Double(rset.getDouble(i)), null);
                    } else {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Double.class, null, null);
                    }
                    break;

                case java.sql.Types.CHAR:
                case java.sql.Types.LONGVARCHAR:
                case java.sql.Types.VARCHAR:
                    aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.String.class,
                            rset.getString(i), null);
                    break;

                case java.sql.Types.DATE:
                case java.sql.Types.TIME:
                case java.sql.Types.TIMESTAMP:
                    aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.util.Date.class,
                            rset.getTimestamp(i), null);
                    break;
                default:
                    logger.error("Ignoring: " + aMeta.getColumnName(i));
                }
            }
        }
        rset.close();
        stmt.close();
        // add virtual column "sysdate"
        aNameSpace.setTypedVariable(AgnUtils.getHibernateDialect().getCurrentTimestampSQLFunctionName(),
                Date.class, new Date(), null);
    } catch (Exception e) {
        sendExceptionMail("Sql: " + sqlStatement, e);
        logger.error("getBshInterpreter: " + e.getMessage());
        aBsh = null;
    }
    DataSourceUtils.releaseConnection(dbCon, ds);
    return aBsh;
}

From source file:org.apache.bigtop.itest.hive.TestJdbc.java

@Test
public void preparedStmtAndResultSet() throws SQLException {
    final String tableName = "bigtop_jdbc_psars_test_table";
    try (Statement stmt = conn.createStatement()) {
        stmt.execute("drop table if exists " + tableName);
        stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, "
                + "i int, lo bigint, sh smallint, st varchar(32))");
    }/*  w w  w . j  a  va2  s.c o m*/

    // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
    // try to put them in the query.
    try (PreparedStatement ps = conn
            .prepareStatement("insert into " + tableName + " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
        ps.setBoolean(1, true);
        ps.setByte(2, (byte) 1);
        ps.setDouble(3, 3.141592654);
        ps.setFloat(4, 3.14f);
        ps.setInt(5, 3);
        ps.setLong(6, 10L);
        ps.setShort(7, (short) 20);
        ps.setString(8, "abc");
        ps.executeUpdate();
    }

    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " + "values(?, ?)",
            ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
        ps.setNull(1, Types.INTEGER);
        ps.setObject(2, "mary had a little lamb");
        ps.executeUpdate();
        ps.setNull(1, Types.INTEGER, null);
        ps.setString(2, "its fleece was white as snow");
        ps.clearParameters();
        ps.setNull(1, Types.INTEGER, null);
        ps.setString(2, "its fleece was white as snow");
        ps.execute();

    }

    try (Statement stmt = conn.createStatement()) {

        ResultSet rs = stmt.executeQuery("select * from " + tableName);

        ResultSetMetaData md = rs.getMetaData();

        int colCnt = md.getColumnCount();
        LOG.debug("Column count is " + colCnt);

        for (int i = 1; i <= colCnt; i++) {
            LOG.debug("Looking at column " + i);
            String strrc = md.getColumnClassName(i);
            LOG.debug("Column class name is " + strrc);

            int intrc = md.getColumnDisplaySize(i);
            LOG.debug("Column display size is " + intrc);

            strrc = md.getColumnLabel(i);
            LOG.debug("Column label is " + strrc);

            strrc = md.getColumnName(i);
            LOG.debug("Column name is " + strrc);

            intrc = md.getColumnType(i);
            LOG.debug("Column type is " + intrc);

            strrc = md.getColumnTypeName(i);
            LOG.debug("Column type name is " + strrc);

            intrc = md.getPrecision(i);
            LOG.debug("Precision is " + intrc);

            intrc = md.getScale(i);
            LOG.debug("Scale is " + intrc);

            boolean boolrc = md.isAutoIncrement(i);
            LOG.debug("Is auto increment? " + boolrc);

            boolrc = md.isCaseSensitive(i);
            LOG.debug("Is case sensitive? " + boolrc);

            boolrc = md.isCurrency(i);
            LOG.debug("Is currency? " + boolrc);

            intrc = md.getScale(i);
            LOG.debug("Scale is " + intrc);

            intrc = md.isNullable(i);
            LOG.debug("Is nullable? " + intrc);

            boolrc = md.isReadOnly(i);
            LOG.debug("Is read only? " + boolrc);

        }

        while (rs.next()) {
            LOG.debug("bo = " + rs.getBoolean(1));
            LOG.debug("bo = " + rs.getBoolean("bo"));
            LOG.debug("ti = " + rs.getByte(2));
            LOG.debug("ti = " + rs.getByte("ti"));
            LOG.debug("db = " + rs.getDouble(3));
            LOG.debug("db = " + rs.getDouble("db"));
            LOG.debug("fl = " + rs.getFloat(4));
            LOG.debug("fl = " + rs.getFloat("fl"));
            LOG.debug("i = " + rs.getInt(5));
            LOG.debug("i = " + rs.getInt("i"));
            LOG.debug("lo = " + rs.getLong(6));
            LOG.debug("lo = " + rs.getLong("lo"));
            LOG.debug("sh = " + rs.getShort(7));
            LOG.debug("sh = " + rs.getShort("sh"));
            LOG.debug("st = " + rs.getString(8));
            LOG.debug("st = " + rs.getString("st"));
            LOG.debug("tm = " + rs.getObject(8));
            LOG.debug("tm = " + rs.getObject("st"));
            LOG.debug("tm was null " + rs.wasNull());
        }
        LOG.debug("bo is column " + rs.findColumn("bo"));

        int intrc = rs.getConcurrency();
        LOG.debug("concurrency " + intrc);

        intrc = rs.getFetchDirection();
        LOG.debug("fetch direction " + intrc);

        intrc = rs.getType();
        LOG.debug("type " + intrc);

        Statement copy = rs.getStatement();

        SQLWarning warning = rs.getWarnings();
        while (warning != null) {
            LOG.debug("Found a warning: " + warning.getMessage());
            warning = warning.getNextWarning();
        }
        rs.clearWarnings();
    }
}

From source file:com.thinkbiganalytics.schema.QueryRunner.java

/**
 * Initializes the query result with the specified metadata.
 *
 * @param queryResult the query result to initialize
 * @param rsMetaData  the result set metadata for the query
 * @throws SQLException if the metadata is not available
 *//*from  w  ww  .j  av a2  s  .  co  m*/
private void initQueryResult(@Nonnull final DefaultQueryResult queryResult,
        @Nonnull final ResultSetMetaData rsMetaData) throws SQLException {
    final List<QueryResultColumn> columns = new ArrayList<>();
    final Map<String, Integer> displayNameMap = new HashMap<>();

    for (int i = 1; i <= rsMetaData.getColumnCount(); i++) {
        final DefaultQueryResultColumn column = new DefaultQueryResultColumn();
        column.setField(rsMetaData.getColumnName(i));
        String displayName = rsMetaData.getColumnLabel(i);
        column.setHiveColumnLabel(displayName);
        //remove the table name if it exists
        displayName = StringUtils.contains(displayName, ".") ? StringUtils.substringAfterLast(displayName, ".")
                : displayName;
        Integer count = 0;
        if (displayNameMap.containsKey(displayName)) {
            count = displayNameMap.get(displayName);
            count++;
        }
        displayNameMap.put(displayName, count);
        column.setDisplayName(displayName + "" + (count > 0 ? count : ""));

        column.setTableName(StringUtils.substringAfterLast(rsMetaData.getColumnName(i), "."));
        column.setDataType(ParserHelper.sqlTypeToHiveType(rsMetaData.getColumnType(i)));
        column.setNativeDataType(rsMetaData.getColumnTypeName(i));
        columns.add(column);
    }

    queryResult.setColumns(columns);
}