List of usage examples for java.sql ResultSetMetaData getColumnName
String getColumnName(int column) throws SQLException;
From source file:com.streamsets.pipeline.lib.jdbc.JdbcUtil.java
public LinkedHashMap<String, Field> resultSetToFields(ResultSet rs, int maxClobSize, int maxBlobSize, Map<String, DataType> columnsToTypes, ErrorRecordHandler errorRecordHandler, UnknownTypeAction unknownTypeAction, Set<String> recordHeader, boolean timestampToString) throws SQLException, StageException { ResultSetMetaData md = rs.getMetaData(); LinkedHashMap<String, Field> fields = new LinkedHashMap<>(md.getColumnCount()); for (int i = 1; i <= md.getColumnCount(); i++) { try {/*www . jav a 2 s .c om*/ if (recordHeader == null || !recordHeader.contains(md.getColumnName(i))) { DataType dataType = columnsToTypes.get(md.getColumnName(i)); Field field = resultToField(md, rs, i, maxClobSize, maxBlobSize, dataType == null ? DataType.USE_COLUMN_TYPE : dataType, unknownTypeAction, timestampToString); fields.put(md.getColumnLabel(i), field); } } catch (SQLException e) { errorRecordHandler.onError(JdbcErrors.JDBC_13, e.getMessage(), e); } catch (IOException e) { errorRecordHandler.onError(JdbcErrors.JDBC_03, md.getColumnName(i), rs.getObject(i), e); } } return fields; }
From source file:org.cloudgraph.rdb.service.JDBCSupport.java
protected Map<String, PropertyPair> fetchRowMap(PlasmaType type, StringBuilder sql, Connection con) { Map<String, PropertyPair> result = new HashMap<String, PropertyPair>(); PreparedStatement statement = null; ResultSet rs = null;/*from w w w. ja va 2s. c o m*/ try { if (log.isDebugEnabled()) { log.debug("fetch: " + sql.toString()); } statement = con.prepareStatement(sql.toString(), ResultSet.TYPE_FORWARD_ONLY, /* * ResultSet * . * TYPE_SCROLL_INSENSITIVE * , */ ResultSet.CONCUR_READ_ONLY); statement.execute(); rs = statement.getResultSet(); ResultSetMetaData rsMeta = rs.getMetaData(); int numcols = rsMeta.getColumnCount(); int count = 0; while (rs.next()) { for (int i = 1; i <= numcols; i++) { String columnName = rsMeta.getColumnName(i); int columnType = rsMeta.getColumnType(i); PlasmaProperty prop = (PlasmaProperty) type.getProperty(columnName); PlasmaProperty valueProp = prop; while (!valueProp.getType().isDataType()) { valueProp = getOppositePriKeyProperty(valueProp); } Object value = converter.fromJDBCDataType(rs, i, columnType, valueProp); if (value != null) { PropertyPair pair = new PropertyPair((PlasmaProperty) prop, value); if (!valueProp.equals(prop)) pair.setValueProp(valueProp); result.put(prop.getName(), pair); } } count++; } if (log.isDebugEnabled()) log.debug("returned " + count + " results"); } catch (Throwable t) { throw new DataAccessException(t); } finally { try { if (rs != null) rs.close(); if (statement != null) statement.close(); } catch (SQLException e) { log.error(e.getMessage(), e); } } return result; }
From source file:i5.las2peer.services.loadStoreGraphService.LoadStoreGraphService.java
/** * // www. java 2 s. c o m * loadGraph * * @param id a String * * @return HttpResponse * */ @GET @Path("/{id}") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.TEXT_PLAIN) @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "graphNotFound"), @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "graphLoaded"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "internalError") }) @ApiOperation(value = "loadGraph", notes = "") public HttpResponse loadGraph(@PathParam("id") String id) { String result = ""; String columnName = ""; String selectquery = ""; int columnCount = 0; Connection conn = null; PreparedStatement stmnt = null; ResultSet rs = null; ResultSetMetaData rsmd = null; try { // get connection from connection pool conn = dbm.getConnection(); selectquery = "SELECT * FROM graphs WHERE graphId = " + id + " ;"; // prepare statement stmnt = conn.prepareStatement(selectquery); // retrieve result set rs = stmnt.executeQuery(); rsmd = (ResultSetMetaData) rs.getMetaData(); columnCount = rsmd.getColumnCount(); // process result set if (rs.next()) { JSONObject ro = new JSONObject(); for (int i = 1; i <= columnCount; i++) { result = rs.getString(i); columnName = rsmd.getColumnName(i); // setup resulting JSON Object ro.put(columnName, result); } HttpResponse graphLoaded = new HttpResponse(ro.toJSONString(), HttpURLConnection.HTTP_OK); return graphLoaded; } else { // return HTTP Response on error String er = "No result for graph with id " + id; HttpResponse graphNotFound = new HttpResponse(er, HttpURLConnection.HTTP_NOT_FOUND); return graphNotFound; } } catch (Exception e) { String er = "Internal error: " + e.getMessage(); HttpResponse internalError = new HttpResponse(er, HttpURLConnection.HTTP_INTERNAL_ERROR); return internalError; } finally { // free resources if (rs != null) { try { rs.close(); } catch (Exception e) { Context.logError(this, e.getMessage()); String er = "Internal error: " + e.getMessage(); HttpResponse internalError = new HttpResponse(er, HttpURLConnection.HTTP_INTERNAL_ERROR); return internalError; } } if (stmnt != null) { try { stmnt.close(); } catch (Exception e) { Context.logError(this, e.getMessage()); String er = "Internal error: " + e.getMessage(); HttpResponse internalError = new HttpResponse(er, HttpURLConnection.HTTP_INTERNAL_ERROR); return internalError; } } if (conn != null) { try { conn.close(); } catch (Exception e) { Context.logError(this, e.getMessage()); String er = "Internal error: " + e.getMessage(); HttpResponse internalError = new HttpResponse(er, HttpURLConnection.HTTP_INTERNAL_ERROR); return internalError; } } } }
From source file:com.chiorichan.database.DatabaseEngine.java
public List<String> getTableFieldNames(String table) throws SQLException { List<String> rtn = Lists.newArrayList(); ResultSet rs = query("SELECT * FROM " + table); ResultSetMetaData rsmd = rs.getMetaData(); int numColumns = rsmd.getColumnCount(); for (int i = 1; i < numColumns + 1; i++) { rtn.add(rsmd.getColumnName(i)); }// w ww . ja va 2 s . c o m return rtn; }
From source file:com.mmnaseri.dragonfly.fluent.impl.AbstractSelectQueryFinalizer.java
private <H> List<Map<Mapping, Object>> execute(SelectQueryExecution<E, H> selection) { final Connection connection = session.getConnection(); final PreparedStatement preparedStatement; try {//www . j a va 2 s .c o m final String sql = selection.getSql() + ";"; LogFactory.getLog(Statement.class).info("Preparing statement: " + sql); preparedStatement = connection.prepareStatement(sql); } catch (SQLException e) { throw new DatabaseNegotiationException("Failed to get a prepared statement from the database", e); } for (ParameterDescriptor descriptor : selection.getParameters()) { try { if (descriptor.getValue() == null) { preparedStatement.setNull(descriptor.getIndex(), descriptor.getSqlType()); } else { preparedStatement.setObject(descriptor.getIndex(), descriptor.getValue()); } } catch (SQLException e) { throw new StatementPreparationException( "Failed to prepare statement for parameter " + descriptor.getIndex(), e); } } final ResultSet resultSet; final ResultSetMetaData metaData; try { resultSet = preparedStatement.executeQuery(); } catch (SQLException e) { throw new DatabaseNegotiationException("Failed to retrieve the results from the data source", e); } try { metaData = resultSet.getMetaData(); } catch (SQLException e) { throw new DatabaseNegotiationException("Failed to get result set metadata for query", e); } final ArrayList<Map<Mapping, Object>> result = new ArrayList<Map<Mapping, Object>>(); while (true) { try { if (!resultSet.next()) { break; } final HashMap<Mapping, Object> map = new HashMap<Mapping, Object>(); for (int i = 1; i <= metaData.getColumnCount(); i++) { map.put(new ImmutableMapping(metaData.getTableName(i), metaData.getColumnName(i), metaData.getColumnLabel(i)), resultSet.getObject(i)); } result.add(map); } catch (SQLException e) { throw new DatabaseNegotiationException("Failed to get the next row", e); } } return result; }
From source file:org.apache.hadoop.hive.jdbc.TestJdbcDriver.java
/** * Validate the Metadata for the result set of a metadata getColumns call. *//* ww w . j a va2 s .co m*/ public void testMetaDataGetColumnsMetaData() throws SQLException { ResultSet rs = con.getMetaData().getColumns(null, null, "testhivejdbcdriver\\_table", null); ResultSetMetaData rsmd = rs.getMetaData(); assertEquals("TABLE_CAT", rsmd.getColumnName(1)); assertEquals(Types.VARCHAR, rsmd.getColumnType(1)); assertEquals(Integer.MAX_VALUE, rsmd.getColumnDisplaySize(1)); assertEquals("ORDINAL_POSITION", rsmd.getColumnName(17)); assertEquals(Types.INTEGER, rsmd.getColumnType(17)); assertEquals(11, rsmd.getColumnDisplaySize(17)); }
From source file:org.batoo.jpa.core.impl.criteria.QueryImpl.java
private void prepareLabels(final ResultSetMetaData md) throws SQLException { this.labels = new String[md.getColumnCount()]; for (int i = 0; i < this.labels.length; i++) { String label = md.getColumnName(i + 1) + " (" + md.getColumnTypeName(i + 1) + ")"; label = StringUtils.abbreviate(label, QueryImpl.MAX_COL_LENGTH); this.labels[i] = label; }/*from ww w . j ava 2 s .com*/ }
From source file:org.ensembl.healthcheck.util.DBUtils.java
/** * Compare a particular column in two ResultSets. * /*from w w w . jav a 2 s . c o m*/ * @param rs1 * The first ResultSet to compare. * @param rs2 * The second ResultSet to compare. * @param i * The index of the column to compare. * @return True if the type and value of the columns match. */ public static boolean compareColumns(ResultSet rs1, ResultSet rs2, int i, boolean warnNull) { try { ResultSetMetaData rsmd = rs1.getMetaData(); Connection con1 = rs1.getStatement().getConnection(); Connection con2 = rs2.getStatement().getConnection(); if (rs1.getObject(i) == null) { if (warnNull) { logger.fine("Column " + rsmd.getColumnName(i) + " is null in table " + rsmd.getTableName(i) + " in " + DBUtils.getShortDatabaseName(con1)); } return (rs2.getObject(i) == null); // true if both are null } if (rs2.getObject(i) == null) { if (warnNull) { logger.fine("Column " + rsmd.getColumnName(i) + " is null in table " + rsmd.getTableName(i) + " in " + DBUtils.getShortDatabaseName(con2)); } return (rs1.getObject(i) == null); // true if both are null } // Note deliberate early returns for performance reasons switch (rsmd.getColumnType(i)) { case Types.INTEGER: return rs1.getInt(i) == rs2.getInt(i); case Types.SMALLINT: return rs1.getInt(i) == rs2.getInt(i); case Types.TINYINT: return rs1.getInt(i) == rs2.getInt(i); case Types.VARCHAR: String s1 = rs1.getString(i); String s2 = rs2.getString(i); // ignore "AUTO_INCREMENT=" part in final part of table // definition s1 = s1.replaceAll("AUTO_INCREMENT=[0-9]+ ", ""); s2 = s2.replaceAll("AUTO_INCREMENT=[0-9]+ ", ""); return s1.equals(s2); case Types.FLOAT: return rs1.getFloat(i) == rs2.getFloat(i); case Types.DOUBLE: return rs1.getDouble(i) == rs2.getDouble(i); case Types.TIMESTAMP: return rs1.getTimestamp(i).equals(rs2.getTimestamp(i)); default: // treat everything else as a String (should deal with ENUM and // TEXT) if (rs1.getString(i) == null || rs2.getString(i) == null) { return true; // ???? } else { return rs1.getString(i).equals(rs2.getString(i)); } } // switch } catch (SQLException se) { throw new SqlUncheckedException("Could not compare two columns sets", se); } }
From source file:com.viettel.ttbankplus.servicegw.hibernate.dao.transaction.TransCPDAO.java
public List<Transaction> getTrans(String where, HashMap param) { List lst = new ArrayList(); try {/*from w w w . j a v a2 s . c om*/ String sql = "select cp.trans_id transId, cp.content_provider_id contentproviderid, to_char(cp.request_date, 'dd/MM/yyyy hh24:mi:ss') requestDate, " + "(case nvl(bp.error_code,'null')\n" + "when '00' then TO_CHAR(2)\n" + "when '32' then \n" + "(case nvl(bp.correct_code,'null')\n" + "when '00' then TO_CHAR(2)\n" + "when 'null' then TO_CHAR(1)\n" + "else TO_CHAR(4) end)\n" + "when 'null' then\n" + "(case nvl(bp.correct_code,'null')\n" + "when '00' then TO_CHAR(2)\n" + "when 'null' then TO_CHAR(1)\n" + "else TO_CHAR(4) end\n" + ")\n" + "when 'null' then\n" + "(case nvl(bp.correct_code,'null')\n" + "when '00' then TO_CHAR(2)\n" + "when 'null' then TO_CHAR(1)\n" + "else TO_CHAR(4) end\n" + ")\n" + "else TO_CHAR(4)\n" + "end\n" + ")" + " transStatus, " + "cp.confirm_status confirmStatus, cp.billing_code billingCode, cp.order_id orderId, cp.amount, " + "cp.order_info orderInfo, cp.trans_type transType, bp.bank_code bankCode, bp.cp_code cpCode, bp.msisdn, p.cp_name cpName, bp.customer_name customerName, bp.error_code || bp.correct_code errorCode from trans_cp cp join trans_bankplus bp on cp.trans_cp_id = bp.trans_cp_id " + " join content_provider p on p.content_provider_id = cp.content_provider_id "; // sql += where; // sql += " order by cp.request_date desc"; // log.debug(sql); //Rebuild where param int numparam = StringUtils.countMatches(where, ":"); // log.info("NUM PARAM: " + numparam); ArrayList<Object> arrParam = new ArrayList<Object>(); arrParam.add(0, ""); for (int i = 0; i < numparam; i++) { for (Object object : param.keySet()) { String key = object.toString(); Object val = param.get(key); int index = where.indexOf(":"); int indexCheck = where.indexOf(":" + key); if (index == indexCheck) { if (val instanceof ArrayList) { ArrayList arr = (ArrayList) val; String add = ""; for (int j = 0; j < arr.size(); j++) { arrParam.add(arr.get(j)); add += ",?"; } add = add.substring(1); where = where.substring(0, index) + add + where.substring(index + (":" + key).length()); } else if (val instanceof Date) { Date d = (Date) val; String date = new SimpleDateFormat("dd/MM/yyyy HH-mm-ss").format(d); arrParam.add(date); where = where.substring(0, index) + "to_date(?,'dd/MM/yyyy hh24-mi-ss')" + where.substring(index + (":" + key).length()); } else { arrParam.add(val); where = where.substring(0, index) + "?" + where.substring(index + (":" + key).length()); } // arrParam.add(val); // where = where.substring(0, index) + "?" + where.substring(index + (":" + key).length()); break; } } } numparam = arrParam.size() - 1; sql += where; sql += " order by cp.request_date desc"; // log.debug(sql); log.info("WHERE CLAUSE: " + where); log.info("LIST PARAM VALUE: " + arrParam); Session sess = DAOFactory.getNewSession(); // SQLQuery query = sess.createSQLQuery(sql); Connection conn = sess.connection(); PreparedStatement pstm = null; ResultSet rs = null; try { pstm = conn.prepareStatement(sql); for (int i = 0; i < numparam; i++) { Object objVal = arrParam.get(i + 1); if (objVal instanceof ArrayList) { ArrayList arrlist = (ArrayList) objVal; java.sql.Array sqlArray = null; // oracle.jdbc.OracleConnection oracleConnection = conn.unwrap(OracleConnection.class); if (arrlist.get(0) instanceof String) { // sqlArray = oracleConnection.createArrayOf("VARCHAR", arrlist.toArray()); // sqlArray = conn.createArrayOf("VARCHAR", arrlist.toArray()); } else { // sqlArray = conn.createArrayOf("NUMERIC", arrlist.toArray()); // sqlArray = oracleConnection.createArrayOf("INTEGER", arrlist.toArray()); } pstm.setArray(i + 1, sqlArray); } else if (objVal instanceof String) { pstm.setString(i + 1, objVal.toString()); // java.sql.Date sqlDate = new java.sql.Date(utilDate.getTime()); // log.info("set string: " + (i + 1) + " - " + objVal.toString()); } else if (objVal instanceof Date) { Date d = (Date) objVal; // String date = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(d); java.sql.Timestamp sqlDate = new java.sql.Timestamp(d.getTime()); // log.info("set date: " + (i + 1) + " - " + sqlDate); pstm.setTimestamp(i + 1, sqlDate); // pstm.setString(i + 1, date); } else { pstm.setLong(i + 1, Long.parseLong(objVal.toString())); // log.info("set long: " + (i + 1) + " - " + Long.parseLong(objVal.toString())); } } // log.info("PREP: " + pstm.toString()); rs = pstm.executeQuery(); ResultSetMetaData rsmd = rs.getMetaData(); int numCol = rsmd.getColumnCount(); String[] arrCol = new String[numCol]; String coltemp = ""; for (int i = 0; i < numCol; i++) { arrCol[i] = rsmd.getColumnName(i + 1); coltemp += rsmd.getColumnName(i + 1) + "#"; } // log.info("CCCCCC:" + coltemp); while (rs.next()) { // log.info("AAAAAAAAAAAAA:" + rs.getString(1)); Transaction trans = new Transaction(); for (int i = 0; i < numCol; i++) { // String data = rs.getString(arrCol[i]); Object data = rs.getObject(arrCol[i]); if (data != null) { callSetFunction(trans, "set" + arrCol[i], data); } } lst.add(trans); } } catch (Exception ex) { log.error("", ex); } finally { // log.info("================>finally"); if (rs != null) { rs.close(); } if (pstm != null) { pstm.close(); } if (sess != null) { sess.close(); } } // query.setResultTransformer(Transformers.aliasToBean(Transaction.class)); // for (Object object : param.keySet()) { // String key = object.toString(); // Object val = param.get(key); // if (val instanceof ArrayList) { //For select in // query.setParameterList(key, (ArrayList) val); // } else { // query.setParameter(key, param.get(key)); // } // } // log.info(query.toString()); // lst = query.list(); // ScrollableResults resultset = query.scroll(ScrollMode.FORWARD_ONLY); // resultset.beforeFirst(); // while (resultset.next()) { // Object[] objres = resultset.get(); // log.info(objres); // } // resultset.close(); } catch (Exception ex) { log.error("getTrans: ", ex); } finally { // DAOFactory.commitCurrentSessions(); } return lst; }
From source file:org.apache.hadoop.hive.jdbc.TestJdbcDriver.java
public void testMetaDataGetColumns() throws SQLException { Map<String[], Integer> tests = new HashMap<String[], Integer>(); tests.put(new String[] { "testhivejdbcdriver\\_table", null }, 2); tests.put(new String[] { "testhivejdbc%", null }, 7); tests.put(new String[] { "testhiveJDBC%", null }, 7); tests.put(new String[] { "testhiveJDB\\C%", null }, 0); tests.put(new String[] { "%jdbcdriver\\_table", null }, 2); tests.put(new String[] { "%jdbcdriver\\_table%", "under\\_col" }, 1); tests.put(new String[] { "%jdbcdriver\\_table%", "under\\_COL" }, 1); tests.put(new String[] { "%jdbcdriver\\_table%", "under\\_co_" }, 1); tests.put(new String[] { "%jdbcdriver\\_table%", "under_col" }, 1); tests.put(new String[] { "%jdbcdriver\\_table%", "und%" }, 1); tests.put(new String[] { "%jdbcdriver\\_table%", "%" }, 2); tests.put(new String[] { "%jdbcdriver\\_table%", "_%" }, 2); for (String[] checkPattern : tests.keySet()) { ResultSet rs = con.getMetaData().getColumns(null, null, checkPattern[0], checkPattern[1]); // validate the metadata for the getColumns result set ResultSetMetaData rsmd = rs.getMetaData(); assertEquals("TABLE_CAT", rsmd.getColumnName(1)); int cnt = 0; while (rs.next()) { String columnname = rs.getString("COLUMN_NAME"); int ordinalPos = rs.getInt("ORDINAL_POSITION"); switch (cnt) { case 0: assertEquals("Wrong column name found", "under_col", columnname); assertEquals("Wrong ordinal position found", ordinalPos, 1); break; case 1: assertEquals("Wrong column name found", "value", columnname); assertEquals("Wrong ordinal position found", ordinalPos, 2); break; default: break; }//from www.j a v a 2 s . com cnt++; } rs.close(); assertEquals("Found less columns then we test for.", tests.get(checkPattern).intValue(), cnt); } }