List of usage examples for java.sql ResultSet getMetaData
ResultSetMetaData getMetaData() throws SQLException;
ResultSet
object's columns. From source file:com.fmguler.ven.QueryMapper.java
private void enumerateColumns(Set columns, ResultSet rs) throws SQLException { if (!columns.isEmpty()) return;// ww w .j a v a 2 s .c o m for (int i = 1; i < rs.getMetaData().getColumnCount() + 1; i++) { columns.add(rs.getMetaData().getColumnName(i)); } }
From source file:com.github.tosdan.utils.sql.BasicRowProcessorMod.java
/** * Convert a <code>ResultSet</code> row into an <code>Object[]</code>. * This implementation copies column values into the array in the same * order they're returned from the <code>ResultSet</code>. Array elements * will be set to <code>null</code> if the column was SQL NULL. * * @see org.apache.commons.dbutils.RowProcessor#toArray(java.sql.ResultSet) * @param rs ResultSet that supplies the array data * @throws SQLException if a database access error occurs * @return the newly created array/*from w ww . j a v a 2 s . c om*/ */ @Override public Object[] toArray(ResultSet rs) throws SQLException { ResultSetMetaData meta = rs.getMetaData(); int cols = meta.getColumnCount(); Object[] result = new Object[cols]; for (int i = 0; i < cols; i++) { result[i] = rs.getObject(i + 1); } return result; }
From source file:com.bethecoder.ascii_table.impl.JDBCASCIITableAware.java
private void init(ResultSet resultSet) throws SQLException { //Populate header int colCount = resultSet.getMetaData().getColumnCount(); headers = new ArrayList<ASCIITableHeader>(colCount); for (int i = 0; i < colCount; i++) { headers.add(new ASCIITableHeader(resultSet.getMetaData().getColumnLabel(i + 1).toUpperCase())); }/*from w w w . j a v a2 s .com*/ //Populate data data = new ArrayList<List<Object>>(); List<Object> rowData = null; List<Object> tempData; while (resultSet.next()) { boolean isAnyColumnMultiline = false; boolean[] columnHasMultiline = new boolean[colCount]; rowData = new ArrayList<Object>(); // figure out if any of the column values need to be split across // multiple lines for (int i = 0; i < colCount; i++) { Object object = resultSet.getObject(i + 1); String val = String.valueOf(object); if (val.contains("\n") || val.length() > maxColumnWidth) { columnHasMultiline[i] = true; isAnyColumnMultiline = true; } rowData.add(object); } if (isAnyColumnMultiline) { // create extra as many extra rows as needed to format // long strings and multiline string int maxRows = 2; Object[][] columns = new Object[colCount][]; for (int i = 0; i < colCount; i++) { if (!columnHasMultiline[i]) continue; String val = String.valueOf(rowData.get(i)); String[] vals = null; if (val.contains("\n")) { vals = val.split("\n"); } else if (val.length() > maxColumnWidth) { String wrap = WordUtils.wrap(val, maxColumnWidth); vals = wrap.split("\n"); } columns[i] = vals; maxRows = Math.max(maxRows, vals.length); } for (int i = 0; i < colCount; i++) { if (columns[i] == null) { columns[i] = padedColumn(rowData.get(i), maxRows); } else if (columns[i].length < maxRows) { Object[] padedArray = new Object[maxRows]; for (int j = 0; j < maxRows; j++) { Object val = j < columns[i].length ? columns[i][j] : ""; padedArray[j] = val; } columns[i] = padedArray; } } for (int r = 0; r < maxRows; r++) { rowData.clear(); for (int c = 0; c < colCount; c++) { rowData.add(columns[c][r]); } data.add(rowData); } } else { data.add(rowData); } } //iterate rows }
From source file:uk.ac.kcl.rowmappers.DocumentRowMapper.java
private void mapDBFields(Document doc, ResultSet rs) throws SQLException, IOException { //add additional query fields for ES export ResultSetMetaData meta = rs.getMetaData(); int colCount = meta.getColumnCount(); for (int col = 1; col <= colCount; col++) { Object value = rs.getObject(col); if (value != null) { String colLabel = meta.getColumnLabel(col).toLowerCase(); if (!fieldsToIgnore.contains(colLabel)) { DateTime dateTime;//from www . j a v a 2 s . c o m //map correct SQL time types switch (meta.getColumnType(col)) { case 91: Date dt = (Date) value; dateTime = new DateTime(dt.getTime()); doc.getAssociativeArray().put(meta.getColumnLabel(col).toLowerCase(), eSCompatibleDateTimeFormatter.print(dateTime)); break; case 93: Timestamp ts = (Timestamp) value; dateTime = new DateTime(ts.getTime()); doc.getAssociativeArray().put(meta.getColumnLabel(col).toLowerCase(), eSCompatibleDateTimeFormatter.print(dateTime)); break; default: doc.getAssociativeArray().put(meta.getColumnLabel(col).toLowerCase(), rs.getString(col)); break; } } } //map binary content from FS or database if required (as per docman reader) if (value != null && meta.getColumnLabel(col).equalsIgnoreCase(binaryContentFieldName)) { switch (binaryContentSource) { case "database": doc.setBinaryContent(rs.getBytes(col)); break; case "fileSystemWithDBPath": Resource resource = context.getResource(pathPrefix + rs.getString(col)); doc.setBinaryContent(IOUtils.toByteArray(resource.getInputStream())); break; default: break; } } } }
From source file:eu.udig.catalog.teradata.TeradataLookUpSchemaRunnable.java
private boolean isBroken(Connection connection, String table, String schema, String geom, String type) throws SQLException { Statement statement = connection.createStatement(); try {/*ww w . j a v a 2s . c o m*/ String sql = "select " + geom + " from " + schema + "." + table + " limit 0"; ResultSet results = statement.executeQuery(sql); String columnType = results.getMetaData().getColumnTypeName(1); return !(columnType.equalsIgnoreCase(type) || columnType.equalsIgnoreCase("geometry") || columnType.equalsIgnoreCase("geometry[]") || columnType.equalsIgnoreCase("point") || columnType.equalsIgnoreCase("point[]") || columnType.equalsIgnoreCase("line") || columnType.equalsIgnoreCase("line[]") || columnType.equalsIgnoreCase("polygon") || columnType.equalsIgnoreCase("polygon[]")); } catch (SQLException e) { return false; } finally { statement.close(); } }
From source file:com.adito.jdbc.DBDumper.java
/** * Dump a single result set row as an INSERT statement. * //from w w w . ja v a 2 s.co m * @param writer * @param resultSet * @throws SQLException */ public void dumpRow(PrintWriter writer, ResultSet resultSet) throws SQLException { String tableName = resultSet.getMetaData().getTableName(1); int columnCount = resultSet.getMetaData().getColumnCount(); writer.print("INSERT INTO " + tableName + " VALUES ("); for (int j = 0; j < columnCount; j++) { if (j > 0) { writer.print(", "); } Object value = resultSet.getObject(j + 1); if (value == null) { writer.print("NULL"); } else { String outputValue = value.toString(); if (value instanceof Number) { writer.print(outputValue); } else { /* * TODO * * This escaping will current only work * for HSQLDB. This needs to be moved up * into the engine. */ outputValue = outputValue.replaceAll("'", "''"); writer.print("'" + outputValue + "'"); } } } writer.println(");"); }
From source file:de.iritgo.aktario.jdbc.LoadObject.java
/** * Load an object.//from ww w. j a v a2s . c o m * * @param dataSource The data source to load from. * @param typeId The type of the object to load. * @param uniqueId The unique id of the object to load. * @return The loaded object (already registered with the base registry). */ private DataObject load(final DataSource dataSource, final String typeId, long uniqueId) { DataObject object = null; try { QueryRunner query = new QueryRunner(dataSource); object = (DataObject) query.query("select * from " + typeId + " where id=" + uniqueId, new ResultSetHandler() { public Object handle(ResultSet rs) throws SQLException { rs.getMetaData(); if (rs.next()) { try { DataObject object = (DataObject) Engine.instance().getIObjectFactory() .newInstance(typeId); object.setUniqueId(rs.getLong("id")); for (Iterator i = object.getAttributes().entrySet().iterator(); i.hasNext();) { Map.Entry attribute = (Map.Entry) i.next(); if (attribute.getValue() instanceof IObjectList) { loadList(dataSource, object, object.getIObjectListAttribute((String) attribute.getKey())); } else { try { if (!object.getAttribute((String) attribute.getKey()).getClass() .equals(rs.getObject((String) attribute.getKey()) .getClass())) { System.out.println( "********* Datastruct is not compatible with dataobject:" + object.getTypeId() + ":" + attribute.getKey() + " Types:" + object.getAttribute( (String) attribute.getKey()).getClass() + "!=" + rs.getObject((String) attribute.getKey()) .getClass()); } object.setAttribute((String) attribute.getKey(), rs.getObject((String) attribute.getKey())); } catch (NullPointerException x) { System.out.println("LoadObject error: " + attribute.getKey()); } } } return object; } catch (NoSuchIObjectException ignored) { Log.logError("persist", "LoadObject", "NoSuchIObjectException"); } } else { } return null; } }); if (object != null) { Log.logVerbose("persist", "LoadObject", "Successfully loaded object " + typeId + ":" + uniqueId); } else { Log.logError("persist", "LoadObject", "Unable to find object " + typeId + ":" + uniqueId); } } catch (SQLException x) { Log.logError("persist", "LoadObject", "Error while loading the object " + typeId + ":" + uniqueId + ": " + x); } return object; }
From source file:DatabaseBrowser.java
public ResultSetTableModel(ResultSet rset) throws SQLException { Vector rowData;/* ww w.j a va 2 s. co m*/ ResultSetMetaData rsmd = rset.getMetaData(); int count = rsmd.getColumnCount(); columnHeaders = new Vector(count); tableData = new Vector(); for (int i = 1; i <= count; i++) { columnHeaders.addElement(rsmd.getColumnName(i)); } while (rset.next()) { rowData = new Vector(count); for (int i = 1; i <= count; i++) { rowData.addElement(rset.getObject(i)); } tableData.addElement(rowData); } }
From source file:com.graphaware.importer.data.access.QueueDbDataReader.java
/** * {@inheritDoc}//www .ja v a 2 s . c o m */ @Override public final void read(final String query, final String hint) { if (records != null) { throw new IllegalStateException("Previous reader hasn't been closed"); } LOG.info("Start query: \n" + query); if (query.startsWith("alter")) { jdbcTemplate.execute(query); noMoreRecords = true; return; } records = new ArrayBlockingQueue<>(queueCapacity()); new Thread(new Runnable() { @Override public void run() { Date d1 = Calendar.getInstance().getTime(); try { jdbcTemplate.query(query, new ResultSetExtractor<Void>() { @Override public Void extractData(ResultSet rs) throws SQLException, DataAccessException { ResultSetMetaData metaData = rs.getMetaData(); int colCount = metaData.getColumnCount(); while (rs.next()) { Map<String, String> columns = new HashMap<>(); for (int i = 1; i <= colCount; i++) { columns.put(metaData.getColumnLabel(i), rs.getString(i)); } columns.put(ROW, String.valueOf(rs.getRow())); try { records.offer(columns, 1, TimeUnit.HOURS); } catch (InterruptedException e) { LOG.warn( "Was waiting for more than 1 hour to insert a record for processing, had to drop it"); } } return null; } }); } finally { noMoreRecords = true; } long diffInSeconds = TimeUnit.MILLISECONDS .toSeconds(Calendar.getInstance().getTime().getTime() - d1.getTime()); LOG.info("Finished querying for " + hint + " in " + diffInSeconds + " seconds"); } }, "DB READER - " + hint).start(); }
From source file:db.migration.V023__UpdateOrganisationToimipisteKoodi.java
public void migrate(JdbcTemplate jdbcTemplate) throws Exception { LOG.info("migrate()..."); // Get all organisations List<Map> resultSet = jdbcTemplate.query("SELECT * FROM organisaatio o", new RowMapper<Map>() { @Override//w w w . jav a 2s . c om public Map mapRow(ResultSet rs, int rowNum) throws SQLException { Map r = new HashMap<String, Object>(); ResultSetMetaData metadata = rs.getMetaData(); for (int i = 1; i <= metadata.getColumnCount(); i++) { String cname = metadata.getColumnName(i); int ctype = metadata.getColumnType(i); switch (ctype) { case Types.VARCHAR: r.put(cname, rs.getString(cname)); break; default: break; } } LOG.debug(" read from db : org = {}", r); _organisations.put((String) r.get("oid"), r); return r; } }); // Generate and update initial values for toimipistekoodis for (Map org : resultSet) { if (isToimipiste(org, jdbcTemplate)) { String tpKoodi = calculateToimipisteKoodi(org, jdbcTemplate); updateToimipisteKoodi(org, tpKoodi, jdbcTemplate); } } LOG.info(" Processed {} organisations, updated {} Opetuspistes", _organisations.size(), _numUpdated); LOG.info("migrate()... done."); }