List of usage examples for java.sql ResultSet getMetaData
ResultSetMetaData getMetaData() throws SQLException;
ResultSet
object's columns. From source file:MainClass.java
public MainClass() { try {/* www .j av a 2s .c om*/ Class.forName("COM.cloudscape.core.RmiJdbcDriver"); Connection connection = DriverManager.getConnection("jdbc:cloudscape:rmi:books"); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SELECT * FROM authors"); ResultSetMetaData metaData = resultSet.getMetaData(); int numberOfColumns = metaData.getColumnCount(); for (int i = 1; i <= numberOfColumns; i++) { System.out.println(metaData.getColumnName(i) + "\t"); } while (resultSet.next()) { for (int i = 1; i <= numberOfColumns; i++) { System.out.println(resultSet.getObject(i) + "\t"); } System.out.println("\n"); } statement.close(); connection.close(); } catch (SQLException sqlException) { System.out.println(sqlException.getMessage()); } catch (ClassNotFoundException classNotFound) { System.out.println("Driver Not Found"); System.exit(1); } }
From source file:io.github.benas.jql.shell.PrintStreamRowCallbackHandler.java
@Override public void processRow(ResultSet resultSet) throws SQLException { int columnCount = resultSet.getMetaData().getColumnCount(); StringBuilder stringBuilder = new StringBuilder(); int i = 1;/* w w w.j a va2 s . c o m*/ while (i <= columnCount) { stringBuilder.append(resultSet.getObject(i)); if (i < columnCount) { stringBuilder.append(" | "); } i++; } printStream.println(stringBuilder.toString()); }
From source file:Main.java
public Main() throws Exception { ArrayList columnNames = new ArrayList(); ArrayList data = new ArrayList(); String url = "jdbc:mysql://localhost:3306/yourdb"; String userid = "root"; String password = "sesame"; String sql = "SELECT * FROM animals"; Connection connection = DriverManager.getConnection(url, userid, password); Statement stmt = connection.createStatement(); ResultSet rs = stmt.executeQuery(sql); ResultSetMetaData md = rs.getMetaData(); int columns = md.getColumnCount(); for (int i = 1; i <= columns; i++) { columnNames.add(md.getColumnName(i)); }/*from w ww .j ava 2 s .c om*/ while (rs.next()) { ArrayList row = new ArrayList(columns); for (int i = 1; i <= columns; i++) { row.add(rs.getObject(i)); } data.add(row); } Vector columnNamesVector = new Vector(); Vector dataVector = new Vector(); for (int i = 0; i < data.size(); i++) { ArrayList subArray = (ArrayList) data.get(i); Vector subVector = new Vector(); for (int j = 0; j < subArray.size(); j++) { subVector.add(subArray.get(j)); } dataVector.add(subVector); } for (int i = 0; i < columnNames.size(); i++) columnNamesVector.add(columnNames.get(i)); JTable table = new JTable(dataVector, columnNamesVector) { public Class getColumnClass(int column) { for (int row = 0; row < getRowCount(); row++) { Object o = getValueAt(row, column); if (o != null) { return o.getClass(); } } return Object.class; } }; JScrollPane scrollPane = new JScrollPane(table); getContentPane().add(scrollPane); JPanel buttonPanel = new JPanel(); getContentPane().add(buttonPanel, BorderLayout.SOUTH); }
From source file:com.wantscart.jade.core.mapper.AbstractCollectionRowMapper.java
@Override public Object mapRow(ResultSet rs, int rowNum) throws SQLException { int columnSize = rs.getMetaData().getColumnCount(); Collection<Object> collection = createCollection(columnSize); // columnIndex1 for (int columnIndex = 1; columnIndex <= columnSize; columnIndex++) { collection.add(JdbcUtils.getResultSetValue(rs, columnIndex, elementType)); }// w w w.jav a2 s .c o m return collection; }
From source file:model.SQLiteModel.java
private static List<Map<String, String>> select(String query) { //query = StringEscapeUtils.escapeJavaScript(query); //System.out.println(query); ResultSet rs = null; Statement stmt = null;//from w w w . j av a 2 s . c om int first = 1; List<String> columnNames = new ArrayList<String>(); List<Map<String, String>> data = new ArrayList<Map<String, String>>(); try { stmt = c.createStatement(); rs = stmt.executeQuery(query); while (rs.next()) { ResultSetMetaData rsmd = rs.getMetaData(); int count = rsmd.getColumnCount(); if (first == 1) { for (int i = 1; i <= count; i++) { columnNames.add(rsmd.getColumnName(i)); } } Map<String, String> curr = new HashMap<String, String>(); for (int i = 1; i <= count; i++) { curr.put(columnNames.get(i - 1), rs.getString(i)); } data.add(curr); first++; } stmt.close(); } catch (Exception e) { System.err.println(e.getClass().getName() + ": " + e.getMessage()); System.out.println("Unsuccessful select query: " + query); writeLineToLog("Unsuccessful select query: " + query); } return data; }
From source file:com.sinosoft.one.data.jade.rowmapper.AbstractCollectionRowMapper.java
@SuppressWarnings("unchecked") public Object mapRow(ResultSet rs, int rowNum) throws SQLException { int columnSize = rs.getMetaData().getColumnCount(); Collection collection = createCollection(columnSize); // columnIndex1 for (int columnIndex = 1; columnIndex <= columnSize; columnIndex++) { collection.add(JdbcUtils.getResultSetValue(rs, columnIndex, elementType)); }/*from w w w.j a va2s. c om*/ return collection; }
From source file:shell.framework.dao.support.ListExtractor4Map.java
public Object extractData(ResultSet rs) throws SQLException, DataAccessException { ResultSetMetaData rsmd = rs.getMetaData(); int columnNum = rsmd.getColumnCount(); List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(); while (rs.next()) { Map<String, Object> map4Row = new HashMap<String, Object>(); for (int i = 1; i <= columnNum; i++) { map4Row.put(rsmd.getColumnName(i), rs.getObject(i)); }//from w w w . j a v a 2s .c om result.add(map4Row); } return result; }
From source file:com.gzj.tulip.jade.rowmapper.AbstractCollectionRowMapper.java
@Override @SuppressWarnings("unchecked") public Object mapRow(ResultSet rs, int rowNum) throws SQLException { int columnSize = rs.getMetaData().getColumnCount(); Collection collection = createCollection(columnSize); // columnIndex1 for (int columnIndex = 1; columnIndex <= columnSize; columnIndex++) { collection.add(JdbcUtils.getResultSetValue(rs, columnIndex, elementType)); }//from ww w . j a va2 s . com return collection; }
From source file:com.laxser.blitz.lama.core.mapper.AbstractCollectionRowMapper.java
@Override @SuppressWarnings({ "unchecked", "rawtypes" }) public Object mapRow(ResultSet rs, int rowNum) throws SQLException { int columnSize = rs.getMetaData().getColumnCount(); Collection collection = createCollection(columnSize); // columnIndex1 for (int columnIndex = 1; columnIndex <= columnSize; columnIndex++) { collection.add(JdbcUtils.getResultSetValue(rs, columnIndex, elementType)); }// w ww . j a v a 2 s. c o m return collection; }
From source file:be.dataminded.nifi.plugins.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }/*from www .j a va 2 s. c o m*/ for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" try { int scale = meta.getScale(i); BigDecimal bigDecimal = ((BigDecimal) value); if (scale == 0) { if (meta.getPrecision(i) < 10) { rec.put(i - 1, bigDecimal.intValue()); } else { rec.put(i - 1, bigDecimal.longValue()); } } else { rec.put(i - 1, bigDecimal.doubleValue()); } } catch (Exception e) { rec.put(i - 1, value.toString()); } } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }