List of usage examples for java.sql ResultSetMetaData getColumnCount
int getColumnCount() throws SQLException;
ResultSet
object. From source file:io.pivotal.dataflow.task.app.jdbcgemfire.common.JdbcColumnToPojoRowMapper.java
@Override public Map<String, Object> mapRow(ResultSet rs, int arg1) throws SQLException { Map<String, Object> builder = new HashMap<>(); Map<String, Object> processed = new HashMap<>(); ColumnStandardMapper mapper = new ColumnStandardMapper(); ResultSetMetaData metaData = rs.getMetaData(); for (int i = 1; i <= metaData.getColumnCount(); i++) { builder.put(JdbcUtils.lookupColumnName(metaData, i), JdbcUtils.getResultSetValue(rs, i)); }//from www . ja va2 s . co m // send map to groovyColumnProcessor.process() processed = mapper.process(builder); return processed; }
From source file:com.squid.kraken.v4.caching.redis.datastruct.RawMatrix.java
public static RawMatrixStreamExecRes streamExecutionItemToByteArray(IExecutionItem item, long nbLinesExpectedLeft) throws IOException, SQLException { RawMatrixStreamExecRes res = new RawMatrixStreamExecRes(); long metter_start = System.currentTimeMillis(); ByteArrayOutputStream baout = new ByteArrayOutputStream(); Output kout = new Output(baout); Kryo kryo = new Kryo(); kryo.setRegistrationRequired(true);//w ww .j a v a 2 s. c o m kryo.setReferences(false); ResultSet result = item.getResultSet(); IJDBCDataFormatter formatter = item.getDataFormatter(); // // if(logger.isDebugEnabled()){logger.debug(("Formatter // ="+formatter.getClass().toString()));} ResultSetMetaData metadata = result.getMetaData(); int nbColumns = metadata.getColumnCount(); IVendorSupport vendorSpecific = VendorSupportRegistry.INSTANCE.getVendorSupport(item.getDatabase()); int[] colTypes = vendorSpecific.getVendorMetadataSupport().normalizeColumnType(result); // get columns #, type and names String[] colNames = new String[nbColumns]; int i = 0; while (i < nbColumns) { colNames[i] = metadata.getColumnLabel(i + 1); i++; } // register // Class mapping have to be registered before we start writing HashMap<String, Integer> registration = new HashMap<String, Integer>(); for (int val : colTypes) { if (!isPrimitiveType(val)) { String className = getJavaDatatype(val); try { if (registration.get(className) == null) { registration.put(className, kryo.register(Class.forName(className)).getId()); } } catch (ClassNotFoundException e0) { logger.info("Class " + className + " not found"); } catch (NullPointerException e1) { logger.info("Class " + className + " not found"); } } } // Register Hadoop type // registration.put("org.apache.hadoop.io.Text",kryo.register(org.apache.hadoop.io.Text.class).getId()); // registration.put("byte[]", kryo.register(byte[].class).getId()); // start writing! // registration kout.writeInt(registration.keySet().size()); for (String s : registration.keySet()) { kout.writeString(s); kout.writeInt(registration.get(s)); // logger.info(s + " " + registration.get(s)); } // version int version = VERSION; if (version >= 1) { kout.writeInt(-1);// this is for V0 compatibility which miss // version information kout.writeInt(version); } // Redis cache type kout.writeInt(RedisCacheType.RAW_MATRIX.ordinal()); // nb of columns kout.writeInt(nbColumns); // columns names for (String n : colNames) kout.writeString(n); // column type for (Integer t : colTypes) kout.writeInt(t); // rows // we need a different dictionary to check for first occurences HashMap<String, Integer> tempsDict = new HashMap<String, Integer>(); int count = 0; int index = 0; boolean moreData = false; boolean maxSizeReached = false; while ((!maxSizeReached) && (moreData = result.next())) { i = 0; kout.writeBoolean(true); while (i < nbColumns) { Object value = result.getObject(i + 1); Object unbox = formatter.unboxJDBCObject(value, colTypes[i]); // if(logger.isDebugEnabled()){logger.debug(("unbox value is // "+unbox));} if (unbox instanceof String) { String stringVal = (String) unbox; // System.out.println(stringVal); Integer ref = tempsDict.get(stringVal); if (ref != null) { kout.write(MEMBER_REFERENCE);// 4 kout.writeInt(ref);// 5 } else { kout.write(MEMBER_DEFINITION);// 4 kout.writeString(stringVal); tempsDict.put(stringVal, new Integer(index)); index++; } } else { kout.write(MEMBER_VALUE);// 4 // if(logger.isDebugEnabled()){logger.debug(("member // unbox " + unbox.toString()));} // if(logger.isDebugEnabled()){logger.debug(("member // value " + value.toString()));} kryo.writeClassAndObject(kout, unbox); } i++; } count++; // stats: display time for first 100th rows if (count == 100) { long intermediate = new Date().getTime(); // logger.info("SQLQuery#" + item.getID() + " proceeded // first 100 items in "+(intermediate-metter_start)+" ms"); logger.info("task=RawMatrix" + " method=streamExecutionItemToByteArray" + " duration=" + ((intermediate - metter_start)) + " error=false status=running queryid=" + item.getID()); } // if max chunk size of 50MB reached, stop if (count % 100 == 0) { float size = Math.round(baout.size() / 1048576); if (size >= maxChunkSizeInMB) { logger.info("Max size of " + maxChunkSizeInMB + "MB for one chunk reached"); maxSizeReached = true; } } // DEBUG CODE TO CREATE SMALLER CHUNKS /* if (count == 250){ maxSizeReached= true; // logger.info("Max debug size of 250 items reached"); } */ } // WRITE moredata kout.writeBoolean(false);// close the data stream, begin metadata //we stop either if maxSize was reach or if there were no more data to read boolean moreToRead = false; // we did not reach the end of the resultset boolean moreThanLimit = false; // is case of a limit query, did we reach the limit if (maxSizeReached) { // we stopped because reached the hard memory limit for one chunk if ((nbLinesExpectedLeft > -1) && (!(count < nbLinesExpectedLeft))) { //we read exqctly as many lines as the limit moreThanLimit = true; } else { moreThanLimit = true; moreToRead = true; } } else { if (!moreData) { //no more lines to read if (nbLinesExpectedLeft > -1) { // limit if (!(count < nbLinesExpectedLeft)) { //we read as many lines as the limit moreThanLimit = true; } } } } kout.writeBoolean(moreThanLimit); // -- V1 only if (version >= 1) { kout.writeLong(item.getExecutionDate().getTime());// the // computeDate } // stats: display total long metter_finish = new Date().getTime(); // logger.info("SQLQuery#" + item.getID() + " serialized // "+(count-1)+" row(s) in "+(metter_finish-metter_start)+" ms, // compressed resulset size is "+size+" Mbytes"); /* logger.info("task=RawMatrix" + " method=streamExecutionItemToByteArray" + " duration=" + (metter_finish - metter_start) / 1000 + " error=false status=done driver=" + item.getDatabase().getName() + " queryid=" + item.getID() + " size= " + size + " SQLQuery#" + item.getID() + " serialized " + (count - 1) + " row(s) in " + (metter_finish - metter_start) + " ms, compressed resulset size is " + size + " Mbytes"); */ // TODO Get project SQLStats queryLog = new SQLStats(Integer.toString(item.getID()), "streamExecutionItemToByteArray", "", (metter_finish - metter_start), item.getDatabase().getName()); queryLog.setError(false); PerfDB.INSTANCE.save(queryLog); kout.close(); res.setHasMore(moreToRead); res.setExecutionTime(metter_finish - metter_start); res.setNbLines(count); res.setStreamedMatrix(baout.toByteArray()); return res; }
From source file:Main.java
public Main() throws Exception { ArrayList columnNames = new ArrayList(); ArrayList data = new ArrayList(); String url = "jdbc:mysql://localhost:3306/yourdb"; String userid = "root"; String password = "sesame"; String sql = "SELECT * FROM animals"; Connection connection = DriverManager.getConnection(url, userid, password); Statement stmt = connection.createStatement(); ResultSet rs = stmt.executeQuery(sql); ResultSetMetaData md = rs.getMetaData(); int columns = md.getColumnCount(); for (int i = 1; i <= columns; i++) { columnNames.add(md.getColumnName(i)); }/*from w w w . j a va2s . c o m*/ while (rs.next()) { ArrayList row = new ArrayList(columns); for (int i = 1; i <= columns; i++) { row.add(rs.getObject(i)); } data.add(row); } Vector columnNamesVector = new Vector(); Vector dataVector = new Vector(); for (int i = 0; i < data.size(); i++) { ArrayList subArray = (ArrayList) data.get(i); Vector subVector = new Vector(); for (int j = 0; j < subArray.size(); j++) { subVector.add(subArray.get(j)); } dataVector.add(subVector); } for (int i = 0; i < columnNames.size(); i++) columnNamesVector.add(columnNames.get(i)); JTable table = new JTable(dataVector, columnNamesVector) { public Class getColumnClass(int column) { for (int row = 0; row < getRowCount(); row++) { Object o = getValueAt(row, column); if (o != null) { return o.getClass(); } } return Object.class; } }; JScrollPane scrollPane = new JScrollPane(table); getContentPane().add(scrollPane); JPanel buttonPanel = new JPanel(); getContentPane().add(buttonPanel, BorderLayout.SOUTH); }
From source file:io.lightlink.spring.StreamingMapper.java
private void readMetadata(ResultSet resultSet) throws SQLException { ResultSetMetaData metaData = resultSet.getMetaData(); colCount = metaData.getColumnCount(); colNames = new String[colCount]; for (int i = 0; i < colNames.length; i++) { colNames[i] = metaData.getColumnLabel(i + 1); }/*from w ww . jav a 2 s. c o m*/ }
From source file:com.krawler.workflow.module.dao.DataObjectRowMapper.java
public Object mapRow(ResultSet rs, int rowNumber) throws SQLException { Map<String, Object> mappedObject = new HashMap<String, Object>(); ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); for (int index = 1; index <= columnCount; index++) { String column = columnMap.get(index); if (column == null) { column = JdbcUtils.lookupColumnName(rsmd, index).toLowerCase(); columnMap.put(index, column); }// www . j av a2s . co m Object value = rs.getObject(index); if (formatter != null && value != null && value instanceof Date) value = formatter.format(value); mappedObject.put(column, value); } return mappedObject; }
From source file:net.orpiske.ssps.common.db.CountRsHandler.java
@Override public Integer handle(ResultSet rs) throws SQLException { Integer dto = null;//from ww w. j a v a 2 s . com // No records to handle :O if (!rs.next()) { return null; } ResultSetMetaData meta = rs.getMetaData(); for (int i = 1; i <= meta.getColumnCount(); i++) { Object value = rs.getObject(i); try { if (value instanceof Integer) { dto = (Integer) value; } } catch (Exception e) { throw new SQLException("Unable to set/retrieve count value", e); } } return dto; }
From source file:com.chiorichan.database.DatabaseEngine.java
public static Map<String, Object> convertRow(ResultSet rs) throws SQLException { Map<String, Object> result = Maps.newLinkedHashMap(); ResultSetMetaData rsmd = rs.getMetaData(); int numColumns = rsmd.getColumnCount(); for (int i = 1; i < numColumns + 1; i++) { String columnName = rsmd.getColumnName(i); // Loader.getLogger().info( "Column: " + columnName + " <-> " + rsmd.getColumnTypeName( i ) ); if (rsmd.getColumnType(i) == java.sql.Types.ARRAY) { result.put(columnName, rs.getArray(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.BIGINT) { result.put(columnName, rs.getInt(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.TINYINT) { result.put(columnName, rs.getInt(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.BIT) // Sometimes tinyints are read as bits {/* w w w. j a va 2 s.c o m*/ result.put(columnName, rs.getInt(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.BOOLEAN) { result.put(columnName, rs.getBoolean(columnName)); } else if (rsmd.getColumnTypeName(i).contains("BLOB") || rsmd.getColumnType(i) == java.sql.Types.BINARY) { // BLOG = Max Length 65,535. Recommended that you use a LONGBLOG. byte[] bytes = rs.getBytes(columnName); result.put(columnName, bytes); /* * try * { * result.put( columnName, new String( bytes, "ISO-8859-1" ) ); * } * catch ( UnsupportedEncodingException e ) * { * e.printStackTrace(); * } */ } else if (rsmd.getColumnType(i) == java.sql.Types.DOUBLE) { result.put(columnName, rs.getDouble(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.FLOAT) { result.put(columnName, rs.getFloat(columnName)); } else if (rsmd.getColumnTypeName(i).equals("INT")) { result.put(columnName, rs.getInt(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.NVARCHAR) { result.put(columnName, rs.getNString(columnName)); } else if (rsmd.getColumnTypeName(i).equals("VARCHAR")) { result.put(columnName, rs.getString(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.SMALLINT) { result.put(columnName, rs.getInt(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.DATE) { result.put(columnName, rs.getDate(columnName)); } else if (rsmd.getColumnType(i) == java.sql.Types.TIMESTAMP) { result.put(columnName, rs.getTimestamp(columnName)); } else { result.put(columnName, rs.getObject(columnName)); } } return result; }
From source file:net.mindengine.oculus.frontend.db.jdbc.BeanMapper.java
@Override public Object mapRow(ResultSet rs, int rowNum) throws SQLException { // TODO Auto-generated method stub ResultSetMetaData metaData = rs.getMetaData(); int columns = metaData.getColumnCount(); try {// ww w. jav a 2 s . co m Class<?> clazz = Class.forName(className); Object obj = clazz.newInstance(); for (int i = 1; i <= columns; i++) { String column = metaData.getColumnName(i); FieldMapper fm = fields.get(column); if (fm != null) { Method method = fm.getMethod(); String typeName = fm.getType().getSimpleName(); typeName = StringUtils.capitalize(typeName); if (typeName.endsWith("[]")) { typeName = typeName.substring(0, typeName.length() - 2) + "s"; } if (typeName.equals("Date")) { typeName = "Timestamp"; } if (typeName.equals("Integer")) { typeName = "Int"; } Method rsMethod = ResultSet.class.getMethod("get" + typeName, String.class); Object value = rsMethod.invoke(rs, column); if (value instanceof Timestamp) { Timestamp timestamp = (Timestamp) value; value = new Date(timestamp.getTime()); } method.invoke(obj, value); } } return obj; } catch (Exception e) { throw new SQLException(e); } }
From source file:cc.osint.graphd.db.SQLDB.java
private JSONObject jsonizeResultSet(ResultSet rs) throws Exception { List<JSONObject> results = new ArrayList<JSONObject>(); ResultSetMetaData md = rs.getMetaData(); int colmax = md.getColumnCount(); int i;/*from w w w . ja va 2s . c om*/ for (; rs.next();) { JSONObject result = new JSONObject(); for (i = 1; i <= colmax; i++) { String colName = md.getColumnName(i).toLowerCase(); String colClassName = md.getColumnClassName(i); String colType = md.getColumnTypeName(i); Object obj = rs.getObject(i); result.put(colName, obj); log.info(colName + ": " + colClassName + ": " + colType + ": " + obj.toString()); } results.add(result); } JSONObject result = new JSONObject(); result.put("results", results); return result; }
From source file:net.orpiske.ssps.common.db.SimpleRsHandler.java
@Override public T handle(ResultSet rs) throws SQLException { // No records to handle :O if (!rs.next()) { return null; }//from w w w.j ava 2s . co m ResultSetMetaData meta = rs.getMetaData(); for (int i = 1; i <= meta.getColumnCount(); i++) { Object value = rs.getObject(i); String name = meta.getColumnName(i); try { /* * We convert the column name to a more appropriate and java like name * because some columns are usually named as some_thing whereas Java * properties are named someThing. This call does this conversion. */ String javaProperty = NameConverter.sqlToProperty(name); PropertyUtils.setSimpleProperty(dto, javaProperty, value); } catch (Exception e) { throw new SQLException("Unable to set property " + name + " for bean" + dto.getClass(), e); } } return dto; }