List of usage examples for java.sql ResultSet getMetaData
ResultSetMetaData getMetaData() throws SQLException;
ResultSet
object's columns. From source file:com.zimbra.cs.mailbox.util.MetadataDump.java
private static List<Row> getRevisionRows(DbConnection conn, int groupId, int mboxId, int itemId, boolean fromDumpster) throws ServiceException { PreparedStatement stmt = null; ResultSet rs = null; try {/*from w w w . java2s . co m*/ String sql = "SELECT * FROM " + DbMailItem.getRevisionTableName(groupId, fromDumpster) + " WHERE mailbox_id = " + mboxId + " AND item_id = " + itemId + " ORDER BY mailbox_id, item_id, version DESC"; stmt = conn.prepareStatement(sql); rs = stmt.executeQuery(); List<Row> rows = new ArrayList<Row>(); while (rs.next()) { Row row = new Row(); ResultSetMetaData rsMeta = rs.getMetaData(); int cols = rsMeta.getColumnCount(); for (int i = 1; i <= cols; i++) { String colName = rsMeta.getColumnName(i); String colValue = rs.getString(i); if (rs.wasNull()) colValue = null; row.addColumn(colName, colValue); } rows.add(row); } return rows; } catch (SQLException e) { throw ServiceException.INVALID_REQUEST("No such item: mbox=" + mboxId + ", item=" + itemId, e); } finally { DbPool.closeResults(rs); DbPool.closeStatement(stmt); } }
From source file:dbs_project.util.Utils.java
public static String resultSetToHtmlTable(java.sql.ResultSet rs) throws SQLException { int rowCount = 0; final StringBuilder result = new StringBuilder(); result.append("<P ALIGN='center'>\n<TABLE BORDER=1>\n"); ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); //header/* w w w .j av a 2 s . c o m*/ result.append("\t<TR>\n"); for (int i = 0; i < columnCount; ++i) { result.append("\t\t<TH>").append(rsmd.getColumnLabel(i + 1)).append("</TH>\n"); } result.append("\t</TR>\n"); //data while (rs.next()) { ++rowCount; result.append("\t<TR>\n"); for (int i = 0; i < columnCount; ++i) { String value = rs.getString(i + 1); if (rs.wasNull()) { value = "<null>"; } result.append("\t\t<TD>").append(value).append("</TD>\n"); } result.append("\t</TR>\n"); } result.append("</TABLE>\n</P>\n"); return result.toString(); }
From source file:com.viettel.ws.client.JDBCUtil.java
/** * Create document using DOM api/* w w w . j ava 2 s . c om*/ * * @param rs a result set * @return A document of a result set * @throws ParserConfigurationException - If error when parse string * @throws SQLException - If error when read data from database */ public static Document toDocument(ResultSet rs) throws ParserConfigurationException, SQLException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setFeature(FEATURE_GENERAL_ENTITIES, false); factory.setFeature(FEATURE_PARAMETER_ENTITIES, false); factory.setXIncludeAware(false); factory.setExpandEntityReferences(false); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.newDocument(); Element results = doc.createElement("Results"); doc.appendChild(results); ResultSetMetaData rsmd = rs.getMetaData(); int colCount = rsmd.getColumnCount(); while (rs.next()) { Element row = doc.createElement("Row"); results.appendChild(row); for (int i = 1; i <= colCount; i++) { String columnName = rsmd.getColumnName(i); Object value = rs.getObject(i); Element node = doc.createElement(columnName); node.appendChild(doc.createTextNode(value.toString())); row.appendChild(node); } } return doc; }
From source file:dbconverter.dao.util.ToolKit.java
/** * Given a ResultSet, writes the contained data as JSON to a target file, * with the expectation that said file will be used in an Elasticsearch * bulk index operation.//from www. j a v a2 s. c o m * This method supports arbitrary-sized ResultSets, provided interval is set low enough * @param resultSet The ResultSet to save to a file * @param obj A QueryObject which must contain the index and type of the target * @param interval Determines how many documents should be stored within Java at a time * If you run out of heap space, try decreasing this value * @param fileName The name of the file to write to * @author hightowe */ public static void writeResultSetToJson(ResultSet resultSet, QueryObject obj, int interval, String fileName) { assert resultSet != null : "ResultSet cannont be null!"; List<String> resultsList = new ArrayList<>(); try { ResultSetMetaData rsMetaData = resultSet.getMetaData(); int columnNumbers = rsMetaData.getColumnCount(); int count = 0; int prev = 0; while (resultSet.next()) { Map<String, Object> dataMap = new HashMap<>(); // add all column names to the map key-set for (int i = 1; i <= columnNumbers; i++) { dataMap.put(rsMetaData.getColumnLabel(i), resultSet.getObject(i)); } dataMap.put(TIME_STAMP, getISOTime(TIME_STAMP_FORMAT)); // Add the data to List of Maps String json = ToolKit.convertMapToJson(dataMap); resultsList.add(json); count++; // write to file after every (interval)th run, then clear // resultsList to avoid heap space errors if (count % interval == 0) { writeJsonStringsToFile(resultsList, fileName, obj, prev); prev += interval; resultsList.clear(); } } writeJsonStringsToFile(resultsList, fileName, obj, prev); } catch (SQLException e) { logger.error(e); } }
From source file:com.squid.kraken.v4.caching.redis.datastruct.RawMatrix.java
public static RawMatrix readExecutionItem(IExecutionItem item, long maxRecords) throws SQLException, ScopeException { long metter_start = System.currentTimeMillis(); try {//from w ww. j ava2 s . c o m RawMatrix matrix = new RawMatrix(); ResultSet result = item.getResultSet(); IJDBCDataFormatter formatter = item.getDataFormatter(); // ResultSetMetaData metadata = result.getMetaData(); int nbColumns = metadata.getColumnCount(); IVendorSupport vendorSpecific; vendorSpecific = VendorSupportRegistry.INSTANCE.getVendorSupport(item.getDatabase()); int[] normalizedTypes = vendorSpecific.getVendorMetadataSupport().normalizeColumnType(result); int i = 0; while (i < nbColumns) { matrix.colTypes.add(normalizedTypes[i]); matrix.colNames.add(metadata.getColumnName(i + 1)); i++; } int count = 0; matrix.moreData = false; // while ((count++ < maxRecords || maxRecords < 0) && (matrix.moreData = result.next())) { Object[] rawrow = new Object[nbColumns]; i = 0; while (i < nbColumns) { Object value = result.getObject(i + 1); Object unbox = formatter.unboxJDBCObject(value, matrix.colTypes.get(i)); if (unbox instanceof String) { String stringVal = (String) unbox; rawrow[i] = DimensionValuesDictionary.INSTANCE.getRef(stringVal); } else { rawrow[i] = unbox; } i++; } matrix.addRow(new RawRow(rawrow)); count++; } long metter_finish = new Date().getTime(); if (logger.isDebugEnabled()) { logger.debug(("SQLQuery#" + item.getID() + "read " + (count - 1) + " row(s) in " + (metter_finish - metter_start) + " ms.")); } return matrix; } finally { item.close(); } }
From source file:dbconverter.dao.util.ToolKit.java
/** * Indexes every document within a ResultSet object * @param resultSet The ResultSet containing all documents to be indexed * @param bl Determines where to index the data * @param uploadInterval Determines how frequently to clear local memory * @return The number of documents indexed * @author hightowe// ww w . j a v a 2s. co m */ public static int bulkIndexResultSet(ResultSet resultSet, BulkLoader bl, int uploadInterval) { assert resultSet != null : PARAMETER_ERROR; assert uploadInterval > 0 : PARAMETER_ERROR; assert bl != null && bl.isConfigured() : PARAMETER_ERROR; int count = 0; try { ResultSetMetaData rsMetaData = resultSet.getMetaData(); int columnNumbers = rsMetaData.getColumnCount(); List<Map> docsList = new ArrayList<>(); while (resultSet.next()) { Map<String, Object> dataMap = new HashMap<>(); for (int i = 1; i <= columnNumbers; i++) { dataMap.put(rsMetaData.getColumnLabel(i), resultSet.getString(i)); } // append a timestamp of when this document was created dataMap.put(TIME_STAMP, getISOTime(TIME_STAMP_FORMAT)); docsList.add(dataMap); count++; if (count % uploadInterval == 0) { bl.bulkIndex(docsList); logger.info("Indexed " + count + " documents " + getISOTime(TIME_STAMP_FORMAT)); docsList.clear(); } } if (docsList.size() > 0) { bl.bulkIndex(docsList); logger.info("Indexed " + count + " documents " + getISOTime(TIME_STAMP_FORMAT)); } } catch (SQLException ex) { logger.error(ex); } logger.info("Total documents indexed: " + count + ", " + getISOTime(TIME_STAMP_FORMAT)); return count; }
From source file:br.com.cobranca.util.Util.java
public static <T> T atribuirValores(Class<T> classe, ResultSet rs) throws Exception { T obj = classe.newInstance();/*from ww w . j a v a2 s. com*/ //Percorre lista de colunas for (int j = 0; j < rs.getMetaData().getColumnCount(); j++) { String nomeColuna = rs.getMetaData().getColumnName(j + 1); Field field = obj.getClass().getDeclaredField(nomeColuna); field.setAccessible(true); String tipoColuna = field.getType().getSimpleName(); if (tipoColuna.toUpperCase().contains("INT")) { tipoColuna = "Int"; } else { tipoColuna = StringPrimeiraLetraMaiuscula(tipoColuna); } // rs . get + tipo da coluna, passando o nome da coluna como parmetro Method met = rs.getClass().getMethod("get" + tipoColuna, String.class); if (tipoColuna.equals("Int")) { Integer valor = (Integer) met.invoke(rs, nomeColuna); met = obj.getClass().getMethod("set" + StringPrimeiraLetraMaiuscula(nomeColuna), Integer.class); met.invoke(obj, valor); } else if (tipoColuna.equals("String")) { String valor = (String) met.invoke(rs, nomeColuna); met = obj.getClass().getMethod("set" + StringPrimeiraLetraMaiuscula(nomeColuna), String.class); met.invoke(obj, valor); } else if (tipoColuna.equals("Double")) { Double valor = (Double) met.invoke(rs, nomeColuna); met = obj.getClass().getMethod("set" + StringPrimeiraLetraMaiuscula(nomeColuna), Double.class); met.invoke(obj, valor); } else if (tipoColuna.equals("Float")) { Float valor = (Float) met.invoke(rs, nomeColuna); met = obj.getClass().getMethod("set" + StringPrimeiraLetraMaiuscula(nomeColuna), Float.class); met.invoke(obj, valor); } else if (tipoColuna.equals("Long")) { Long valor = (Long) met.invoke(rs, nomeColuna); met = obj.getClass().getMethod("set" + StringPrimeiraLetraMaiuscula(nomeColuna), Long.class); met.invoke(obj, valor); } else if (tipoColuna.equals("Boolean")) { Boolean valor = (Boolean) met.invoke(rs, nomeColuna); met = obj.getClass().getMethod("set" + StringPrimeiraLetraMaiuscula(nomeColuna), Boolean.class); met.invoke(obj, valor); } else if (tipoColuna.equals("Date")) { Date valor = (Date) met.invoke(rs, nomeColuna); met = obj.getClass().getMethod("set" + StringPrimeiraLetraMaiuscula(nomeColuna), Date.class); met.invoke(obj, valor); } else { break; } } return obj; }
From source file:com.sinosoft.one.data.jade.rowmapper.ArrayRowMapper.java
public Object mapRow(ResultSet rs, int rowNum) throws SQLException { int columnSize = rs.getMetaData().getColumnCount(); Object array = Array.newInstance(componentType, columnSize); for (int i = 0; i < columnSize; i++) { Object value = JdbcUtils.getResultSetValue(rs, (i + 1), componentType); Array.set(array, i, value); }//from w ww . jav a 2 s .c om return array; }
From source file:com.gzj.tulip.jade.rowmapper.ArrayRowMapper.java
@Override public Object mapRow(ResultSet rs, int rowNum) throws SQLException { int columnSize = rs.getMetaData().getColumnCount(); Object array = Array.newInstance(componentType, columnSize); for (int i = 0; i < columnSize; i++) { Object value = JdbcUtils.getResultSetValue(rs, (i + 1), componentType); Array.set(array, i, value); }// w w w.j a v a 2 s. c o m return array; }
From source file:com.hangum.tadpole.engine.sql.util.resultset.ResultSetUtils.java
/** * ResultSet to List//from ww w. j av a 2s . c om * * * * @param isShowRowNum ? ?. * @param rs ResultSet * @param limitCount * @param intLastIndex * @return * @throws SQLException */ public static TadpoleResultSet getResultToList(boolean isShowRowNum, final ResultSet rs, final int limitCount, int intLastIndex) throws SQLException { TadpoleResultSet returnRS = new TadpoleResultSet(); Map<Integer, Object> tmpRow = null; // ?? ? . int rowCnt = intLastIndex; while (rs.next()) { tmpRow = new HashMap<Integer, Object>(); int intStartIndex = 0; if (isShowRowNum) { intStartIndex++; tmpRow.put(0, rowCnt + 1); } for (int i = 0; i < rs.getMetaData().getColumnCount(); i++) { final int intColIndex = i + 1; final int intShowColIndex = i + intStartIndex; Object obj = null; try { obj = rs.getObject(intColIndex); // int type = rs.getMetaData().getColumnType(intColIndex); // if(obj instanceof oracle.sql.STRUCT) { // tmpRow.put(intShowColIndex, rs.getObject(intColIndex)); // } else if (obj == null) { tmpRow.put(intShowColIndex, PublicTadpoleDefine.DEFINE_NULL_VALUE); } else { String type = obj.getClass().getSimpleName(); if (type.toUpperCase().contains("LOB")) { tmpRow.put(intShowColIndex, rs.getObject(intColIndex)); } else { tmpRow.put(intShowColIndex, rs.getString(intColIndex)); } // if(logger.isDebugEnabled()) { // logger.debug("======[jdbc type ]===> " + rs.getMetaData().getColumnType(intColIndex) + ", class type is " + obj.getClass().getName()); // } } } catch (Exception e) { logger.error("ResutSet fetch error", e); //$NON-NLS-1$ tmpRow.put(i + intStartIndex, ""); //$NON-NLS-1$ } } returnRS.getData().add(tmpRow); // ? ? . (hive driver getRow ? ) --;; 2013.08.19, hangum if (limitCount == (rowCnt + 1)) { returnRS.setEndOfRead(false); break; } rowCnt++; } return returnRS; }