List of usage examples for java.sql ResultSetMetaData getColumnLabel
String getColumnLabel(int column) throws SQLException;
From source file:net.sf.jdbcwrappers.trim.TrimmingResultSetInvocationHandler.java
private void fetchCharColumns() throws SQLException { if (charColumns == null) { ResultSetMetaData metadata = target.getMetaData(); int columnCount = metadata.getColumnCount(); charColumns = new HashSet<String>(); isCharColumn = new boolean[columnCount]; for (int i = 1; i <= columnCount; i++) { if (metadata.getColumnType(i) == Types.CHAR) { charColumns.add(metadata.getColumnLabel(i).toUpperCase()); isCharColumn[i - 1] = true; }//w ww .j av a 2s .co m } if (LOG.isDebugEnabled()) { LOG.debug("CHAR columns: " + charColumns); } } }
From source file:com.netspective.axiom.sql.ResultSetUtils.java
public Map getResultSetSingleRowAsMap(ResultSet rs, boolean useLabelAsKey) throws SQLException { Map result = new HashMap(); if (rs.next()) { ResultSetMetaData rsmd = rs.getMetaData(); int colsCount = rsmd.getColumnCount(); for (int i = 1; i <= colsCount; i++) { result.put(/*from w w w . j a va2 s . c o m*/ useLabelAsKey ? rsmd.getColumnLabel(i).toLowerCase() : rsmd.getColumnName(i).toLowerCase(), rs.getObject(i)); } return result; } else return null; }
From source file:ViewDB.java
/** * Constructs the data panel.//from ww w. ja v a 2 s . co m * @param rs the result set whose contents this panel displays */ public DataPanel(RowSet rs) throws SQLException { fields = new ArrayList<JTextField>(); setLayout(new GridBagLayout()); GridBagConstraints gbc = new GridBagConstraints(); gbc.gridwidth = 1; gbc.gridheight = 1; ResultSetMetaData rsmd = rs.getMetaData(); for (int i = 1; i <= rsmd.getColumnCount(); i++) { gbc.gridy = i - 1; String columnName = rsmd.getColumnLabel(i); gbc.gridx = 0; gbc.anchor = GridBagConstraints.EAST; add(new JLabel(columnName), gbc); int columnWidth = rsmd.getColumnDisplaySize(i); JTextField tb = new JTextField(columnWidth); if (!rsmd.getColumnClassName(i).equals("java.lang.String")) tb.setEditable(false); fields.add(tb); gbc.gridx = 1; gbc.anchor = GridBagConstraints.WEST; add(tb, gbc); } }
From source file:org.hyperic.hq.plugin.postgresql.DataBaseCollector.java
private void extartMetrics(String query, Connection conn, boolean getDBNames) throws SQLException { Statement stmt = null;//from www. jav a 2s . c o m ResultSet rs = null; try { log.debug("[extartMetrics] query:'" + query + "'"); stmt = conn.createStatement(); rs = stmt.executeQuery(query); ResultSetMetaData md = rs.getMetaData(); while (rs.next()) { String dbName = rs.getString("datname"); if (getDBNames) { db_cache.add(dbName); } for (int c = 1; c <= md.getColumnCount(); c++) { setValue(dbName + "." + md.getColumnLabel(c), rs.getString(c)); } } } finally { DBUtil.closeJDBCObjects(log, null, stmt, rs); } }
From source file:org.gsoft.admin.ScriptRunner.java
/** * Runs an SQL script (read in using the Reader parameter) using the * connection passed in//from www . j ava 2 s . c o m * * @param conn * - the connection to use for the script * @param reader * - the source of the script * @throws SQLException * if any SQL errors occur * @throws IOException * if there is an error reading from the Reader */ private void runScript(Connection conn, Reader reader) throws IOException, SQLException { StringBuffer command = null; try { LineNumberReader lineReader = new LineNumberReader(reader); String line = null; while ((line = lineReader.readLine()) != null) { if (command == null) { command = new StringBuffer(); } String trimmedLine = line.trim(); if (trimmedLine.startsWith("--")) { println(trimmedLine); } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) { // Do nothing } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) { // Do nothing } else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter()) || fullLineDelimiter && trimmedLine.equals(getDelimiter())) { command.append(line.substring(0, line.lastIndexOf(getDelimiter()))); command.append(" "); Statement statement = conn.createStatement(); println(command); boolean hasResults = false; if (stopOnError) { hasResults = statement.execute(command.toString()); } else { try { statement.execute(command.toString()); } catch (SQLException e) { e.fillInStackTrace(); printlnError("Error executing: " + command); printlnError(e); } } if (autoCommit && !conn.getAutoCommit()) { conn.commit(); } ResultSet rs = statement.getResultSet(); if (hasResults && rs != null) { ResultSetMetaData md = rs.getMetaData(); int cols = md.getColumnCount(); for (int i = 0; i < cols; i++) { String name = md.getColumnLabel(i); print(name + "\t"); } println(""); while (rs.next()) { for (int i = 0; i < cols; i++) { String value = rs.getString(i); print(value + "\t"); } println(""); } } command = null; try { statement.close(); } catch (Exception e) { // Ignore to workaround a bug in Jakarta DBCP } Thread.yield(); } else { command.append(line); command.append(" "); } } if (!autoCommit) { conn.commit(); } } catch (SQLException e) { e.fillInStackTrace(); printlnError("Error executing: " + command); printlnError(e); throw e; } catch (IOException e) { e.fillInStackTrace(); printlnError("Error executing: " + command); printlnError(e); throw e; } finally { conn.rollback(); flush(); } }
From source file:org.apache.ibatis.jdbc.SqlRunner.java
private List<Map<String, Object>> getResults(ResultSet rs) throws SQLException { try {//from w w w . j a va 2s . c o m List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(); List<String> columns = new ArrayList<String>(); List<TypeHandler<?>> typeHandlers = new ArrayList<TypeHandler<?>>(); ResultSetMetaData rsmd = rs.getMetaData(); for (int i = 0, n = rsmd.getColumnCount(); i < n; i++) { columns.add(rsmd.getColumnLabel(i + 1)); try { Class<?> type = Resources.classForName(rsmd.getColumnClassName(i + 1)); TypeHandler<?> typeHandler = typeHandlerRegistry.getTypeHandler(type); if (typeHandler == null) { typeHandler = typeHandlerRegistry.getTypeHandler(Object.class); } typeHandlers.add(typeHandler); } catch (Exception e) { typeHandlers.add(typeHandlerRegistry.getTypeHandler(Object.class)); } } while (rs.next()) { Map<String, Object> row = new HashMap<String, Object>(); for (int i = 0, n = columns.size(); i < n; i++) { String name = columns.get(i); TypeHandler<?> handler = typeHandlers.get(i); row.put(name.toUpperCase(Locale.ENGLISH), handler.getResult(rs, name)); } list.add(row); } return list; } finally { try { if (rs != null) { rs.close(); } } catch (Exception e) { // ignore } } }
From source file:com.squid.kraken.v4.caching.redis.datastruct.RawMatrix.java
public static RawMatrixStreamExecRes streamExecutionItemToByteArray(IExecutionItem item, long nbLinesExpectedLeft) throws IOException, SQLException { RawMatrixStreamExecRes res = new RawMatrixStreamExecRes(); long metter_start = System.currentTimeMillis(); ByteArrayOutputStream baout = new ByteArrayOutputStream(); Output kout = new Output(baout); Kryo kryo = new Kryo(); kryo.setRegistrationRequired(true);//from w w w . ja v a 2 s . c om kryo.setReferences(false); ResultSet result = item.getResultSet(); IJDBCDataFormatter formatter = item.getDataFormatter(); // // if(logger.isDebugEnabled()){logger.debug(("Formatter // ="+formatter.getClass().toString()));} ResultSetMetaData metadata = result.getMetaData(); int nbColumns = metadata.getColumnCount(); IVendorSupport vendorSpecific = VendorSupportRegistry.INSTANCE.getVendorSupport(item.getDatabase()); int[] colTypes = vendorSpecific.getVendorMetadataSupport().normalizeColumnType(result); // get columns #, type and names String[] colNames = new String[nbColumns]; int i = 0; while (i < nbColumns) { colNames[i] = metadata.getColumnLabel(i + 1); i++; } // register // Class mapping have to be registered before we start writing HashMap<String, Integer> registration = new HashMap<String, Integer>(); for (int val : colTypes) { if (!isPrimitiveType(val)) { String className = getJavaDatatype(val); try { if (registration.get(className) == null) { registration.put(className, kryo.register(Class.forName(className)).getId()); } } catch (ClassNotFoundException e0) { logger.info("Class " + className + " not found"); } catch (NullPointerException e1) { logger.info("Class " + className + " not found"); } } } // Register Hadoop type // registration.put("org.apache.hadoop.io.Text",kryo.register(org.apache.hadoop.io.Text.class).getId()); // registration.put("byte[]", kryo.register(byte[].class).getId()); // start writing! // registration kout.writeInt(registration.keySet().size()); for (String s : registration.keySet()) { kout.writeString(s); kout.writeInt(registration.get(s)); // logger.info(s + " " + registration.get(s)); } // version int version = VERSION; if (version >= 1) { kout.writeInt(-1);// this is for V0 compatibility which miss // version information kout.writeInt(version); } // Redis cache type kout.writeInt(RedisCacheType.RAW_MATRIX.ordinal()); // nb of columns kout.writeInt(nbColumns); // columns names for (String n : colNames) kout.writeString(n); // column type for (Integer t : colTypes) kout.writeInt(t); // rows // we need a different dictionary to check for first occurences HashMap<String, Integer> tempsDict = new HashMap<String, Integer>(); int count = 0; int index = 0; boolean moreData = false; boolean maxSizeReached = false; while ((!maxSizeReached) && (moreData = result.next())) { i = 0; kout.writeBoolean(true); while (i < nbColumns) { Object value = result.getObject(i + 1); Object unbox = formatter.unboxJDBCObject(value, colTypes[i]); // if(logger.isDebugEnabled()){logger.debug(("unbox value is // "+unbox));} if (unbox instanceof String) { String stringVal = (String) unbox; // System.out.println(stringVal); Integer ref = tempsDict.get(stringVal); if (ref != null) { kout.write(MEMBER_REFERENCE);// 4 kout.writeInt(ref);// 5 } else { kout.write(MEMBER_DEFINITION);// 4 kout.writeString(stringVal); tempsDict.put(stringVal, new Integer(index)); index++; } } else { kout.write(MEMBER_VALUE);// 4 // if(logger.isDebugEnabled()){logger.debug(("member // unbox " + unbox.toString()));} // if(logger.isDebugEnabled()){logger.debug(("member // value " + value.toString()));} kryo.writeClassAndObject(kout, unbox); } i++; } count++; // stats: display time for first 100th rows if (count == 100) { long intermediate = new Date().getTime(); // logger.info("SQLQuery#" + item.getID() + " proceeded // first 100 items in "+(intermediate-metter_start)+" ms"); logger.info("task=RawMatrix" + " method=streamExecutionItemToByteArray" + " duration=" + ((intermediate - metter_start)) + " error=false status=running queryid=" + item.getID()); } // if max chunk size of 50MB reached, stop if (count % 100 == 0) { float size = Math.round(baout.size() / 1048576); if (size >= maxChunkSizeInMB) { logger.info("Max size of " + maxChunkSizeInMB + "MB for one chunk reached"); maxSizeReached = true; } } // DEBUG CODE TO CREATE SMALLER CHUNKS /* if (count == 250){ maxSizeReached= true; // logger.info("Max debug size of 250 items reached"); } */ } // WRITE moredata kout.writeBoolean(false);// close the data stream, begin metadata //we stop either if maxSize was reach or if there were no more data to read boolean moreToRead = false; // we did not reach the end of the resultset boolean moreThanLimit = false; // is case of a limit query, did we reach the limit if (maxSizeReached) { // we stopped because reached the hard memory limit for one chunk if ((nbLinesExpectedLeft > -1) && (!(count < nbLinesExpectedLeft))) { //we read exqctly as many lines as the limit moreThanLimit = true; } else { moreThanLimit = true; moreToRead = true; } } else { if (!moreData) { //no more lines to read if (nbLinesExpectedLeft > -1) { // limit if (!(count < nbLinesExpectedLeft)) { //we read as many lines as the limit moreThanLimit = true; } } } } kout.writeBoolean(moreThanLimit); // -- V1 only if (version >= 1) { kout.writeLong(item.getExecutionDate().getTime());// the // computeDate } // stats: display total long metter_finish = new Date().getTime(); // logger.info("SQLQuery#" + item.getID() + " serialized // "+(count-1)+" row(s) in "+(metter_finish-metter_start)+" ms, // compressed resulset size is "+size+" Mbytes"); /* logger.info("task=RawMatrix" + " method=streamExecutionItemToByteArray" + " duration=" + (metter_finish - metter_start) / 1000 + " error=false status=done driver=" + item.getDatabase().getName() + " queryid=" + item.getID() + " size= " + size + " SQLQuery#" + item.getID() + " serialized " + (count - 1) + " row(s) in " + (metter_finish - metter_start) + " ms, compressed resulset size is " + size + " Mbytes"); */ // TODO Get project SQLStats queryLog = new SQLStats(Integer.toString(item.getID()), "streamExecutionItemToByteArray", "", (metter_finish - metter_start), item.getDatabase().getName()); queryLog.setError(false); PerfDB.INSTANCE.save(queryLog); kout.close(); res.setHasMore(moreToRead); res.setExecutionTime(metter_finish - metter_start); res.setNbLines(count); res.setStreamedMatrix(baout.toByteArray()); return res; }
From source file:org.apache.hadoop.sqoop.util.ResultSetPrinter.java
/** * Format the contents of the ResultSet into something that could be printed * neatly; the results are appended to the supplied StringBuilder. */// w w w .ja v a 2s . c om public final void printResultSet(OutputStream os, ResultSet results) throws IOException { try { StringBuilder sbNames = new StringBuilder(); int cols = results.getMetaData().getColumnCount(); int[] colWidths = new int[cols]; ResultSetMetaData metadata = results.getMetaData(); for (int i = 1; i < cols + 1; i++) { String colName = metadata.getColumnName(i); colWidths[i - 1] = Math.min(metadata.getColumnDisplaySize(i), MAX_COL_WIDTH); if (colName == null || colName.equals("")) { colName = metadata.getColumnLabel(i) + "*"; } printPadded(sbNames, colName, colWidths[i - 1]); sbNames.append(COL_SEPARATOR); } sbNames.append('\n'); StringBuilder sbPad = new StringBuilder(); for (int i = 0; i < cols; i++) { for (int j = 0; j < COL_SEPARATOR.length() + colWidths[i]; j++) { sbPad.append('-'); } } sbPad.append('\n'); sendToStream(sbPad, os); sendToStream(sbNames, os); sendToStream(sbPad, os); while (results.next()) { StringBuilder sb = new StringBuilder(); for (int i = 1; i < cols + 1; i++) { printPadded(sb, results.getString(i), colWidths[i - 1]); sb.append(COL_SEPARATOR); } sb.append('\n'); sendToStream(sb, os); } sendToStream(sbPad, os); } catch (SQLException sqlException) { LOG.error("Error reading from database: " + sqlException.toString()); } }
From source file:com.haulmont.yarg.loaders.impl.SqlDataLoader.java
@Override public List<Map<String, Object>> loadData(ReportQuery reportQuery, BandData parentBand, Map<String, Object> params) { List resList;//w w w. ja v a2 s. c o m final List<OutputValue> outputValues = new ArrayList<OutputValue>(); String query = reportQuery.getScript(); if (StringUtils.isBlank(query)) { return Collections.emptyList(); } try { if (Boolean.TRUE.equals(reportQuery.getProcessTemplate())) { query = processQueryTemplate(query, parentBand, params); } final QueryPack pack = prepareQuery(query, parentBand, params); ArrayList<Object> resultingParams = new ArrayList<Object>(); QueryParameter[] queryParameters = pack.getParams(); for (QueryParameter queryParameter : queryParameters) { if (queryParameter.isSingleValue()) { resultingParams.add(queryParameter.getValue()); } else { resultingParams.addAll(queryParameter.getMultipleValues()); } } resList = runQuery(reportQuery, pack.getQuery(), resultingParams.toArray(), new ResultSetHandler<List>() { @Override public List handle(ResultSet rs) throws SQLException { List<Object[]> resList = new ArrayList<Object[]>(); while (rs.next()) { ResultSetMetaData metaData = rs.getMetaData(); if (outputValues.size() == 0) { for (int columnIndex = 1; columnIndex <= metaData .getColumnCount(); columnIndex++) { String columnName = metaData.getColumnLabel(columnIndex); OutputValue outputValue = new OutputValue(columnName); setCaseSensitiveSynonym(columnName, outputValue); outputValues.add(outputValue); } } Object[] values = new Object[metaData.getColumnCount()]; for (int columnIndex = 0; columnIndex < metaData.getColumnCount(); columnIndex++) { values[columnIndex] = convertOutputValue(rs.getObject(columnIndex + 1)); } resList.add(values); } return resList; } private void setCaseSensitiveSynonym(String columnName, OutputValue outputValue) { Matcher matcher = Pattern.compile("(?i)as\\s*(" + columnName + ")") .matcher(pack.getQuery()); if (matcher.find()) { outputValue.setSynonym(matcher.group(1)); } } }); } catch (DataLoadingException e) { throw e; } catch (Throwable e) { throw new DataLoadingException( String.format("An error occurred while loading data for data set [%s]", reportQuery.getName()), e); } return fillOutputData(resList, outputValues); }
From source file:com.cloudera.sqoop.util.ResultSetPrinter.java
/** * Format the contents of the ResultSet into something that could be printed * neatly; the results are appended to the supplied StringBuilder. */// ww w . j a v a 2 s . co m public final void printResultSet(PrintWriter pw, ResultSet results) throws IOException { try { StringBuilder sbNames = new StringBuilder(); int cols = results.getMetaData().getColumnCount(); int[] colWidths = new int[cols]; ResultSetMetaData metadata = results.getMetaData(); sbNames.append(LEFT_BORDER); for (int i = 1; i < cols + 1; i++) { String colName = metadata.getColumnName(i); colWidths[i - 1] = Math.min(metadata.getColumnDisplaySize(i), MAX_COL_WIDTH); if (colName == null || colName.equals("")) { colName = metadata.getColumnLabel(i) + "*"; } printPadded(sbNames, colName, colWidths[i - 1]); sbNames.append(COL_SEPARATOR); } sbNames.append('\n'); StringBuilder sbPad = new StringBuilder(); for (int i = 0; i < cols; i++) { for (int j = 0; j < COL_SEPARATOR.length() + colWidths[i]; j++) { sbPad.append('-'); } } sbPad.append('-'); sbPad.append('\n'); pw.print(sbPad.toString()); pw.print(sbNames.toString()); pw.print(sbPad.toString()); while (results.next()) { StringBuilder sb = new StringBuilder(); sb.append(LEFT_BORDER); for (int i = 1; i < cols + 1; i++) { printPadded(sb, results.getString(i), colWidths[i - 1]); sb.append(COL_SEPARATOR); } sb.append('\n'); pw.print(sb.toString()); } pw.print(sbPad.toString()); } catch (SQLException sqlException) { LOG.error("Error reading from database: " + StringUtils.stringifyException(sqlException)); } }