List of usage examples for java.sql ResultSetMetaData getColumnLabel
String getColumnLabel(int column) throws SQLException;
From source file:cz.lbenda.dataman.db.DbStructureFactory.java
private void writeColumnNames(String columnsFrom, ResultSetMetaData metaData) throws SQLException { if (Constants.IS_IN_DEVELOP_MODE) { if (!columnsFromWriten.contains(columnsFrom)) { LOG.debug("Write column names: " + columnsFrom); columnsFromWriten.add(columnsFrom); for (int i = 1; i <= metaData.getColumnCount(); i++) { LOG.debug("Column: " + metaData.getColumnName(i) + " : " + metaData.getColumnLabel(i)); }// w w w .ja va2 s . com } } }
From source file:org.apache.nifi.processors.standard.GenerateTableFetch.java
@Override public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException { ProcessSession session = sessionFactory.createSession(); FlowFile fileToProcess = null;/* www. j a v a2 s . co m*/ if (context.hasIncomingConnection()) { fileToProcess = session.get(); if (fileToProcess == null) { // Incoming connection with no flow file available, do no work (see capability description) return; } } final ComponentLog logger = getLogger(); final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(fileToProcess) .getValue(); final String columnNames = context.getProperty(COLUMN_NAMES).evaluateAttributeExpressions(fileToProcess) .getValue(); final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES) .evaluateAttributeExpressions(fileToProcess).getValue(); final int partitionSize = context.getProperty(PARTITION_SIZE).evaluateAttributeExpressions(fileToProcess) .asInteger(); final StateManager stateManager = context.getStateManager(); final StateMap stateMap; try { stateMap = stateManager.getState(Scope.CLUSTER); } catch (final IOException ioe) { logger.error("Failed to retrieve observed maximum values from the State Manager. Will not perform " + "query until this is accomplished.", ioe); context.yield(); return; } try { // Make a mutable copy of the current state property map. This will be updated by the result row callback, and eventually // set as the current state map (after the session has been committed) final Map<String, String> statePropertyMap = new HashMap<>(stateMap.toMap()); // Build a WHERE clause with maximum-value columns (if they exist), and a list of column names that will contain MAX(<column>) aliases. The // executed SQL query will retrieve the count of all records after the filter(s) have been applied, as well as the new maximum values for the // specified columns. This allows the processor to generate the correctly partitioned SQL statements as well as to update the state with the // latest observed maximum values. String whereClause = null; List<String> maxValueColumnNameList = StringUtils.isEmpty(maxValueColumnNames) ? new ArrayList<>(0) : Arrays.asList(maxValueColumnNames.split("\\s*,\\s*")); List<String> maxValueClauses = new ArrayList<>(maxValueColumnNameList.size()); String columnsClause = null; List<String> maxValueSelectColumns = new ArrayList<>(maxValueColumnNameList.size() + 1); maxValueSelectColumns.add("COUNT(*)"); // For each maximum-value column, get a WHERE filter and a MAX(column) alias IntStream.range(0, maxValueColumnNameList.size()).forEach((index) -> { String colName = maxValueColumnNameList.get(index); maxValueSelectColumns.add("MAX(" + colName + ") " + colName); final String fullyQualifiedStateKey = getStateKey(tableName, colName); String maxValue = statePropertyMap.get(fullyQualifiedStateKey); if (StringUtils.isEmpty(maxValue) && !isDynamicTableName) { // If the table name is static and the fully-qualified key was not found, try just the column name maxValue = statePropertyMap.get(getStateKey(null, colName)); } if (!StringUtils.isEmpty(maxValue)) { Integer type = columnTypeMap.get(fullyQualifiedStateKey); if (type == null && !isDynamicTableName) { // If the table name is static and the fully-qualified key was not found, try just the column name type = columnTypeMap.get(getStateKey(null, colName)); } if (type == null) { // This shouldn't happen as we are populating columnTypeMap when the processor is scheduled or when the first maximum is observed throw new IllegalArgumentException("No column type found for: " + colName); } // Add a condition for the WHERE clause maxValueClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, dbAdapter.getName())); } }); whereClause = StringUtils.join(maxValueClauses, " AND "); columnsClause = StringUtils.join(maxValueSelectColumns, ", "); // Build a SELECT query with maximum-value columns (if present) final String selectQuery = dbAdapter.getSelectStatement(tableName, columnsClause, whereClause, null, null, null); long rowCount = 0; try (final Connection con = dbcpService.getConnection(); final Statement st = con.createStatement()) { final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT) .evaluateAttributeExpressions(fileToProcess).asTimePeriod(TimeUnit.SECONDS).intValue(); st.setQueryTimeout(queryTimeout); // timeout in seconds logger.debug("Executing {}", new Object[] { selectQuery }); ResultSet resultSet; resultSet = st.executeQuery(selectQuery); if (resultSet.next()) { // Total row count is in the first column rowCount = resultSet.getLong(1); // Update the state map with the newly-observed maximum values ResultSetMetaData rsmd = resultSet.getMetaData(); for (int i = 2; i <= rsmd.getColumnCount(); i++) { //Some JDBC drivers consider the columns name and label to be very different things. // Since this column has been aliased lets check the label first, // if there is no label we'll use the column name. String resultColumnName = (StringUtils.isNotEmpty(rsmd.getColumnLabel(i)) ? rsmd.getColumnLabel(i) : rsmd.getColumnName(i)).toLowerCase(); String fullyQualifiedStateKey = getStateKey(tableName, resultColumnName); String resultColumnCurrentMax = statePropertyMap.get(fullyQualifiedStateKey); if (StringUtils.isEmpty(resultColumnCurrentMax) && !isDynamicTableName) { // If we can't find the value at the fully-qualified key name and the table name is static, it is possible (under a previous scheme) // the value has been stored under a key that is only the column name. Fall back to check the column name; either way, when a new // maximum value is observed, it will be stored under the fully-qualified key from then on. resultColumnCurrentMax = statePropertyMap.get(resultColumnName); } int type = rsmd.getColumnType(i); if (isDynamicTableName) { // We haven't pre-populated the column type map if the table name is dynamic, so do it here columnTypeMap.put(fullyQualifiedStateKey, type); } try { String newMaxValue = getMaxValueFromRow(resultSet, i, type, resultColumnCurrentMax, dbAdapter.getName()); if (newMaxValue != null) { statePropertyMap.put(fullyQualifiedStateKey, newMaxValue); } } catch (ParseException | IOException pie) { // Fail the whole thing here before we start creating flow files and such throw new ProcessException(pie); } } } else { // Something is very wrong here, one row (even if count is zero) should be returned throw new SQLException("No rows returned from metadata query: " + selectQuery); } final long numberOfFetches = (partitionSize == 0) ? rowCount : (rowCount / partitionSize) + (rowCount % partitionSize == 0 ? 0 : 1); // Generate SQL statements to read "pages" of data for (long i = 0; i < numberOfFetches; i++) { long limit = partitionSize == 0 ? null : partitionSize; long offset = partitionSize == 0 ? null : i * partitionSize; final String query = dbAdapter.getSelectStatement(tableName, columnNames, whereClause, StringUtils.join(maxValueColumnNameList, ", "), limit, offset); FlowFile sqlFlowFile = (fileToProcess == null) ? session.create() : session.create(fileToProcess); sqlFlowFile = session.write(sqlFlowFile, out -> out.write(query.getBytes())); session.transfer(sqlFlowFile, REL_SUCCESS); } if (fileToProcess != null) { session.remove(fileToProcess); } } catch (SQLException e) { if (fileToProcess != null) { logger.error("Unable to execute SQL select query {} due to {}, routing {} to failure", new Object[] { selectQuery, e, fileToProcess }); fileToProcess = session.putAttribute(fileToProcess, "generatetablefetch.sql.error", e.getMessage()); session.transfer(fileToProcess, REL_FAILURE); } else { logger.error("Unable to execute SQL select query {} due to {}", new Object[] { selectQuery, e }); throw new ProcessException(e); } } session.commit(); try { // Update the state stateManager.setState(statePropertyMap, Scope.CLUSTER); } catch (IOException ioe) { logger.error( "{} failed to update State Manager, observed maximum values will not be recorded. " + "Also, any generated SQL statements may be duplicated.", new Object[] { this, ioe }); } } catch (final ProcessException pe) { // Log the cause of the ProcessException if it is available Throwable t = (pe.getCause() == null ? pe : pe.getCause()); logger.error("Error during processing: {}", new Object[] { t.getMessage() }, t); session.rollback(); context.yield(); } }
From source file:org.sonar.db.AbstractDbTester.java
private static List<Map<String, Object>> getHashMap(ResultSet resultSet) throws Exception { ResultSetMetaData metaData = resultSet.getMetaData(); int colCount = metaData.getColumnCount(); List<Map<String, Object>> rows = newArrayList(); while (resultSet.next()) { Map<String, Object> columns = newHashMap(); for (int i = 1; i <= colCount; i++) { Object value = resultSet.getObject(i); if (value instanceof Clob) { Clob clob = (Clob) value; value = IOUtils.toString((clob.getAsciiStream())); doClobFree(clob);//from ww w. j a v a2s. c o m } else if (value instanceof BigDecimal) { // In Oracle, INTEGER types are mapped as BigDecimal BigDecimal bgValue = ((BigDecimal) value); if (bgValue.scale() == 0) { value = bgValue.longValue(); } else { value = bgValue.doubleValue(); } } else if (value instanceof Integer) { // To be consistent, all INTEGER types are mapped as Long value = ((Integer) value).longValue(); } else if (value instanceof Timestamp) { value = new Date(((Timestamp) value).getTime()); } columns.put(metaData.getColumnLabel(i), value); } rows.add(columns); } return rows; }
From source file:com.nway.spring.jdbc.bean.AsmBeanProcessor.java
/** * The positions in the returned array represent column numbers. The values stored at each * position represent the index in the <code>PropertyDescriptor[]</code> for the bean property * that matches the column name. If no bean property was found for a column, the position is set * to <code>PROPERTY_NOT_FOUND</code>. * * @param rsmd The <code>ResultSetMetaData</code> containing column information. * * @param props The bean property descriptors. * * @throws SQLException if a database access error occurs * * @return An int[] with column index to property index mappings. The 0th element is meaningless * because JDBC column indexing starts at 1. */// ww w.j a v a2 s . c o m private int[] mapColumnsToProperties(ResultSetMetaData rsmd, PropertyDescriptor[] props) throws SQLException { int cols = rsmd.getColumnCount(); int[] columnToProperty = new int[cols + 1]; Arrays.fill(columnToProperty, PROPERTY_NOT_FOUND); for (int col = 1; col <= cols; col++) { String columnName = rsmd.getColumnLabel(col); for (int i = 0; i < props.length; i++) { Column columnAnnotation = props[i].getReadMethod().getAnnotation(Column.class); if (columnAnnotation == null) { //'_' if (columnName.replace("_", "").equalsIgnoreCase(props[i].getName())) { columnToProperty[col] = i; break; } } else if (columnName.equalsIgnoreCase(columnAnnotation.value()) || columnName.equalsIgnoreCase(columnAnnotation.name())) { columnToProperty[col] = i; break; } } } return columnToProperty; }
From source file:org.fastcatsearch.datasource.reader.DBReader.java
@Override public void init() throws IRException { isClosed = false;//from w ww. j av a 2 s .com BULK_SIZE = getConfigInt("bulkSize"); useBlobFile = getConfigBoolean("useBlobFile"); tmpFile = new ArrayList<File>(); dataSet = new Map[BULK_SIZE]; String jdbcSourceId = getConfigString("jdbcSourceId"); JDBCSourceInfo jdbcSourceInfo = null; IRService irService = ServiceManager.getInstance().getService(IRService.class); List<JDBCSourceInfo> jdbcSourceInfoList = irService.getJDBCSourceConfig().getJdbcSourceInfoList(); for (JDBCSourceInfo info : jdbcSourceInfoList) { if (info.getId().equals(jdbcSourceId)) { jdbcSourceInfo = info; break; } } try { con = getConnection(jdbcSourceInfo); doBeforeQuery(); String deleteIdSQL = getConfigString("deleteIdSQL"); if (deleteIdSQL != null && deleteIdSQL.length() > 0) { PreparedStatement idPstmt = null; ResultSet rs = null; ResultSetMetaData rm = null; try { idPstmt = con.prepareStatement(q(deleteIdSQL)); rs = idPstmt.executeQuery(); rm = rs.getMetaData(); while (rs.next()) { String[] rid = new String[rm.getColumnCount()]; for (int inx = 0; inx < rid.length; inx++) { rid[inx] = rs.getString(inx + 1); } deleteIdList.add(rid); } } finally { if (idPstmt != null) { try { idPstmt.close(); } catch (Exception e) { } } if (rs != null) { try { rs.close(); } catch (Exception e) { } } } } String dataSQL = getConfigString("dataSQL"); if (dataSQL == null || dataSQL.length() == 0) { throw new IRException("Data query sql is empty!"); } if (logger.isTraceEnabled()) { logger.trace("real query = {}", q(dataSQL)); } else { logger.debug("Data query = {}", dataSQL); } int fetchSize = getConfigInt("fetchSize"); if (fetchSize < 0) { //in mysql, fetch data row by row pstmt = con.prepareStatement(q(dataSQL), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); pstmt.setFetchSize(Integer.MIN_VALUE); } else { pstmt = con.prepareStatement(q(dataSQL)); if (fetchSize > 0) { pstmt.setFetchSize(fetchSize); } } if (maxRows > 0) { pstmt.setMaxRows(maxRows); } r = pstmt.executeQuery(); ResultSetMetaData rsMetadata = r.getMetaData(); columnCount = rsMetadata.getColumnCount(); columnName = new String[columnCount]; for (int i = 0; i < columnCount; i++) { columnName[i] = rsMetadata.getColumnLabel(i + 1).toUpperCase(); String typeName = rsMetadata.getColumnTypeName(i + 1); logger.info("Column-{} [{}]:[{}]", new Object[] { i + 1, columnName[i], typeName }); } } catch (Exception e) { closeConnection(); throw new IRException(e); } }
From source file:org.connid.bundles.soap.wssample.ProvisioningImpl.java
@Override public List<WSUser> query(Operand query) { LOG.debug("Query request received"); List<WSUser> results = new ArrayList<WSUser>(); Connection conn = null;/*from w w w . j a va 2 s . co m*/ try { String queryString = "SELECT * FROM user" + (query == null ? "" : " WHERE " + query.toString()); queryString = queryString.replaceAll("__NAME__", "userId").replaceAll("__UID__", "userId") .replaceAll("__PASSWORD__", "password"); LOG.debug("Execute query: {}", queryString); if (queryString == null || queryString.length() == 0) { throw new SQLException("Invalid query [" + queryString + "]"); } conn = connect(); Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(queryString); ResultSetMetaData metaData = rs.getMetaData(); LOG.debug("Metadata: {}", metaData); while (rs.next()) { WSUser user = new WSUser(); for (int i = 0; i < metaData.getColumnCount(); i++) { WSAttributeValue attr = new WSAttributeValue(); attr.setName(metaData.getColumnLabel(i + 1)); if (StringUtils.isNotBlank(rs.getString(i + 1))) { attr.setValues(Collections.singletonList(rs.getString(i + 1))); } if ("userId".equalsIgnoreCase(metaData.getColumnName(i + 1))) { attr.setKey(true); user.setAccountid(rs.getString(i + 1)); } user.addAttribute(attr); } results.add(user); } LOG.debug("Retrieved users: {}", results); } catch (SQLException e) { LOG.error("Search operation failed", e); } finally { if (conn != null) { try { close(conn); } catch (SQLException ignore) { // ignore exception } } } return results; }
From source file:org.vivoweb.harvester.fetch.JDBCFetch.java
/** * Get the fields from a result set// ww w . ja v a2s . c o m * @param rs the resultset * @return the list of fields * @throws SQLException error reading resultset */ private List<String> getResultSetFields(ResultSet rs) throws SQLException { ResultSetMetaData rsmd = rs.getMetaData(); int count = rsmd.getColumnCount(); List<String> fields = new ArrayList<String>(count); for (int x = 1; x <= count; x++) { fields.add(rsmd.getColumnLabel(x)); } return fields; }
From source file:net.tirasa.connid.bundles.soap.wssample.ProvisioningImpl.java
@Override public List<WSUser> query(Operand query) { LOG.debug("Query request received"); List<WSUser> results = new ArrayList<WSUser>(); Connection conn = null;/*from ww w . ja va 2 s .co m*/ try { String queryString = "SELECT * FROM user" + (query == null ? "" : " WHERE " + query.toString()); queryString = queryString.replaceAll("__NAME__", "userId").replaceAll("__UID__", "userId") .replaceAll("__PASSWORD__", "password"); LOG.debug("Execute query: {}", queryString); if (queryString == null || queryString.length() == 0) { throw new SQLException("Invalid query [" + queryString + "]"); } conn = connect(); Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(queryString); ResultSetMetaData metaData = rs.getMetaData(); LOG.debug("Metadata: {}", metaData); while (rs.next()) { WSUser user = new WSUser(); for (int i = 0; i < metaData.getColumnCount(); i++) { WSAttributeValue attr = new WSAttributeValue(); attr.setName(metaData.getColumnLabel(i + 1)); if (StringUtils.isNotBlank(rs.getString(i + 1))) { attr.addValue(rs.getString(i + 1)); } if ("userId".equalsIgnoreCase(metaData.getColumnName(i + 1))) { attr.setKey(true); user.setAccountid(rs.getString(i + 1)); } user.addAttribute(attr); } results.add(user); } LOG.debug("Retrieved users: {}", results); } catch (SQLException e) { LOG.error("Search operation failed", e); } finally { if (conn != null) { try { close(conn); } catch (SQLException ignore) { // ignore exception } } } return results; }
From source file:org.apache.jmeter.protocol.jdbc.AbstractJDBCTestElement.java
/** * Gets a Data object from a ResultSet.// w w w .jav a 2 s .co m * * @param rs * ResultSet passed in from a database query * @return a Data object * @throws java.sql.SQLException * @throws UnsupportedEncodingException */ private String getStringFromResultSet(ResultSet rs) throws SQLException, UnsupportedEncodingException { ResultSetMetaData meta = rs.getMetaData(); StringBuilder sb = new StringBuilder(); int numColumns = meta.getColumnCount(); for (int i = 1; i <= numColumns; i++) { sb.append(meta.getColumnLabel(i)); if (i == numColumns) { sb.append('\n'); } else { sb.append('\t'); } } JMeterVariables jmvars = getThreadContext().getVariables(); String[] varNames = getVariableNames().split(COMMA); String resultVariable = getResultVariable().trim(); List<Map<String, Object>> results = null; if (resultVariable.length() > 0) { results = new ArrayList<>(); jmvars.putObject(resultVariable, results); } int j = 0; while (rs.next()) { Map<String, Object> row = null; j++; for (int i = 1; i <= numColumns; i++) { Object o = rs.getObject(i); if (results != null) { if (row == null) { row = new HashMap<>(numColumns); results.add(row); } row.put(meta.getColumnLabel(i), o); } if (o instanceof byte[]) { o = new String((byte[]) o, ENCODING); } sb.append(o); if (i == numColumns) { sb.append('\n'); } else { sb.append('\t'); } if (i <= varNames.length) { // i starts at 1 String name = varNames[i - 1].trim(); if (name.length() > 0) { // Save the value in the variable if present jmvars.put(name + UNDERSCORE + j, o == null ? null : o.toString()); } } } } // Remove any additional values from previous sample for (String varName : varNames) { String name = varName.trim(); if (name.length() > 0 && jmvars != null) { final String varCount = name + "_#"; // $NON-NLS-1$ // Get the previous count String prevCount = jmvars.get(varCount); if (prevCount != null) { int prev = Integer.parseInt(prevCount); for (int n = j + 1; n <= prev; n++) { jmvars.remove(name + UNDERSCORE + n); } } jmvars.put(varCount, Integer.toString(j)); // save the current count } } return sb.toString(); }
From source file:com.kylinolap.rest.service.QueryService.java
/** * @param sql/*from ww w.j av a2s . c o m*/ * @param project * @return * @throws Exception */ private SQLResponse execute(String sql, SQLRequest sqlRequest) throws Exception { Connection conn = null; Statement stat = null; ResultSet resultSet = null; List<List<String>> results = new LinkedList<List<String>>(); List<SelectedColumnMeta> columnMetas = new LinkedList<SelectedColumnMeta>(); try { conn = getOLAPDataSource(sqlRequest.getProject()).getConnection(); if (sqlRequest instanceof PrepareSqlRequest) { PreparedStatement preparedState = conn.prepareStatement(sql); for (int i = 0; i < ((PrepareSqlRequest) sqlRequest).getParams().length; i++) { setParam(preparedState, i + 1, ((PrepareSqlRequest) sqlRequest).getParams()[i]); } resultSet = preparedState.executeQuery(); } else { stat = conn.createStatement(); resultSet = stat.executeQuery(sql); } ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); // Fill in selected column meta for (int i = 1; i <= columnCount; ++i) { columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i))); } List<String> oneRow = new LinkedList<String>(); // fill in results while (resultSet.next()) { for (int i = 0; i < columnCount; i++) { oneRow.add((resultSet.getString(i + 1))); } results.add(new LinkedList<String>(oneRow)); oneRow.clear(); } } catch (Exception e) { logger.error(e.getLocalizedMessage(), e); throw e; } finally { close(resultSet, stat, conn); } boolean isPartialResult = false; String cube = ""; long totalScanCount = 0; for (OLAPContext ctx : OLAPContext.getThreadLocalContexts()) { isPartialResult |= ctx.storageContext.isPartialResultReturned(); cube = ctx.cubeInstance.getName(); totalScanCount += ctx.storageContext.getTotalScanCount(); } SQLResponse response = new SQLResponse(columnMetas, results, cube, 0, false, null, isPartialResult); response.setTotalScanCount(totalScanCount); return response; }