List of usage examples for java.sql ResultSetMetaData getColumnType
int getColumnType(int column) throws SQLException;
From source file:com.streamsets.pipeline.lib.jdbc.multithread.CDCJdbcRunnable.java
@Override public void generateSchemaChanges(BatchContext batchContext) throws SQLException { Map<String, Integer> source = new HashMap<>(); ResultSet rs = tableReadContext.getMoreResultSet(); String schemaName = ""; String tableName = ""; String captureInstanceName = ""; if (rs != null && rs.next()) { ResultSetMetaData data = rs.getMetaData(); for (int i = 1; i <= data.getColumnCount(); i++) { String label = data.getColumnLabel(i); if (label.equals(MSQueryUtil.CDC_SOURCE_SCHEMA_NAME)) { schemaName = rs.getString(label); } else if (label.equals(MSQueryUtil.CDC_SOURCE_TABLE_NAME)) { tableName = rs.getString(label); } else if (label.equals(MSQueryUtil.CDC_CAPTURE_INSTANCE_NAME)) { captureInstanceName = rs.getString(label); } else { int type = data.getColumnType(i); source.put(label, type); }//ww w .j a v a2 s . com } boolean schemaChanges = getDiff(captureInstanceName, source, tableRuntimeContext.getSourceTableContext().getColumnToType()); if (schemaChanges) { JdbcEvents.SCHEMA_CHANGE.create(context, batchContext).with("source-table-schema-name", schemaName) .with("source-table-name", tableName).with("capture-instance-name", captureInstanceName) .createAndSend(); context.processBatch(batchContext); } } }
From source file:org.apache.syncope.core.util.ImportExport.java
private void doExportTable(final TransformerHandler handler, final Connection conn, final String tableName) throws SQLException, SAXException { AttributesImpl attrs = new AttributesImpl(); PreparedStatement stmt = null; ResultSet rs = null;// w w w. ja va 2s.com ResultSet pkeyRS = null; try { // ------------------------------------ // retrieve primary keys to perform an ordered select final DatabaseMetaData meta = conn.getMetaData(); pkeyRS = meta.getPrimaryKeys(null, null, tableName); final StringBuilder orderBy = new StringBuilder(); while (pkeyRS.next()) { final String columnName = pkeyRS.getString("COLUMN_NAME"); if (columnName != null) { if (orderBy.length() > 0) { orderBy.append(","); } orderBy.append(columnName); } } // ------------------------------------ stmt = conn.prepareStatement( "SELECT * FROM " + tableName + " a" + (orderBy.length() > 0 ? " ORDER BY " + orderBy : "")); rs = stmt.executeQuery(); for (int rowNo = 0; rs.next(); rowNo++) { attrs.clear(); final ResultSetMetaData rsMeta = rs.getMetaData(); for (int i = 0; i < rsMeta.getColumnCount(); i++) { final String columnName = rsMeta.getColumnName(i + 1); final Integer columnType = rsMeta.getColumnType(i + 1); // Retrieve value taking care of binary values. String value = getValues(rs, columnName, columnType); if (value != null) { attrs.addAttribute("", "", columnName, "CDATA", value); } } handler.startElement("", "", tableName, attrs); handler.endElement("", "", tableName); } } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { LOG.error("While closing result set", e); } } if (pkeyRS != null) { try { pkeyRS.close(); } catch (SQLException e) { LOG.error("While closing result set", e); } } if (stmt != null) { try { stmt.close(); } catch (SQLException e) { LOG.error("While closing result set", e); } } } }
From source file:org.apache.sqoop.connector.hbase.HbaseToInitializer.java
@Override public Schema getSchema(InitializerContext context, LinkConfiguration linkConfig, ToJobConfiguration toJobConfig) { executor = new HbaseExecutor(linkConfig.linkConfig); String schemaName = toJobConfig.toJobConfig.tableName; if (schemaName == null) { throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0019, "Table name extraction not supported yet."); }//from w w w . j a va2 s . c o m if (toJobConfig.toJobConfig.schemaName != null) { schemaName = toJobConfig.toJobConfig.schemaName + "." + schemaName; } Schema schema = new Schema(schemaName); ResultSet rs = null; ResultSetMetaData rsmt = null; try { rs = executor.executeQuery("SELECT * FROM " + schemaName + " WHERE 1 = 0"); rsmt = rs.getMetaData(); for (int i = 1; i <= rsmt.getColumnCount(); i++) { String columnName = rsmt.getColumnName(i); if (StringUtils.isEmpty(columnName)) { columnName = rsmt.getColumnLabel(i); if (StringUtils.isEmpty(columnName)) { columnName = "Column " + i; } } Column column = SqlTypesUtils.sqlTypeToSchemaType(rsmt.getColumnType(i), columnName, rsmt.getPrecision(i), rsmt.getScale(i)); schema.addColumn(column); } return schema; } catch (SQLException e) { throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0016, e); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { LOG.info("Ignoring exception while closing ResultSet", e); } } } }
From source file:com.netspective.sparx.form.DialogContextUtils.java
public void populateFieldValuesFromResultSet(DialogContext dc, ResultSet rs) throws SQLException { if (rs.next()) { ResultSetMetaData rsmd = rs.getMetaData(); int colsCount = rsmd.getColumnCount(); DialogFieldStates fieldStates = dc.getFieldStates(); for (int i = 1; i <= colsCount; i++) { String fieldName = rsmd.getColumnName(i).toLowerCase(); DialogField.State state = fieldStates.getState(fieldName, null); if (state != null) { // for columns that are Date objects, use the object setter instead of the text setter // because we don't need to do unnecessary formatting/parsing if (rsmd.getColumnType(i) == Types.DATE) state.getValue().setValue(rs.getDate(i)); else state.getValue().setTextValue(rs.getString(i)); }//from www .j ava 2 s .c o m } } }
From source file:org.cloudgraph.rdb.filter.RDBStatementExecutor.java
@Override public List<PropertyPair> fetchRow(PlasmaType type, StringBuilder sql) { List<PropertyPair> result = new ArrayList<PropertyPair>(); PreparedStatement statement = null; ResultSet rs = null;/*from ww w .ja va2s.co m*/ try { if (log.isDebugEnabled()) { log.debug("fetch: " + sql.toString()); } statement = con.prepareStatement(sql.toString(), ResultSet.TYPE_FORWARD_ONLY, /* * ResultSet * . * TYPE_SCROLL_INSENSITIVE * , */ ResultSet.CONCUR_READ_ONLY); statement.execute(); rs = statement.getResultSet(); ResultSetMetaData rsMeta = rs.getMetaData(); int numcols = rsMeta.getColumnCount(); int count = 0; while (rs.next()) { for (int i = 1; i <= numcols; i++) { String columnName = rsMeta.getColumnName(i); int columnType = rsMeta.getColumnType(i); PlasmaProperty prop = (PlasmaProperty) type.getProperty(columnName); PlasmaProperty valueProp = prop; while (!valueProp.getType().isDataType()) { valueProp = this.statementUtil.getOppositePriKeyProperty(valueProp); } Object value = converter.fromJDBCDataType(rs, i, columnType, valueProp); if (value != null) { PropertyPair pair = new PropertyPair((PlasmaProperty) prop, value); if (!valueProp.equals(prop)) pair.setValueProp(valueProp); result.add(pair); } } count++; } if (log.isDebugEnabled()) log.debug("returned " + count + " results"); } catch (Throwable t) { throw new DataAccessException(t); } finally { try { if (rs != null) rs.close(); if (statement != null) statement.close(); } catch (SQLException e) { log.error(e.getMessage(), e); } } return result; }
From source file:org.cloudgraph.rdb.filter.RDBStatementExecutor.java
@Override public Map<String, PropertyPair> fetchRowMap(PlasmaType type, StringBuilder sql) { Map<String, PropertyPair> result = new HashMap<String, PropertyPair>(); PreparedStatement statement = null; ResultSet rs = null;/*from ww w . ja v a 2s.c om*/ try { if (log.isDebugEnabled()) { log.debug("fetch: " + sql.toString()); } statement = con.prepareStatement(sql.toString(), ResultSet.TYPE_FORWARD_ONLY, /* * ResultSet * . * TYPE_SCROLL_INSENSITIVE * , */ ResultSet.CONCUR_READ_ONLY); statement.execute(); rs = statement.getResultSet(); ResultSetMetaData rsMeta = rs.getMetaData(); int numcols = rsMeta.getColumnCount(); int count = 0; while (rs.next()) { for (int i = 1; i <= numcols; i++) { String columnName = rsMeta.getColumnName(i); int columnType = rsMeta.getColumnType(i); PlasmaProperty prop = (PlasmaProperty) type.getProperty(columnName); PlasmaProperty valueProp = prop; while (!valueProp.getType().isDataType()) { valueProp = this.statementUtil.getOppositePriKeyProperty(valueProp); } Object value = converter.fromJDBCDataType(rs, i, columnType, valueProp); if (value != null) { PropertyPair pair = new PropertyPair((PlasmaProperty) prop, value); if (!valueProp.equals(prop)) pair.setValueProp(valueProp); result.put(prop.getName(), pair); } } count++; } if (log.isDebugEnabled()) log.debug("returned " + count + " results"); } catch (Throwable t) { throw new DataAccessException(t); } finally { try { if (rs != null) rs.close(); if (statement != null) statement.close(); } catch (SQLException e) { log.error(e.getMessage(), e); } } return result; }
From source file:org.openecomp.sdnc.sli.resource.sql.SqlResource.java
public void saveCachedRowSetToCtx(CachedRowSet results, SvcLogicContext ctx, String prefix, DbLibService dblibSvc) throws SQLException { if (ctx != null) { if ((prefix != null) && prefix.endsWith("[]")) { // Return an array. String pfx = prefix.substring(0, prefix.length() - 2); int idx = 0; do {//from w ww. ja v a 2 s . c o m ResultSetMetaData rsMeta = results.getMetaData(); int numCols = rsMeta.getColumnCount(); for (int i = 0; i < numCols; i++) { String colValue = null; String tableName = rsMeta.getTableName(i + 1); if (rsMeta.getColumnType(i + 1) == java.sql.Types.VARBINARY) { colValue = decryptColumn(tableName, rsMeta.getColumnName(i + 1), results.getBytes(i + 1), dblibSvc); } else { colValue = results.getString(i + 1); } LOG.debug("Setting " + pfx + "[" + idx + "]." + rsMeta.getColumnLabel(i + 1).replaceAll("_", "-") + " = " + colValue); ctx.setAttribute(pfx + "[" + idx + "]." + rsMeta.getColumnLabel(i + 1).replaceAll("_", "-"), colValue); } idx++; } while (results.next()); LOG.debug("Setting " + pfx + "_length = " + idx); ctx.setAttribute(pfx + "_length", "" + idx); } else { ResultSetMetaData rsMeta = results.getMetaData(); int numCols = rsMeta.getColumnCount(); for (int i = 0; i < numCols; i++) { String colValue = null; String tableName = rsMeta.getTableName(i + 1); if ("VARBINARY".equalsIgnoreCase(rsMeta.getColumnTypeName(i + 1))) { colValue = decryptColumn(tableName, rsMeta.getColumnName(i + 1), results.getBytes(i + 1), dblibSvc); } else { colValue = results.getString(i + 1); } if (prefix != null) { LOG.debug("Setting " + prefix + "." + rsMeta.getColumnLabel(i + 1).replaceAll("_", "-") + " = " + colValue); ctx.setAttribute(prefix + "." + rsMeta.getColumnLabel(i + 1).replaceAll("_", "-"), colValue); } else { LOG.debug( "Setting " + rsMeta.getColumnLabel(i + 1).replaceAll("_", "-") + " = " + colValue); ctx.setAttribute(rsMeta.getColumnLabel(i + 1).replaceAll("_", "-"), colValue); } } } } }
From source file:org.jboss.dashboard.dataset.sql.SQLDataSet.java
public void load() throws Exception { DataSource targetDS = CoreServices.lookup().getDataSourceManager().getDataSource(dataSource); if (targetDS == null) return;// ww w.j a v a 2 s . c o m Connection conn = null; PreparedStatement stmt = null; ResultSet rs = null; CodeBlockTrace trace = null; try { // Get the connection. conn = targetDS.getConnection(); // Execute the query. lastExecutedStmt = createSQLStatament(); trace = new SQLStatementTrace(lastExecutedStmt.getSQLSentence()).begin(); trace.addRuntimeConstraint(new DataSetLoadConstraints(this)); log.debug("Load data set from datasource=" + dataSource + " SQL=" + lastExecutedStmt.getSQLSentence()); stmt = lastExecutedStmt.getPreparedStatement(conn); rs = stmt.executeQuery(); // Get the properties definition. ResultSetMetaData meta = rs.getMetaData(); int propsSize = meta.getColumnCount(); SQLDataSet.this.setPropertySize(propsSize); for (int i = 0; i < propsSize; i++) { SQLDataProperty dp = createSQLProperty(); String propId = StringUtils.isNotBlank(meta.getColumnLabel(i + 1)) ? meta.getColumnLabel(i + 1) : meta.getColumnName(i + 1); dp.setPropertyId(propId.toLowerCase()); // dp.setPropertyId(meta.getColumnName(i + 1).toLowerCase()); dp.setType(meta.getColumnType(i + 1)); dp.setTableName(meta.getTableName(i + 1)); dp.setColumnName(meta.getColumnName(i + 1)); addProperty(dp, i); } // Get rows and populate the data set values. int index = 0; while (rs.next()) { Object[] row = new Object[propsSize]; for (int i = 0; i < propsSize; i++) row[i] = rs.getObject(i + 1); SQLDataSet.this.addRowValues(row); // Check load constraints (every 10,000 rows) if (++index == 10000) { trace.checkRuntimeConstraints(); index = 0; } } // Once we got the dataset initialized then calculate the domain for each property. for (int i = 0; i < properties.length; i++) { SQLDataProperty property = (SQLDataProperty) properties[i]; property.calculateDomain(); } } catch (Exception e) { if (lastExecutedStmt != null) { log.error("Error in load() SQLDataset. SQL = " + lastExecutedStmt.getSQLSentence(), e); } throw e; } finally { try { if (rs != null) rs.close(); } catch (Exception e) { log.warn("Error closing ResultSet: ", e); } try { if (stmt != null) stmt.close(); } catch (Exception e) { log.warn("Error closing PreparedStatement: ", e); } if (conn != null) { conn.close(); } if (trace != null) { trace.end(); } } }
From source file:org.h2gis.drivers.geojson.GeoJsonWriteDriver.java
/** * Cache the column name and its index.//from www.j a v a 2 s .c o m * * @param resultSetMetaData * @throws SQLException */ private void cacheMetadata(ResultSetMetaData resultSetMetaData) throws SQLException { cachedColumnNames = new HashMap<String, Integer>(); for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) { final String fieldTypeName = resultSetMetaData.getColumnTypeName(i); if (!fieldTypeName.equalsIgnoreCase("geometry") && isSupportedPropertyType(resultSetMetaData.getColumnType(i), fieldTypeName)) { cachedColumnNames.put(resultSetMetaData.getColumnName(i).toUpperCase(), i); columnCountProperties++; } } }
From source file:jeeves.resources.dbms.Dbms.java
private Element buildResponse(ResultSet rs, Hashtable<String, String> formats) throws SQLException { ResultSetMetaData md = rs.getMetaData(); int colNum = md.getColumnCount(); // --- retrieve name and type of fields Vector<String> vHeaders = new Vector<String>(); Vector<Integer> vTypes = new Vector<Integer>(); for (int i = 0; i < colNum; i++) { vHeaders.add(md.getColumnLabel(i + 1).toLowerCase()); vTypes.add(new Integer(md.getColumnType(i + 1))); }//from w w w . ja v a 2 s . c o m // --- build the jdom tree Element root = new Element(Jeeves.Elem.RESPONSE); while (rs.next()) { Element record = new Element(Jeeves.Elem.RECORD); for (int i = 0; i < colNum; i++) { String name = vHeaders.get(i).toString(); int type = ((Integer) vTypes.get(i)).intValue(); record.addContent(buildElement(rs, i, name, type, formats)); } root.addContent(record); } return root; }