List of usage examples for java.sql ResultSet getDouble
double getDouble(String columnLabel) throws SQLException;
ResultSet
object as a double
in the Java programming language. From source file:edu.ku.brc.specify.tasks.subpane.lm.LifeMapperPane.java
/** * @param pStmt//from ww w . j a v a2s . c om * @param ceID * @param pmList * @throws SQLException */ private void addMarkerFromCE(final PreparedStatement pStmt, final int ceID, final ArrayList<LatLonPlacemarkIFace> pmList) throws SQLException { pStmt.setInt(1, ceID); ResultSet rs = pStmt.executeQuery(); if (rs.next()) { LatLonPlacemark llp = new LatLonPlacemark(markerImg, rs.getDouble(1), rs.getDouble(2)); pmList.add(llp); } rs.close(); }
From source file:com.itemanalysis.jmetrik.stats.ranking.RankingAnalysis.java
private ResizableDoubleArray getData() throws SQLException { Statement stmt = null;//from w ww.j a va 2 s . com ResultSet rs = null; ResizableDoubleArray data = new ResizableDoubleArray((int) (maxProgress / 2.0)); try { //connect to table to create data set to be ranked Table sqlTable = new Table(tableName.getNameForDatabase()); SelectQuery select = new SelectQuery(); select.addColumn(sqlTable, variable.getName().nameForDatabase()); stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery(select.toString()); String vNameDb = variable.getName().nameForDatabase(); double x = Double.NaN; int dbIndex = 0;//row position index for all records in db while (rs.next()) { x = rs.getDouble(vNameDb); if (!rs.wasNull()) { if (ascending) { data.addElement(x);//ascending order } else { data.addElement(-x);//descending order } } else { missingIndex.add(dbIndex); } dbIndex++; updateProgress(); } return data; } catch (SQLException ex) { throw ex; } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } }
From source file:com.act.lcms.db.model.FeedingLCMSWell.java
@Override protected List<FeedingLCMSWell> fromResultSet(ResultSet resultSet) throws SQLException { List<FeedingLCMSWell> results = new ArrayList<>(); while (resultSet.next()) { Integer id = resultSet.getInt(DB_FIELD.ID.getOffset()); Integer plateId = resultSet.getInt(DB_FIELD.PLATE_ID.getOffset()); Integer plateRow = resultSet.getInt(DB_FIELD.PLATE_ROW.getOffset()); Integer plateColumn = resultSet.getInt(DB_FIELD.PLATE_COLUMN.getOffset()); String msid = resultSet.getString(DB_FIELD.MSID.getOffset()); String composition = resultSet.getString(DB_FIELD.COMPOSITION.getOffset()); String extract = resultSet.getString(DB_FIELD.EXTRACT.getOffset()); String chemical = resultSet.getString(DB_FIELD.CHEMICAL.getOffset()); Double concentration = resultSet.getDouble(DB_FIELD.CONCENTRATION.getOffset()); if (resultSet.wasNull()) { concentration = null;// w w w. j a v a2s.co m } String note = resultSet.getString(DB_FIELD.NOTE.getOffset()); results.add(new FeedingLCMSWell(id, plateId, plateRow, plateColumn, msid, composition, extract, chemical, concentration, note)); } return results; }
From source file:info.raack.appliancelabeler.data.JDBCDatabase.java
@Override public Map<Appliance, Double> getApplianceEnergyConsumptionAverages(Date startDate, int detectionAlgorithmId, EnergyMonitor ignoredMonitor) {//from w ww .ja v a 2s.c o m final Map<Appliance, Double> applianceMap = new HashMap<Appliance, Double>(); jdbcTemplate.query(queryForApplianceEnergyConsumptionAverages, new Object[] { detectionAlgorithmId, startDate.getTime(), ignoredMonitor.getId() }, new RowMapper<Entry<Appliance, Double>>() { @Override public Entry<Appliance, Double> mapRow(ResultSet rs, int arg1) throws SQLException { // just put entries into the map applianceMap.put(getApplianceById(rs.getInt("appliance_id")), rs.getDouble("average_energy_consumed")); return null; } }); return applianceMap; }
From source file:org.jfree.data.jdbc.JDBCPieDataset.java
/** * ExecuteQuery will attempt execute the query passed to it against the * existing database connection. If no connection exists then no action * is taken.//from w w w. j ava 2 s . c o m * The results from the query are extracted and cached locally, thus * applying an upper limit on how many rows can be retrieved successfully. * * @param query the query to be executed * @param con the connection the query is to be executed against * * @throws SQLException if there is a problem executing the query. */ public void executeQuery(Connection con, String query) throws SQLException { Statement statement = null; ResultSet resultSet = null; try { statement = con.createStatement(); resultSet = statement.executeQuery(query); ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); if (columnCount != 2) { throw new SQLException("Invalid sql generated. PieDataSet requires 2 columns only"); } int columnType = metaData.getColumnType(2); double value = Double.NaN; while (resultSet.next()) { Comparable key = resultSet.getString(1); switch (columnType) { case Types.NUMERIC: case Types.REAL: case Types.INTEGER: case Types.DOUBLE: case Types.FLOAT: case Types.DECIMAL: case Types.BIGINT: value = resultSet.getDouble(2); setValue(key, value); break; case Types.DATE: case Types.TIME: case Types.TIMESTAMP: Timestamp date = resultSet.getTimestamp(2); value = date.getTime(); setValue(key, value); break; default: System.err.println("JDBCPieDataset - unknown data type"); break; } } fireDatasetChanged(new DatasetChangeInfo()); //TODO: fill in real change info } finally { if (resultSet != null) { try { resultSet.close(); } catch (Exception e) { System.err.println("JDBCPieDataset: swallowing exception."); } } if (statement != null) { try { statement.close(); } catch (Exception e) { System.err.println("JDBCPieDataset: swallowing exception."); } } } }
From source file:uta.ak.usttmp.common.dao.mapper.MiningTaskRowMapper.java
@Override public Object mapRow(ResultSet rs, int rowNum) throws SQLException { try {//from w ww.j av a2s . c o m MiningTask mt = new MiningTask(); mt.setId(rs.getLong("mme_eid")); SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); mt.setStartTime(formatter.parse(rs.getString("starttime"))); mt.setEndTime(formatter.parse(rs.getString("endtime"))); mt.setKeywordNum(rs.getInt("keyword_num")); mt.setMiningInterval(rs.getInt("mininginterval")); mt.setName(rs.getString("name")); mt.setStatus(rs.getInt("status")); mt.setTag(rs.getString("tag")); mt.setTopicNum(rs.getInt("topic_num")); mt.setQrtzJobName(rs.getString("qrtz_job_name")); mt.setQrtzJobExecCount(rs.getInt("qrtz_job_exec_count")); mt.setQrtzJobTotalCount(rs.getInt("qrtz_job_total_count")); mt.setPreprocessComponent(rs.getString("preprocess_component")); mt.setMiningComponent(rs.getString("mining_component")); mt.setTrackingComponent(rs.getString("tracking_component")); mt.setAlpha(rs.getDouble("alpha")); mt.setBeta(rs.getDouble("beta")); return mt; } catch (ParseException ex) { Logger.getLogger(MiningTaskRowMapper.class.getName()).log(Level.SEVERE, null, ex); } return null; }
From source file:edu.ku.brc.specify.tasks.subpane.lm.LifeMapperPane.java
/** * @param recSet//from w w w . j a v a 2 s . c o m */ public int addLocalData(final String genusSpecies) { int numFnd = 0; Connection conn = null; Statement stmt = null; try { String sql = "SELECT ce.CollectingEventID, l.Latitude1, l.Longitude1 FROM taxon t INNER JOIN determination d ON t.TaxonID = d.TaxonID " + "INNER JOIN collectionobject co ON d.CollectionObjectID = co.CollectionObjectID " + "INNER JOIN collectingevent ce ON co.CollectingEventID = ce.CollectingEventID " + "INNER JOIN locality l ON ce.LocalityID = l.LocalityID WHERE co.CollectionMemberID = COLMEMID AND t.FullName LIKE '" + genusSpecies + "%'"; sql = QueryAdjusterForDomain.getInstance().adjustSQL(sql); conn = DBConnection.getInstance().createConnection(); stmt = conn.createStatement(); ArrayList<LatLonPlacemarkIFace> coPoints = new ArrayList<LatLonPlacemarkIFace>(); ResultSet rs = stmt.executeQuery(sql); while (rs.next()) { LatLonPlacemark llp = new LatLonPlacemark(markerImg, rs.getDouble(2), rs.getDouble(3)); coPoints.add(llp); } numFnd = coPoints.size(); if (numFnd > 0) { BasicMarkerAttributes bmAttrs = new BasicMarkerAttributes(Material.GREEN, BasicMarkerShape.CONE, 1d, 3, 3); wwPanel.placeMarkers(coPoints, true, false, 0, bmAttrs, false); } } catch (SQLException ex) { ex.printStackTrace(); } finally { try { if (stmt != null) stmt.close(); if (conn != null) conn.close(); } catch (Exception ex) { } } return numFnd; }
From source file:Data.java
private JTable getTbleData(Statement stmt) throws SQLException, ClassNotFoundException { ResultSet rs; String sql = "SELECT pro_name, pro_description, COUNT(sto_uid) - (SUM(sto_inout) * 2) AS NbProduit FROM t_produit, t_stock WHERE t_produit.id_produit = t_stock.id_produit GROUP BY t_produit.id_produit"; rs = stmt.executeQuery(sql);//from w w w.j av a 2 s.c om rs.last(); Object rowData[][] = new Object[rs.getRow()][3]; rs.beforeFirst(); while (rs.next()) { rowData[rs.getRow() - 1][0] = rs.getString(1); rowData[rs.getRow() - 1][1] = rs.getString(2); rowData[rs.getRow() - 1][2] = rs.getInt(3); } sql = "SELECT tmp_temperature, tmp_humidity FROM t_temphum ORDER BY tmp_date DESC LIMIT 1"; rs = stmt.executeQuery(sql); rs.first(); if (tempUnit == "C") { tempRealTime.setText("Temprature : " + rs.getDouble(1) + "" + tempUnit); } else { tempRealTime.setText("Temprature : " + celsiusToFahrenheit(rs.getString(1)) + "" + tempUnit); } humRealTime.setText("Humidit : " + rs.getDouble(2) + "%"); lastTemp = rs.getDouble(1); lastHum = rs.getDouble(2); Object columnNames[] = { "Poduit", "Description", "Quantit" }; JTable table = new JTable(rowData, columnNames); return table; }
From source file:edu.uga.cs.fluxbuster.db.PostgresDBInterface.java
/** * Get the features needed for cluster classification. * /* w ww . j a va 2 s .co m*/ * @param logdate the run date of the cluster * @param clusterId the cluster's id * @return the cluster features * @throws SQLException */ private List<Double> getClusterFeatures(Date logdate, int clusterId) throws SQLException { List<Double> retval = new ArrayList<Double>(); String tabDateStr = dateFormatTable.format(logdate); String query = "SELECT network_cardinality, ip_diversity, domains_per_network, " + "number_of_domains, ttl_per_domain, ip_growth_ratio FROM " + "cluster_feature_vectors_" + tabDateStr + " WHERE cluster_id = " + clusterId; ResultSet rs = this.executeQueryWithResult(query); try { if (rs.next()) { for (int i = 1; i <= 6; i++) { retval.add(rs.getDouble(i)); } } } catch (SQLException e) { if (rs != null && !rs.isClosed()) { rs.close(); } throw e; } return retval; }
From source file:com.sqewd.open.dal.core.persistence.db.EntityHelper.java
@SuppressWarnings({ "unchecked", "rawtypes" }) public static void setColumnValue(final ResultSet rs, final StructAttributeReflect attr, final AbstractEntity entity, final AbstractJoinGraph gr, final Stack<KeyValuePair<Class<?>>> path) throws Exception { KeyValuePair<String> alias = gr.getAliasFor(path, attr.Column, 0); String tabprefix = alias.getKey(); if (EnumPrimitives.isPrimitiveType(attr.Field.getType())) { EnumPrimitives prim = EnumPrimitives.type(attr.Field.getType()); switch (prim) { case ECharacter: String sv = rs.getString(tabprefix + "." + attr.Column); if (!rs.wasNull()) { PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), sv.charAt(0)); }// ww w .jav a2 s. c o m break; case EShort: short shv = rs.getShort(tabprefix + "." + attr.Column); if (!rs.wasNull()) { PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), shv); } break; case EInteger: int iv = rs.getInt(tabprefix + "." + attr.Column); if (!rs.wasNull()) { PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), iv); } break; case ELong: long lv = rs.getLong(tabprefix + "." + attr.Column); if (!rs.wasNull()) { PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), lv); } break; case EFloat: float fv = rs.getFloat(tabprefix + "." + attr.Column); if (!rs.wasNull()) { PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), fv); } break; case EDouble: double dv = rs.getDouble(tabprefix + "." + attr.Column); if (!rs.wasNull()) { PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), dv); } break; default: throw new Exception("Unsupported Data type [" + prim.name() + "]"); } } else if (attr.Convertor != null) { String value = rs.getString(tabprefix + "." + attr.Column); if (!rs.wasNull()) { attr.Convertor.load(entity, attr.Column, value); } } else if (attr.Field.getType().equals(String.class)) { String value = rs.getString(tabprefix + "." + attr.Column); if (!rs.wasNull()) { PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), value); } } else if (attr.Field.getType().equals(Date.class)) { long value = rs.getLong(tabprefix + "." + attr.Column); if (!rs.wasNull()) { Date dt = new Date(value); PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), dt); } } else if (attr.Field.getType().isEnum()) { String value = rs.getString(tabprefix + "." + attr.Column); if (!rs.wasNull()) { Class ecls = attr.Field.getType(); Object evalue = Enum.valueOf(ecls, value); PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), evalue); } } else if (attr.Reference != null) { Class<?> rt = Class.forName(attr.Reference.Class); Object obj = rt.newInstance(); if (!(obj instanceof AbstractEntity)) throw new Exception("Unsupported Entity type [" + rt.getCanonicalName() + "]"); AbstractEntity rentity = (AbstractEntity) obj; if (path.size() > 0) { path.peek().setKey(attr.Column); } KeyValuePair<Class<?>> cls = new KeyValuePair<Class<?>>(); cls.setValue(rentity.getClass()); path.push(cls); setEntity(rentity, rs, gr, path); PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), rentity); path.pop(); } }