List of usage examples for java.sql ResultSetMetaData getColumnCount
int getColumnCount() throws SQLException;
ResultSet
object. From source file:at.stefanproell.ResultSetVerification.ResultSetVerificationAPI.java
public String calculateResultSetHashShort(ResultSet rs, String concatenatedColumns) { //@todo hashing this.logger.info("Resulset row count: " + this.getResultSetRowCount(rs)); String resultSetHash = concatenatedColumns; String concatenatedIdentifiers = ""; String currentHash = ""; String previousKey = ""; String compositeHash = ""; int hashCounter = 0; long startTime = System.currentTimeMillis(); //int hashCounter =0; try {// ww w . j a v a 2s . c o m ResultSetMetaData rsmd = rs.getMetaData(); int columnsNumber = rsmd.getColumnCount(); this.logger.info("There are " + columnsNumber + " columns in the result set"); long meanTimeStart = System.currentTimeMillis(); rs.setFetchSize(10000); while (rs.next()) { hashCounter++; if (hashCounter % 1000 == 0) { long meanTimeStop = System.currentTimeMillis(); this.logger.warning("Calculated " + hashCounter + " hashes so far. This batch took " + (double) ((meanTimeStop - meanTimeStart) / 1000) + " seconds"); meanTimeStart = System.currentTimeMillis(); } concatenatedIdentifiers += rs.getString(1); //this.logger.info("At row " + hashCounter +" the hash currently has the length of: " + concatenatedIdentifiers.length()); System.gc(); } resultSetHash += concatenatedIdentifiers; this.logger.info("The hash has the length of: " + concatenatedIdentifiers.length()); resultSetHash = this.calculateHashFromString(resultSetHash); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } long endTime = System.currentTimeMillis(); long totalTime = endTime - startTime; double elapsedTime = (double) (totalTime / 1000); this.logger.info("Calculated " + hashCounter + " hash values in " + elapsedTime + " sec"); this.logger.info("Hash is " + resultSetHash); return resultSetHash; }
From source file:at.alladin.rmbt.statisticServer.export.ExportResource.java
@Get public Representation request(final String entity) { //Before doing anything => check if a cached file already exists and is new enough String property = System.getProperty("java.io.tmpdir"); final String filename_zip; final String filename_csv; //allow filtering by month/year int year = -1; int month = -1; int hours = -1; boolean hoursExport = false; boolean dateExport = false; if (getRequest().getAttributes().containsKey("hours")) { // export by hours try {//from w w w . ja va2 s . co m hours = Integer.parseInt(getRequest().getAttributes().get("hours").toString()); } catch (NumberFormatException ex) { //Nothing -> just fall back } if (hours <= 7 * 24 && hours >= 1) { //limit to 1 week (avoid DoS) hoursExport = true; } } else if (!hoursExport && getRequest().getAttributes().containsKey("year")) { // export by month/year try { year = Integer.parseInt(getRequest().getAttributes().get("year").toString()); month = Integer.parseInt(getRequest().getAttributes().get("month").toString()); } catch (NumberFormatException ex) { //Nothing -> just fall back } if (year < 2099 && month > 0 && month <= 12 && year > 2000) { dateExport = true; } } if (hoursExport) { filename_zip = FILENAME_ZIP_HOURS.replace("%HOURS%", String.format("%03d", hours)); filename_csv = FILENAME_CSV_HOURS.replace("%HOURS%", String.format("%03d", hours)); cacheThresholdMs = 5 * 60 * 1000; //5 minutes } else if (dateExport) { filename_zip = FILENAME_ZIP.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%", String.format("%02d", month)); filename_csv = FILENAME_CSV.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%", String.format("%02d", month)); cacheThresholdMs = 23 * 60 * 60 * 1000; //23 hours } else { filename_zip = FILENAME_ZIP_CURRENT; filename_csv = FILENAME_CSV_CURRENT; cacheThresholdMs = 3 * 60 * 60 * 1000; //3 hours } final File cachedFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv)); final File generatingFile = new File( property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp"); if (cachedFile.exists()) { //check if file has been recently created OR a file is currently being created if (((cachedFile.lastModified() + cacheThresholdMs) > (new Date()).getTime()) || (generatingFile.exists() && (generatingFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())) { //if so, return the cached file instead of a cost-intensive new one final OutputRepresentation result = new OutputRepresentation( zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) { @Override public void write(OutputStream out) throws IOException { InputStream is = new FileInputStream(cachedFile); IOUtils.copy(is, out); out.close(); } }; if (zip) { final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT); disposition.setFilename(filename_zip); result.setDisposition(disposition); } return result; } } final String timeClause; if (dateExport) timeClause = " AND (EXTRACT (month FROM t.time AT TIME ZONE 'UTC') = " + month + ") AND (EXTRACT (year FROM t.time AT TIME ZONE 'UTC') = " + year + ") "; else if (hoursExport) timeClause = " AND time > now() - interval '" + hours + " hours' "; else timeClause = " AND time > current_date - interval '31 days' "; final String sql = "SELECT" + " ('P' || t.open_uuid) open_uuid," + " ('O' || t.open_test_uuid) open_test_uuid," + " to_char(t.time AT TIME ZONE 'UTC', 'YYYY-MM-DD HH24:MI:SS') time_utc," + " nt.group_name cat_technology," + " nt.name network_type," + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN" + " t.geo_lat" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_lat*1111)/1111" + " ELSE null" + " END) lat," + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN" + " t.geo_long" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_long*741)/741 " + " ELSE null" + " END) long," + " (CASE WHEN ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN" + " 'rastered'" + //make raster transparent " ELSE t.geo_provider" + " END) loc_src," + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') " + " THEN round(t.geo_accuracy::float * 10)/10 " + " WHEN (t.geo_accuracy < 100) AND ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN 100" + // limit accuracy to 100m " WHEN (t.geo_accuracy < ?) THEN round(t.geo_accuracy::float * 10)/10" + " ELSE null END) loc_accuracy, " + " (CASE WHEN (t.zip_code < 1000 OR t.zip_code > 9999) THEN null ELSE t.zip_code END) zip_code," + " t.gkz gkz," + " t.country_location country_location," + " t.speed_download download_kbit," + " t.speed_upload upload_kbit," + " round(t.ping_median::float / 100000)/10 ping_ms," + " t.lte_rsrp," + " t.lte_rsrq," + " ts.name server_name," + " duration test_duration," + " num_threads," + " t.plattform platform," + " COALESCE(adm.fullname, t.model) model," + " client_software_version client_version," + " network_operator network_mcc_mnc," + " network_operator_name network_name," + " network_sim_operator sim_mcc_mnc," + " nat_type," + " public_ip_asn asn," + " client_public_ip_anonymized ip_anonym," + " (ndt.s2cspd*1000)::int ndt_download_kbit," + " (ndt.c2sspd*1000)::int ndt_upload_kbit," + " COALESCE(t.implausible, false) implausible," + " t.signal_strength" + " FROM test t" + " LEFT JOIN network_type nt ON nt.uid=t.network_type" + " LEFT JOIN device_map adm ON adm.codename=t.model" + " LEFT JOIN test_server ts ON ts.uid=t.server_id" + " LEFT JOIN test_ndt ndt ON t.uid=ndt.test_id" + " WHERE " + " t.deleted = false" + timeClause + " AND status = 'FINISHED'" + " ORDER BY t.uid"; final String[] columns; final List<String[]> data = new ArrayList<>(); PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(sql); //insert filter for accuracy double accuracy = Double.parseDouble(settings.getString("RMBT_GEO_ACCURACY_DETAIL_LIMIT")); ps.setDouble(1, accuracy); ps.setDouble(2, accuracy); ps.setDouble(3, accuracy); ps.setDouble(4, accuracy); ps.setDouble(5, accuracy); ps.setDouble(6, accuracy); if (!ps.execute()) return null; rs = ps.getResultSet(); final ResultSetMetaData meta = rs.getMetaData(); final int colCnt = meta.getColumnCount(); columns = new String[colCnt]; for (int i = 0; i < colCnt; i++) columns[i] = meta.getColumnName(i + 1); while (rs.next()) { final String[] line = new String[colCnt]; for (int i = 0; i < colCnt; i++) { final Object obj = rs.getObject(i + 1); line[i] = obj == null ? null : obj.toString(); } data.add(line); } } catch (final SQLException e) { e.printStackTrace(); return null; } finally { try { if (rs != null) rs.close(); if (ps != null) ps.close(); } catch (final SQLException e) { e.printStackTrace(); } } final OutputRepresentation result = new OutputRepresentation( zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) { @Override public void write(OutputStream out) throws IOException { //cache in file => create temporary temporary file (to // handle errors while fulfilling a request) String property = System.getProperty("java.io.tmpdir"); final File cachedFile = new File( property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp"); OutputStream outf = new FileOutputStream(cachedFile); if (zip) { final ZipOutputStream zos = new ZipOutputStream(outf); final ZipEntry zeLicense = new ZipEntry("LIZENZ.txt"); zos.putNextEntry(zeLicense); final InputStream licenseIS = getClass().getResourceAsStream("DATA_LICENSE.txt"); IOUtils.copy(licenseIS, zos); licenseIS.close(); final ZipEntry zeCsv = new ZipEntry(filename_csv); zos.putNextEntry(zeCsv); outf = zos; } final OutputStreamWriter osw = new OutputStreamWriter(outf); final CSVPrinter csvPrinter = new CSVPrinter(osw, csvFormat); for (final String c : columns) csvPrinter.print(c); csvPrinter.println(); for (final String[] line : data) { for (final String f : line) csvPrinter.print(f); csvPrinter.println(); } csvPrinter.flush(); if (zip) outf.close(); //if we reach this code, the data is now cached in a temporary tmp-file //so, rename the file for "production use2 //concurrency issues should be solved by the operating system File newCacheFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv)); Files.move(cachedFile.toPath(), newCacheFile.toPath(), StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING); FileInputStream fis = new FileInputStream(newCacheFile); IOUtils.copy(fis, out); fis.close(); out.close(); } }; if (zip) { final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT); disposition.setFilename(filename_zip); result.setDisposition(disposition); } return result; }
From source file:at.stefanproell.ResultSetVerification.ResultSetVerificationAPI.java
/** * Calculate hash on DB/*from w w w . j a va 2 s . c om*/ * * @return */ public String retrieveFullResultSet(ResultSet rs) { this.logger.info("Resulset row count: " + this.getResultSetRowCount(rs)); String resultSetHash = ""; String currentHash = ""; String previousKey = ""; String compositeHash = ""; int hashCounter = 0; long startTime = System.currentTimeMillis(); //int hashCounter =0; try { ResultSetMetaData rsmd = rs.getMetaData(); int columnsNumber = rsmd.getColumnCount(); this.logger.info("There are " + columnsNumber + " columns in the result set"); String newResultSetHash = null; long meanTimeStart = System.currentTimeMillis(); rs.setFetchSize(1000); while (rs.next()) { hashCounter++; if (hashCounter % 1000 == 0) { long meanTimeStop = System.currentTimeMillis(); this.logger.warning("Calculated " + hashCounter + " hashes so far. This batch took " + (double) ((meanTimeStop - meanTimeStart) / 1000) + " seconds"); meanTimeStart = System.currentTimeMillis(); } for (int i = 1; i < columnsNumber; i++) { currentHash += rs.getString(i); } if (rs.isFirst()) { resultSetHash = this.calculateHashFromString(currentHash); } else { compositeHash = (resultSetHash + currentHash); // reset the variables in order to reduce overhead resultSetHash = null; currentHash = null; newResultSetHash = this.calculateHashFromString(compositeHash); //this.logger.info("[resultSetHash] "+resultSetHash + "[currentHash] " + currentHash +" -> // [newResultSetHash]" + newResultSetHash ); resultSetHash = newResultSetHash; } System.gc(); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } long endTime = System.currentTimeMillis(); long totalTime = endTime - startTime; double elapsedTime = (double) (totalTime / 1000); this.logger.info("Calculated " + hashCounter + " hash values in " + elapsedTime + " sec"); this.logger.info("Hash is " + resultSetHash); return resultSetHash; }
From source file:com.gdcn.modules.db.jdbc.processor.CamelBeanProcessor.java
/** * The positions in the returned array represent column numbers. The * values stored at each position represent the index in the * <code>PropertyDescriptor[]</code> for the bean property that matches * the column name. If no bean property was found for a column, the * position is set to <code>PROPERTY_NOT_FOUND</code>. * * @param rsmd The <code>ResultSetMetaData</code> containing column * information.//from www. j ava 2 s. co m * * @param props The bean property descriptors. * * @throws SQLException if a database access error occurs * * @return An int[] with column index to property index mappings. The 0th * element is meaningless because JDBC column indexing starts at 1. */ protected int[] mapColumnsToProperties(ResultSetMetaData rsmd, PropertyDescriptor[] props) throws SQLException { int cols = rsmd.getColumnCount(); int[] columnToProperty = new int[cols + 1]; Arrays.fill(columnToProperty, PROPERTY_NOT_FOUND); for (int col = 1; col <= cols; col++) { String columnName = rsmd.getColumnLabel(col); if (null == columnName || 0 == columnName.length()) { columnName = rsmd.getColumnName(col); } columnName = columnName.toLowerCase(); String propertyName = columnToPropertyOverrides.get(columnName); if (propertyName == null) { propertyName = EncodeUtils.underline2camel(columnName);//? } for (int i = 0; i < props.length; i++) { String prop = props[i].getName(); if (propertyName.equalsIgnoreCase(prop)) { columnToProperty[col] = i; break; } } } return columnToProperty; }
From source file:at.ac.tuwien.inso.subcat.reporter.Reporter.java
private void exportWordStats(final ExporterConfig config, Project project, int commitDictId, int bugDictId, Settings settings, final ReportWriter formatter, String outputPath, Map<String, Object> vars) throws SQLException, Exception { formatter.init(project, settings, outputPath); model.rawForeach(config.getQuery(), vars, new ResultCallback() { @Override//from ww w. j a va2s. co m public void processResult(ResultSet res) throws SemanticException, SQLException, Exception { ResultSetMetaData meta = res.getMetaData(); String[] titles = new String[meta.getColumnCount()]; if (titles.length != 2) { throw new SemanticException( "semantic error: invalid column count, expected: (<string>, <string>)", config.getStart(), config.getEnd()); } if (meta.getColumnType(1) != Types.VARCHAR || meta.getColumnType(2) != Types.VARCHAR) { throw new SemanticException( "semantic error: invalid column type, expected: (<string>, <string>), got " + "(<" + meta.getColumnTypeName(1) + ">, <" + meta.getColumnTypeName(2) + ">)", config.getStart(), config.getEnd()); } Map<String, Map<String, Integer>> data = new HashMap<String, Map<String, Integer>>(); Lemmatizer lemmatiser = new Lemmatizer(); Set<String> categoryNames = new HashSet<String>(); while (res.next()) { String category = res.getString(1); categoryNames.add(category); List<String> lemma = lemmatiser.lemmatize(res.getString(2)); for (String word : lemma) { Map<String, Integer> counter = data.get(word); if (counter == null) { counter = new HashMap<String, Integer>(); data.put(word, counter); } Integer wordCount = counter.get(category); if (wordCount == null) { wordCount = 0; } counter.put(category, wordCount + 1); } } String[] header = new String[categoryNames.size() + 1]; header[0] = "word"; int i = 1; for (String catName : categoryNames) { header[i] = catName; i++; } formatter.writeHeader(header); for (Entry<String, Map<String, Integer>> entry : data.entrySet()) { Map<String, Integer> scores = entry.getValue(); String[] row = new String[header.length]; row[0] = entry.getKey(); i = 1; for (String cat : categoryNames) { Integer score = scores.get(cat); if (score == null) { score = 0; } row[i] = score.toString(); i++; } formatter.writeSet(row); } formatter.writeFooter(header); } }); }
From source file:com.mirth.connect.connectors.jdbc.DatabaseConnectorServlet.java
@Override public SortedSet<Table> getTables(String channelId, String channelName, String driver, String url, String username, String password, Set<String> tableNamePatterns, String selectLimit, Set<String> resourceIds) { CustomDriver customDriver = null;/* w w w . j a v a2 s .co m*/ Connection connection = null; try { url = replacer.replaceValues(url, channelId, channelName); username = replacer.replaceValues(username, channelId, channelName); password = replacer.replaceValues(password, channelId, channelName); String schema = null; try { MirthContextFactory contextFactory = contextFactoryController.getContextFactory(resourceIds); try { ClassLoader isolatedClassLoader = contextFactory.getIsolatedClassLoader(); if (isolatedClassLoader != null) { customDriver = new CustomDriver(isolatedClassLoader, driver); logger.debug("Custom driver created: " + customDriver.toString() + ", Version " + customDriver.getMajorVersion() + "." + customDriver.getMinorVersion()); } else { logger.debug("Custom classloader is not being used, defaulting to DriverManager."); } } catch (Exception e) { logger.debug("Error creating custom driver, defaulting to DriverManager.", e); } } catch (Exception e) { logger.debug("Error retrieving context factory, defaulting to DriverManager.", e); } if (customDriver == null) { Class.forName(driver); } int oldLoginTimeout = DriverManager.getLoginTimeout(); DriverManager.setLoginTimeout(30); if (customDriver != null) { connection = customDriver.connect(url, username, password); } else { connection = DriverManager.getConnection(url, username, password); } DriverManager.setLoginTimeout(oldLoginTimeout); DatabaseMetaData dbMetaData = connection.getMetaData(); // the sorted set to hold the table information SortedSet<Table> tableInfoList = new TreeSet<Table>(); // Use a schema if the user name matches one of the schemas. // Fix for Oracle: MIRTH-1045 ResultSet schemasResult = null; try { schemasResult = dbMetaData.getSchemas(); while (schemasResult.next()) { String schemaResult = schemasResult.getString(1); if (username.equalsIgnoreCase(schemaResult)) { schema = schemaResult; } } } finally { if (schemasResult != null) { schemasResult.close(); } } // based on the table name pattern, attempt to retrieve the table information tableNamePatterns = translateTableNamePatterns(tableNamePatterns); List<String> tableNameList = new ArrayList<String>(); // go through each possible table name patterns and query for the tables for (String tableNamePattern : tableNamePatterns) { ResultSet rs = null; try { rs = dbMetaData.getTables(null, schema, tableNamePattern, TABLE_TYPES); // based on the result set, loop through to store the table name so it can be used to // retrieve the table's column information while (rs.next()) { tableNameList.add(rs.getString("TABLE_NAME")); } } finally { if (rs != null) { rs.close(); } } } // for each table, grab their column information for (String tableName : tableNameList) { ResultSet rs = null; ResultSet backupRs = null; boolean fallback = false; try { // apparently it's much more efficient to use ResultSetMetaData to retrieve // column information. So each driver is defined with their own unique SELECT // statement to query the table columns and use ResultSetMetaData to retrieve // the column information. If driver is not defined with the select statement // then we'll define to the generic method of getting column information, but // this could be extremely slow List<Column> columnList = new ArrayList<Column>(); if (StringUtils.isEmpty(selectLimit)) { logger.debug("No select limit is defined, using generic method"); rs = dbMetaData.getColumns(null, null, tableName, null); // retrieve all relevant column information for (int i = 0; rs.next(); i++) { Column column = new Column(rs.getString("COLUMN_NAME"), rs.getString("TYPE_NAME"), rs.getInt("COLUMN_SIZE")); columnList.add(column); } } else { logger.debug( "Select limit is defined, using specific select query : '" + selectLimit + "'"); // replace the '?' with the appropriate schema.table name, and use ResultSetMetaData to // retrieve column information final String schemaTableName = StringUtils.isNotEmpty(schema) ? "\"" + schema + "\".\"" + tableName + "\"" : "\"" + tableName + "\""; final String queryString = selectLimit.trim().replaceAll("\\?", Matcher.quoteReplacement(schemaTableName)); Statement statement = connection.createStatement(); try { rs = statement.executeQuery(queryString); ResultSetMetaData rsmd = rs.getMetaData(); // retrieve all relevant column information for (int i = 1; i < rsmd.getColumnCount() + 1; i++) { Column column = new Column(rsmd.getColumnName(i), rsmd.getColumnTypeName(i), rsmd.getPrecision(i)); columnList.add(column); } } catch (SQLException sqle) { logger.info("Failed to execute '" + queryString + "', fall back to generic approach to retrieve column information"); fallback = true; } finally { if (statement != null) { statement.close(); } } // failed to use selectLimit method, so we need to fall back to generic // if this generic approach fails, then there's nothing we can do if (fallback) { // Re-initialize in case some columns were added before failing columnList = new ArrayList<Column>(); logger.debug("Using fallback method for retrieving columns"); backupRs = dbMetaData.getColumns(null, null, tableName.replace("/", "//"), null); // retrieve all relevant column information while (backupRs.next()) { Column column = new Column(backupRs.getString("COLUMN_NAME"), backupRs.getString("TYPE_NAME"), backupRs.getInt("COLUMN_SIZE")); columnList.add(column); } } } // create table object and add to the list of table definitions Table table = new Table(tableName, columnList); tableInfoList.add(table); } finally { if (rs != null) { rs.close(); } if (backupRs != null) { backupRs.close(); } } } return tableInfoList; } catch (Exception e) { throw new MirthApiException(new Exception("Could not retrieve database tables and columns.", e)); } finally { if (connection != null) { try { connection.close(); } catch (SQLException e) { } } } }
From source file:com.ToResultSet.java
@Path("/input") @GET/* w w w . j a va 2s .c o m*/ @Produces(MediaType.APPLICATION_JSON) public String toResultSet(@QueryParam("budget") String x, @QueryParam("maxbudget") String y, @QueryParam("location") String location, @QueryParam("date") String minDateString) throws ParserConfigurationException, TransformerException, ParseException { try { Class.forName("com.mysql.jdbc.Driver"); } catch (ClassNotFoundException e) { System.out.println("Fehler bei MySQL-JDBC-Bridge" + e); } //SQL Query wird hier erzeugt je nach dem Daten, die angegeben werden try { int minBudget = 0; int maxBudget = 0; try { minBudget = Integer.valueOf(x); } catch (NumberFormatException e) { if (x.length() > 0) { return "Ihre Budget soll aus Zahlen bestehen"; } } try { maxBudget = Integer.valueOf(y); } catch (NumberFormatException e) { if (y.length() > 0) { return "Ihre Budget soll aus Zahlen bestehen"; } } try { test = Integer.valueOf(location); if (test >= 0) { return "Location soll aus String bestehen"; } } catch (Exception e) { } try { if (minDateString.substring(2, 3).contains("-") && minDateString.length() > 0) { return "Date Format soll yyyy-MM-dd"; } else { java.util.Date date1 = sdf.parse(minDateString); minDate = new java.sql.Date(date1.getTime()); } } catch (Exception e) { if (minDateString.length() > 0) return "Date Format soll yyyy-MM-dd"; } //Connection mit dem SQL wird erzeugt String url = "jdbc:mysql://" + host + "/" + dbName; connection = DriverManager.getConnection( "jdbc:mysql://localhost:3306/location?autoReconnect=true&useSSL=false", "root", "dreamhigh"); statement = connection.createStatement(); String sqlQuery = "Select * FROM " + dbTable; List<String> whereClause = new ArrayList<>(); if (minBudget > 0) { whereClause.add("Budget >= " + minBudget); } if (maxBudget > 0) { whereClause.add("Budget <= " + maxBudget); } if (minBudget > maxBudget && maxBudget > 0 && minBudget > 0) { return "Minimal Budget soll kleiner als Maximal Budget"; } if (minDate != null) { whereClause.add("Date >= '" + minDate + "'"); } if (location != null && !location.isEmpty()) { whereClause.add("Location = '" + location + "'"); } //Die Daten werden nach dem Budget absteigend sortiert if (whereClause.size() > 0) { sqlQuery += " WHERE " + StringUtils.join(whereClause, " AND ") + " ORDER BY Budget DESC"; } if (whereClause.size() == 0) { sqlQuery += " ORDER BY Budget DESC"; } resultSet = statement.executeQuery(sqlQuery); int spalten = resultSet.getMetaData().getColumnCount(); System.out.println("Anzahl Spalten: " + spalten); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.newDocument(); Element results = doc.createElement("Results"); doc.appendChild(results); connection = DriverManager.getConnection( "jdbc:mysql://localhost:3306/location?autoReconnect=true&useSSL=false", "root", "dreamhigh"); ResultSetMetaData rsmd = resultSet.getMetaData(); int colCount = rsmd.getColumnCount(); while (resultSet.next()) { String[] str = new String[8]; for (int k = 1; k <= spalten; k++) { str[k - 1] = resultSet.getString(k); result.add(resultSet.getString(k)); } //Result wird hier als XML zuerst erzeugt und dann Json Element row = doc.createElement("Row"); results.appendChild(row); for (int ii = 1; ii <= colCount; ii++) { String columnName = rsmd.getColumnName(ii); Object value = resultSet.getObject(ii); Element node = doc.createElement(columnName); if (value != null) { node.appendChild(doc.createTextNode(value.toString())); row.appendChild(node); } } } System.out.println(getDocumentAsXml(doc)); try { xmlJSONObj = XML.toJSONObject(getDocumentAsXml(doc)); outPut = xmlJSONObj.toString(4); System.out.println(outPut); return outPut; } catch (JSONException je) { } } catch (SQLException e) { System.out.println("Fehler bei Tabellenabfrage: " + e); } return outPut; }
From source file:Database.Handler.java
@SuppressWarnings("unchecked") private List<T> mapRersultSetToObject(ResultSet rs, Class outputClass) { List<T> outputList = null; try {//from www .j a va 2 s. c o m if (rs != null) { if (outputClass.isAnnotationPresent(Entity.class)) { ResultSetMetaData rsmd = rs.getMetaData(); Field[] fields = outputClass.getDeclaredFields(); while (rs.next()) { T bean = (T) outputClass.newInstance(); //System.out.println("rsmd = "+rsmd.getColumnCount()); for (int i = 1; i <= rsmd.getColumnCount(); i++) { String columnName = rsmd.getColumnName(i); Object columnValue = rs.getObject(i); for (Field field : fields) { if (field.isAnnotationPresent(Column.class)) { Column column = field.getAnnotation(Column.class); if (column.name().equalsIgnoreCase(columnName) && columnValue != null) { //System.out.println(field.getName() + "=====>" + columnValue); BeanUtils.setProperty(bean, field.getName(), columnValue); break; } } } } if (outputList == null) { outputList = new ArrayList<T>(); } outputList.add(bean); } } else { // throw some error System.out.println("output class is not annotationPresented"); } } else { return null; } } catch (SQLException ex) { Logger.getLogger(Handler.class.getName()).log(Level.SEVERE, null, ex); } catch (SecurityException ex) { Logger.getLogger(Handler.class.getName()).log(Level.SEVERE, null, ex); } catch (InstantiationException ex) { Logger.getLogger(Handler.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(Handler.class.getName()).log(Level.SEVERE, null, ex); } catch (InvocationTargetException ex) { Logger.getLogger(Handler.class.getName()).log(Level.SEVERE, null, ex); } return outputList; }
From source file:com.streamsets.pipeline.stage.processor.parser.sql.SqlParserProcessor.java
private void resolveSchema(SchemaAndTable schemaAndTable) throws StageException { Map<String, Integer> columns = new HashMap<>(); String schema = schemaAndTable.getSchema(); String table = schemaAndTable.getTable(); try (Statement s = connection.createStatement()) { ResultSetMetaData md = s .executeQuery(Utils.format("SELECT * FROM {}{} WHERE 1 = 0", StringUtils.isNotEmpty(schema) ? "\"" + schema + "\"." : "", "\"" + table + "\"")) .getMetaData();// w w w . j av a 2s. co m int colCount = md.getColumnCount(); for (int i = 1; i <= colCount; i++) { int colType = md.getColumnType(i); String colName = md.getColumnName(i); if (!configBean.caseSensitive) { colName = colName.toUpperCase(); } if (colType == Types.DATE || colType == Types.TIME || colType == Types.TIMESTAMP) { dateTimeColumns.computeIfAbsent(schemaAndTable, k -> new HashMap<>()); dateTimeColumns.get(schemaAndTable).put(colName, md.getColumnTypeName(i)); } if (colType == Types.DECIMAL || colType == Types.NUMERIC) { decimalColumns.computeIfAbsent(schemaAndTable, k -> new HashMap<>()).put(colName, new PrecisionAndScale(md.getPrecision(i), md.getScale(i))); } columns.put(md.getColumnName(i), md.getColumnType(i)); } tableSchemas.put(schemaAndTable, columns); } catch (SQLException ex) { throw new StageException(JDBC_00, configBean.hikariConfigBean.connectionString); } }
From source file:com.ToResultSet.java
@Path("/tabelle") @GET//from w w w. j a va 2 s . c o m public String toResultSetTabelle(@QueryParam("budget") String x, @QueryParam("maxbudget") String y, @QueryParam("location") String location, @QueryParam("date") String minDateString) throws ParserConfigurationException, TransformerException, ParseException { ArrayList<String[]> result1 = new ArrayList<String[]>(); try { Class.forName("com.mysql.jdbc.Driver"); } catch (ClassNotFoundException e) { System.out.println("Fehler bei MySQL-JDBC-Bridge" + e); } //SQL Query wird hier erzeugt je nach dem Daten, die angegeben werden try { int minBudget = 0; int maxBudget = 0; try { minBudget = Integer.valueOf(x); } catch (NumberFormatException e) { if (x.length() > 0) { return "Ihre Budget soll aus Zahlen bestehen"; } } try { maxBudget = Integer.valueOf(y); } catch (NumberFormatException e) { if (y.length() > 0) { return "Ihre Budget soll aus Zahlen bestehen"; } } try { test = Integer.valueOf(location); if (test >= 0) { return "Location soll aus String bestehen"; } } catch (Exception e) { } try { if (minDateString.substring(2, 3).contains("-") && minDateString.length() > 0) { return "Date Format soll yyyy-MM-dd"; } else { java.util.Date date1 = sdf.parse(minDateString); minDate = new java.sql.Date(date1.getTime()); } } catch (Exception e) { if (minDateString.length() > 0) return "Date Format soll yyyy-MM-dd"; } //Connection mit dem SQL wird erzeugt String url = "jdbc:mysql://" + host + "/" + dbName; connection = DriverManager.getConnection( "jdbc:mysql://localhost:3306/location?autoReconnect=true&useSSL=false", "root", "dreamhigh"); statement = connection.createStatement(); String sqlQuery = "Select * FROM " + dbTable; List<String> whereClause = new ArrayList<>(); if (minBudget > 0) { whereClause.add("Budget >= " + minBudget); } if (maxBudget > 0) { whereClause.add("Budget <= " + maxBudget); } if (minBudget > maxBudget && maxBudget > 0 && minBudget > 0) { return "Minimal Budget soll kleiner als Maximal Budget"; } if (minDate != null) { whereClause.add("Date >= '" + minDate + "'"); } if (location != null && !location.isEmpty()) { whereClause.add("Location = '" + location + "'"); } //Die Daten werden nach dem Budget absteigend sortiert if (whereClause.size() > 0) { sqlQuery += " WHERE " + StringUtils.join(whereClause, " AND ") + " ORDER BY Budget DESC"; } if (whereClause.size() == 0) { sqlQuery += " ORDER BY Budget DESC"; } resultSet = statement.executeQuery(sqlQuery); int spalten = resultSet.getMetaData().getColumnCount(); System.out.println("Anzahl Spalten: " + spalten); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); doc = builder.newDocument(); Element results = doc.createElement("Results"); doc.appendChild(results); connection = DriverManager.getConnection( "jdbc:mysql://localhost:3306/location?autoReconnect=true&useSSL=false", "root", "dreamhigh"); ResultSetMetaData rsmd = resultSet.getMetaData(); int colCount = rsmd.getColumnCount(); while (resultSet.next()) { String[] str = new String[6]; for (int k = 1; k <= spalten; k++) { str[k - 1] = resultSet.getString(k); if (result1.contains(str) == false) { result1.add(str); } } } //HTML Datei wird mit Hilfe von String Builder erzeugt sb.append("<html><body>"); for (Iterator iter = result1.iterator(); iter.hasNext();) { String[] str = (String[]) iter.next(); sb.append("<br>"); sb.append("<br>"); for (int i = 0; i < str.length; i++) { sb.append(System.lineSeparator()); if (i == 0) { sb.append("Link : <a href=" + str[i] + ">" + str[i] + "</a><br>"); } if (i == 1) { sb.append("Budget : " + str[i] + "<br>"); } if (i == 2) { sb.append("Location : " + str[i] + "<br>"); } if (i == 3) { sb.append("Qm : " + str[i] + "<br>"); } if (i == 4) { sb.append("Room : " + str[i] + "<br>"); } if (i == 5) { sb.append("Date : " + str[i] + "<br>"); } } } sb.append("</body>"); sb.append("</html>"); outPut = sb.toString(); } catch (SQLException e) { System.out.println("Fehler bei Tabellenabfrage: " + e); } return outPut = sb.toString(); }