List of usage examples for java.sql PreparedStatement setDouble
void setDouble(int parameterIndex, double x) throws SQLException;
double
value. From source file:org.globus.workspace.persistence.PersistenceAdapterImpl.java
public void addSpotPriceHistory(Calendar timeStamp, Double newPrice) throws WorkspaceDatabaseException { if (this.dbTrace) { logger.trace("addSpotPriceHistory(): timeStamp = " + timeStamp + ", spot price = " + newPrice); }//from w ww . j a v a2 s .c o m Connection c = null; PreparedStatement pstmt = null; try { c = getConnection(); pstmt = c.prepareStatement(SQL_INSERT_SPOT_PRICE); if (timeStamp != null) { pstmt.setLong(1, new Long(timeStamp.getTimeInMillis())); } else { pstmt.setInt(1, 0); } pstmt.setDouble(2, newPrice); final int updated = pstmt.executeUpdate(); if (this.dbTrace) { logger.trace("addSpotPriceHistory(): updated " + updated + " rows"); } } catch (SQLException e) { logger.error("", e); throw new WorkspaceDatabaseException(e); } finally { try { if (pstmt != null) { pstmt.close(); } if (c != null) { returnConnection(c); } } catch (SQLException sql) { logger.error("SQLException in finally cleanup", sql); } } }
From source file:at.alladin.rmbt.mapServer.MarkerResource.java
@Post("json") public String request(final String entity) { addAllowOrigin();/* w w w . j a v a2s. c o m*/ final MapServerOptions mso = MapServerOptions.getInstance(); final Classification classification = Classification.getInstance(); JSONObject request = null; final JSONObject answer = new JSONObject(); if (entity != null && !entity.isEmpty()) // try parse the string to a JSON object try { request = new JSONObject(entity); String lang = request.optString("language"); // Load Language Files for Client final List<String> langs = Arrays .asList(settings.getString("RMBT_SUPPORTED_LANGUAGES").split(",\\s*")); if (langs.contains(lang)) labels = ResourceManager.getSysMsgBundle(new Locale(lang)); else lang = settings.getString("RMBT_DEFAULT_LANGUAGE"); // System.out.println(request.toString(4)); final JSONObject coords = request.getJSONObject("coords"); final int zoom; double geo_x = 0; double geo_y = 0; int size = 0; boolean useXY = false; boolean useLatLon = false; if (coords.has("x") && coords.has("y")) useXY = true; else if (coords.has("lat") && coords.has("lon")) useLatLon = true; if (coords.has("z") && (useXY || useLatLon)) { zoom = coords.optInt("z"); if (useXY) { geo_x = coords.optDouble("x"); geo_y = coords.optDouble("y"); } else if (useLatLon) { final double tmpLat = coords.optDouble("lat"); final double tmpLon = coords.optDouble("lon"); geo_x = GeoCalc.lonToMeters(tmpLon); geo_y = GeoCalc.latToMeters(tmpLat); // System.out.println(String.format("using %f/%f", geo_x, geo_y)); } if (coords.has("size")) size = coords.getInt("size"); if (zoom != 0 && geo_x != 0 && geo_y != 0) { double radius = 0; if (size > 0) radius = size * GeoCalc.getResFromZoom(256, zoom); // TODO use real tile size else radius = CLICK_RADIUS * GeoCalc.getResFromZoom(256, zoom); // TODO use real tile size final double geo_x_min = geo_x - radius; final double geo_x_max = geo_x + radius; final double geo_y_min = geo_y - radius; final double geo_y_max = geo_y + radius; String hightlightUUIDString = null; UUID highlightUUID = null; final JSONObject mapOptionsObj = request.getJSONObject("options"); String optionStr = mapOptionsObj.optString("map_options"); if (optionStr == null || optionStr.length() == 0) // set // default optionStr = "mobile/download"; final MapOption mo = mso.getMapOptionMap().get(optionStr); final List<SQLFilter> filters = new ArrayList<>(mso.getDefaultMapFilters()); filters.add(mso.getAccuracyMapFilter()); final JSONObject mapFilterObj = request.getJSONObject("filter"); final Iterator<?> keys = mapFilterObj.keys(); while (keys.hasNext()) { final String key = (String) keys.next(); if (mapFilterObj.get(key) instanceof Object) if (key.equals("highlight")) hightlightUUIDString = mapFilterObj.getString(key); else { final MapFilter mapFilter = mso.getMapFilterMap().get(key); if (mapFilter != null) filters.add(mapFilter.getFilter(mapFilterObj.getString(key))); } } if (hightlightUUIDString != null) try { highlightUUID = UUID.fromString(hightlightUUIDString); } catch (final Exception e) { highlightUUID = null; } if (conn != null) { PreparedStatement ps = null; ResultSet rs = null; final StringBuilder whereSQL = new StringBuilder(mo.sqlFilter); for (final SQLFilter sf : filters) whereSQL.append(" AND ").append(sf.where); final String sql = String.format("SELECT" + (useLatLon ? " geo_lat lat, geo_long lon" : " ST_X(t.location) x, ST_Y(t.location) y") + ", t.time, t.timezone, t.speed_download, t.speed_upload, t.ping_median, t.network_type," + " t.signal_strength, t.lte_rsrp, t.wifi_ssid, t.network_operator_name, t.network_operator," + " t.network_sim_operator, t.roaming_type, t.public_ip_as_name, " //TODO: sim_operator obsoled by sim_name + " pMob.shortname mobile_provider_name," // TODO: obsoleted by mobile_network_name + " prov.shortname provider_text, t.open_test_uuid," + " COALESCE(mnwk.shortname,mnwk.name) mobile_network_name," + " COALESCE(msim.shortname,msim.name) mobile_sim_name" + (highlightUUID == null ? "" : " , c.uid, c.uuid") + " FROM v_test t" + " LEFT JOIN mccmnc2name mnwk ON t.mobile_network_id=mnwk.uid" + " LEFT JOIN mccmnc2name msim ON t.mobile_sim_id=msim.uid" + " LEFT JOIN provider prov" + " ON t.provider_id=prov.uid" + " LEFT JOIN provider pMob" + " ON t.mobile_provider_id=pMob.uid" + (highlightUUID == null ? "" : " LEFT JOIN client c ON (t.client_id=c.uid AND c.uuid=?)") + " WHERE" + " %s" + " AND location && ST_SetSRID(ST_MakeBox2D(ST_Point(?,?), ST_Point(?,?)), 900913)" + " ORDER BY" + (highlightUUID == null ? "" : " c.uid ASC,") + " t.uid DESC" + " LIMIT 5", whereSQL); // System.out.println("SQL: " + sql); ps = conn.prepareStatement(sql); int i = 1; if (highlightUUID != null) ps.setObject(i++, highlightUUID); for (final SQLFilter sf : filters) i = sf.fillParams(i, ps); ps.setDouble(i++, geo_x_min); ps.setDouble(i++, geo_y_min); ps.setDouble(i++, geo_x_max); ps.setDouble(i++, geo_y_max); // System.out.println("SQL: " + ps.toString()); if (ps.execute()) { final Locale locale = new Locale(lang); final Format format = new SignificantFormat(2, locale); final JSONArray resultList = new JSONArray(); rs = ps.getResultSet(); while (rs.next()) { final JSONObject jsonItem = new JSONObject(); JSONArray jsonItemList = new JSONArray(); // RMBTClient Info if (highlightUUID != null && rs.getString("uuid") != null) jsonItem.put("highlight", true); final double res_x = rs.getDouble(1); final double res_y = rs.getDouble(2); final String openTestUUID = rs.getObject("open_test_uuid").toString(); jsonItem.put("lat", res_x); jsonItem.put("lon", res_y); jsonItem.put("open_test_uuid", "O" + openTestUUID); // marker.put("uid", uid); final Date date = rs.getTimestamp("time"); final String tzString = rs.getString("timezone"); final TimeZone tz = TimeZone.getTimeZone(tzString); final DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM, locale); dateFormat.setTimeZone(tz); jsonItem.put("time_string", dateFormat.format(date)); final int fieldDown = rs.getInt("speed_download"); JSONObject singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_DOWNLOAD")); final String downloadString = String.format("%s %s", format.format(fieldDown / 1000d), labels.getString("RESULT_DOWNLOAD_UNIT")); singleItem.put("value", downloadString); singleItem.put("classification", Classification.classify(classification.THRESHOLD_DOWNLOAD, fieldDown)); // singleItem.put("help", "www.rtr.at"); jsonItemList.put(singleItem); final int fieldUp = rs.getInt("speed_upload"); singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_UPLOAD")); final String uploadString = String.format("%s %s", format.format(fieldUp / 1000d), labels.getString("RESULT_UPLOAD_UNIT")); singleItem.put("value", uploadString); singleItem.put("classification", Classification.classify(classification.THRESHOLD_UPLOAD, fieldUp)); // singleItem.put("help", "www.rtr.at"); jsonItemList.put(singleItem); final long fieldPing = rs.getLong("ping_median"); final int pingValue = (int) Math.round(rs.getDouble("ping_median") / 1000000d); singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_PING")); final String pingString = String.format("%s %s", format.format(pingValue), labels.getString("RESULT_PING_UNIT")); singleItem.put("value", pingString); singleItem.put("classification", Classification.classify(classification.THRESHOLD_PING, fieldPing)); // singleItem.put("help", "www.rtr.at"); jsonItemList.put(singleItem); final int networkType = rs.getInt("network_type"); final String signalField = rs.getString("signal_strength"); if (signalField != null && signalField.length() != 0) { final int signalValue = rs.getInt("signal_strength"); final int[] threshold = networkType == 99 || networkType == 0 ? classification.THRESHOLD_SIGNAL_WIFI : classification.THRESHOLD_SIGNAL_MOBILE; singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_SIGNAL")); singleItem.put("value", signalValue + " " + labels.getString("RESULT_SIGNAL_UNIT")); singleItem.put("classification", Classification.classify(threshold, signalValue)); jsonItemList.put(singleItem); } final String lteRsrpField = rs.getString("lte_rsrp"); if (lteRsrpField != null && lteRsrpField.length() != 0) { final int lteRsrpValue = rs.getInt("lte_rsrp"); final int[] threshold = classification.THRESHOLD_SIGNAL_RSRP; singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_LTE_RSRP")); singleItem.put("value", lteRsrpValue + " " + labels.getString("RESULT_LTE_RSRP_UNIT")); singleItem.put("classification", Classification.classify(threshold, lteRsrpValue)); jsonItemList.put(singleItem); } jsonItem.put("measurement", jsonItemList); jsonItemList = new JSONArray(); singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_NETWORK_TYPE")); singleItem.put("value", Helperfunctions.getNetworkTypeName(networkType)); jsonItemList.put(singleItem); if (networkType == 98 || networkType == 99) // mobile wifi or browser { String providerText = MoreObjects.firstNonNull( rs.getString("provider_text"), rs.getString("public_ip_as_name")); if (!Strings.isNullOrEmpty(providerText)) { if (providerText.length() > (MAX_PROVIDER_LENGTH + 3)) { providerText = providerText.substring(0, MAX_PROVIDER_LENGTH) + "..."; } singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_PROVIDER")); singleItem.put("value", providerText); jsonItemList.put(singleItem); } if (networkType == 99) // mobile wifi { if (highlightUUID != null && rs.getString("uuid") != null) // own test { final String ssid = rs.getString("wifi_ssid"); if (ssid != null && ssid.length() != 0) { singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_WIFI_SSID")); singleItem.put("value", ssid.toString()); jsonItemList.put(singleItem); } } } } else // mobile { String networkOperator = rs.getString("network_operator"); String mobileNetworkName = rs.getString("mobile_network_name"); String simOperator = rs.getString("network_sim_operator"); String mobileSimName = rs.getString("mobile_sim_name"); final int roamingType = rs.getInt("roaming_type"); //network if (!Strings.isNullOrEmpty(networkOperator)) { final String mobileNetworkString; if (roamingType != 2) { //not international roaming - display name of home network if (Strings.isNullOrEmpty(mobileSimName)) mobileNetworkString = networkOperator; else mobileNetworkString = String.format("%s (%s)", mobileSimName, networkOperator); } else { //international roaming - display name of network if (Strings.isNullOrEmpty(mobileSimName)) mobileNetworkString = networkOperator; else mobileNetworkString = String.format("%s (%s)", mobileNetworkName, networkOperator); } singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_MOBILE_NETWORK")); singleItem.put("value", mobileNetworkString); jsonItemList.put(singleItem); } //home network (sim) else if (!Strings.isNullOrEmpty(simOperator)) { final String mobileNetworkString; if (Strings.isNullOrEmpty(mobileSimName)) mobileNetworkString = simOperator; else mobileNetworkString = String.format("%s (%s)", mobileSimName, simOperator); /* if (!Strings.isNullOrEmpty(mobileProviderName)) { mobileNetworkString = mobileProviderName; } else { mobileNetworkString = simOperator; } */ singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_HOME_NETWORK")); singleItem.put("value", mobileNetworkString); jsonItemList.put(singleItem); } if (roamingType > 0) { singleItem = new JSONObject(); singleItem.put("title", labels.getString("RESULT_ROAMING")); singleItem.put("value", Helperfunctions.getRoamingType(labels, roamingType)); jsonItemList.put(singleItem); } } jsonItem.put("net", jsonItemList); resultList.put(jsonItem); if (resultList.length() == 0) System.out.println("Error getting Results."); // errorList.addError(MessageFormat.format(labels.getString("ERROR_DB_GET_CLIENT"), // new Object[] {uuid})); } answer.put("measurements", resultList); } else System.out.println("Error executing SQL."); } else System.out.println("No Database Connection."); } } else System.out.println("Expected request is missing."); } catch (final JSONException e) { System.out.println("Error parsing JSDON Data " + e.toString()); } catch (final SQLException e) { e.printStackTrace(); } else System.out.println("No Request."); return answer.toString(); }
From source file:org.dllearner.scripts.evaluation.EnrichmentEvaluation.java
public void printResultsLaTeX() throws Exception { List<Class<? extends LearningAlgorithm>> algorithms = new ArrayList<Class<? extends LearningAlgorithm>>(); algorithms.addAll(classAlgorithms);//from w w w.java 2 s. c o m algorithms.addAll(objectPropertyAlgorithms); algorithms.addAll(dataPropertyAlgorithms); //create view which contains only entries without TIMEOUT and NULL PreparedStatement ps = conn.prepareStatement( "CREATE OR REPLACE VIEW evaluation_cleaned AS (SELECT * FROM evaluation WHERE axiom != ? AND axiom != ?)"); ps.setString(1, "NULL"); ps.setString(2, "TIMEOUT"); ps.execute(); StringBuilder table1 = new StringBuilder(); table1.append("\\begin{tabulary}{\\textwidth}{LRRRRR}\\toprule\n"); table1.append( " algorithm & Avg. \\#suggestions & Avg. runtime in ms & timeout in \\% & Avg. score & Avg. maximum score\\\\\\midrule\n"); for (Class<? extends LearningAlgorithm> algo : algorithms) { String algoName = algo.getAnnotation(ComponentAnn.class).name(); //get number of entities ps = conn.prepareStatement("SELECT COUNT(DISTINCT entity) FROM evaluation WHERE algorithm=?"); ps.setString(1, algoName); java.sql.ResultSet rs = ps.executeQuery(); rs.next(); int overallNumberOfEntities = rs.getInt(1); //get number of entities with empty result ps = conn.prepareStatement( "SELECT COUNT(DISTINCT entity) FROM evaluation WHERE algorithm=? AND axiom=?"); ps.setString(1, algoName); ps.setString(2, "NULL"); rs = ps.executeQuery(); rs.next(); int numberOfEntitiesWithEmptyResult = rs.getInt(1); //get number of entities with timout ps = conn.prepareStatement( "SELECT COUNT(DISTINCT entity) FROM evaluation WHERE algorithm=? AND axiom=?"); ps.setString(1, algoName); ps.setString(2, "TIMEOUT"); rs = ps.executeQuery(); rs.next(); int numberOfEntitiesWithTimeout = rs.getInt(1); //compute average number of suggestions above threshold ps = conn.prepareStatement( "SELECT AVG(cnt) FROM (SELECT entity, COUNT(DISTINCT axiom) AS cnt FROM (SELECT * FROM evaluation WHERE algorithm=? AND score >=?) AS A GROUP BY entity) AS B"); ps.setString(1, algoName); ps.setDouble(2, threshold); rs = ps.executeQuery(); rs.next(); double avgSuggestionsAboveThreshold = round(rs.getDouble(1)); //compute average runtime ps = conn.prepareStatement( "SELECT AVG(runtime) FROM (SELECT MAX(runtime_ms) AS runtime FROM evaluation WHERE algorithm=?) AS A"); ps.setString(1, algoName); rs = ps.executeQuery(); rs.next(); double avgRuntimeInMilliseconds = rs.getDouble(1); //compute ratio for complete timeouts double timeoutRatio = round((double) numberOfEntitiesWithTimeout / overallNumberOfEntities); //compute avg. score ps = conn.prepareStatement( "SELECT AVG(avg) FROM (SELECT AVG(score) AS avg FROM evaluation_cleaned WHERE algorithm=? AND score >= ? GROUP BY entity) AS A"); ps.setString(1, algoName); ps.setDouble(2, threshold); rs = ps.executeQuery(); rs.next(); double avgScore = round(rs.getDouble(1)); //compute avg. max. score ps = conn.prepareStatement( "SELECT AVG(max) FROM (SELECT MAX(score) AS max FROM evaluation_cleaned WHERE algorithm=? AND score>=? GROUP BY entity) AS A"); ps.setString(1, algoName); ps.setDouble(2, threshold); rs = ps.executeQuery(); rs.next(); double avgMaxScore = round(rs.getDouble(1)); table1.append(algoName.replace("axiom learner", "").trim()).append(" & ") .append(avgSuggestionsAboveThreshold).append(" & ").append(avgRuntimeInMilliseconds) .append(" & ").append(timeoutRatio).append(" & ").append(avgScore).append(" & ") .append(avgMaxScore).append("\\\\\n"); } table1.append("\\bottomrule\n\\end{tabulary}"); System.out.println(table1.toString()); write2Disk(table1.toString(), "evaluation/table1.tex"); //second part of evaluation StringBuilder table2 = new StringBuilder(); table2.append("\\begin{tabulary}{\\textwidth}{LCCCCC}\\toprule\n"); table2.append("& & & \\multicolumn{3}{c}{Estimated precision} \\\\\n"); table2.append(" axiom type & recall & additional axioms & no & maybe & yes \\\\\\midrule\n"); //get all axiomtypes and corresponding algorithm Map<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>> axiomType2Algorithm = getAxiomTypesWithLearningAlgorithms(); // get all entities in database because we compute recall only for axioms of entities which we have tested // we use only entities for which triples in the endpoint are contained java.sql.ResultSet rs = conn .prepareStatement("SELECT DISTINCT entity FROM evaluation WHERE axiom != 'NULL'").executeQuery(); Set<OWLEntity> allEntities = new HashSet<OWLEntity>(); Set<OWLEntity> classes = new HashSet<OWLEntity>(); Set<OWLEntity> objectProperties = new HashSet<OWLEntity>(); Set<OWLEntity> dataProperties = new HashSet<OWLEntity>(); IRI iri; while (rs.next()) { iri = IRI.create("http://dbpedia.org/ontology/" + rs.getString(1).substring(4)); if (dbPediaOntology.containsClassInSignature(iri)) { allEntities.add(factory.getOWLClass(iri)); classes.add(factory.getOWLClass(iri)); } else if (dbPediaOntology.containsObjectPropertyInSignature(iri)) { allEntities.add(factory.getOWLObjectProperty(iri)); objectProperties.add(factory.getOWLObjectProperty(iri)); } else if (dbPediaOntology.containsDataPropertyInSignature(iri)) { allEntities.add(factory.getOWLDataProperty(iri)); dataProperties.add(factory.getOWLDataProperty(iri)); } } //compute recall for each axiom type ps = conn.prepareStatement("SELECT axiom, entailed, score FROM evaluation WHERE algorithm=? AND score>=?"); Set<OWLEntity> entities = null; for (Entry<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>> entry : axiomType2Algorithm .entrySet()) { AxiomType<? extends OWLAxiom> type = entry.getKey(); algorithms = entry.getValue(); if (classAlgorithms.containsAll(algorithms)) { entities = classes; } else if (objectPropertyAlgorithms.containsAll(algorithms)) { entities = objectProperties; } else if (dataPropertyAlgorithms.containsAll(algorithms)) { entities = dataProperties; } if (entities != null) { ps.setString(1, algorithms.get(0).getAnnotation(ComponentAnn.class).name()); ps.setDouble(2, threshold); //get all found axioms for specific axiom type Set<String> foundAxioms = new TreeSet<String>(); Map<String, Double> foundAndNotEntailedAxioms = new TreeMap<String, Double>(); rs = ps.executeQuery(); String axiom; boolean entailed; double score; while (rs.next()) { axiom = rs.getString(1); entailed = rs.getBoolean(2); score = rs.getDouble(3); foundAxioms.add(axiom); if (!entailed) { foundAndNotEntailedAxioms.put(axiom, score); } } //get all axioms in the reference ontology for a specific axiom type Set<String> relevantAxioms = getRelevantAxioms2(type, entities); //compute the axioms which are in the reference ontology, but not be computed by the learning algorithm Set<String> missedAxioms = org.mindswap.pellet.utils.SetUtils.difference(relevantAxioms, foundAxioms); //compute the additional found axioms which were not entailed for (String relAxiom : relevantAxioms) { foundAndNotEntailedAxioms.remove(relAxiom); } Set<String> additionalAxioms = foundAndNotEntailedAxioms.keySet(); int total = relevantAxioms.size(); int found = total - missedAxioms.size(); table2.append(type.getName()).append(" & ").append(found + "/" + total).append(" & ") .append(additionalAxioms.size()).append(" & & & \\\\\n"); System.out.println(type.getName() + ": " + found + "/" + total); //write additional axioms with score into file writeToDisk(type, foundAndNotEntailedAxioms); //write missed axioms into file writeToDisk(type, missedAxioms); } } table2.append("\\end{tabulary}"); System.out.println(table2.toString()); write2Disk(table2.toString(), "evaluation/table2.tex"); }
From source file:com.flexive.core.storage.GenericDivisionImporter.java
/** * Import flat storages to the hierarchical storage * * @param con an open and valid connection to store imported data * @param zip zip file containing the data * @throws Exception on errors//from w w w .j av a 2 s .com */ protected void importFlatStoragesHierarchical(Connection con, ZipFile zip) throws Exception { //mapping: storage->level->columnname->assignment id final Map<String, Map<Integer, Map<String, Long>>> flatAssignmentMapping = new HashMap<String, Map<Integer, Map<String, Long>>>( 5); //mapping: assignment id->position index final Map<Long, Integer> assignmentPositions = new HashMap<Long, Integer>(100); //mapping: flatstorage->column sizes [string,bigint,double,select,text] final Map<String, Integer[]> flatstoragesColumns = new HashMap<String, Integer[]>(5); ZipEntry zeMeta = getZipEntry(zip, FILE_FLATSTORAGE_META); DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document document = builder.parse(zip.getInputStream(zeMeta)); XPath xPath = XPathFactory.newInstance().newXPath(); //calculate column sizes NodeList nodes = (NodeList) xPath.evaluate("/flatstorageMeta/storageMeta", document, XPathConstants.NODESET); Node currNode; for (int i = 0; i < nodes.getLength(); i++) { currNode = nodes.item(i); int cbigInt = Integer.parseInt(currNode.getAttributes().getNamedItem("bigInt").getNodeValue()); int cdouble = Integer.parseInt(currNode.getAttributes().getNamedItem("double").getNodeValue()); int cselect = Integer.parseInt(currNode.getAttributes().getNamedItem("select").getNodeValue()); int cstring = Integer.parseInt(currNode.getAttributes().getNamedItem("string").getNodeValue()); int ctext = Integer.parseInt(currNode.getAttributes().getNamedItem("text").getNodeValue()); String tableName = null; if (currNode.hasChildNodes()) { for (int j = 0; j < currNode.getChildNodes().getLength(); j++) if (currNode.getChildNodes().item(j).getNodeName().equals("name")) { tableName = currNode.getChildNodes().item(j).getTextContent(); } } if (tableName != null) { flatstoragesColumns.put(tableName, new Integer[] { cstring, cbigInt, cdouble, cselect, ctext }); } } //parse mappings nodes = (NodeList) xPath.evaluate("/flatstorageMeta/mapping", document, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { currNode = nodes.item(i); long assignment = Long.valueOf(currNode.getAttributes().getNamedItem("assid").getNodeValue()); int level = Integer.valueOf(currNode.getAttributes().getNamedItem("lvl").getNodeValue()); String storage = null; String columnname = null; final NodeList childNodes = currNode.getChildNodes(); for (int c = 0; c < childNodes.getLength(); c++) { Node child = childNodes.item(c); if ("tblname".equals(child.getNodeName())) storage = child.getTextContent(); else if ("colname".equals(child.getNodeName())) columnname = child.getTextContent(); } if (storage == null || columnname == null) throw new Exception("Invalid flatstorage export: could not read storage or column name!"); if (!flatAssignmentMapping.containsKey(storage)) flatAssignmentMapping.put(storage, new HashMap<Integer, Map<String, Long>>(20)); Map<Integer, Map<String, Long>> levelMap = flatAssignmentMapping.get(storage); if (!levelMap.containsKey(level)) levelMap.put(level, new HashMap<String, Long>(30)); Map<String, Long> columnMap = levelMap.get(level); if (!columnMap.containsKey(columnname)) columnMap.put(columnname, assignment); //calculate position assignmentPositions.put(assignment, getAssignmentPosition(flatstoragesColumns.get(storage), columnname)); } if (flatAssignmentMapping.size() == 0) { LOG.warn("No flatstorage assignments found to process!"); return; } ZipEntry zeData = getZipEntry(zip, FILE_DATA_FLAT); final String xpathStorage = "flatstorages/storage"; final String xpathData = "flatstorages/storage/data"; final PreparedStatement psGetAssInfo = con.prepareStatement( "SELECT DISTINCT a.APROPERTY,a.XALIAS,p.DATATYPE FROM " + DatabaseConst.TBL_STRUCT_ASSIGNMENTS + " a, " + DatabaseConst.TBL_STRUCT_PROPERTIES + " p WHERE a.ID=? AND p.ID=a.APROPERTY"); final Map<Long, Object[]> assignmentPropAlias = new HashMap<Long, Object[]>(assignmentPositions.size()); final String insert1 = "INSERT INTO " + DatabaseConst.TBL_CONTENT_DATA + //1 2 3 4 5 6 =1 =1 =1 =1 7 8 9 "(ID,VER,POS,LANG,TPROP,ASSIGN,XDEPTH,XMULT,XINDEX,PARENTXMULT,ISMAX_VER,ISLIVE_VER,ISMLDEF,"; final String insert2 = "(?,?,?,?,1,?,?,1,1,1,?,?,?,"; final PreparedStatement psString = con .prepareStatement(insert1 + "FTEXT1024,UFTEXT1024,FSELECT,FINT)VALUES" + insert2 + "?,?,0,?)"); final PreparedStatement psText = con .prepareStatement(insert1 + "FCLOB,UFCLOB,FSELECT,FINT)VALUES" + insert2 + "?,?,0,?)"); final PreparedStatement psDouble = con .prepareStatement(insert1 + "FDOUBLE,FSELECT,FINT)VALUES" + insert2 + "?,0,?)"); final PreparedStatement psNumber = con .prepareStatement(insert1 + "FINT,FSELECT,FBIGINT)VALUES" + insert2 + "?,0,?)"); final PreparedStatement psLargeNumber = con .prepareStatement(insert1 + "FBIGINT,FSELECT,FINT)VALUES" + insert2 + "?,0,?)"); final PreparedStatement psFloat = con .prepareStatement(insert1 + "FFLOAT,FSELECT,FINT)VALUES" + insert2 + "?,0,?)"); final PreparedStatement psBoolean = con .prepareStatement(insert1 + "FBOOL,FSELECT,FINT)VALUES" + insert2 + "?,0,?)"); final PreparedStatement psReference = con .prepareStatement(insert1 + "FREF,FSELECT,FINT)VALUES" + insert2 + "?,0,?)"); final PreparedStatement psSelectOne = con .prepareStatement(insert1 + "FSELECT,FINT)VALUES" + insert2 + "?,?)"); try { final SAXParser parser = SAXParserFactory.newInstance().newSAXParser(); final DefaultHandler handler = new DefaultHandler() { private String currentElement = null; private String currentStorage = null; private Map<String, String> data = new HashMap<String, String>(10); private StringBuilder sbData = new StringBuilder(10000); boolean inTag = false; boolean inElement = false; List<String> path = new ArrayList<String>(10); StringBuilder currPath = new StringBuilder(100); int insertCount = 0; /** * {@inheritDoc} */ @Override public void startDocument() throws SAXException { inTag = false; inElement = false; path.clear(); currPath.setLength(0); sbData.setLength(0); data.clear(); currentElement = null; currentStorage = null; insertCount = 0; } /** * {@inheritDoc} */ @Override public void endDocument() throws SAXException { LOG.info("Imported [" + insertCount + "] flatstorage entries into the hierarchical storage"); } /** * {@inheritDoc} */ @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { pushPath(qName, attributes); if (currPath.toString().equals(xpathData)) { inTag = true; data.clear(); for (int i = 0; i < attributes.getLength(); i++) { String name = attributes.getLocalName(i); if (StringUtils.isEmpty(name)) name = attributes.getQName(i); data.put(name, attributes.getValue(i)); } } else if (currPath.toString().equals(xpathStorage)) { currentStorage = attributes.getValue("name"); LOG.info("Processing storage: " + currentStorage); } else { currentElement = qName; } inElement = true; sbData.setLength(0); } /** * Push a path element from the stack * * @param qName element name to push * @param att attributes */ @SuppressWarnings({ "UnusedDeclaration" }) private void pushPath(String qName, Attributes att) { path.add(qName); buildPath(); } /** * Pop the top path element from the stack */ private void popPath() { path.remove(path.size() - 1); buildPath(); } /** * Rebuild the current path */ private synchronized void buildPath() { currPath.setLength(0); for (String s : path) currPath.append(s).append('/'); if (currPath.length() > 1) currPath.delete(currPath.length() - 1, currPath.length()); // System.out.println("currPath: " + currPath); } /** * {@inheritDoc} */ @Override public void endElement(String uri, String localName, String qName) throws SAXException { if (currPath.toString().equals(xpathData)) { // LOG.info("Insert [" + xpathData + "]: [" + data + "]"); inTag = false; processData(); /*try { if (insertMode) { if (executeInsertPhase) { processColumnSet(insertColumns, psInsert); counter += psInsert.executeUpdate(); } } else { if (executeUpdatePhase) { if (processColumnSet(updateSetColumns, psUpdate)) { processColumnSet(updateClauseColumns, psUpdate); counter += psUpdate.executeUpdate(); } } } } catch (SQLException e) { throw new SAXException(e); } catch (ParseException e) { throw new SAXException(e); }*/ } else { if (inTag) { data.put(currentElement, sbData.toString()); } currentElement = null; } popPath(); inElement = false; sbData.setLength(0); } void processData() { // System.out.println("processing " + currentStorage + " -> " + data); final String[] cols = { "string", "bigint", "double", "select", "text" }; for (String column : data.keySet()) { if (column.endsWith("_mld")) continue; for (String check : cols) { if (column.startsWith(check)) { if ("select".equals(check) && "0".equals(data.get(column))) continue; //dont insert 0-referencing selects try { insertData(column); } catch (SQLException e) { //noinspection ThrowableInstanceNeverThrown throw new FxDbException(e, "ex.db.sqlError", e.getMessage()) .asRuntimeException(); } } } } } private void insertData(String column) throws SQLException { final int level = Integer.parseInt(data.get("lvl")); long assignment = flatAssignmentMapping.get(currentStorage).get(level) .get(column.toUpperCase()); int pos = FxArrayUtils.getIntElementAt(data.get("positions"), ',', assignmentPositions.get(assignment)); String _valueData = data.get("valuedata"); Integer valueData = _valueData == null ? null : FxArrayUtils.getHexIntElementAt(data.get("valuedata"), ',', assignmentPositions.get(assignment)); Object[] propXP = getPropertyXPathDataType(assignment); long prop = (Long) propXP[0]; String xpath = (String) propXP[1]; FxDataType dataType; try { dataType = FxDataType.getById((Long) propXP[2]); } catch (FxNotFoundException e) { throw e.asRuntimeException(); } long id = Long.parseLong(data.get("id")); int ver = Integer.parseInt(data.get("ver")); long lang = Integer.parseInt(data.get("lang")); boolean isMaxVer = "1".equals(data.get("ismax_ver")); boolean isLiveVer = "1".equals(data.get("islive_ver")); boolean mlDef = "1".equals(data.get(column + "_mld")); PreparedStatement ps; int vdPos; switch (dataType) { case String1024: ps = psString; ps.setString(10, data.get(column)); ps.setString(11, data.get(column).toUpperCase()); vdPos = 12; break; case Text: case HTML: ps = psText; ps.setString(10, data.get(column)); ps.setString(11, data.get(column).toUpperCase()); vdPos = 12; break; case Number: ps = psNumber; ps.setLong(10, Long.valueOf(data.get(column))); vdPos = 11; break; case LargeNumber: ps = psLargeNumber; ps.setLong(10, Long.valueOf(data.get(column))); vdPos = 11; break; case Reference: ps = psReference; ps.setLong(10, Long.valueOf(data.get(column))); vdPos = 11; break; case Float: ps = psFloat; ps.setFloat(10, Float.valueOf(data.get(column))); vdPos = 11; break; case Double: ps = psDouble; ps.setDouble(10, Double.valueOf(data.get(column))); vdPos = 11; break; case Boolean: ps = psBoolean; ps.setBoolean(10, "1".equals(data.get(column))); vdPos = 11; break; case SelectOne: ps = psSelectOne; ps.setLong(10, Long.valueOf(data.get(column))); vdPos = 11; break; default: //noinspection ThrowableInstanceNeverThrown throw new FxInvalidParameterException("assignment", "ex.structure.flatstorage.datatype.unsupported", dataType.name()) .asRuntimeException(); } ps.setLong(1, id); ps.setInt(2, ver); ps.setInt(3, pos); ps.setLong(4, lang); ps.setLong(5, prop); ps.setLong(6, assignment); ps.setBoolean(7, isMaxVer); ps.setBoolean(8, isLiveVer); ps.setBoolean(9, mlDef); if (valueData == null) ps.setNull(vdPos, java.sql.Types.NUMERIC); else ps.setInt(vdPos, valueData); ps.executeUpdate(); insertCount++; } /** * Get property id, xpath and data type for an assignment * * @param assignment assignment id * @return Object[] {propertyId, xpath, datatype} */ private Object[] getPropertyXPathDataType(long assignment) { if (assignmentPropAlias.get(assignment) != null) return assignmentPropAlias.get(assignment); try { psGetAssInfo.setLong(1, assignment); ResultSet rs = psGetAssInfo.executeQuery(); if (rs != null && rs.next()) { Object[] data = new Object[] { rs.getLong(1), rs.getString(2), rs.getLong(3) }; assignmentPropAlias.put(assignment, data); return data; } } catch (SQLException e) { throw new IllegalArgumentException( "Could not load data for assignment " + assignment + ": " + e.getMessage()); } throw new IllegalArgumentException("Could not load data for assignment " + assignment + "!"); } /** * {@inheritDoc} */ @Override public void characters(char[] ch, int start, int length) throws SAXException { if (inElement) sbData.append(ch, start, length); } }; parser.parse(zip.getInputStream(zeData), handler); } finally { Database.closeObjects(GenericDivisionImporter.class, psGetAssInfo, psString, psBoolean, psDouble, psFloat, psLargeNumber, psNumber, psReference, psSelectOne, psText); } }
From source file:org.dllearner.scripts.evaluation.EnrichmentEvaluationMultithreaded.java
public void printResultsLaTeX() throws Exception { List<Class<? extends LearningAlgorithm>> algorithms = new ArrayList<Class<? extends LearningAlgorithm>>(); algorithms.addAll(classAlgorithms);//from w w w .j a v a 2s . c o m algorithms.addAll(objectPropertyAlgorithms); algorithms.addAll(dataPropertyAlgorithms); //create view which contains only entries without TIMEOUT and NULL PreparedStatement ps = conn.prepareStatement( "CREATE OR REPLACE VIEW evaluation_cleaned AS (SELECT * FROM evaluation WHERE axiom != ? AND axiom != ?)"); ps.setString(1, "NULL"); ps.setString(2, "TIMEOUT"); ps.execute(); StringBuilder table1 = new StringBuilder(); table1.append("\\begin{tabulary}{\\textwidth}{LRRRRR}\\toprule\n"); table1.append( " algorithm & Avg. \\#suggestions & Avg. runtime in ms & timeout in \\% & Avg. score & Avg. maximum score\\\\\\midrule\n"); for (Class<? extends LearningAlgorithm> algo : algorithms) { String algoName = algo.getAnnotation(ComponentAnn.class).name(); //get number of entities ps = conn.prepareStatement("SELECT COUNT(DISTINCT entity) FROM evaluation WHERE algorithm=?"); ps.setString(1, algoName); java.sql.ResultSet rs = ps.executeQuery(); rs.next(); int overallNumberOfEntities = rs.getInt(1); //get number of entities with empty result ps = conn.prepareStatement( "SELECT COUNT(DISTINCT entity) FROM evaluation WHERE algorithm=? AND axiom=?"); ps.setString(1, algoName); ps.setString(2, "NULL"); rs = ps.executeQuery(); rs.next(); int numberOfEntitiesWithEmptyResult = rs.getInt(1); //get number of entities with timout ps = conn.prepareStatement( "SELECT COUNT(DISTINCT entity) FROM evaluation WHERE algorithm=? AND axiom=?"); ps.setString(1, algoName); ps.setString(2, "TIMEOUT"); rs = ps.executeQuery(); rs.next(); int numberOfEntitiesWithTimeout = rs.getInt(1); //compute average number of suggestions above threshold ps = conn.prepareStatement( "SELECT AVG(cnt) FROM (SELECT entity, COUNT(DISTINCT axiom) AS cnt FROM (SELECT * FROM evaluation WHERE algorithm=? AND score >=?) AS A GROUP BY entity) AS B"); ps.setString(1, algoName); ps.setDouble(2, threshold); rs = ps.executeQuery(); rs.next(); double avgSuggestionsAboveThreshold = round(rs.getDouble(1)); //compute average runtime ps = conn.prepareStatement( "SELECT AVG(runtime) FROM (SELECT MAX(runtime_ms) AS runtime FROM evaluation WHERE algorithm=?) AS A"); ps.setString(1, algoName); rs = ps.executeQuery(); rs.next(); double avgRuntimeInMilliseconds = rs.getDouble(1); //compute ratio for complete timeouts double timeoutRatio = round((double) numberOfEntitiesWithTimeout / overallNumberOfEntities); //compute avg. score ps = conn.prepareStatement( "SELECT AVG(avg) FROM (SELECT AVG(score) AS avg FROM evaluation_cleaned WHERE algorithm=? AND score >= ? GROUP BY entity) AS A"); ps.setString(1, algoName); ps.setDouble(2, threshold); rs = ps.executeQuery(); rs.next(); double avgScore = round(rs.getDouble(1)); //compute avg. max. score ps = conn.prepareStatement( "SELECT AVG(max) FROM (SELECT MAX(score) AS max FROM evaluation_cleaned WHERE algorithm=? AND score>=? GROUP BY entity) AS A"); ps.setString(1, algoName); ps.setDouble(2, threshold); rs = ps.executeQuery(); rs.next(); double avgMaxScore = round(rs.getDouble(1)); table1.append(algoName.replace("axiom learner", "").trim()).append(" & ") .append(avgSuggestionsAboveThreshold).append(" & ").append(avgRuntimeInMilliseconds) .append(" & ").append(timeoutRatio).append(" & ").append(avgScore).append(" & ") .append(avgMaxScore).append("\\\\\n"); } table1.append("\\bottomrule\n\\end{tabulary}"); System.out.println(table1.toString()); write2Disk(table1.toString(), "evaluation/table1.tex"); //second part of evaluation StringBuilder table2 = new StringBuilder(); table2.append("\\begin{tabulary}{\\textwidth}{LCCCCC}\\toprule\n"); table2.append("& & & \\multicolumn{3}{c}{Estimated precision} \\\\\n"); table2.append(" axiom type & recall & additional axioms & no & maybe & yes \\\\\\midrule\n"); //get all axiomtypes and corresponding algorithm Map<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>> axiomType2Algorithm = getAxiomTypesWithLearningAlgorithms(); // get all entities in database because we compute recall only for axioms of entities which we have tested // we use only entities for which triples in the endpoint are contained java.sql.ResultSet rs = conn .prepareStatement("SELECT DISTINCT entity FROM evaluation WHERE axiom != 'EMPTY_ENTITY'") .executeQuery(); Set<OWLEntity> allEntities = new HashSet<OWLEntity>(); Set<OWLEntity> classes = new HashSet<OWLEntity>(); Set<OWLEntity> objectProperties = new HashSet<OWLEntity>(); Set<OWLEntity> dataProperties = new HashSet<OWLEntity>(); IRI iri; while (rs.next()) { iri = IRI.create("http://dbpedia.org/ontology/" + rs.getString(1).substring(4)); if (dbPediaOntology.containsClassInSignature(iri)) { allEntities.add(factory.getOWLClass(iri)); classes.add(factory.getOWLClass(iri)); } else if (dbPediaOntology.containsObjectPropertyInSignature(iri)) { allEntities.add(factory.getOWLObjectProperty(iri)); objectProperties.add(factory.getOWLObjectProperty(iri)); } else if (dbPediaOntology.containsDataPropertyInSignature(iri)) { allEntities.add(factory.getOWLDataProperty(iri)); dataProperties.add(factory.getOWLDataProperty(iri)); } } //compute recall for each axiom type ps = conn.prepareStatement( "SELECT axiom, entailed, score FROM evaluation WHERE algorithm=? AND score>=0 AND entity=?"); Set<OWLEntity> entities = null; for (Entry<AxiomType<? extends OWLAxiom>, List<Class<? extends LearningAlgorithm>>> entry : axiomType2Algorithm .entrySet()) { AxiomType<? extends OWLAxiom> type = entry.getKey(); algorithms = entry.getValue(); entities = null; if (classAlgorithms.containsAll(algorithms)) { entities = classes; } else if (objectPropertyAlgorithms.containsAll(algorithms)) { entities = objectProperties; } else if (dataPropertyAlgorithms.containsAll(algorithms)) { entities = dataProperties; } DefaultPrefixManager pm = new DefaultPrefixManager(); pm.setPrefix("dbo:", "http://dbpedia.org/ontology/"); Set<String> missedAxioms = new TreeSet<String>(); Set<String> additionalAxioms = new TreeSet<String>(); Map<String, Double> foundAndNotEntailedAxioms = new TreeMap<String, Double>(); if (entities != null) { //write learned axioms in separate TTL file new File("evaluation/ontologies").mkdirs(); OWLOntology ontology = manager .createOntology(IRI.create("http://dl-learner.org/ontologies/" + type.getName() + ".owl")); if (algorithm2Ontology.containsKey(algorithms.get(0))) { manager.addAxioms(ontology, algorithm2Ontology.get(algorithms.get(0))); manager.saveOntology(ontology, new TurtleOntologyFormat(), new FileOutputStream(new File("evaluation/ontologies/" + type.getName() + ".ttl"))); } for (OWLEntity entity : entities) { Map<String, Double> axiom2Score = new HashMap<String, Double>(); ps.setString(1, algorithms.get(0).getAnnotation(ComponentAnn.class).name()); // ps.setDouble(2, threshold); ps.setString(2, pm.getShortForm(entity)); //get all found axioms for specific axiom type Set<String> foundAxioms = new TreeSet<String>(); Map<String, Double> foundAndNotEntailedAxiomsTmp = new TreeMap<String, Double>(); rs = ps.executeQuery(); String axiom; boolean entailed; double score; boolean emptyEntity = false; while (rs.next()) { axiom = rs.getString(1); if (axiom.equalsIgnoreCase("empty_entity")) { emptyEntity = true; } entailed = rs.getBoolean(2); score = rs.getDouble(3); if (!emptyEntity) { if (score >= threshold) { foundAxioms.add(axiom); if (!entailed) { foundAndNotEntailedAxiomsTmp.put(axiom, score); } } else { axiom2Score.put(axiom, score); } } } //get all axioms in the reference ontology for a specific axiom type Set<String> relevantAxioms = getRelevantAxioms2(type, Collections.singleton(entity)); //compute the axioms which are in the reference ontology, but not be computed by the learning algorithm Set<String> missedAxiomsTmp = org.mindswap.pellet.utils.SetUtils.difference(relevantAxioms, foundAxioms); Set<String> tmp = new TreeSet<String>(); for (String ax : missedAxiomsTmp) { if (emptyEntity) { tmp.add(ax + "\t(EMPTY_ENTITY)"); } else if (axiom2Score.containsKey(ax)) { tmp.add(ax + "\t(" + axiom2Score.get(ax) + ")"); } else { tmp.add(ax); } } missedAxiomsTmp = tmp; missedAxioms.addAll(missedAxiomsTmp); //compute the additional found axioms which were not entailed for (String relAxiom : relevantAxioms) { foundAndNotEntailedAxiomsTmp.remove(relAxiom); } Set<String> additionalAxiomsTmp = foundAndNotEntailedAxiomsTmp.keySet(); additionalAxioms.addAll(additionalAxiomsTmp); foundAndNotEntailedAxioms.putAll(foundAndNotEntailedAxiomsTmp); } int total = getRelevantAxioms2(type, entities).size(); int found = total - missedAxioms.size(); table2.append(type.getName()).append(" & ").append(found + "/" + total).append(" & ") .append(additionalAxioms.size()).append(" & & & \\\\\n"); System.out.println(type.getName() + ": " + found + "/" + total); //write additional axioms with score into file writeToDisk(type, foundAndNotEntailedAxioms); //write missed axioms into file writeToDisk(type, missedAxioms); } } table2.append("\\end{tabulary}"); System.out.println(table2.toString()); write2Disk(table2.toString(), "evaluation/table2.tex"); }
From source file:com.oltpbenchmark.benchmarks.tpcc.TPCCLoader.java
protected int loadStock(int whseKount, int itemKount) { int k = 0;/*www . j a v a 2 s .c om*/ int t = 0; int randPct = 0; int len = 0; int startORIGINAL = 0; try { PreparedStatement stckPrepStmt = getInsertStatement(TPCCConstants.TABLENAME_STOCK); now = new java.util.Date(); t = (whseKount * itemKount); LOG.debug("\nStart Stock Load for " + t + " units @ " + now + " ..."); if (outputFiles == true) { out = new PrintWriter(new FileOutputStream(fileLocation + "stock.csv")); LOG.debug("\nWriting Stock file to: " + fileLocation + "stock.csv"); } Stock stock = new Stock(); for (int i = 1; i <= itemKount; i++) { for (int w = 1; w <= whseKount; w++) { stock.s_i_id = i; stock.s_w_id = w; stock.s_quantity = TPCCUtil.randomNumber(10, 100, gen); stock.s_ytd = 0; stock.s_order_cnt = 0; stock.s_remote_cnt = 0; // s_data randPct = TPCCUtil.randomNumber(1, 100, gen); len = TPCCUtil.randomNumber(26, 50, gen); if (randPct > 10) { // 90% of time i_data isa random string of length [26 .. // 50] stock.s_data = TPCCUtil.randomStr(len); } else { // 10% of time i_data has "ORIGINAL" crammed somewhere // in middle startORIGINAL = TPCCUtil.randomNumber(2, (len - 8), gen); stock.s_data = TPCCUtil.randomStr(startORIGINAL - 1) + "ORIGINAL" + TPCCUtil.randomStr(len - startORIGINAL - 9); } stock.s_dist_01 = TPCCUtil.randomStr(24); stock.s_dist_02 = TPCCUtil.randomStr(24); stock.s_dist_03 = TPCCUtil.randomStr(24); stock.s_dist_04 = TPCCUtil.randomStr(24); stock.s_dist_05 = TPCCUtil.randomStr(24); stock.s_dist_06 = TPCCUtil.randomStr(24); stock.s_dist_07 = TPCCUtil.randomStr(24); stock.s_dist_08 = TPCCUtil.randomStr(24); stock.s_dist_09 = TPCCUtil.randomStr(24); stock.s_dist_10 = TPCCUtil.randomStr(24); k++; if (outputFiles == false) { stckPrepStmt.setLong(1, stock.s_w_id); stckPrepStmt.setLong(2, stock.s_i_id); stckPrepStmt.setLong(3, stock.s_quantity); stckPrepStmt.setDouble(4, stock.s_ytd); stckPrepStmt.setLong(5, stock.s_order_cnt); stckPrepStmt.setLong(6, stock.s_remote_cnt); stckPrepStmt.setString(7, stock.s_data); stckPrepStmt.setString(8, stock.s_dist_01); stckPrepStmt.setString(9, stock.s_dist_02); stckPrepStmt.setString(10, stock.s_dist_03); stckPrepStmt.setString(11, stock.s_dist_04); stckPrepStmt.setString(12, stock.s_dist_05); stckPrepStmt.setString(13, stock.s_dist_06); stckPrepStmt.setString(14, stock.s_dist_07); stckPrepStmt.setString(15, stock.s_dist_08); stckPrepStmt.setString(16, stock.s_dist_09); stckPrepStmt.setString(17, stock.s_dist_10); stckPrepStmt.addBatch(); if ((k % configCommitCount) == 0) { long tmpTime = new java.util.Date().getTime(); String etStr = " Elasped Time(ms): " + ((tmpTime - lastTimeMS) / 1000.000) + " "; LOG.debug(etStr.substring(0, 30) + " Writing record " + k + " of " + t); lastTimeMS = tmpTime; stckPrepStmt.executeBatch(); stckPrepStmt.clearBatch(); transCommit(); } } else { String str = ""; str = str + stock.s_i_id + ","; str = str + stock.s_w_id + ","; str = str + stock.s_quantity + ","; str = str + stock.s_ytd + ","; str = str + stock.s_order_cnt + ","; str = str + stock.s_remote_cnt + ","; str = str + stock.s_data + ","; str = str + stock.s_dist_01 + ","; str = str + stock.s_dist_02 + ","; str = str + stock.s_dist_03 + ","; str = str + stock.s_dist_04 + ","; str = str + stock.s_dist_05 + ","; str = str + stock.s_dist_06 + ","; str = str + stock.s_dist_07 + ","; str = str + stock.s_dist_08 + ","; str = str + stock.s_dist_09 + ","; str = str + stock.s_dist_10; out.println(str); if ((k % configCommitCount) == 0) { long tmpTime = new java.util.Date().getTime(); String etStr = " Elasped Time(ms): " + ((tmpTime - lastTimeMS) / 1000.000) + " "; LOG.debug(etStr.substring(0, 30) + " Writing record " + k + " of " + t); lastTimeMS = tmpTime; } } } // end for [w] } // end for [i] long tmpTime = new java.util.Date().getTime(); String etStr = " Elasped Time(ms): " + ((tmpTime - lastTimeMS) / 1000.000) + " "; LOG.debug(etStr.substring(0, 30) + " Writing final records " + k + " of " + t); lastTimeMS = tmpTime; if (outputFiles == false) { stckPrepStmt.executeBatch(); } transCommit(); now = new java.util.Date(); LOG.debug("End Stock Load @ " + now); } catch (SQLException se) { LOG.debug(se.getMessage()); transRollback(); } catch (Exception e) { e.printStackTrace(); transRollback(); } return (k); }
From source file:com.gtwm.pb.model.manageSchema.DatabaseDefn.java
/** * Update all the existing field values in the database with the default * value for that field//from w w w. j a va2s. c o m */ private void setFieldDefaultDbAction(Connection conn, BaseField field) throws SQLException, CantDoThatException, ObjectNotFoundException, CodingErrorException { if (field.hasDefault()) { String internalTableName = field.getTableContainingField().getInternalTableName(); String internalFieldName = field.getInternalFieldName(); String SQLCode = "UPDATE " + internalTableName + " SET " + internalFieldName + "=?"; PreparedStatement statement = conn.prepareStatement(SQLCode); if (field instanceof TextField) { String defaultValue = ((TextField) field).getDefault(); statement.setString(1, defaultValue); } else if (field instanceof DecimalField) { Double defaultValue = ((DecimalField) field).getDefault(); statement.setDouble(1, defaultValue); } else if (field instanceof IntegerField) { Integer defaultValue = ((IntegerField) field).getDefault(); statement.setInt(1, defaultValue); } else if (field instanceof CheckboxField) { Boolean defaultValue = ((CheckboxField) field).getDefault(); statement.setBoolean(1, defaultValue); } else if (field instanceof DateField) { Calendar defaultValueCalendar = ((DateField) field).getDefault(); Timestamp defaultValue = new Timestamp(defaultValueCalendar.getTimeInMillis()); statement.setTimestamp(1, defaultValue); } else { throw new CantDoThatException( "Unable to set default value for field type " + field.getFieldCategory()); } statement.execute(); statement.close(); } }
From source file:de.ingrid.importer.udk.strategy.v32.IDCStrategy3_2_0.java
private void updateDQAbsPosGenauigkeit() throws Exception { log.info("\nUpdating object_data_quality 'Absolute Positionsgenauigkeit'..."); log.info("Transfer 'Absolute Positionsgenauigkeit' values from DQ table (object_data_quality) to moved " + "fields 'Hhengenauigkeit' (T011_obj_geo.pos_accuracy_vertical) and 'Lagegenauigkeit (m)' (T011_obj_geo.rec_exact) " + "if fields are empty ..."); // NOTICE: We do NOT update search index due to same values. // select all relevant entries in DQ Table String sqlSelectDQTable = "select obj_id, name_of_measure_key, result_value from object_data_quality where dq_element_id = 117"; // select according values in DQ Field PreparedStatement psSelectDQFields = jdbc .prepareStatement("SELECT pos_accuracy_vertical, rec_exact FROM t011_obj_geo WHERE obj_id = ?"); // update according value in DQ Field PreparedStatement psUpdateDQFieldLage = jdbc .prepareStatement("UPDATE t011_obj_geo SET " + "rec_exact = ? " + "WHERE obj_id = ?"); PreparedStatement psUpdateDQFieldHoehe = jdbc .prepareStatement("UPDATE t011_obj_geo SET " + "pos_accuracy_vertical = ? " + "WHERE obj_id = ?"); Statement st = jdbc.createStatement(); ResultSet rs = jdbc.executeQuery(sqlSelectDQTable, st); int numProcessed = 0; while (rs.next()) { long objId = rs.getLong("obj_id"); int dqTableMeasureKey = rs.getInt("name_of_measure_key"); String dqTableValue = rs.getString("result_value"); if (dqTableValue != null) { // read according value from field psSelectDQFields.setLong(1, objId); ResultSet rs2 = psSelectDQFields.executeQuery(); if (rs2.next()) { // read field value where to migrate to and check whether was null double lageFieldValue = rs2.getDouble("rec_exact"); boolean lageFieldValueWasNull = rs2.wasNull(); double hoeheFieldValue = rs2.getDouble("pos_accuracy_vertical"); boolean hoeheFieldValueWasNull = rs2.wasNull(); log.debug("Object id=" + objId + " -> read DQ table value: measureKey=" + dqTableMeasureKey + ", value=" + dqTableValue + " / values in fields: Lagegenauigkeit=" + (lageFieldValueWasNull ? null : lageFieldValue) + ", Hhengenauigkeit=" + (hoeheFieldValueWasNull ? null : hoeheFieldValue)); // transfer Lagegenauigkeit from table to field if field is null if (dqTableMeasureKey == syslist7117EntryKeyLagegenauigkeit && lageFieldValueWasNull) { try { psUpdateDQFieldLage.setDouble(1, new Double(dqTableValue)); psUpdateDQFieldLage.setLong(2, objId); psUpdateDQFieldLage.executeUpdate(); numProcessed++;/*from w w w . j a va 2s . c o m*/ log.debug("Transferred 'Lagegenauigkeit' value '" + dqTableValue + "' from DQ table to field (was empty), obj_id:" + objId); } catch (Exception ex) { String msg = "Problems transferring 'Lagegenauigkeit' value '" + dqTableValue + "' from DQ table as DOUBLE to field, value is lost ! obj_id:" + objId; log.error(msg, ex); System.out.println(msg); } } // transfer Hhengenauigkeit from table to field if field is null if (dqTableMeasureKey == syslist7117EntryKeyHoehegenauigkeit && hoeheFieldValueWasNull) { try { psUpdateDQFieldHoehe.setDouble(1, new Double(dqTableValue)); psUpdateDQFieldHoehe.setLong(2, objId); psUpdateDQFieldHoehe.executeUpdate(); numProcessed++; log.debug("Transferred 'Hhengenauigkeit' value '" + dqTableValue + "' from DQ table to field (was empty), obj_id:" + objId); } catch (Exception ex) { String msg = "Problems transferring 'Hhengenauigkeit' value '" + dqTableValue + "' from DQ table as DOUBLE to field, value is lost ! obj_id:" + objId; log.error(msg, ex); System.out.println(msg); } } } rs2.close(); } } rs.close(); st.close(); psSelectDQFields.close(); psUpdateDQFieldLage.close(); psUpdateDQFieldHoehe.close(); log.info("Transferred " + numProcessed + " entries... done"); log.info("Delete 'Absoulte Positionsgenauigkeit' values from DQ table (object_data_quality) ..."); sqlStr = "DELETE FROM object_data_quality where dq_element_id = 117"; int numDeleted = jdbc.executeUpdate(sqlStr); log.debug("Deleted " + numDeleted + " entries."); log.info("Updating object_data_quality 'Absolute Positionsgenauigkeit' ... done\n"); }
From source file:org.agnitas.dao.impl.ImportRecipientsDaoImpl.java
private void setPreparedStatmentForCurrentColumn(PreparedStatement ps, int index, CSVColumnState column, ProfileRecipientFields bean, ImportProfile importProfile, ValidatorResults validatorResults) throws SQLException { String value = Toolkit.getValueFromBean(bean, column.getColName()); if (column.getType() == CSVColumnState.TYPE_NUMERIC && column.getColName().equals("gender")) { if (StringUtils.isEmpty(value) || value == null) { ps.setInt(index, 2);//www .j a v a 2s. c o m } else { if (GenericValidator.isInt(value) && Integer.valueOf(value) <= 5 && Integer.valueOf(value) >= 0) { ps.setInt(index, Integer.valueOf(value)); } else { final Integer intValue = importProfile.getGenderMapping().get(value); ps.setInt(index, intValue); } } } else if (column.getType() == CSVColumnState.TYPE_CHAR) { if (value == null) { ps.setNull(index, Types.VARCHAR); } else { String columnName = column.getColName(); if (columnName.equals("email")) { value = value.toLowerCase(); if (validatorResults != null && !ImportUtils.checkIsCurrentFieldValid(validatorResults, "email", "checkRange")) { throw new ImportRecipientsToolongValueException(value); } } else if (importProfile.getKeyColumns().contains(columnName) || (importProfile.getKeyColumns().isEmpty() && columnName.equals(importProfile.getKeyColumn()))) { // range validation for keyColumn if (validatorResults != null && !ImportUtils.checkIsCurrentFieldValid(validatorResults, columnName, "checkRange")) { throw new ImportRecipientsToolongValueException(value); } } if (AgnUtils.isOracleDB()) { ps.setString(index, value); } else if (AgnUtils.isMySQLDB()) { if (column.isNullable() && value.isEmpty()) { ps.setNull(index, Types.VARCHAR); } else { ps.setString(index, value); } } } } else if (column.getType() == CSVColumnState.TYPE_NUMERIC) { if (StringUtils.isEmpty(value) || value == null) { ps.setNull(index, Types.NUMERIC); } else { ps.setDouble(index, Double.valueOf(value)); } } else if (column.getType() == CSVColumnState.TYPE_DATE) { if (StringUtils.isEmpty(value) || value == null) { ps.setNull(index, Types.DATE); } else { Date date = ImportUtils.getDateAsString(value, importProfile.getDateFormat()); ps.setTimestamp(index, new Timestamp(date.getTime())); } } }
From source file:org.ramadda.repository.database.DatabaseManager.java
/** * _more_/*from w w w. j av a2s. c o m*/ * * @param statement _more_ * @param values _more_ * @param startIdx _more_ * * @throws Exception _more_ */ public void setValues(PreparedStatement statement, Object[] values, int startIdx) throws Exception { for (int i = 0; i < values.length; i++) { if (values[i] == null) { statement.setNull(i + startIdx, java.sql.Types.VARCHAR); } else if (values[i] instanceof Date) { setDate(statement, i + startIdx, (Date) values[i]); } else if (values[i] instanceof Boolean) { boolean b = ((Boolean) values[i]).booleanValue(); statement.setInt(i + startIdx, (b ? 1 : 0)); } else if (values[i] instanceof Double) { double d = ((Double) values[i]).doubleValue(); //Special check for nans on derby if (d == Double.POSITIVE_INFINITY) { d = Double.NaN; } else if (d == Double.NEGATIVE_INFINITY) { d = Double.NaN; } if (d != d) { if (isDatabaseDerby()) { d = -99999999.999; } // } try { statement.setDouble(i + startIdx, d); } catch (Exception exc) { System.err.println("d:" + d); throw exc; } } else { statement.setObject(i + startIdx, values[i]); } } }