List of usage examples for java.io PrintStream close
public void close()
From source file:com.clavain.munin.MuninNode.java
/** * Will load the plugin list from munin-node */// w w w . j ava 2 s . co m public boolean loadPlugins() { setLoadedPlugins(new CopyOnWriteArrayList<MuninPlugin>()); String l_lastProceeded = ""; try { Socket cs = new Socket(); cs.setKeepAlive(false); cs.setSoLinger(true, 0); cs.setReuseAddress(true); cs.setSoTimeout(com.clavain.muninmxcd.socketTimeout); if (!str_via.equals("unset")) { cs.connect(new InetSocketAddress(this.getStr_via(), this.getPort()), com.clavain.muninmxcd.socketTimeout); } else { cs.connect(new InetSocketAddress(this.getHostname(), this.getPort()), com.clavain.muninmxcd.socketTimeout); } if (p.getProperty("kill.sockets").equals("true")) { SocketCheck sc = new SocketCheck(cs, getUnixtime()); sc.setHostname(this.getHostname()); com.clavain.muninmxcd.v_sockets.add(sc); } PrintStream os = new PrintStream(cs.getOutputStream()); BufferedReader in = new BufferedReader(new InputStreamReader(cs.getInputStream())); String s = in.readLine(); if (s != null) { // Set version os.println("version"); Thread.sleep(150); s = in.readLine(); String version = s.substring(s.indexOf(":") + 1, s.length()).trim(); this.str_muninVersion = version; if (authpw != null) { // if authpw is set, verify if (!authpw.trim().equals("")) { os.println("config muninmxauth"); Thread.sleep(150); String apw = in.readLine(); s = in.readLine(); if (!apw.trim().equals(this.getAuthpw())) { logger.error("Invalid muninmxauth password for host: " + this.getHostname()); cs.close(); return false; } } } // check anyway if muninmxauth plugin is present else { os.println("config muninmxauth"); Thread.sleep(100); String apw = in.readLine(); if (!apw.trim().equals("# Unknown service")) { logger.error( "no auth password given, but muninmxauth plugin present on " + this.getHostname()); cs.close(); return false; } s = in.readLine(); } // get list of available plugins if (str_via.equals("unset")) { os.println("list"); } else { os.println("list " + str_hostname); } Thread.sleep(250); s = in.readLine(); // if response is empty and host is not via, do a list $hostname if (s.trim().equals("") && str_via.equals("unset")) { logger.info("Plugin Response Empty on " + this.getHostname() + " trying to load with list $hostname"); os.println("list " + this.getHostname()); Thread.sleep(250); s = in.readLine(); } String l_tmp; StringTokenizer l_st = new StringTokenizer(s, " "); // create plugin MuninPlugin l_mp = new MuninPlugin(); // negative support ArrayList<String> tmp_negatives = new ArrayList<String>(); while (l_st.hasMoreTokens()) { String l_strPlugin = l_st.nextToken(); // check for track_pkg and muninmx essentials if (l_strPlugin.equals("muninmx_trackpkg")) { this.setTrack_pkg(true); continue; } // got essentials? if (l_strPlugin.equals("muninmx_essentials")) { this.setEssentials(true); continue; } if (isPluginIgnored(l_strPlugin.toUpperCase())) { continue; } l_mp.setPluginName(l_strPlugin); os.println("config " + l_strPlugin); // create graphs for plugin int l_iGraphsFound = 0; int l_iTmp = 0; MuninGraph l_mg = new MuninGraph(); l_mg.setQueryInterval(this.getQueryInterval()); while ((l_tmp = in.readLine()) != null) { if (l_tmp.startsWith(".")) { break; } // collect graphs only for plugin String l_strName; String l_strType; String l_strValue; if (!l_tmp.contains("graph_") && !l_tmp.trim().equals("") && !l_tmp.contains("host_name") && !l_tmp.contains("multigraph") && !l_tmp.trim().equals("graph no") && !l_tmp.trim().equals("# Bad exit") && !l_tmp.trim().contains("info Currently our peer") && !l_tmp.trim().startsWith("#") && !l_tmp.trim().contains("Bonding interface errors")) { l_lastProceeded = l_tmp; l_strName = l_tmp.substring(0, l_tmp.indexOf(".")); l_strType = l_tmp.substring(l_tmp.indexOf(".") + 1, l_tmp.indexOf(" ")); l_strValue = l_tmp.substring(l_tmp.indexOf(" ") + 1, l_tmp.length()); //System.err.println("Name: " + l_strName + " Type: " + l_strType + " Value: " + l_strValue); if (l_strType.equals("label")) { l_iTmp++; if (l_iTmp > 1) { l_mp.addGraph(l_mg); l_mg = new MuninGraph(); l_mg.setQueryInterval(this.getQueryInterval()); } l_mg.setGraphName(l_strName); l_mg.setGraphLabel(l_strValue); } else if (l_strType.equals("draw")) { l_mg.setGraphDraw(l_strValue); } else if (l_strType.equals("type")) { l_mg.setGraphType(l_strValue); } else if (l_strType.equals("info")) { l_mg.setGraphInfo(l_strValue); } else if (l_strType.equals("negative")) { // add to temporary negative list to set negatives later tmp_negatives.add(l_strValue); } //System.out.println(l_strName); //System.out.println(l_strType); //System.out.println(l_strValue); } else { // set plugin title if (l_tmp.contains("graph_title")) { l_mp.setPluginTitle(l_tmp.substring(12, l_tmp.length())); } // set plugin info, if any if (l_tmp.contains("graph_info")) { l_mp.setPluginInfo(l_tmp.substring(11, l_tmp.length())); } // set graph category if (l_tmp.contains("graph_category")) { l_mp.setPluginCategory(l_tmp.substring(15, l_tmp.length())); } // set graph vlabel if (l_tmp.contains("graph_vlabel")) { l_mp.setPluginLabel(l_tmp.substring(13, l_tmp.length())); } // set plugin title if (l_tmp.contains("graph_mxdraw")) { l_mp.setStr_LineMode(l_tmp.substring(13, l_tmp.length())); } } } // add to pluginlist l_mp.addGraph(l_mg); Iterator it = l_mp.getGraphs().iterator(); while (it.hasNext()) { MuninGraph l_mpNg = (MuninGraph) it.next(); if (tmp_negatives.contains(l_mpNg.getGraphName())) { l_mpNg.setNegative(true); } } // add plugin if it got valid graphs and add nodeid (req. for alerts) if (l_mp.getGraphs().size() > 0) { l_mp.set_NodeId(this.getNode_id()); getLoadedPlugins().add(l_mp); } // flush temporary negatives tmp_negatives.clear(); l_mp = null; l_mp = new MuninPlugin(); //String l_strGraphTitle = s.substring(s.indexOf("graph_title") + 11,s.length()); //System.out.println(" - " + l_strGraphTitle); } cs.close(); in.close(); os.close(); last_plugin_load = getUnixtime(); //System.out.println(s); } else { cs.close(); in.close(); os.close(); logger.warn("Error loading plugins on " + str_hostname + " (" + this.getNode_id() + "). Check connectivity or munin-node"); } /* for (MuninPlugin l_mn : getLoadedPlugins()) { i_GraphCount = i_GraphCount + l_mn.getGraphs().size(); logger.debug(l_mn.getGraphs().size() + " graphs found for plugin: " + l_mn.getPluginName().toUpperCase() + " on node: " + this.getNodename()); }*/ } catch (Exception ex) { logger.error("Error loading plugins on " + str_hostname + " (" + this.getNode_id() + ") : " + ex.getMessage()); ex.printStackTrace(); return false; } return true; }
From source file:edu.cmu.tetrad.search.TestIndTestConditionalCorrelation.java
public void test11() { try {//from w w w . j av a2s .c o m // PrintStream out = new PrintStream("/Users/josephramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test10out.txt"); PrintStream out = System.out; long start = System.currentTimeMillis(); int model = 6; int numVariables = 20; int N = 1000; int numRuns = 1; NumberFormat nf = new DecimalFormat("0.00"); for (int modelIndex = 1; modelIndex <= 14; modelIndex++) { double sumAP1 = 0.0; double sumAR1 = 0.0; double sumAP2 = 0.0; double sumAR2 = 0.0; double sumEP1 = 0.0; double sumER1 = 0.0; double sumEP2 = 0.0; double sumER2 = 0.0; int sumAP1N = 0; int sumAR1N = 0; int sumAP2N = 0; int sumAR2N = 0; int sumEP1N = 0; int sumER1N = 0; int sumEP2N = 0; int sumER2N = 0; for (int r = 0; r < numRuns; r++) { GeneralizedSemIm im = makeTestIm4(numVariables, modelIndex); out.println(im); SemGraph gTrue = im.getGeneralizedSemPm().getGraph(); gTrue.setShowErrorTerms(false); Graph truePattern = SearchGraphUtils.patternForDag(gTrue); DataSet data = im.simulateData(N, false); long start2 = System.currentTimeMillis(); PcStable pc = new PcStable(new IndTestConditionalCorrelation(data, .05)); Graph graph = pc.search(); long stop2 = System.currentTimeMillis(); System.out.println("Elapsed (just CCI) " + (stop2 - start2) / 1000L + " seconds"); // Goes to report. out.println(graph); graph = GraphUtils.replaceNodes(graph, truePattern.getNodes()); int adjFn = adjacenciesComplement(truePattern, graph); int adjFp = adjacenciesComplement(graph, truePattern); int truePosAdj = truePositivesAdj(truePattern, graph); int edgeFn = edgesComplement(truePattern, graph); int edgeFp = edgesComplement(graph, truePattern); int truePosEdges = truePositiveEdges(truePattern, graph); double adjPrecision = truePosAdj / (double) (truePosAdj + adjFp); double adjRecall = truePosAdj / (double) (truePosAdj + adjFn); double edgePrecision = truePosEdges / (double) (truePosEdges + edgeFp); double edgeRecall = truePosEdges / (double) (truePosEdges + edgeFn); if (!Double.isNaN(adjPrecision)) { sumAP1 += adjPrecision; sumAP1N++; } if (!Double.isNaN(adjRecall)) { sumAR1 += adjRecall; sumAR1N++; } if (!Double.isNaN(edgePrecision)) { sumEP1 += edgePrecision; sumEP1N++; } if (!Double.isNaN(edgeRecall)) { sumER1 += edgeRecall; sumER1N++; } out.println("Model # " + modelIndex + " AP (CCI) = " + adjPrecision); out.println("Model # " + modelIndex + " AR (CCI) = " + adjRecall); PcStable pc2 = new PcStable(new IndTestFisherZ(data, 0.05)); Graph graph2 = pc2.search(); // Should go to the report. out.println(graph2); graph2 = GraphUtils.replaceNodes(graph2, truePattern.getNodes()); int adjFn2 = adjacenciesComplement(truePattern, graph2); int adjFp2 = adjacenciesComplement(graph2, truePattern); int truePosAdj2 = truePositivesAdj(truePattern, graph2); int edgeFn2 = edgesComplement(truePattern, graph2); int edgeFp2 = edgesComplement(graph2, truePattern); int truePosEdges2 = truePositiveEdges(truePattern, graph2); double adjPrecision2 = truePosAdj2 / (double) (truePosAdj2 + adjFp2); double adjRecall2 = truePosAdj2 / (double) (truePosAdj2 + adjFn2); double edgePrecision2 = truePosEdges2 / (double) (truePosEdges2 + edgeFp2); double edgeRecall2 = truePosEdges2 / (double) (truePosEdges2 + edgeFn2); if (!Double.isNaN(adjPrecision2)) { sumAP2 += adjPrecision2; sumAP2N++; } if (!Double.isNaN(adjRecall2)) { sumAR2 += adjRecall2; sumAR2N++; } if (!Double.isNaN(edgePrecision2)) { sumEP2 += edgePrecision2; sumEP2N++; } if (!Double.isNaN(edgeRecall2)) { sumER2 += edgeRecall2; sumER2N++; } out.println("Model # " + modelIndex + " AP (Fisher Z) = " + adjPrecision2); out.println("Model # " + modelIndex + " AR (Fisher Z) = " + adjRecall2); } out.println("\nAverages"); out.println("Model # " + modelIndex + " Average AP (CCI) = " + nf.format(sumAP1 / sumAP1N)); out.println("Model # " + modelIndex + " Average AR (CCI) = " + nf.format(sumAR1 / sumAR1N)); out.println("Model # " + modelIndex + " Average EP (CCI) = " + nf.format(sumEP1 / sumEP1N)); out.println("Model # " + modelIndex + " Average ER (CCI) = " + nf.format(sumER1 / sumER1N)); out.println("Model # " + modelIndex + " Average AP (Fisher Z) = " + nf.format(sumAP2 / sumAP2N)); out.println("Model # " + modelIndex + " Average AR (Fisher Z) = " + nf.format(sumAR2 / sumAR2N)); out.println("Model # " + modelIndex + " Average EP (Fisher Z) = " + nf.format(sumEP2 / sumEP2N)); out.println("Model # " + modelIndex + " Average ER (Fisher Z) = " + nf.format(sumER2 / sumER2N)); } long stop = System.currentTimeMillis(); System.out.println("Elapsed " + (stop - start) / 1000L + " seconds"); out.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:edu.umass.cs.mallet.util.bibsonomy.IEInterface.java
public void offLineEvaluate(File inputFile, boolean sgml, String seperator, int N) { assert (pipe != null); InstanceList instancelist = new InstanceList(pipe); Reader reader;/* w w w .j a v a2s . com*/ try { reader = new FileReader(inputFile); } catch (Exception e) { throw new IllegalArgumentException("Can't read file " + inputFile); } instancelist.add(new LineGroupIterator(reader, Pattern.compile(seperator), true)); String outputFileStr = inputFile.toString() + "_tagged"; System.out.println(inputFile.toString() + " ---> " + outputFileStr); PrintStream taggedOut = null; try { FileOutputStream fos = new FileOutputStream(outputFileStr); taggedOut = new PrintStream(fos); } catch (IOException e) { logger.warn("Couldn't open output file '" + outputFileStr + "'"); } if (taggedOut == null) { taggedOut = System.out; } Alphabet targets = (this.pipe).getTargetAlphabet(); assert (targets != null); System.out.println("target size: " + targets.size()); System.out.print("State labels:"); for (int i = 0; i < targets.size(); i++) System.out.print(" " + targets.lookupObject(i)); System.out.println(""); int numCorrectTokens = 0, totalTokens = 0; int[] numTrueSegments, numPredictedSegments, numCorrectSegments; int[] numCorrectSegmentsInVocabulary, numCorrectSegmentsOOV; int[] numIncorrectSegmentsInVocabulary, numIncorrectSegmentsOOV; int[][] matrixEntry; int numCorrectWholeInstance = 0; numTrueSegments = new int[targets.size()]; numPredictedSegments = new int[targets.size()]; numCorrectSegments = new int[targets.size()]; matrixEntry = new int[targets.size()][targets.size()]; // String PUNT = "[,\\.;:?!()*]"; // Pattern puntPattern = Pattern.compile(PUNT); String viterbiStr = ""; // taggedOut.println("testing instance number: " + instancelist.size() ); for (int i = 0; i < instancelist.size(); i++) { // taggedOut.println("\ntesting instance " + i); // System.out.println("\ntesting instance " + i); Instance instance = instancelist.getInstance(i); //viterbi decoding /* String crfStr = viterbiCRFInstance(instance,sgml); taggedOut.println(seperator); // taggedOut.println("confidence = " + confidence + " instance accuracy= " // + instance_error_num + "/" + instance_size + "=" + instance_accuracy); taggedOut.println(crfStr); viterbiStr += crfStr; */ //N-best tagging String crfStr = viterbiCRFInstance_NBest(instance, sgml, N); // taggedOut.println("N-best result:"); taggedOut.println(seperator); // taggedOut.println("confidence = " + confidence + " instance accuracy= " // + instance_error_num + "/" + instance_size + "=" + instance_accuracy); taggedOut.println(crfStr); viterbiStr += crfStr; boolean wholeInstanceCorrect = true; Sequence trueSequence = (Sequence) instance.getTarget(); assert (trueSequence.size() == viterbiSequence.size()); for (int j = 0; j < trueSequence.size(); j++) { Object predO = viterbiSequence.get(j); Object trueO = trueSequence.get(j); // System.out.println(predO + "/" + trueO); int predIndex = targets.lookupIndex(predO); int trueIndex = targets.lookupIndex(trueO); String tokenStr = tokenSequence.getToken(j).getText(); if (puntPattern.matcher(tokenStr).matches() && ignorePunct) {//ignore punct; continue; } totalTokens++; numTrueSegments[trueIndex]++; numPredictedSegments[predIndex]++; matrixEntry[trueIndex][predIndex]++; if (predIndex == trueIndex) { numCorrectTokens++; numCorrectSegments[trueIndex]++; } else { wholeInstanceCorrect = false; } } if (wholeInstanceCorrect) numCorrectWholeInstance++; } double macro_average_p = 0; double macro_average_r = 0; double macro_average_f = 0; double micro_average_p = 0; double micro_average_r = 0; double micro_average_f = 0; int micro_numCorrectSegments = 0; int micro_numPredictedSegments = 0; int micro_numTrueSegments = 0; int classNum = 0; for (int t = 0; t < targets.size(); t++) { double precision = numPredictedSegments[t] == 0 ? 1 : ((double) numCorrectSegments[t]) / numPredictedSegments[t]; double recall = numTrueSegments[t] == 0 ? 1 : ((double) numCorrectSegments[t]) / numTrueSegments[t]; double f1 = recall + precision == 0.0 ? 0.0 : (2.0 * recall * precision) / (recall + precision); double accuracy_individual = (double) (totalTokens - numPredictedSegments[t] - numTrueSegments[t] + 2 * numCorrectSegments[t]) / totalTokens; System.out.println(targets.lookupObject(t) + " precision=" + precision + " recall=" + recall + " f1=" + f1 + " accuracy=" + accuracy_individual); System.out.println( "segments true=" + numTrueSegments[t] + " pred=" + numPredictedSegments[t] + " correct=" + numCorrectSegments[t] + " misses=" + (numTrueSegments[t] - numCorrectSegments[t]) + " alarms=" + (numPredictedSegments[t] - numCorrectSegments[t]) + "\n"); if (!targets.lookupObject(t).equals("O")) { classNum++; macro_average_p += precision; macro_average_r += recall; macro_average_f += f1; micro_numCorrectSegments += numCorrectSegments[t]; micro_numPredictedSegments += numPredictedSegments[t]; micro_numTrueSegments += numTrueSegments[t]; } } micro_average_p = (double) micro_numCorrectSegments / micro_numPredictedSegments; micro_average_r = (double) micro_numCorrectSegments / micro_numTrueSegments; micro_average_f = micro_average_r + micro_average_p == 0.0 ? 0.0 : (2.0 * micro_average_r * micro_average_p) / (micro_average_r + micro_average_p); macro_average_p /= classNum; macro_average_r /= classNum; macro_average_f /= classNum; System.out.println("\n Confusion Matrix (row: true label, col: predicted label)"); System.out.print("\t"); for (int t = 0; t < targets.size(); t++) { System.out.print(targets.lookupObject(t) + "\t"); } System.out.println(); for (int t = 0; t < targets.size(); t++) { System.out.print(targets.lookupObject(t) + "\t"); for (int tt = 0; tt < targets.size(); tt++) { System.out.print(matrixEntry[t][tt] + "\t"); } System.out.println(); } // print out the overall performance double accuracy = (double) numCorrectTokens / totalTokens; System.out.println("\n" + " accuracy=" + numCorrectTokens + "/" + totalTokens + " = " + accuracy); double wholeInstanceAccuracy = (double) numCorrectWholeInstance / instancelist.size(); System.out.println("Whole instance accuracy = " + numCorrectWholeInstance + "/" + instancelist.size() + " = " + wholeInstanceAccuracy); System.out.println("\nMacro Average"); System.out.println("macro precision : " + macro_average_p); System.out.println("macro recall: " + macro_average_r); System.out.println("macro f : " + macro_average_f); System.out.println("\nMicro Average"); System.out.println("micro precision : " + micro_average_p); System.out.println("micro recall: " + micro_average_r); System.out.println("micro f : " + micro_average_f); /* double accuracy = (double)numCorrectTokens/totalTokens; System.out.println ("\n" +" accuracy=" + numCorrectTokens +"/"+ totalTokens + " = " +accuracy); double wholeInstanceAccuracy = (double)numCorrectWholeInstance/instancelist.size(); System.out.println ("Whole instance accuracy = " + numCorrectWholeInstance + "/" + instancelist.size() + " = " + wholeInstanceAccuracy); for(int t=0; t<targets.size(); t++){ double precision = numPredictedSegments[t] == 0 ? 1 : ((double)numCorrectSegments[t]) / numPredictedSegments[t]; double recall = numTrueSegments[t] == 0 ? 1 : ((double)numCorrectSegments[t]) / numTrueSegments[t]; double f1 = recall+precision == 0.0 ? 0.0 : (2.0 * recall * precision) / (recall + precision); double accuracy_individual = (double)(totalTokens-numPredictedSegments[t]-numTrueSegments[t] + 2*numCorrectSegments[t] )/totalTokens; System.out.println (targets.lookupObject(t) + " precision="+precision+" recall="+recall+" f1="+f1 + " accuracy=" + accuracy_individual); System.out.println ("segments true="+numTrueSegments[t]+" pred="+numPredictedSegments[t]+" correct="+numCorrectSegments[t]+" misses="+(numTrueSegments[t]-numCorrectSegments[t])+" alarms="+(numPredictedSegments[t]-numCorrectSegments[t]) + "\n"); } System.out.println("\n Confusion Matrix (row: true label, col: predicted label)"); System.out.print("\t"); for(int t=0; t<targets.size(); t++){ System.out.print(targets.lookupObject(t) + "\t"); } System.out.println(); for(int t=0; t< targets.size(); t++){ System.out.print(targets.lookupObject(t)+"\t"); for(int tt=0; tt<targets.size(); tt++){ System.out.print(matrixEntry[t][tt] + "\t"); } System.out.println(); } */ if (taggedOut != System.out) { taggedOut.close(); } }
From source file:org.openmrs.module.sync.api.db.hibernate.HibernateSyncDAO.java
public void exportChildDB(String uuidForChild, OutputStream os) throws DAOException { PrintStream out = new PrintStream(os); Set<String> tablesToSkip = new HashSet<String>(); {/* w w w . j av a2 s. co m*/ tablesToSkip.add("hl7_in_archive"); tablesToSkip.add("hl7_in_queue"); tablesToSkip.add("hl7_in_error"); tablesToSkip.add("formentry_archive"); tablesToSkip.add("formentry_queue"); tablesToSkip.add("formentry_error"); tablesToSkip.add("sync_class"); tablesToSkip.add("sync_import"); tablesToSkip.add("sync_record"); tablesToSkip.add("sync_server"); tablesToSkip.add("sync_server_class"); tablesToSkip.add("sync_server_record"); // TODO: figure out which other tables to skip // tablesToSkip.add("obs"); // tablesToSkip.add("concept"); // tablesToSkip.add("patient"); } List<String> tablesToDump = new ArrayList<String>(); Session session = sessionFactory.getCurrentSession(); String schema = (String) session.createSQLQuery("SELECT schema()").uniqueResult(); log.warn("schema: " + schema); // Get all tables that we'll need to dump { Query query = session.createSQLQuery( "SELECT tabs.table_name FROM INFORMATION_SCHEMA.TABLES tabs WHERE tabs.table_schema = '" + schema + "'"); for (Object tn : query.list()) { String tableName = (String) tn; if (!tablesToSkip.contains(tableName.toLowerCase())) tablesToDump.add(tableName); } } log.warn("tables to dump: " + tablesToDump); String thisServerGuid = getGlobalProperty(SyncConstants.PROPERTY_SERVER_UUID); // Write the DDL Header as mysqldump does { out.println("-- ------------------------------------------------------"); out.println("-- Database dump to create an openmrs child server"); out.println("-- Schema: " + schema); out.println("-- Parent GUID: " + thisServerGuid); out.println("-- Parent version: " + OpenmrsConstants.OPENMRS_VERSION); out.println("-- ------------------------------------------------------"); out.println(""); out.println("/*!40101 SET CHARACTER_SET_CLIENT=utf8 */;"); out.println("/*!40101 SET NAMES utf8 */;"); out.println("/*!40103 SET TIME_ZONE='+00:00' */;"); out.println("/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;"); out.println("/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;"); out.println("/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;"); out.println("/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;"); out.println("/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;"); out.println("/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;"); out.println("/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;"); out.println("/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;"); out.println(""); } try { // JDBC way of doing this // Connection conn = // DriverManager.getConnection("jdbc:mysql://localhost/" + schema, // "test", "test"); Connection conn = sessionFactory.getCurrentSession().connection(); try { Statement st = conn.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); // Get the create database statement ResultSet rs = st.executeQuery("SHOW CREATE DATABASE " + schema); for (String tableName : tablesToDump) { out.println(); out.println("--"); out.println("-- Table structure for table `" + tableName + "`"); out.println("--"); out.println("DROP TABLE IF EXISTS `" + tableName + "`;"); out.println("SET @saved_cs_client = @@character_set_client;"); out.println("SET character_set_client = utf8;"); rs = st.executeQuery("SHOW CREATE TABLE " + tableName); while (rs.next()) { out.println(rs.getString("Create Table") + ";"); } out.println("SET character_set_client = @saved_cs_client;"); out.println(); { out.println("-- Dumping data for table `" + tableName + "`"); out.println("LOCK TABLES `" + tableName + "` WRITE;"); out.println("/*!40000 ALTER TABLE `" + tableName + "` DISABLE KEYS */;"); boolean first = true; rs = st.executeQuery("select * from " + tableName); ResultSetMetaData md = rs.getMetaData(); int numColumns = md.getColumnCount(); int rowNum = 0; boolean insert = false; while (rs.next()) { if (rowNum == 0) { insert = true; out.print("INSERT INTO `" + tableName + "` VALUES "); } ++rowNum; if (first) { first = false; } else { out.print(", "); } if (rowNum % 20 == 0) { out.println(); } out.print("("); for (int i = 1; i <= numColumns; ++i) { if (i != 1) { out.print(","); } if (rs.getObject(i) == null) { out.print("NULL"); } else { switch (md.getColumnType(i)) { case Types.VARCHAR: case Types.CHAR: case Types.LONGVARCHAR: out.print("'"); out.print( rs.getString(i).replaceAll("\n", "\\\\n").replaceAll("'", "\\\\'")); out.print("'"); break; case Types.BIGINT: case Types.DECIMAL: case Types.NUMERIC: out.print(rs.getBigDecimal(i)); break; case Types.BIT: out.print(rs.getBoolean(i)); break; case Types.INTEGER: case Types.SMALLINT: case Types.TINYINT: out.print(rs.getInt(i)); break; case Types.REAL: case Types.FLOAT: case Types.DOUBLE: out.print(rs.getDouble(i)); break; case Types.BLOB: case Types.VARBINARY: case Types.LONGVARBINARY: Blob blob = rs.getBlob(i); out.print("'"); InputStream in = blob.getBinaryStream(); while (true) { int b = in.read(); if (b < 0) { break; } char c = (char) b; if (c == '\'') { out.print("\'"); } else { out.print(c); } } out.print("'"); break; case Types.CLOB: out.print("'"); out.print( rs.getString(i).replaceAll("\n", "\\\\n").replaceAll("'", "\\\\'")); out.print("'"); break; case Types.DATE: out.print("'" + rs.getDate(i) + "'"); break; case Types.TIMESTAMP: out.print("'" + rs.getTimestamp(i) + "'"); break; default: throw new RuntimeException("TODO: handle type code " + md.getColumnType(i) + " (name " + md.getColumnTypeName(i) + ")"); } } } out.print(")"); } if (insert) { out.println(";"); insert = false; } out.println("/*!40000 ALTER TABLE `" + tableName + "` ENABLE KEYS */;"); out.println("UNLOCK TABLES;"); out.println(); } } } finally { conn.close(); } // Now we mark this as a child out.println("-- Now mark this as a child database"); if (uuidForChild == null) uuidForChild = SyncUtil.generateUuid(); out.println("update global_property set property_value = '" + uuidForChild + "' where property = '" + SyncConstants.PROPERTY_SERVER_UUID + "';"); // Write the footer of the DDL script { out.println("/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;"); out.println("/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;"); out.println("/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;"); out.println("/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;"); out.println("/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;"); out.println("/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;"); out.println("/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;"); out.println("/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;"); } out.flush(); out.close(); } catch (IOException ex) { log.error("IOException", ex); } catch (SQLException ex) { log.error("SQLException", ex); } }
From source file:edu.cmu.tetrad.search.TestIndTestConditionalCorrelation.java
public void test16() { try {/*from w w w . j a v a 2 s .c o m*/ // PrintStream out = new PrintStream("/Users/josephramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test10out.txt"); PrintStream out = System.out; String _dir = "/Users/josephramsey/Documents/LAB_NOTEBOOK.2012.04.20/2013.11.23/test12final/"; // String _dir = "/home/jdramsey/test12final/"; File dir = new File(_dir); if (!dir.exists()) dir.mkdir(); int numRuns = 20; double alpha = 0.01; double gamma = 0.01; int numModels = 14; // This should be done only once. for (int model = 1; model <= numModels; model++) { System.out.println("================= MODEL " + model + " ================="); int numVariables = 5; int N = 1000; for (int run = 1; run <= numRuns; run++) { File file = new File(dir, "data." + model + "." + run + ".txt"); if (file.exists()) continue; GeneralizedSemIm im = makeTestIm4(numVariables, model); DataSet data = im.simulateData(N, false); PrintWriter out1 = new PrintWriter(file); DataWriter.writeRectangularData(data, out1, '\t'); File file2 = new File(dir, "graph." + model + "." + run + ".txt"); File file3 = new File(dir, "graph.tetrad." + model + "." + run + ".txt"); File file6 = new File(dir, "dag.tetrad." + model + "." + run + ".txt"); File file7 = new File(dir, "model." + model + "." + run + ".txt"); SemGraph dag = im.getSemPm().getGraph(); dag.setShowErrorTerms(false); Graph _dag = GraphUtils.replaceNodes(dag, data.getVariables()); Graph truePattern = SearchGraphUtils.patternForDag(_dag); PrintWriter out2 = new PrintWriter(file2); PrintWriter out3 = new PrintWriter(file3); PrintWriter out6 = new PrintWriter(file6); PrintWriter out7 = new PrintWriter(file7); writePatternAsMatrix(data.getVariables(), truePattern, out2); out3.println(truePattern.toString()); out6.println(dag.toString()); out7.println(im); out1.close(); out2.close(); out3.close(); out6.close(); out7.close(); } } double[][] stats = new double[14][8]; for (int model = 1; model <= 14; model++) { System.out.println("MODEL " + model); NumberFormat nf = new DecimalFormat("0.00"); String[] indTestTypes = new String[] { "fisherz", "drton", "kci", "cci" }; // String[] indTestTypes = new String[]{"fisherz", "drton"}; // String indTestType = "fisherz"; // String indTestType = "cci"; // String indTestType = "kci"; // String indTestType = "drton"; for (int type = 0; type < indTestTypes.length; type++) { // for (String indTestType : indTestTypes) { String indTestType = indTestTypes[type]; double sumAP = 0.0; double sumAR = 0.0; double sumEP = 0.0; double sumER = 0.0; int sumErrors = 0; int sumAPN = 0; int sumARN = 0; int sumEPN = 0; int sumERN = 0; for (int run = 1; run <= numRuns; run++) { System.out.println("\nRun " + run); File file4 = new File(dir, "pattern." + indTestType + "." + model + "." + run + ".txt"); Graph pattern; File file3 = new File(dir, "graph.tetrad." + model + "." + run + ".txt"); Graph truePattern = GraphUtils.loadGraphTxt(file3); if (!file4.exists()) { File file = new File(dir, "data." + model + "." + run + ".txt"); DataReader reader = new DataReader(); reader.setVariablesSupplied(true); reader.setDelimiter(DelimiterType.WHITESPACE); DataSet dataSet = reader.parseTabular(file); // long start2 = System.currentTimeMillis(); double cutoff = indTestType.equals("drton") ? gamma : alpha; Cpc pc = new Cpc(getIndependenceTest(indTestType, dataSet, cutoff)); pattern = pc.search(); // pattern = GraphUtils.bidirectedToUndirected(pattern); // long stop2 = System.currentTimeMillis(); // System.out.println("Elapsed (just " + indTestType + ") " + (stop2 - start2) / 1000L + " seconds"); PrintWriter out4 = new PrintWriter(file4); out4.println(pattern); out4.close(); } else { pattern = GraphUtils.loadGraphTxt(file4); } System.out.println("True pattern = " + truePattern); System.out.println("Pattern = " + pattern); pattern = GraphUtils.replaceNodes(pattern, truePattern.getNodes()); // pattern = GraphUtils.bidirectedToUndirected(pattern); int adjFn = adjacenciesComplement(truePattern, pattern); int adjFp = adjacenciesComplement(pattern, truePattern); int truePosAdj = truePositivesAdj(truePattern, pattern); System.out.println("AdjFn = " + adjFn); System.out.println("AdjFp = " + adjFp); System.out.println("TruePosAdj = " + truePosAdj); int edgeFn = edgesComplement2(truePattern, pattern); int edgeFp = edgesComplement2(pattern, truePattern); int truePosEdges = truePositiveEdges(truePattern, pattern); // int edgeFn = arrowsComplement(truePattern, pattern); // int edgeFp = arrowsComplement(pattern, truePattern); // int truePosEdges = truePositiveArrows(truePattern, pattern); double adjPrecision = truePosAdj / (double) (truePosAdj + adjFp); double adjRecall = truePosAdj / (double) (truePosAdj + adjFn); double edgePrecision = truePosEdges / (double) (truePosEdges + edgeFp); double edgeRecall = truePosEdges / (double) (truePosEdges + edgeFn); System.out.println("edge Precision = " + edgePrecision); System.out.println("edge Recall = " + edgeRecall); sumErrors += adjFn + adjFp; if (!Double.isNaN(adjPrecision)) { sumAP += adjPrecision; sumAPN++; } if (!Double.isNaN(adjRecall)) { sumAR += adjRecall; sumARN++; } if (!Double.isNaN(edgePrecision)) { sumEP += edgePrecision; sumEPN++; } if (!Double.isNaN(edgeRecall)) { sumER += edgeRecall; sumERN++; } // out.println("Model # " + modelIndex + " AP (CCI) = " + adjPrecision); // out.println("Model # " + modelIndex + " AR (CCI) = " + adjRecall); } out.println("\nAverages " + indTestType); out.println("Model # " + model + " Average AP = " + nf.format(sumAP / sumAPN)); out.println("Model # " + model + " Average AR = " + nf.format(sumAR / sumARN)); out.println("Model # " + model + " Average EP = " + nf.format(sumEP / sumEPN)); out.println("Model # " + model + " Average ER = " + nf.format(sumER / sumERN)); out.println("Model # " + model + " Average Adj Errors = " + nf.format(sumErrors / (double) numModels)); stats[model - 1][type * 2] = sumEP / sumEPN; stats[model - 1][type * 2 + 1] = sumER / sumERN; } } System.out.println(MatrixUtils.toString(stats)); out.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:edu.cmu.tetrad.search.TestIndTestConditionalCorrelation.java
public void test12() { try {/*from w ww. ja va 2 s . c o m*/ // PrintStream out = new PrintStream("/Users/josephramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test10out.txt"); PrintStream out = System.out; String _dir = "/Users/josephramsey/Documents/LAB_NOTEBOOK.2012.04.20/2013.11.23/test12final/"; // String _dir = "/home/jdramsey/test12final/"; File dir = new File(_dir); if (!dir.exists()) dir.mkdir(); int numRuns = 20; double alpha = 0.01; double gamma = 0.01; int numModels = 14; // This should be done only once. for (int model = 1; model <= numModels; model++) { System.out.println("================= MODEL " + model + " ================="); int numVariables = 5; int N = 1000; for (int run = 1; run <= numRuns; run++) { File file = new File(dir, "data." + model + "." + run + ".txt"); if (file.exists()) continue; GeneralizedSemIm im = makeTestIm4(numVariables, model); DataSet data = im.simulateData(N, false); PrintWriter out1 = new PrintWriter(file); DataWriter.writeRectangularData(data, out1, '\t'); File file2 = new File(dir, "graph." + model + "." + run + ".txt"); File file3 = new File(dir, "graph.tetrad." + model + "." + run + ".txt"); File file6 = new File(dir, "dag.tetrad." + model + "." + run + ".txt"); File file7 = new File(dir, "model." + model + "." + run + ".txt"); SemGraph dag = im.getSemPm().getGraph(); dag.setShowErrorTerms(false); Graph _dag = GraphUtils.replaceNodes(dag, data.getVariables()); Graph truePattern = SearchGraphUtils.patternForDag(_dag); PrintWriter out2 = new PrintWriter(file2); PrintWriter out3 = new PrintWriter(file3); PrintWriter out6 = new PrintWriter(file6); PrintWriter out7 = new PrintWriter(file7); writePatternAsMatrix(data.getVariables(), truePattern, out2); out3.println(truePattern.toString()); out6.println(dag.toString()); out7.println(im); out1.close(); out2.close(); out3.close(); out6.close(); out7.close(); } } double[][] stats = new double[14][8]; for (int model = 1; model <= 14; model++) { System.out.println("MODEL " + model); NumberFormat nf = new DecimalFormat("0.00"); String[] indTestTypes = new String[] { "fisherz", "drton", "kci", "cci" }; // String[] indTestTypes = new String[]{"fisherz", "drton"}; // String indTestType = "fisherz"; // String indTestType = "cci"; // String indTestType = "kci"; // String indTestType = "drton"; for (int type = 0; type < indTestTypes.length; type++) { // for (String indTestType : indTestTypes) { String indTestType = indTestTypes[type]; double sumAP = 0.0; double sumAR = 0.0; double sumEP = 0.0; double sumER = 0.0; int sumErrors = 0; int sumAPN = 0; int sumARN = 0; int sumEPN = 0; int sumERN = 0; for (int run = 1; run <= numRuns; run++) { System.out.println("\nRun " + run); File file4 = new File(dir, "pattern." + indTestType + "." + model + "." + run + ".txt"); Graph pattern; File file3 = new File(dir, "dag.tetrad." + model + "." + run + ".txt"); Graph truePattern = GraphUtils.loadGraphTxt(file3); if (!file4.exists()) { File file = new File(dir, "data." + model + "." + run + ".txt"); DataReader reader = new DataReader(); reader.setVariablesSupplied(true); reader.setDelimiter(DelimiterType.WHITESPACE); DataSet dataSet = reader.parseTabular(file); // long start2 = System.currentTimeMillis(); double cutoff = indTestType.equals("drton") ? gamma : alpha; Pc pc = new Pc(getIndependenceTest(indTestType, dataSet, cutoff)); pattern = pc.search(); Nlo nlo = new Nlo(dataSet, alpha); pattern = nlo.fullOrient4(pattern); // pattern = GraphUtils.bidirectedToUndirected(pattern); // long stop2 = System.currentTimeMillis(); // System.out.println("Elapsed (just " + indTestType + ") " + (stop2 - start2) / 1000L + " seconds"); PrintWriter out4 = new PrintWriter(file4); out4.println(pattern); out4.close(); } else { pattern = GraphUtils.loadGraphTxt(file4); } System.out.println("True pattern = " + truePattern); System.out.println("Pattern = " + pattern); pattern = GraphUtils.replaceNodes(pattern, truePattern.getNodes()); // pattern = GraphUtils.bidirectedToUndirected(pattern); int adjFn = adjacenciesComplement(truePattern, pattern); int adjFp = adjacenciesComplement(pattern, truePattern); int truePosAdj = truePositivesAdj(truePattern, pattern); System.out.println("AdjFn = " + adjFn); System.out.println("AdjFp = " + adjFp); System.out.println("TruePosAdj = " + truePosAdj); // int edgeFn = edgesComplement(truePattern, pattern); // int edgeFp = edgesComplement(pattern, truePattern); // int truePosEdges = truePositiveEdges(truePattern, pattern); int edgeFn = arrowsComplement(truePattern, pattern); int edgeFp = arrowsComplement(pattern, truePattern); int truePosEdges = truePositiveArrows(truePattern, pattern); // double adjPrecision = truePosAdj / (double) (truePosAdj + adjFp); double adjRecall = truePosAdj / (double) (truePosAdj + adjFn); double edgePrecision = truePosEdges / (double) (truePosEdges + edgeFp); double edgeRecall = truePosEdges / (double) (truePosEdges + edgeFn); System.out.println("edge Precision = " + edgePrecision); System.out.println("edge Recall = " + edgeRecall); sumErrors += adjFn + adjFp; if (!Double.isNaN(adjPrecision)) { sumAP += adjPrecision; sumAPN++; } if (!Double.isNaN(adjRecall)) { sumAR += adjRecall; sumARN++; } if (!Double.isNaN(edgePrecision)) { sumEP += edgePrecision; sumEPN++; } if (!Double.isNaN(edgeRecall)) { sumER += edgeRecall; sumERN++; } // out.println("Model # " + modelIndex + " AP (CCI) = " + adjPrecision); // out.println("Model # " + modelIndex + " AR (CCI) = " + adjRecall); } out.println("\nAverages " + indTestType); out.println("Model # " + model + " Average AP = " + nf.format(sumAP / sumAPN)); out.println("Model # " + model + " Average AR = " + nf.format(sumAR / sumARN)); out.println("Model # " + model + " Average EP = " + nf.format(sumEP / sumEPN)); out.println("Model # " + model + " Average ER = " + nf.format(sumER / sumERN)); out.println("Model # " + model + " Average Adj Errors = " + nf.format(sumErrors / (double) numModels)); stats[model - 1][type * 2] = sumEP / sumEPN; stats[model - 1][type * 2 + 1] = sumER / sumERN; } } System.out.println(MatrixUtils.toString(stats)); out.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:edu.cmu.tetrad.search.TestIndTestConditionalCorrelation.java
public void test12_2() { try {//from w w w . ja v a 2s.c om // PrintStream out = new PrintStream("/Users/josephramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test10out.txt"); PrintStream out = System.out; // String _dir = "/Users/josephramsey/Documents/LAB_NOTEBOOK.2012.04.20/2013.11.23/test12_2/"; String _dir = "/home/jdramsey/test12-2final/"; File dir = new File(_dir); if (!dir.exists()) dir.mkdir(); int numRuns = 100; double alpha = 0.01; double gamma = 0.01; int numModels = 14; double[][] stats = new double[5][8]; // This should be done only once. for (int sampleSize : new int[] { 100, 250, 400, 550, 700 }) { System.out.println("================= Saple size " + sampleSize + " ================="); int numVariables = 5; int N = sampleSize; for (int run = 1; run <= numRuns; run++) { File file = new File(dir, "data." + sampleSize + "." + run + ".txt"); if (file.exists()) continue; GeneralizedSemIm im = makeTestIm1(); DataSet data = im.simulateData(N, false); PrintWriter out1 = new PrintWriter(file); DataWriter.writeRectangularData(data, out1, '\t'); File file2 = new File(dir, "graph." + sampleSize + "." + run + ".txt"); File file3 = new File(dir, "graph.tetrad." + sampleSize + "." + run + ".txt"); File file6 = new File(dir, "dag.tetrad." + sampleSize + "." + run + ".txt"); File file7 = new File(dir, "model." + sampleSize + "." + run + ".txt"); SemGraph dag = im.getSemPm().getGraph(); dag.setShowErrorTerms(false); Graph _dag = GraphUtils.replaceNodes(dag, data.getVariables()); Graph truePattern = SearchGraphUtils.patternForDag(_dag); PrintWriter out2 = new PrintWriter(file2); PrintWriter out3 = new PrintWriter(file3); PrintWriter out6 = new PrintWriter(file6); PrintWriter out7 = new PrintWriter(file7); writePatternAsMatrix(data.getVariables(), truePattern, out2); out3.println(truePattern.toString()); out6.println(dag.toString()); out7.println(im); out1.close(); out2.close(); out3.close(); out6.close(); out7.close(); } } int[] sizes = { 100, 250, 400, 550, 700 }; for (int s = 0; s < sizes.length; s++) { int sampleSize = sizes[s]; System.out.println("Sample size " + sampleSize); NumberFormat nf = new DecimalFormat("0.00"); String[] indTestTypes = new String[] { "fisherz", "drton", "cci", "kci" }; // String[] indTestTypes = new String[]{"fisherz", "drton"}; // String indTestType = "fisherz"; // String indTestType = "cci"; // String indTestType = "kci"; // String indTestType = "drton"; for (int type = 0; type < indTestTypes.length; type++) { // for (String indTestType : indTestTypes) { String indTestType = indTestTypes[type]; double sumAP = 0.0; double sumAR = 0.0; double sumEP = 0.0; double sumER = 0.0; int sumErrors = 0; int sumAPN = 0; int sumARN = 0; int sumEPN = 0; int sumERN = 0; for (int run = 1; run <= numRuns; run++) { System.out.println("\nRun " + run); File file4 = new File(dir, "pattern." + indTestType + "." + sampleSize + "." + run + ".txt"); Graph pattern; File file3 = new File(dir, "graph.tetrad." + sampleSize + "." + run + ".txt"); Graph truePattern = GraphUtils.loadGraphTxt(file3); if (!file4.exists()) { File file = new File(dir, "data." + sampleSize + "." + run + ".txt"); DataReader reader = new DataReader(); reader.setVariablesSupplied(true); reader.setDelimiter(DelimiterType.WHITESPACE); DataSet dataSet = reader.parseTabular(file); // long start2 = System.currentTimeQMillis(); double cutoff = indTestType.equals("drton") ? gamma : alpha; Pc pc = new Pc(getIndependenceTest(indTestType, dataSet, cutoff)); pattern = pc.search(); // pattern = GraphUtils.bidirectedToUndirected(pattern); // long stop2 = System.currentTimeMillis(); // System.out.println("Elapsed (just " + indTestType + ") " + (stop2 - start2) / 1000L + " seconds"); PrintWriter out4 = new PrintWriter(file4); out4.println(pattern); out4.close(); System.out.println("Pattern = " + pattern); } else { pattern = GraphUtils.loadGraphTxt(file4); // pattern = GraphUtils.bidirectedToUndirected(pattern); } pattern = GraphUtils.replaceNodes(pattern, truePattern.getNodes()); // pattern = GraphUtils.bidirectedToUndirected(pattern); int adjFn = adjacenciesComplement(truePattern, pattern); int adjFp = adjacenciesComplement(pattern, truePattern); int truePosAdj = truePositivesAdj(truePattern, pattern); System.out.println("AdjFn = " + adjFn); System.out.println("AdjFp = " + adjFp); System.out.println("TruePosAdj = " + truePosAdj); int edgeFn = edgesComplement(truePattern, pattern); int edgeFp = edgesComplement(pattern, truePattern); int truePosEdges = truePositiveEdges(truePattern, pattern); // int edgeFn = arrowsComplement(truePattern, pattern); // int edgeFp = arrowsComplement(pattern, truePattern); // int truePosEdges = truePositiveArrows(truePattern, pattern); double adjPrecision = truePosAdj / (double) (truePosAdj + adjFp); double adjRecall = truePosAdj / (double) (truePosAdj + adjFn); System.out.println("adjPrecision = " + adjPrecision); System.out.println("adjRecall = " + adjRecall); double edgePrecision = truePosEdges / (double) (truePosEdges + edgeFp); double edgeRecall = truePosEdges / (double) (truePosEdges + edgeFn); sumErrors += adjFn + adjFp; if (!Double.isNaN(adjPrecision)) { sumAP += adjPrecision; sumAPN++; } if (!Double.isNaN(adjRecall)) { sumAR += adjRecall; sumARN++; } if (!Double.isNaN(edgePrecision)) { sumEP += edgePrecision; sumEPN++; } if (!Double.isNaN(edgeRecall)) { sumER += edgeRecall; sumERN++; } // out.println("Model # " + modelIndex + " AP (CCI) = " + adjPrecision); // out.println("Model # " + modelIndex + " AR (CCI) = " + adjRecall); } out.println("\nAverages " + indTestType); out.println("Model # " + sampleSize + " Average AP = " + nf.format(sumAP / sumAPN)); out.println("Model # " + sampleSize + " Average AR = " + nf.format(sumAR / sumARN)); out.println("Model # " + sampleSize + " Average EP = " + nf.format(sumEP / sumEPN)); out.println("Model # " + sampleSize + " Average ER = " + nf.format(sumER / sumERN)); out.println("Model # " + sampleSize + " Average Adj Errors = " + nf.format(sumErrors / (double) numModels)); stats[s][type * 2] = sumEP / sumEPN; stats[s][type * 2 + 1] = sumER / sumERN; } } System.out.println(MatrixUtils.toString(stats)); out.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:edu.cmu.tetrad.search.TestIndTestConditionalCorrelation.java
public void test12_3() { try {// ww w. j a v a 2 s.c o m // PrintStream out = new PrintStream("/Users/josephramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test11out.txt"); // PrintStream out = new PrintStream("/home/jdramsey/test10out.txt"); PrintStream out = System.out; // String _dir = "/Users/josephramsey/Documents/LAB_NOTEBOOK.2012.04.20/2013.11.23/test12-3final/"; String _dir = "/home/jdramsey/test12-3final/"; File dir = new File(_dir); if (!dir.exists()) dir.mkdir(); int numRuns = 1; double alpha = 0.01; double gamma = 0.01; double[][] stats = new double[5][8]; // This should be done only once. for (int sampleSize : new int[] { 2000 }) { System.out.println("================= Sample size " + sampleSize + " ================="); int numVariables = 200; int N = sampleSize; for (int run = 1; run <= numRuns; run++) { File file = new File(dir, "data." + sampleSize + "." + run + ".txt"); if (file.exists()) continue; GeneralizedSemIm im = makeTestIm4(numVariables, 10); DataSet data = im.simulateData(N, false); PrintWriter out1 = new PrintWriter(file); DataWriter.writeRectangularData(data, out1, '\t'); File file2 = new File(dir, "graph." + sampleSize + "." + run + ".txt"); File file3 = new File(dir, "graph.tetrad." + sampleSize + "." + run + ".txt"); File file6 = new File(dir, "dag.tetrad." + sampleSize + "." + run + ".txt"); File file7 = new File(dir, "model." + sampleSize + "." + run + ".txt"); SemGraph dag = im.getSemPm().getGraph(); dag.setShowErrorTerms(false); Graph _dag = GraphUtils.replaceNodes(dag, data.getVariables()); Graph truePattern = SearchGraphUtils.patternForDag(_dag); PrintWriter out2 = new PrintWriter(file2); PrintWriter out3 = new PrintWriter(file3); PrintWriter out6 = new PrintWriter(file6); PrintWriter out7 = new PrintWriter(file7); writePatternAsMatrix(data.getVariables(), truePattern, out2); out3.println(truePattern.toString()); out6.println(dag.toString()); out7.println(im); out1.close(); out2.close(); out3.close(); out6.close(); out7.close(); } } int[] sizes = { 2000 }; for (int s = 0; s < sizes.length; s++) { int sampleSize = sizes[s]; System.out.println("Sample size " + sampleSize); NumberFormat nf = new DecimalFormat("0.00"); String[] indTestTypes = new String[] { "fisherz", "drton", "cci" }; // String[] indTestTypes = new String[]{"fisherz", "drton"}; // String indTestType = "fisherz"; // String indTestType = "cci"; // String indTestType = "kci"; // String indTestType = "drton"; for (int type = 0; type < indTestTypes.length; type++) { long start = System.currentTimeMillis(); // for (String indTestType : indTestTypes) { String indTestType = indTestTypes[type]; double sumAP = 0.0; double sumAR = 0.0; double sumEP = 0.0; double sumER = 0.0; int sumErrors = 0; int sumAPN = 0; int sumARN = 0; int sumEPN = 0; int sumERN = 0; for (int run = 1; run <= numRuns; run++) { System.out.println("\nRun " + run); File file4 = new File(dir, "pattern." + indTestType + "." + sampleSize + "." + run + ".txt"); Graph pattern; File file3 = new File(dir, "graph.tetrad." + sampleSize + "." + run + ".txt"); Graph truePattern = GraphUtils.loadGraphTxt(file3); if (!file4.exists()) { File file = new File(dir, "data." + sampleSize + "." + run + ".txt"); DataReader reader = new DataReader(); reader.setVariablesSupplied(true); reader.setDelimiter(DelimiterType.WHITESPACE); DataSet dataSet = reader.parseTabular(file); // long start2 = System.currentTimeQMillis(); double cutoff = indTestType.equals("drton") ? gamma : alpha; PcStable pc = new PcStable(getIndependenceTest(indTestType, dataSet, cutoff)); pattern = pc.search(); // pattern = GraphUtils.bidirectedToUndirected(pattern); // long stop2 = System.currentTimeMillis(); // System.out.println("Elapsed (just " + indTestType + ") " + (stop2 - start2) / 1000L + " seconds"); PrintWriter out4 = new PrintWriter(file4); out4.println(pattern); out4.close(); System.out.println("Pattern = " + pattern); } else { pattern = GraphUtils.loadGraphTxt(file4); // pattern = GraphUtils.bidirectedToUndirected(pattern); } pattern = GraphUtils.replaceNodes(pattern, truePattern.getNodes()); // pattern = GraphUtils.bidirectedToUndirected(pattern); int adjFn = adjacenciesComplement(truePattern, pattern); int adjFp = adjacenciesComplement(pattern, truePattern); int truePosAdj = truePositivesAdj(truePattern, pattern); System.out.println("AdjFn = " + adjFn); System.out.println("AdjFp = " + adjFp); System.out.println("TruePosAdj = " + truePosAdj); int edgeFn = edgesComplement(truePattern, pattern); int edgeFp = edgesComplement(pattern, truePattern); int truePosEdges = truePositiveEdges(truePattern, pattern); // int edgeFn = arrowsComplement(truePattern, pattern); // int edgeFp = arrowsComplement(pattern, truePattern); // int truePosEdges = truePositiveArrows(truePattern, pattern); double adjPrecision = truePosAdj / (double) (truePosAdj + adjFp); double adjRecall = truePosAdj / (double) (truePosAdj + adjFn); System.out.println("adjPrecision = " + adjPrecision); System.out.println("adjRecall = " + adjRecall); double edgePrecision = truePosEdges / (double) (truePosEdges + edgeFp); double edgeRecall = truePosEdges / (double) (truePosEdges + edgeFn); sumErrors += adjFn + adjFp; if (!Double.isNaN(adjPrecision)) { sumAP += adjPrecision; sumAPN++; } if (!Double.isNaN(adjRecall)) { sumAR += adjRecall; sumARN++; } if (!Double.isNaN(edgePrecision)) { sumEP += edgePrecision; sumEPN++; } if (!Double.isNaN(edgeRecall)) { sumER += edgeRecall; sumERN++; } // out.println("Model # " + modelIndex + " AP (CCI) = " + adjPrecision); // out.println("Model # " + modelIndex + " AR (CCI) = " + adjRecall); } out.println("\nAverages " + indTestType); out.println("Model # " + sampleSize + " Average AP = " + nf.format(sumAP / sumAPN)); out.println("Model # " + sampleSize + " Average AR = " + nf.format(sumAR / sumARN)); out.println("Model # " + sampleSize + " Average EP = " + nf.format(sumEP / sumEPN)); out.println("Model # " + sampleSize + " Average ER = " + nf.format(sumER / sumERN)); // out.println("Model # " + sampleSize + " Average Adj Errors = " + nf.format(sumErrors / (double) numModels)); stats[s][type * 2] = sumEP / sumEPN; stats[s][type * 2 + 1] = sumER / sumERN; long stop = System.currentTimeMillis(); System.out.println("Elapsed time " + (stop - start) / 1000L); } } System.out.println(MatrixUtils.toString(stats)); out.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.fluidops.iwb.deepzoom.CXMLServlet.java
@SuppressWarnings(value = "DM_CONVERT_CASE", justification = "checked") private void renderCollection(Map<URI, Map<URI, Set<Value>>> graph, Vector<URI> predicates, HashMap<URI, Integer> facetCountMap, String collection, String collectionName, PrintStream httpOut, String q, HttpServletRequest req, int maxEntities, int maxFacets) throws FileNotFoundException, IOException { PrintStream resultCache = new PrintStream(new FileOutputStream(getCacheFile("resultCache", hash(q))), false, "UTF-8"); PrintStream out = new PrintStream(new DualOutputStream(httpOut, resultCache), false, "UTF-8"); out.println("<?xml version=\"1.0\" encoding=\"utf-8\"?>"); //out.println("<?xml-stylesheet type=\"text/xsl\" href=\"/int/pivot/pivot.xsl\"?>"); out.println("<Collection Name=\"" + XML.escape(collectionName) + "\" SchemaVersion=\"1.0\""); out.println(" xmlns=\"http://schemas.microsoft.com/collection/metadata/2009\""); out.println(" xmlns:p=\"http://schemas.microsoft.com/livelabs/pivot/collection/2009\""); out.println(" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""); out.println(" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\">"); Dimension d = new Dimension(200, 200); Writer collectionWriter = null; try {/*from ww w . jav a 2s. c o m*/ collectionWriter = new OutputStreamWriter( new FileOutputStream(getCacheFile("collectionCache", hash(q))), "UTF-8"); } catch (IOException e) { logger.warn(e.getMessage()); } collectionWriter.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" + "<Collection MaxLevel=\"8\" TileSize=\"256\" Format=\"jpg\" NextItemId=\"1001\" ServerFormat=\"Default\" xmlns=\"http://schemas.microsoft.com/deepzoom/2009\">\n" + "<Items>\n"); // render facet categories out.println("<FacetCategories>"); List<URI> displayedFacets = new LinkedList<URI>(); // We limit the number of displayed facets (maxPivotFacets) // We rank facets by the number of entities that have a value for the given facet // The ranking only needs to be performed if maxFacets>0 (0 disables limitation) // and the number of facets is actually greater than maxFacets if (facetCountMap != null && maxFacets > 0 && facetCountMap.size() > maxFacets) { Set<Entry<URI, Integer>> s = facetCountMap.entrySet(); for (int i = 0; i < maxFacets; i++) { int max = 0; Entry<URI, Integer> maxEntry = null; for (Entry<URI, Integer> e : s) { if (e.getValue() > max) { maxEntry = e; max = e.getValue(); } } if (maxEntry == null) break; displayedFacets.add(maxEntry.getKey()); s.remove(maxEntry); } } else displayedFacets = predicates; Vector<URI> facetNames = new Vector<URI>(); Vector<String> facetLabels = new Vector<String>(); // we may use any sort order here; currently, we sort alphabetically, // with the exception that the type facet is always on top Collections.sort(predicates, new Comparator<URI>() { public int compare(URI o1, URI o2) { if (o1.equals(RDF.TYPE)) return -1; else if (o2.equals(RDF.TYPE)) return 1; else return o1.stringValue().compareTo(o2.stringValue()); } }); for (URI predicate : predicates) { ReadDataManager dm = EndpointImpl.api().getDataManager(); if (dm.getLabel(predicate).equals("name") || facetNames.contains(predicate)) continue; // labels of names are unfortunately not unique, but must not occur as duplicates facetNames.add(predicate); if (displayedFacets.contains(predicate)) { String name = dm.getLabel(predicate); name = escape(name).trim(); // TODO: only quick fix, has to be handled properly // facet labels need to be unique, disregarding capitalization if (!facetLabels.contains(name.toLowerCase())) { facetLabels.add(name.toLowerCase()); String type; URI range = predicateTypes.get(predicate); if (range == null) range = XMLSchema.STRING; if (range.equals(XMLSchema.DATETIME) || range.equals(XMLSchema.DATE)) type = "DateTime"; else if (range.equals(XMLSchema.FLOAT) || range.equals(XMLSchema.DOUBLE) || range.equals(XMLSchema.INT) || range.equals(XMLSchema.INTEGER) || range.equals(XMLSchema.LONG)) type = "Number"; else type = "String"; //default out.println("<FacetCategory Name=\"" + XML.escape(name) + "\" Type=\"" + type + "\" p:IsFilterVisible=\"true\" p:IsMetaDataVisible=\"false\" />"); out.println("<FacetCategory Name=\"" + XML.escape(name) + ":\" Type=\"Link\" p:IsFilterVisible=\"false\" p:IsMetaDataVisible=\"true\" />"); } } } out.println( "<FacetCategory Name=\"Home\" Type=\"Link\" p:IsFilterVisible=\"false\" p:IsMetaDataVisible=\"true\" />"); out.println("</FacetCategories>"); // render items in collection String collectionID = "collection" + hash(q) + ".xml"; out.println("<Items ImgBase=\"" + collection + "/" + collectionID + "\">"); int counter = 0; Vector<String> imageVector = new Vector<String>(); // queryImageVector.put(hash(q), imageVector); ImageLoader loader = new ImageLoader(api().getRequestMapper().getInternalUrlWithoutContext(req)); for (Entry<URI, Map<URI, Set<Value>>> entry : graph.entrySet()) { Resource entity = entry.getKey(); if (counter >= maxEntities) break; Map<URI, Set<Value>> facets = entry.getValue(); // We assume that all entities are URIs. Perhaps need to re-evaluate this assumption URI uri = (URI) entity; String img = null; if (IWBCmsUtil.isUploadedFile(uri) && ImageResolver.isImage(uri.stringValue())) img = getImageURLForValue(uri); else { // verify if there is an image in the facets using image properties // given by ImageResolver (e.g. foaf:img) for (URI imageProperty : ImageResolver.getDefaultImageResolver().getImageProperties()) { Set<Value> imgForProperty = facets.get(imageProperty); if (imgForProperty != null && imgForProperty.size() > 0) { img = getImageURLForValue(imgForProperty.iterator().next()); break; // found an image } } // fallback, no image found: indicates that ID card should be generated if (img == null) img = "id:" + entity.stringValue() + ".jpg"; } String filename = ImageLoader.filename(img); if (!imageVector.contains(img)) //only need to load the image once { loader.createImageThreaded(uri, facets, img, collection); imageVector.add(img); int imgID = imageVector.indexOf(img); collectionWriter.write("<I Id=\"" + imgID + "\" N=\"" + imgID + "\" Source=\"dzimages/" + ImageLoader.subdir(filename) + "/" + ((filename.lastIndexOf(".") > -1) ? filename.substring(0, filename.lastIndexOf(".")) : filename) + ".xml\">" + "<Size Width=\"" + d.width + "\" Height=\"" + d.height + "\"/></I>"); } int imgID = imageVector.indexOf(img); ReadDataManager dm = EndpointImpl.api().getDataManager(); String label = cleanLabel(dm.getLabel(uri)); if (label.length() == 0) continue; out.println("<Item Img=\"#" + imgID + "\" Id=\"" + counter + "\" Href=\"" + EndpointImpl.api().getRequestMapper().getRequestStringFromValue(uri) + "\" Name=\"" + label + "\">"); String facetString = "";// temp string as we do not know whether there are any facet values for (Entry<URI, Set<Value>> facetEntry : facets.entrySet()) { URI predicate = facetEntry.getKey(); if (predicate.equals(Vocabulary.DBPEDIA_ONT.THUMBNAIL)) continue; String name = dm.getLabel(predicate); if (name.equals("name")) continue; Set<Value> facetValues = facetEntry.getValue(); String values = ""; String exploreValues = ""; int facetValueCounter = 0; for (Value value : facetValues) { if (value == null) continue; if (facetValueCounter++ > 1000) break; //TODO: temp hack to prevent scalability problems, improve logic label = cleanLabel(dm.getLabel(value)); if (label.equals("")) continue; String href = EndpointImpl.api().getRequestMapper().getRequestStringFromValue(value); if ((value instanceof Literal) && !Config.getConfig().getLinkLiterals()) { href = "#"; } if (label.toLowerCase().startsWith("category:")) { label = label.substring(9); } URI range = predicateTypes.get(predicate); if (range == null) values += "<String Value=\"" + label + "\"/>"; else if (range.equals(XMLSchema.FLOAT) || range.equals(XMLSchema.DOUBLE) || range.equals(XMLSchema.INT) || range.equals(XMLSchema.INTEGER) || range.equals(XMLSchema.LONG)) { try { // Need to be compliant with xsd:decimal values += "<Number Value=\"" + (new BigDecimal(Double.parseDouble(label))).toPlainString() + "\"/>"; } catch (Exception e) { logger.warn("Parsing problem with: " + label); } } else if (range.equals(XMLSchema.DATETIME) || range.equals(XMLSchema.DATE)) { Date date = null; // Loop over supported date formats for (DateFormat formatter : formatters) { try { date = (Date) formatter.parse(label); break; } // If date format is not supported.. catch (ParseException e) { logger.debug("Date format not supported: " + label + ". Using today instead."); } } if (date != null) { DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); values += "<DateTime Value=\"" + format.format(date) + "\"/>"; } } else values += "<String Value=\"" + label + "\"/>"; exploreValues += "<Link Href=\"" + href + "\" Name=\"" + label + "\"/>"; } name = escape(name).trim(); // values = values.replace("]", ""); if (values.length() > 0 && displayedFacets.contains(predicate)) facetString += "<Facet Name=\"" + XML.escape(name) + "\">" + values + "</Facet>"; if (exploreValues.length() > 0 && displayedFacets.contains(predicate)) facetString += "<Facet Name=\"" + XML.escape(name) + ":\">" + exploreValues + "</Facet>"; } /* //Query for related elements, currently not used String href = "CONSTRUCT { ?uri <http://dbpedia.org/ontology/thumbnail> ?img . ?uri ?p ?o . } WHERE { { ?uri ?relationship <"+uri.stringValue()+"> } UNION { <"+uri.stringValue()+"> ?relationship ?uri } . ?uri <http://dbpedia.org/ontology/thumbnail> ?img . ?uri ?p ?o }"; try { href=URLEncoder.encode(href, "UTF-8"); } catch (UnsupportedEncodingException e) { logger.error(e.getMessage(), e); } facetString+="<Facet Name=\"Related\"><Link Href=\"query.cxml?q="+href+"\" Name=\"Explore Related\"/></Facet>"; */ if (!facetString.isEmpty()) out.println("<Facets>" + facetString + "</Facets>"); out.println("</Item>"); counter++; } if (counter == 0) { // should show some image indicating that nothing has been found, for now we just show a text out.println("<Item Img=\"#0\" Id=\"0\" Name=\"Nothing found\">"); out.println("</Item>"); URI uri = ValueFactoryImpl.getInstance().createURI("http://www.fluidops.com/Nothing Found"); // id: indicates that ID card should be generated String img = "id:" + uri.stringValue() + ".jpg"; String filename = ImageLoader.filename(img); Map<URI, Set<Value>> facets = new HashMap<URI, Set<Value>>(); loader.createImageThreaded(uri, facets, img, collection); imageVector.add(img); int imgID = 0; collectionWriter.write("<I Id=\"" + imgID + "\" N=\"" + imgID + "\" Source=\"dzimages/" + ImageLoader.subdir(filename) + "/" + ((filename.lastIndexOf(".") > -1) ? filename.substring(0, filename.lastIndexOf(".")) : filename) + ".xml\">" + "<Size Width=\"" + d.width + "\" Height=\"" + d.height + "\"/></I>"); } try { writeImageListToFile("imageCache", hash(q), imageVector); } catch (IOException e) { logger.warn(e.getMessage(), e); } // We wait until all image loading threads have finished, but at most one minute long wait = 0; while (loader.threadCounter > 0 && wait < 60000) { try { Thread.sleep(10); wait += 10; } catch (InterruptedException e1) { logger.debug(e1.toString()); } } collectionWriter.write("</Items>\n</Collection>"); collectionWriter.close(); out.println("</Items>"); out.println("</Collection>"); out.close(); }