List of usage examples for java.io PrintStream print
public void print(Object obj)
From source file:edu.cornell.med.icb.goby.modes.AbstractCommandLineMode.java
/** * Print the usage for a mode in the format appropriate for a * <a href="http://www.wikimedia.org/">Wikimedia</a> table. * * @param jsap The parameters for the mode *//*from w ww . j av a2s.c om*/ public void printUsageAsWikiTable(final JSAP jsap) { final PrintStream stream = System.out; // Table definition/header stream.println( "{| class=\"wikitable\" style=\"margin: 1em auto 1em auto; background:#efefef;\" valign=\"top\" align=\"center\" border=\"1\" cellpadding=\"5\" width=\"80%\""); stream.println("|- valign=\"bottom\" align=\"left\" style=\"background:#ffdead;\""); stream.println("!width=\"15%\"| Flag"); stream.println("!width=\"10%\" | Arguments"); stream.println("!width=\"5%\"| Required"); stream.println("! Description"); stream.println(); // iterate through help entries for the mode final IDMap idMap = jsap.getIDMap(); final Iterator iterator = idMap.idIterator(); while (iterator.hasNext()) { final String id = (String) iterator.next(); if (!"mode".equals(id) && !isJSAPHelpId(id)) { // skip the mode and help ids final Parameter parameter = jsap.getByID(id); stream.println("|- valign=\"top\" align=\"left\""); stream.print("| <nowiki>"); if (parameter instanceof Flagged) { final Flagged flagged = (Flagged) parameter; final Character characterFlag = flagged.getShortFlagCharacter(); final String longFlag = flagged.getLongFlag(); if (characterFlag != null && StringUtils.isNotBlank(longFlag)) { stream.print("(-" + characterFlag + "|--" + longFlag + ")"); } else if (characterFlag != null) { stream.print("-" + characterFlag); } else if (StringUtils.isNotBlank(longFlag)) { stream.print("--" + longFlag); } } else { stream.print("n/a"); } stream.println("</nowiki>"); stream.print("| "); if (parameter instanceof Switch) { stream.println("n/a"); } else { stream.println(id); } final boolean required; if (parameter instanceof Option) { final Option option = (Option) parameter; required = option.required(); } else { required = !(parameter instanceof Switch); } stream.println("| " + BooleanUtils.toStringYesNo(required)); stream.println("| " + parameter.getHelp()); if (parameter.getDefault() != null) { stream.print("Default value: "); for (final String defaultValue : parameter.getDefault()) { stream.print(" "); stream.print(defaultValue); } } stream.println(); } } // table close stream.println("|}"); }
From source file:org.apache.jackrabbit.jcr2spi.nodetype.NodeTypeManagerImpl.java
/** * {@inheritDoc}//from w w w . ja v a 2 s. c o m */ public void dump(PrintStream ps) { ps.println("NodeTypeManager (" + this + ")"); ps.println(); ps.println("All NodeTypes:"); ps.println(); try { NodeTypeIterator iter = this.getAllNodeTypes(); while (iter.hasNext()) { NodeType nt = iter.nextNodeType(); ps.println(nt.getName()); NodeType[] supertypes = nt.getSupertypes(); ps.println("\tSupertypes"); for (int i = 0; i < supertypes.length; i++) { ps.println("\t\t" + supertypes[i].getName()); } ps.println("\tMixin\t" + nt.isMixin()); ps.println("\tOrderableChildNodes\t" + nt.hasOrderableChildNodes()); ps.println("\tPrimaryItemName\t" + (nt.getPrimaryItemName() == null ? "<null>" : nt.getPrimaryItemName())); PropertyDefinition[] pd = nt.getPropertyDefinitions(); for (int i = 0; i < pd.length; i++) { ps.print("\tPropertyDefinition"); ps.println(" (declared in " + pd[i].getDeclaringNodeType().getName() + ") "); ps.println("\t\tName\t\t" + (pd[i].getName())); String type = pd[i].getRequiredType() == 0 ? "null" : PropertyType.nameFromValue(pd[i].getRequiredType()); ps.println("\t\tRequiredType\t" + type); String[] vca = pd[i].getValueConstraints(); StringBuffer constraints = new StringBuffer(); if (vca == null) { constraints.append("<null>"); } else { for (int n = 0; n < vca.length; n++) { if (constraints.length() > 0) { constraints.append(", "); } constraints.append(vca[n]); } } ps.println("\t\tValueConstraints\t" + constraints.toString()); Value[] defVals = pd[i].getDefaultValues(); StringBuffer defaultValues = new StringBuffer(); if (defVals == null) { defaultValues.append("<null>"); } else { for (int n = 0; n < defVals.length; n++) { if (defaultValues.length() > 0) { defaultValues.append(", "); } defaultValues.append(defVals[n].getString()); } } ps.println("\t\tDefaultValue\t" + defaultValues.toString()); ps.println("\t\tAutoCreated\t" + pd[i].isAutoCreated()); ps.println("\t\tMandatory\t" + pd[i].isMandatory()); ps.println("\t\tOnVersion\t" + OnParentVersionAction.nameFromValue(pd[i].getOnParentVersion())); ps.println("\t\tProtected\t" + pd[i].isProtected()); ps.println("\t\tMultiple\t" + pd[i].isMultiple()); } NodeDefinition[] nd = nt.getChildNodeDefinitions(); for (int i = 0; i < nd.length; i++) { ps.print("\tNodeDefinition"); ps.println(" (declared in " + nd[i].getDeclaringNodeType() + ") "); ps.println("\t\tName\t\t" + nd[i].getName()); NodeType[] reqPrimaryTypes = nd[i].getRequiredPrimaryTypes(); if (reqPrimaryTypes != null && reqPrimaryTypes.length > 0) { for (int n = 0; n < reqPrimaryTypes.length; n++) { ps.print("\t\tRequiredPrimaryType\t" + reqPrimaryTypes[n].getName()); } } NodeType defPrimaryType = nd[i].getDefaultPrimaryType(); if (defPrimaryType != null) { ps.print("\n\t\tDefaultPrimaryType\t" + defPrimaryType.getName()); } ps.println("\n\t\tAutoCreated\t" + nd[i].isAutoCreated()); ps.println("\t\tMandatory\t" + nd[i].isMandatory()); ps.println("\t\tOnVersion\t" + OnParentVersionAction.nameFromValue(nd[i].getOnParentVersion())); ps.println("\t\tProtected\t" + nd[i].isProtected()); ps.println("\t\tAllowsSameNameSiblings\t" + nd[i].allowsSameNameSiblings()); } } ps.println(); } catch (RepositoryException e) { e.printStackTrace(ps); } }
From source file:de.juwimm.cms.remote.AdministrationServiceSpringImpl.java
/** * @see de.juwimm.cms.remote.AdministrationServiceSpring#exportXlsPersonData() *///from w w w. j a v a2s. c o m @Override protected InputStream handleExportXlsPersonData() throws Exception { try { if (log.isInfoEnabled()) log.info("exportXlsPersonData " + AuthenticationHelper.getUserName()); File fle = File.createTempFile("XlsPersonData", ".xml.gz"); FileOutputStream fout = new FileOutputStream(fle); PrintStream out = new PrintStream(fout, true, "UTF-8"); UserHbm invoker = getUserHbmDao().load(AuthenticationHelper.getUserName()); SiteHbm site = invoker.getActiveSite(); if (log.isDebugEnabled()) log.debug("Invoker is: " + invoker.getUserId() + " within Site " + site.getName()); // header out.println("Titel,Vorname,Nachname,Adresse,PLZ,Ort,Telefon 1,Telefon 2,Fax,e-Mail,Einrichtung"); Iterator<UnitHbm> it = getUnitHbmDao().findAll(site.getSiteId()).iterator(); while (it.hasNext()) { UnitHbm currentUnit = it.next(); Collection<PersonHbm> persons = getPersonHbmDao().findByUnit(currentUnit.getUnitId()); for (PersonHbm currentPerson : persons) { Iterator<AddressHbm> addressIt = currentPerson.getAddresses().iterator(); boolean hasAddress = false; while (addressIt.hasNext()) { hasAddress = true; AddressHbm currentAddress = addressIt.next(); out.print(currentPerson.getTitle() == null ? "," : currentPerson.getTitle() + ","); out.print(currentPerson.getFirstname() == null ? "," : currentPerson.getFirstname() + ","); out.print(currentPerson.getLastname() == null ? "," : currentPerson.getLastname() + ","); String street = currentAddress.getStreet(); String streetNo = currentAddress.getStreetNr(); if (street == null) street = ""; if (streetNo == null) streetNo = ""; out.print(street + " " + streetNo + ","); out.print(currentAddress.getZipCode() == null ? "," : currentAddress.getZipCode() + ","); out.print(currentAddress.getCity() == null ? "," : currentAddress.getCity() + ","); out.print(currentAddress.getPhone1() == null ? "," : currentAddress.getPhone1() + ","); out.print(currentAddress.getPhone2() == null ? "," : currentAddress.getPhone2() + ","); out.print(currentAddress.getFax() == null ? "," : currentAddress.getFax() + ","); out.print(currentAddress.getEmail() == null ? "," : currentAddress.getEmail() + ","); out.println(currentUnit.getName().trim()); } if (!hasAddress) { out.print(currentPerson.getTitle() == null ? "," : currentPerson.getTitle() + ","); out.print(currentPerson.getFirstname() == null ? "," : currentPerson.getFirstname() + ","); out.print(currentPerson.getLastname() == null ? "," : currentPerson.getLastname() + ",,,,,,,,"); out.println(currentUnit.getName().trim()); } } } if (log.isDebugEnabled()) log.debug("Finished exportXlsPersonData"); out.flush(); out.close(); out = null; return new FileInputStream(fle); } catch (Exception e) { throw new UserException(e.getMessage()); } }
From source file:examples.ClassPropertyUsageAnalyzer.java
/** * Prints the terms (label, etc.) of one entity to the given stream. This * will lead to several values in the CSV file, which are the same for * properties and class items.//from w ww.jav a 2s . c om * * @param out * the output to write to * @param termedDocument * the document that provides the terms to write * @param entityIdValue * the entity that the data refers to. * @param specialLabel * special label to use (rather than the label string in the * document) or null if not using; used by classes, which need to * support disambiguation in their labels */ private void printTerms(PrintStream out, TermedDocument termedDocument, EntityIdValue entityIdValue, String specialLabel) { String label = specialLabel; String description = "-"; if (termedDocument != null) { if (label == null) { MonolingualTextValue labelValue = termedDocument.getLabels().get("en"); if (labelValue != null) { label = csvStringEscape(labelValue.getText()); } } MonolingualTextValue descriptionValue = termedDocument.getDescriptions().get("en"); if (descriptionValue != null) { description = csvStringEscape(descriptionValue.getText()); } } if (label == null) { label = entityIdValue.getId(); } out.print(entityIdValue.getId() + "," + label + "," + description + "," + entityIdValue.getIri()); }
From source file:org.apache.hadoop.hive.ql.exec.ExplainTask.java
private JSONArray outputList(List<?> l, PrintStream out, boolean hasHeader, boolean extended, boolean jsonOutput, int indent) throws Exception { boolean first_el = true; boolean nl = false; JSONArray outputArray = new JSONArray(); for (Object o : l) { if (isPrintable(o)) { String delim = first_el ? " " : ", "; if (out != null) { out.print(delim); out.print(o);/*from ww w .ja v a2s .c om*/ } if (jsonOutput) { outputArray.put(o); } nl = true; } else { if (first_el && (out != null) && hasHeader) { out.println(); } JSONObject jsonOut = outputPlan(o, out, extended, jsonOutput, jsonOutput ? 0 : (hasHeader ? indent + 2 : indent)); if (jsonOutput) { outputArray.put(jsonOut); } } first_el = false; } if (nl && (out != null)) { out.println(); } return jsonOutput ? outputArray : null; }
From source file:org.apache.hadoop.hive.ql.exec.ExplainTask.java
@Override public int execute(DriverContext driverContext) { PrintStream out = null; try {// w w w . j a v a 2s. co m Path resFile = work.getResFile(); OutputStream outS = resFile.getFileSystem(conf).create(resFile); out = new PrintStream(outS); if (work.isLogical()) { JSONObject jsonLogicalPlan = getJSONLogicalPlan(out, work); if (work.isFormatted()) { out.print(jsonLogicalPlan); } } else if (work.isAuthorize()) { JSONObject jsonAuth = collectAuthRelatedEntities(out, work); if (work.isFormatted()) { out.print(jsonAuth); } } else if (work.getDependency()) { JSONObject jsonDependencies = getJSONDependencies(work); out.print(jsonDependencies); } else { if (work.isUserLevelExplain()) { // Because of the implementation of the JsonParserFactory, we are sure // that we can get a TezJsonParser. JsonParser jsonParser = JsonParserFactory.getParser(conf); work.getConfig().setFormatted(true); JSONObject jsonPlan = getJSONPlan(out, work); if (work.getCboInfo() != null) { jsonPlan.put("cboInfo", work.getCboInfo()); } try { jsonParser.print(jsonPlan, out); } catch (Exception e) { // if there is anything wrong happen, we bail out. LOG.error("Running explain user level has problem: " + e.toString() + ". Falling back to normal explain"); work.getConfig().setFormatted(false); work.getConfig().setUserLevelExplain(false); jsonPlan = getJSONPlan(out, work); } } else { JSONObject jsonPlan = getJSONPlan(out, work); if (work.isFormatted()) { // use the parser to get the output operators of RS JsonParser jsonParser = JsonParserFactory.getParser(conf); if (jsonParser != null) { jsonParser.print(jsonPlan, null); LOG.info("JsonPlan is augmented to " + jsonPlan.toString()); } out.print(jsonPlan); } } } out.close(); out = null; return (0); } catch (Exception e) { console.printError("Failed with exception " + e.getMessage(), "\n" + StringUtils.stringifyException(e)); return (1); } finally { IOUtils.closeStream(out); } }
From source file:org.apache.hadoop.hdfs.tools.DFSAdmin.java
int getReconfigurationStatus(String nodeType, String address, PrintStream out, PrintStream err) throws IOException { if ("datanode".equals(nodeType)) { ClientDatanodeProtocol dnProxy = getDataNodeProxy(address); try {/*from ww w.j ava 2 s.co m*/ ReconfigurationTaskStatus status = dnProxy.getReconfigurationStatus(); out.print("Reconfiguring status for DataNode[" + address + "]: "); if (!status.hasTask()) { out.println("no task was found."); return 0; } out.print("started at " + new Date(status.getStartTime())); if (!status.stopped()) { out.println(" and is still running."); return 0; } out.println(" and finished at " + new Date(status.getEndTime()).toString() + "."); for (Map.Entry<PropertyChange, Optional<String>> result : status.getStatus().entrySet()) { if (!result.getValue().isPresent()) { out.print("SUCCESS: "); } else { out.print("FAILED: "); } out.printf("Change property %s%n\tFrom: \"%s\"%n\tTo: \"%s\"%n", result.getKey().prop, result.getKey().oldVal, result.getKey().newVal); if (result.getValue().isPresent()) { out.println("\tError: " + result.getValue().get() + "."); } } } catch (IOException e) { err.println("DataNode reloading configuration: " + e + "."); return 1; } } else { err.println("Node type " + nodeType + " does not support reconfiguration."); return 1; } return 0; }
From source file:idgs.IdgsCliDriver.java
/** * override super method process to handle idgs command *//*from w ww.j ava 2s .c om*/ @Override public int processCmd(String cmd) { CliSessionState ss = (CliSessionState) SessionState.get(); String cmd_trimmed = cmd.trim(); String[] tokens = cmd_trimmed.split("\\s+"); String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim(); int ret = 0; if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit") || tokens[0].equalsIgnoreCase("source") || cmd_trimmed.startsWith("!") || tokens[0].toLowerCase().equals("list")) { super.processCmd(cmd); } else { HiveConf hconf = (HiveConf) conf; try { CommandProcessor proc = CommandProcessorFactory.get(tokens, hconf); if (proc != null) { // Spark expects the ClassLoader to be an URLClassLoader. // In case we're using something else here, wrap it into an // URLCLassLaoder. if (System.getenv("TEST_WITH_ANT") == "1") { ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(new URLClassLoader(new URL[1], cl)); } if (proc instanceof Driver) { // There is a small overhead here to create a new instance of // SharkDriver for every command. But it saves us the hassle of // hacking CommandProcessorFactory. Driver qp = null; try { // ##### using hive_idgs driver qp = (IdgsConfVars.getVar(conf, IdgsConfVars.EXEC_MODE) == "idgs") ? new IdgsDriver(hconf) : Driver.class.newInstance(); } catch (Exception e) { e.printStackTrace(); } LOG.info("Execution Mode: " + IdgsConfVars.getVar(conf, IdgsConfVars.EXEC_MODE)); qp.init(); PrintStream out = ss.out; long start = System.currentTimeMillis(); if (ss.getIsVerbose()) { out.println(cmd); } ret = qp.run(cmd).getResponseCode(); if (ret != 0) { qp.close(); return ret; } boolean isPrint = IdgsConfVars.getBoolVar(conf, IdgsConfVars.PRINT_RESULT); List<Object[]> res = new ArrayList<Object[]>(); if (isPrint) { if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER)) { // Print the column names. List<FieldSchema> fieldSchemas = qp.getSchema().getFieldSchemas(); if (fieldSchemas != null) { for (FieldSchema fieldSchema : fieldSchemas) { out.print("header" + fieldSchema.getName() + "\t"); } out.println(); } } } long printTime = 0; int counter = 0; SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); try { long s = System.currentTimeMillis(); while (qp.getResults(res)) { for (Object[] row : res) { if (isPrint) { for (Object v : row) { if (v != null) { if (v instanceof byte[]) { out.print(new String((byte[]) v)); } else if (v instanceof Timestamp) { out.print(timestampFormat.format((Timestamp) v)); } else if (v instanceof Date) { out.print(dateFormat.format((Date) v)); } else { out.print(v); } } else { out.print(v); } out.print("\t"); } out.println(); } } counter += res.size(); res.clear(); if (out.checkError()) { break; } } printTime = System.currentTimeMillis() - s; } catch (IOException e) { console.printError( "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); ret = 1; } int cret = qp.close(); if (ret == 0) { ret = cret; } long end = System.currentTimeMillis(); double timeTaken = (end - start) / 1000.0; console.printInfo("Time taken: " + timeTaken + " seconds, Fetched: " + counter + " row(s)"); // Destroy the driver to release all the locks. if (qp instanceof IdgsDriver) { LOG.info("Time taken: " + timeTaken + " seconds, Fetched: " + counter + " row(s)"); LOG.info("Compile time taken: " + (((IdgsDriver) qp).getCompileTime() / 1000.0) + " seconds"); LOG.info("Task run time taken: " + (((IdgsDriver) qp).getTaskRunTime() / 1000.0) + " seconds"); LOG.info("Print time taken: " + (printTime / 1000.0) + " seconds"); qp.destroy(); } } else { if (ss.getIsVerbose()) { ss.out.println(tokens[0] + " " + cmd_1); } ret = proc.run(cmd_1).getResponseCode(); } } } catch (CommandNeedRetryException ex) { LOG.error("Execute command " + cmd + " error.", ex); console.printInfo("Retry query with a different approach..."); } catch (Exception ex) { LOG.error("Execute command " + cmd + " error.", ex); console.printInfo("Execute command error, caused " + ex.getMessage() + "."); ret = 1; } } return ret; }
From source file:org.hyperic.hq.product.util.PluginDumper.java
private void dumpWikiDocs() throws IOException { HashMap typeMap = new HashMap(); File wikiDir = new File("wiki-docs"); if (!wikiDir.exists()) { wikiDir.mkdir();//from w w w.j av a 2 s. c o m } for (int n = 0; n < this.pPlugins.length; n++) { ProductPlugin pp = this.pPlugins[n]; TypeInfo[] types = pp.getTypes(); MeasurementPlugin mp; if ((types == null) || (types.length == 0)) { continue; } for (int i = 0; i < types.length; i++) { TypeInfo type = types[i]; String name = type.getName(); // fold multiple types with the same name. if (typeMap.put(name, type) != null) { continue; } String typeName = TYPES[type.getType()]; MeasurementInfo[] metrics = null; mp = this.ppm.getMeasurementPlugin(name); if (mp != null) { metrics = mp.getMeasurements(types[i]); } if ((metrics == null) || (metrics.length == 0)) { // Skip virtual resources continue; } String fileName = name + " " + typeName; PrintStream ps = openFile(wikiDir, fileName); ps.println("h3. " + name + " " + typeName); // Print log track info ps.print("*Log Track Supported:* "); try { this.ltpm.getPlugin(name); ps.println("Yes"); } catch (PluginNotFoundException e) { ps.println("No"); } // Print config track info ps.print("*Config Track Supported:* "); try { this.ctpm.getPlugin(name); ps.println("Yes"); } catch (PluginNotFoundException e) { ps.println("No"); } // Print custom properties ps.print("*Custom Properties Supported:* "); ConfigSchema c = pp.getCustomPropertiesSchema(name); List options = c.getOptions(); if (options.size() == 0) { ps.println("None"); } else { ps.println(""); ps.println("||Name||Description"); for (int j = 0; j < options.size(); j++) { ConfigOption opt = (ConfigOption) options.get(j); ps.println("|" + opt.getName() + "|" + opt.getDescription()); } ps.println("||"); } // Print control actions ps.print("*Supported Control Actions:* "); List actions; try { actions = this.cpm.getActions(name); for (int j = 0; j < actions.size(); j++) { String action = (String) actions.get(j); if (j < actions.size() - 1) { ps.print(action + ","); } else { ps.println(action); } } } catch (PluginNotFoundException e) { ps.println("None"); } ps.println("*Supported Metrics:* "); ps.println("||Name||Alias||Units||Category||Default On||Default Interval"); for (int j = 0; j < metrics.length; j++) { String colorStart; String colorEnd = "{color}"; if (metrics[j].isDefaultOn()) { colorStart = "{color:navy}"; } else { colorStart = "{color:gray}"; } ps.println("|" + colorStart + metrics[j].getName() + colorEnd + "|" + colorStart + metrics[j].getAlias() + colorEnd + "|" + colorStart + metrics[j].getUnits() + colorEnd + "|" + colorStart + metrics[j].getCategory() + colorEnd + "|" + colorStart + new Boolean(metrics[j].isDefaultOn()) + colorEnd + "|" + colorStart + new Long(metrics[j].getInterval() / 60000) + " min" + colorEnd); } ps.println("||||||"); ps.println(""); ps.println("*Configuration help:* "); Object help; try { help = mpm.getHelp(type, getProperties()); } catch (PluginNotFoundException e) { ps.println("None"); continue; } if (help != null) { ps.println("{html}"); ps.print(help.toString()); ps.println("{html}"); } ps.close(); } } }
From source file:fr.inrialpes.exmo.align.cli.GroupEval.java
public void printLATEX(Vector<Vector<Object>> result, PrintStream writer) { // variables for computing iterative harmonic means int expected = 0; // expected so far int foundVect[]; // found so far int correctVect[]; // correct so far long timeVect[]; // time so far Formatter formatter = new Formatter(writer); fsize = format.length();/*ww w. jav a 2s. c o m*/ // JE: the h-means computation should be put out as well // Print the header writer.println("\\documentclass[11pt]{book}"); writer.println(); writer.println("\\begin{document}"); writer.println("\\date{today}"); writer.println(""); writer.println("\n%% Plot generated by GroupEval of alignapi"); writer.println("\\setlength{\\tabcolsep}{3pt} % May be changed"); writer.println("\\begin{table}"); writer.print("\\begin{tabular}{|l||"); for (int i = size; i > 0; i--) { for (int j = fsize; j > 0; j--) writer.print("c"); writer.print("|"); } writer.println("}"); writer.println("\\hline"); // For each file do a writer.print("algo"); // for each algo <td spancol='2'>name</td> for (String m : listAlgo) { writer.print(" & \\multicolumn{" + fsize + "}{c|}{" + m + "}"); } writer.println(" \\\\ \\hline"); writer.print("test"); // for each algo <td>Prec.</td><td>Rec.</td> for (String m : listAlgo) { for (int i = 0; i < fsize; i++) { writer.print(" & "); if (format.charAt(i) == 'p') { writer.print("Prec."); } else if (format.charAt(i) == 'f') { writer.print("FMeas."); } else if (format.charAt(i) == 'o') { writer.print("Over."); } else if (format.charAt(i) == 't') { writer.print("Time"); } else if (format.charAt(i) == 'r') { writer.print("Rec."); } } } writer.println(" \\\\ \\hline"); foundVect = new int[size]; correctVect = new int[size]; timeVect = new long[size]; for (int k = size - 1; k >= 0; k--) { foundVect[k] = 0; correctVect[k] = 0; timeVect[k] = 0; } for (Vector<Object> test : result) { int nexpected = -1; // Print the directory writer.print((String) test.get(0)); // For each record print the values Enumeration<Object> f = test.elements(); f.nextElement(); for (int k = 0; f.hasMoreElements(); k++) { PRecEvaluator eval = (PRecEvaluator) f.nextElement(); if (eval != null) { // iterative H-means computation if (nexpected == -1) { expected += eval.getExpected(); nexpected = 0; } foundVect[k] += eval.getFound(); correctVect[k] += eval.getCorrect(); timeVect[k] += eval.getTime(); for (int i = 0; i < fsize; i++) { writer.print(" & "); if (format.charAt(i) == 'p') { formatter.format("%1.2f", eval.getPrecision()); } else if (format.charAt(i) == 'f') { formatter.format("%1.2f", eval.getFmeasure()); } else if (format.charAt(i) == 'o') { formatter.format("%1.2f", eval.getOverall()); } else if (format.charAt(i) == 't') { if (eval.getTime() == 0) { writer.print("-"); } else { formatter.format("%1.2f", eval.getTime()); } } else if (format.charAt(i) == 'r') { formatter.format("%1.2f", eval.getRecall()); } } } else { writer.print(" & \\multicolumn{" + fsize + "}{c|}{n/a}"); } } writer.println(" \\\\"); } writer.print("H-mean"); // Here we are computing a sheer average. // While in the column results we print NaN when the returned // alignment is empty, // here we use the real values, i.e., add 0 to both correctVect and // foundVect, so this is OK for computing the average. int k = 0; // ??? for (String m : listAlgo) { double precision = (double) correctVect[k] / foundVect[k]; double recall = (double) correctVect[k] / expected; for (int i = 0; i < fsize; i++) { writer.print(" & "); if (format.charAt(i) == 'p') { formatter.format("%1.2f", precision); } else if (format.charAt(i) == 'f') { formatter.format("%1.2f", 2 * precision * recall / (precision + recall)); } else if (format.charAt(i) == 'o') { formatter.format("%1.2f", recall * (2 - (1 / precision))); } else if (format.charAt(i) == 't') { if (timeVect[k] == 0) { writer.print("-"); } else { formatter.format("%1.2f", timeVect[k]); } } else if (format.charAt(i) == 'r') { formatter.format("%1.2f", recall); } } ; k++; } writer.println(" \\\\ \\hline"); writer.println("\\end{tabular}"); writer.println( "\\caption{Plot generated by GroupEval of alignapi \\protect\\footnote{n/a: result alignment not provided or not readable -- NaN: division per zero, likely due to empty alignment.}}"); writer.println("\\end{table}"); writer.println("\\end{document}"); }