List of usage examples for org.apache.commons.lang3 StringUtils repeat
public static String repeat(final char ch, final int repeat)
From source file:com.mirth.connect.donkey.server.data.jdbc.JdbcDao.java
@Override public void insertMetaData(ConnectorMessage connectorMessage, List<MetaDataColumn> metaDataColumns) { logger.debug(connectorMessage.getChannelId() + "/" + connectorMessage.getMessageId() + "/" + connectorMessage.getMetaDataId() + ": inserting custom meta data"); PreparedStatement statement = null; try {/* w ww . j a v a 2 s. com*/ List<String> metaDataColumnNames = new ArrayList<String>(); Map<String, Object> metaDataMap = connectorMessage.getMetaDataMap(); for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { metaDataColumnNames.add(metaDataColumn.getName()); } } // Don't do anything if all values were null if (!metaDataColumnNames.isEmpty()) { Map<String, Object> values = new HashMap<String, Object>(); values.put("localChannelId", getLocalChannelId(connectorMessage.getChannelId())); values.put("metaDataColumnNames", quoteChar + StringUtils.join(metaDataColumnNames, quoteChar + "," + quoteChar) + quoteChar); values.put("metaDataColumnPlaceholders", "?" + StringUtils.repeat(", ?", metaDataColumnNames.size() - 1)); statement = connection.prepareStatement(querySource.getQuery("insertMetaData", values)); statement.setInt(1, connectorMessage.getMetaDataId()); statement.setLong(2, connectorMessage.getMessageId()); int n = 3; for (MetaDataColumn metaDataColumn : metaDataColumns) { Object value = metaDataMap.get(metaDataColumn.getName()); if (value != null) { // @formatter:off switch (metaDataColumn.getType()) { case STRING: statement.setString(n, (String) value); break; case NUMBER: statement.setBigDecimal(n, (BigDecimal) value); break; case BOOLEAN: statement.setBoolean(n, (Boolean) value); break; case TIMESTAMP: statement.setTimestamp(n, new Timestamp(((Calendar) value).getTimeInMillis())); break; } // @formatter:on n++; } } statement.executeUpdate(); } } catch (Exception e) { throw new DonkeyDaoException("Failed to insert connector message meta data", e); } finally { close(statement); } }
From source file:com.google.dart.java2dart.util.ToFormattedSourceVisitor.java
private void indentDec() { indentLevel -= 2; indentString = StringUtils.repeat(" ", indentLevel); }
From source file:com.google.dart.java2dart.util.ToFormattedSourceVisitor.java
private void indentInc() { indentLevel += 2; indentString = StringUtils.repeat(" ", indentLevel); }
From source file:com.google.dart.engine.services.internal.correction.CorrectionUtils.java
/** * @return the default indentation with given level. *//*w ww. java 2 s.co m*/ public String getIndent(int level) { return StringUtils.repeat(" ", level); }
From source file:de.tudarmstadt.tk.statistics.report.ReportGenerator.java
/** * Creates a report of the statistical evaluation in the Latex-format * /*from ww w.ja va 2 s . c om*/ * @param outputFolder * the folder where the report will be written later to store * related images etc. there * @param evalResults * an object of type {@link EvaluationResults} comprising the * results of the statistical evaluation * @return A String representing the report of the statistical evaluation in * Latex-format */ public String createLatexReport(File outputFolder) { // Set locale to English globally to make reports independent of the // machine thei're created on, e.g. use "." as decimal points on any // machine Locale.setDefault(Locale.ENGLISH); StringBuilder report = new StringBuilder(); Statistics stats = Statistics.getInstance(true); HashMap<String, String> methodsSummary = new HashMap<String, String>(); HashMap<String, HashMap<String, List<String>>> testSummary = new HashMap<String, HashMap<String, List<String>>>(); ArrayList<String[]> figures = new ArrayList<String[]>(); testSummary.put("Parametric", new HashMap<String, List<String>>()); testSummary.put("Non-Parametric", new HashMap<String, List<String>>()); String outputFolderPath = ""; if (outputFolder != null) { outputFolderPath = outputFolder.getAbsolutePath(); } // // Header // // Packages report.append("\\documentclass[a4paper,12pt]{article}\n"); report.append("\\usepackage[english]{babel}\n"); report.append("\\usepackage[utf8]{inputenc}\n"); report.append("\\usepackage{graphicx}\n"); report.append("\\usepackage{titlesec}\n"); report.append("\\usepackage{caption}\n"); report.append("\\usepackage{subcaption}\n"); report.append("\\usepackage{adjustbox}\n"); report.append("\\usepackage{placeins}\n"); report.append("\\usepackage{longtable}\n"); report.append("\\usepackage{morefloats}\n"); // Title definition report.append("\\titleformat*{\\section}{\\large\\bfseries}\n"); report.append("\\titleformat*{\\subsection}{\\normalsize\\bfseries}\n"); report.append("\\titleformat*{\\subsubsection}{\\vspace{-0.3cm}\\normalsize\\bfseries}\n"); report.append("\\title{Statistical Evaluation Report}\n"); report.append("\\date{\\vspace{-10ex}}\n"); report.append("\\begin{document}\n"); report.append("\\maketitle\n"); // // Evaluation Overview // report.append("\\section{Evaluation Overview}"); int nModels = evalResults.getSampleData().getModelMetadata().size(); ArrayList<String> measures = evalResults.getMeasures(); int nSamples = evalResults.getSampleData().getSamples().get(measures.get(0)).get(0).size(); String ref = "tbl:models"; // Separate training/testing datasets List<String> trainingDataList = new ArrayList<String>(); List<String> testingDataList = new ArrayList<String>(); List<Pair<String, String>> datasets = evalResults.getSampleData().getDatasetNames(); Iterator<Pair<String, String>> itp = datasets.iterator(); while (itp.hasNext()) { Pair<String, String> trainTest = itp.next(); trainingDataList.add(trainTest.getKey()); if (trainTest.getValue() != null) { testingDataList.add(trainTest.getValue()); } } Set<String> trainingDataSet = new HashSet<String>(trainingDataList); Set<String> testingDataSet = new HashSet<String>(testingDataList); String pipelineDescription = null; String sampleOrigin = "per CV"; ReportTypes pipelineType = this.evalResults.getSampleData().getPipelineType(); switch (pipelineType) { // One-domain n-fold CV (ReportData=per Fold) case CV: pipelineDescription = String.format("%d-fold cross validation", evalResults.getSampleData().getnFolds()); sampleOrigin = "per fold "; break; case MULTIPLE_CV: pipelineDescription = String.format("%dx%s repeated cross validation", evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds()); break; case CV_DATASET_LVL: pipelineDescription = String.format("%d-fold cross validation over %d datasets", evalResults.getSampleData().getnFolds(), trainingDataSet.size()); break; case MULTIPLE_CV_DATASET_LVL: pipelineDescription = String.format("%dx%s repeated cross validation over %d datasets", evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds(), trainingDataSet.size()); sampleOrigin = "per dataset"; break; case TRAIN_TEST_DATASET_LVL: // In the train/test scenario, the number of datasets only includes // distinct ones Set<String> allDataSets = new HashSet<String>(testingDataSet); allDataSets.addAll(trainingDataSet); pipelineDescription = String.format("Train/Test over %d datasets", allDataSets.size()); sampleOrigin = "per dataset"; break; default: pipelineDescription = "!unknown pipeline type!"; sampleOrigin = "!unknown pipeline type!"; break; } boolean isBaselineEvaluation = evalResults.isBaselineEvaluation(); report.append(String.format("The system performed a %s for the %d models in Tbl \\ref{%s}. ", pipelineDescription, nModels, ref)); if (isBaselineEvaluation) { report.append(String.format("The models were compared against the first baseline model. \n", pipelineDescription, nModels, ref)); } else { report.append(String.format("The models were compared against each other. \n", pipelineDescription, nModels, ref)); } String[][] values = new String[nModels][3]; for (int r = 0; r < nModels; r++) { values[r][0] = String.format("M%d", r); // Remove package prefix for algorithms, e.g. shorten "trees.J48" to "J48". String[] algorithm = evalResults.getSampleData().getModelMetadata().get(r).getKey().split("\\."); values[r][1] = escapeLatexCharacters(algorithm[algorithm.length - 1]); values[r][2] = escapeLatexCharacters(evalResults.getSampleData().getModelMetadata().get(r).getValue()); } String table = createLatexTable("Evaluated models with classifier algorithm and feature sets", ref, new String[] { "Index", "Algorithm", "Feature Set" }, "|l|l|p{11cm}|", values); report.append(table); // List test/training datasets. Consider the case when these sets are // different. if (testingDataSet.isEmpty()) { if (trainingDataSet.size() == 1) { report.append( String.format("The models were evaluated on the dataset %s. ", trainingDataList.get(0))); } else { report.append(String.format("The models were evaluated on the datasets %s. ", this.createEnumeration(trainingDataList))); } } else { if (trainingDataSet.size() == 1 && testingDataSet.size() == 1) { report.append( String.format("The models were trained on the dataset %s and tested on the dataset %s. ", trainingDataList.get(0), testingDataList.get(0))); } else if (trainingDataSet.size() > 1 && testingDataSet.size() == 1) { report.append(String.format( "The models were trained on the datasets %s and tested on the dataset %s. ", this.createEnumeration(new ArrayList<String>(trainingDataSet)), testingDataList.get(0))); } else if (trainingDataSet.size() == 1 && testingDataSet.size() > 1) { report.append(String.format( "The models were trained on the dataset %s and tested on the datasets %s. ", trainingDataList.get(0), this.createEnumeration(new ArrayList<String>(testingDataSet)))); } else { report.append( String.format("The models were trained on the datasets %s and tested on the datasets %s. ", this.createEnumeration(new ArrayList<String>(trainingDataSet)), this.createEnumeration(new ArrayList<String>(testingDataSet)))); } } report.append(String.format("Their performance was assessed with the %s", createEnumeration(measures))); report.append( ". In the analysis, the models thus represent levels of the independent variable, while the performance measures are dependent variables.\n"); // // Results (for each measure separately) // report.append("\\FloatBarrier\n"); // All previous floats must be placed // before this point report.append("\\section{Results}\n"); report.append(String.format( "Throughout the report, p-values are annotated if they are significant. While {\\footnotesize *} indicates low significance ($p<\\alpha=%.2f$), the annotations {\\footnotesize **} and {\\footnotesize ***} represent medium ($p<\\alpha=%.2f$) and high significance ($p<\\alpha=%.2f$).", significance_low, significance_medium, significance_high)); for (int i = 0; i < measures.size(); i++) { /* * Create table with samples for the current performance measure If * samples are drawn over multiple datasets, transpose table */ String measure = measures.get(i); if (!evalResults.getSampleData().getSamples().containsKey(measure)) { continue; } ArrayList<ArrayList<Double>> measureSamples = evalResults.getSampleData().getSamples().get(measure); ArrayList<Double> averageMeasureSamples = evalResults.getSampleData().getSamplesAverage().get(measure); report.append("\\FloatBarrier\n"); report.append(String.format("\\subsection{%s}\n", measure)); ref = String.format("tbl:%s", measure.replaceAll("\\s", "")); report.append(String.format( "The %s samples drawn from the %s and the %d models are presented in Tbl. \\ref{%s}.\n", measure, pipelineDescription, nModels, ref)); // Plot Box-Whisker-Diagram of samples for the current measure and add the figure to the appendix // Use the min/max sample value as indicators for the box-plots limits String filename = String.format("boxPlot%s", measure.replaceAll("\\s", "")); String path = String.format("%s%s%s", outputFolderPath, File.separator, filename); String pathR = this.fixSlashes(path); String figRef = String.format("fig:boxPlot%s", measure.replaceAll("\\s", "")); String caption = String.format("Box-Whisker-Plot of %s samples. Red dots indicate means.", measure); double[][] samples = new double[nModels][]; double minSample = Double.MAX_VALUE; double maxSample = Double.MIN_VALUE; for (int k = 0; k < nModels; k++) { ArrayList<Double> s = measureSamples.get(k); samples[k] = new double[s.size()]; for (int j = 0; j < s.size(); j++) { samples[k][j] = s.get(j); if (minSample > s.get(j)) { minSample = s.get(j); } if (maxSample < s.get(j)) { maxSample = s.get(j); } } } double sampleRange = maxSample - minSample; int lowerLimit = (int) Math.floor(minSample - sampleRange * 0.1); int upperLimit = (int) Math.ceil(maxSample + sampleRange * 0.1); boolean successful = stats.plotBoxWhisker(samples, lowerLimit, upperLimit, pathR, measure); if (successful) { figures.add(new String[] { figRef, caption, filename }); report.append( String.format("See Fig. \\ref{%s} for a Box-Whisker plot of these samples. ", figRef)); } caption = String.format("Samples of the %s drawn from the %s and the %d models", measure, pipelineDescription, nModels); switch (pipelineType) { case CV: case MULTIPLE_CV: values = new String[nModels + 1][nSamples + 2]; for (int r = 0; r <= nModels; r++) { // First line of table = Fold indices if (r == 0) { values[r][0] = ""; values[r][nSamples + 1] = ""; for (int f = 1; f <= nSamples; f++) { values[r][f] = Integer.toString(f); } // Next lines with model indices, samples per fold and // average measure over all samples } else { values[r][0] = String.format("M%d", (r - 1)); //values[r][nSamples + 1] = String.format("%.2f", averageMeasureSamples.get(r - 1) * 100); values[r][nSamples + 1] = String.format("%.2f", averageMeasureSamples.get(r - 1)); ArrayList<Double> s = measureSamples.get(r - 1); for (int j = 0; j < s.size(); j++) { //values[r][j + 1] = String.format("%.2f", s.get(j) * 100); values[r][j + 1] = String.format("%.2f", s.get(j)); } } } if (values.length > 58) { table = createLatexLongTable(caption, ref, new String[] { "Classifier", String.format("\\multicolumn{%d}{|c|}{%s %s}", nSamples, measure, sampleOrigin), "Average" }, String.format("|%s", StringUtils.repeat("l|", nSamples + 2)), values); } else { table = createLatexTable(caption, ref, new String[] { "Classifier", String.format("\\multicolumn{%d}{|c|}{%s %s}", nSamples, measure, sampleOrigin), "Average" }, String.format("|%s", StringUtils.repeat("l|", nSamples + 2)), values); } break; case CV_DATASET_LVL: case MULTIPLE_CV_DATASET_LVL: case TRAIN_TEST_DATASET_LVL: values = new String[nSamples + 2][nModels + 1]; // double[][] valuesNumeric = new double[nSamples][nModels]; for (int r = 0; r <= nSamples + 1; r++) { // First line of table = Model indices if (r == 0) { values[r][0] = ""; for (int j = 0; j < nModels; j++) { values[r][j + 1] = String.format("M%d", (j)); } // Last line of table = average sums } else if (r == nSamples + 1) { values[r][0] = "Average"; for (int j = 0; j < nModels; j++) { //values[r][j + 1] = String.format("%.2f", averageMeasureSamples.get(j) * 100); values[r][j + 1] = String.format("%.2f", averageMeasureSamples.get(j)); } // Next lines with model indices, samples per fold and // average measure over all samples } else { // Only print both train- and test set if there is more // than one training set Pair<String, String> trainTest = evalResults.getSampleData().getDatasetNames().get(r - 1); if (pipelineType == ReportTypes.TRAIN_TEST_DATASET_LVL) { if (trainingDataSet.size() > 1) { values[r][0] = String.format("%s-%s", trainTest.getKey(), trainTest.getValue()); } else { values[r][0] = trainTest.getValue(); } } else { values[r][0] = trainTest.getKey(); } for (int j = 0; j < nModels; j++) { ArrayList<Double> s = measureSamples.get(j); //values[r][j + 1] = String.format("%.2f", s.get(r - 1) * 100); values[r][j + 1] = String.format("%.2f", s.get(r - 1)); } } } if (values.length > 58) { table = createLatexLongTable(caption, ref, new String[] { "Dataset", String.format("\\multicolumn{%d}{|c|}{%s %s}", nModels, measure, sampleOrigin) }, String.format("|%s", StringUtils.repeat("l|", nModels + 1)), values); } else { table = createLatexTable(caption, ref, new String[] { "Dataset", String.format("\\multicolumn{%d}{|c|}{%s %s}", nModels, measure, sampleOrigin) }, String.format("|%s", StringUtils.repeat("l|", nModels + 1)), values); } break; } report.append(table); // // Results - First parametric tests, then non-parametric (2 // iterations) // Print results for alls non-parametric tests except McNemar. // McNemar is not based on the same performance measures but on a // contingency matrix, which is // printed in a separate section. for (String testType : new String[] { "Parametric", "Non-Parametric" }) { report.append(String.format("\\subsubsection{%s Testing}", testType)); Pair<String, AbstractTestResult> result = null; if (testType.equals("Parametric")) { result = evalResults.getParametricTestResults().get(measure); } else { result = evalResults.getNonParametricTestResults().get(measure); } // Use pretty-print method descriptor if specified String method = result.getKey(); if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) { method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method); } methodsSummary.put(testType, method); TestResult r = (TestResult) result.getValue(); report.append( String.format("The system compared the %d models using the \\emph{%s}. ", nModels, method)); if (r != null && !Double.isNaN(r.getpValue())) { // A priori test: assumptions boolean assumptionViolated = false; Iterator<String> it = r.getAssumptions().keySet().iterator(); while (it.hasNext()) { String assumption = it.next(); TestResult at = (TestResult) r.getAssumptions().get(assumption); if (at == null) { report.append(String.format("Testing for %s failed. ", assumption)); assumptionViolated = true; continue; } if (Double.isNaN(at.getpValue())) { report.append( String.format("Testing for %s using %s failed. ", assumption, at.getMethod())); assumptionViolated = true; continue; } double ap = at.getpValue(); if (ap <= this.significance_low) { assumptionViolated = true; } // Verbalize result according to p value Pair<String, Double> verbalizedP = verbalizeP(ap, true); String testResultRepresentation = getTestResultRepresentation(at, verbalizedP.getValue()); report.append(String.format("%s %s violation of %s (%s). ", at.getMethod(), verbalizedP.getKey(), assumption, testResultRepresentation)); } // Create QQ-Normal diagram to support the analysis of a // normality assumption if (result.getKey().equals("DependentT") && samples.length == 2) { filename = String.format("qqNormPlot%s", measure.replaceAll("\\s", "")); path = String.format("%s%s%s", outputFolderPath, File.separator, filename); pathR = this.fixSlashes(path); figRef = String.format("fig:qqNormPlot%s", measure.replaceAll("\\s", "")); caption = String.format("QQ-Normal plot of pairwise differences between %s samples.", measure); double[] differences = new double[samples[0].length]; for (int j = 0; j < samples[0].length; j++) { differences[j] = samples[0][j] - samples[1][j]; } successful = stats.plotQQNorm(differences, "M0-M1", measure, pathR); if (successful) { figures.add(new String[] { figRef, caption, filename }); report.append(String.format("See Fig. \\ref{%s} for a QQ-Normal plot of the samples. ", figRef)); } } if (assumptionViolated) { report.append( "Given that the assumptions are violated, the following test may be corrupted. "); } // A Priori test results // Verbalize result according to p value Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false); String testResultRepresentation = getTestResultRepresentation(r, verbalizedP.getValue()); report.append(String.format( "The %s %s differences between the performances of the models (%s).\\\\ \n\n ", method, verbalizedP.getKey(), testResultRepresentation)); // Store result for summary if (testSummary.get(testType).containsKey(verbalizedP.getKey())) { testSummary.get(testType).get(verbalizedP.getKey()).add(measure); } else { ArrayList<String> list = new ArrayList<String>(); list.add(measure); testSummary.get(testType).put(verbalizedP.getKey(), list); } // Post-hoc test for >2 models (pairwise comparisons) if (evalResults.getSampleData().getModelMetadata().size() > 2) { Pair<String, AbstractTestResult> postHocResult = null; if (testType.equals("Parametric")) { postHocResult = evalResults.getParametricPostHocTestResults().get(measure); } else { postHocResult = evalResults.getNonParametricPostHocTestResults().get(measure); } method = postHocResult.getKey(); if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) { method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method); } methodsSummary.put(String.format("%sPostHoc", testType), method); PairwiseTestResult rPostHoc = (PairwiseTestResult) postHocResult.getValue(); report.append(String.format("The system performed the \\emph{%s} post-hoc. ", method)); if (rPostHoc == null) { report.append("The test failed. "); continue; } // Assumptions boolean assumptionsViolated = false; it = rPostHoc.getAssumptions().keySet().iterator(); while (it.hasNext()) { String assumption = it.next(); PairwiseTestResult at = (PairwiseTestResult) rPostHoc.getAssumptions().get(assumption); if (at == null) { report.append(String.format("Testing for %s failed. ", assumption)); assumptionsViolated = true; continue; } // Create table with pairwise p-values for // assumption testing double[][] ap = at.getpValue(); Pair<String[], String[][]> tableData = getPValueStringArray(ap, isBaselineEvaluation); // first // element // is // header, // second // are // values caption = String.format("P-values from the %s for %s", at.getMethod(), measure); ref = String.format("tbl:%s%s", at.getMethod().replaceAll("\\s", ""), measure.replaceAll("\\s", "")); table = createLatexTable(caption, ref, tableData.getKey(), String.format("|%s", StringUtils.repeat("l|", nModels)), tableData.getValue()); double max = getMax(ap); double min = getMin(ap); verbalizedP = verbalizeP(min, true); if ((max > significance_low && min <= significance_low) || (max > significance_medium && min <= significance_medium) || (max > significance_high && min <= significance_high)) { // partly significant to degree as specified by // verbalized p-value report.append(String.format( "%s partly %s violation of %s ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n", at.getMethod(), verbalizedP.getKey(), assumption, verbalizedP.getValue(), ref)); } else { report.append(String.format( "%s %s violation of %s ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n", at.getMethod(), verbalizedP.getKey(), assumption, verbalizedP.getValue(), ref)); } report.append(table); if (min <= this.significance_low) { assumptionsViolated = true; } } if (assumptionViolated) { report.append( "Given that the assumptions are violated, the following test may be corrupted. "); } // Result double[][] ap = rPostHoc.getpValue(); Pair<String[], String[][]> tableData = getPValueStringArray(ap, isBaselineEvaluation); // first // element // is // header, // second // are // values caption = String.format("P-values from the %s for %s", method, measure); ref = String.format("tbl:%s%s", method.replaceAll("\\s", ""), measure.replaceAll("\\s", "")); String formatting = null; if (!isBaselineEvaluation) { formatting = String.format("|%s", StringUtils.repeat("l|", nModels)); } else { formatting = String.format("|l|l|"); } String tablePNonAdjusted = createLatexTable(caption, ref, tableData.getKey(), formatting, tableData.getValue()); // Already fetch pairwise adjustments here in order to // determine choice of words double max = getMax(ap); double min = getMin(ap); verbalizedP = verbalizeP(min, false); ArrayList<StatsConfigConstants.CORRECTION_VALUES> adjustments = new ArrayList<StatsConfigConstants.CORRECTION_VALUES>( rPostHoc.getpValueCorrections().keySet()); String adjustWord = ""; if (adjustments.size() > 0) { adjustWord = " for non-adjusted p-values"; } if ((max > significance_low && min <= significance_low) || (max > significance_medium && min <= significance_medium) || (max > significance_high && min <= significance_high)) { // partly significant to degree as specified by // verbalized p-value report.append(String.format( "The %s partly %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ", method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref)); } else { report.append(String.format( "The %s %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ", method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref)); } // Determine ordering of models HashMap<Integer, TreeSet<Integer>> postHocOrdering = null; int[][] orderingEdgeList = null; if (testType.equals("Parametric")) { postHocOrdering = evalResults.getParameticPostHocOrdering().get(measure); orderingEdgeList = evalResults.getParameticPostHocEdgelist().get(measure); } else { postHocOrdering = evalResults.getNonParameticPostHocOrdering().get(measure); orderingEdgeList = evalResults.getNonParameticPostHocEdgelist().get(measure); } String ordering = getModelOrderingRepresentation(postHocOrdering); report.append(ordering); // Print graphs of ordering for the current measure and // add the figure to the appendix filename = String.format("graphOrdering%s%s", measure.replaceAll("\\s", ""), testType); path = String.format("%s%s%s", outputFolderPath, File.separator, filename); pathR = this.fixSlashes(path); figRef = String.format("fig:graphOrdering%s%s", measure.replaceAll("\\s", ""), testType); caption = String.format( "Directed graph of significant differences for %s, as indicated by the %s post-hoc test.", measure, testType.toLowerCase()); // int nodes[] = new int[nModels]; // for(int j=0; j<nModels;j++){nodes[j]=j;}; successful = stats.plotGraph(orderingEdgeList, nModels, pathR); if (successful) { figures.add(new String[] { figRef, caption, filename }); report.append(String.format("The ordering is visualized in Fig. \\ref{%s}. ", figRef)); } // Pairwise adjustments String tablePAdjusted = null; if (adjustments.size() > 0) { String[] subcaption = new String[adjustments.size()]; String[] header = null; String[][][] overallValues = new String[adjustments.size()][][]; double[] minAdjustments = new double[adjustments.size()]; double[] maxAdjustments = new double[adjustments.size()]; for (int j = 0; j < adjustments.size(); j++) { StatsConfigConstants.CORRECTION_VALUES adjustmentMethod = adjustments.get(j); subcaption[j] = adjustmentMethod.name(); double[][] correctedP = rPostHoc.getpValueCorrections().get(adjustmentMethod); if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(adjustmentMethod)) { subcaption[j] = StatsConfigConstants.PRETTY_PRINT_METHODS.get(adjustmentMethod); } tableData = getPValueStringArray(correctedP, isBaselineEvaluation); header = tableData.getKey(); overallValues[j] = tableData.getValue(); minAdjustments[j] = getMin(correctedP); maxAdjustments[j] = getMax(correctedP); } caption = String.format("Adjusted p-values from the %s for %s", method, measure); ref = String.format("tbl:%s%sAdjusted", method.replaceAll("\\s", ""), measure.replaceAll("\\s", "")); formatting = null; if (!isBaselineEvaluation) { formatting = String.format("|%s", StringUtils.repeat("l|", nModels)); } else { formatting = String.format("|l|l|"); } tablePAdjusted = createLatexSubTable(caption, subcaption, ref, header, formatting, overallValues); min = getMin(minAdjustments); max = getMax(maxAdjustments); verbalizedP = verbalizeP(min, false); if ((max > significance_low && min <= significance_low) || (max > significance_medium && min <= significance_medium) || (max > significance_high && min <= significance_high)) { // partly significant to degree as specified by // verbalized p-value report.append(String.format( "It partly %s differences for adjusted p-values ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n\n ", verbalizedP.getKey(), verbalizedP.getValue(), ref)); } else { report.append(String.format( "It %s differences for adjusted p-values ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n\n ", verbalizedP.getKey(), verbalizedP.getValue(), ref)); } } report.append(tablePNonAdjusted); if (tablePAdjusted != null) { report.append(tablePAdjusted); } } } else { report.append(String.format("The %s failed.", method)); } } } // // Contingency table and McNemar results if this test was performed // if (evalResults.getNonParametricTest().equals("McNemar")) { String measure = "Contingency Table"; String testType = "Non-Parametric"; report.append("\\FloatBarrier\n"); report.append("\\subsection{Contingency Table}\n"); String caption = String .format("Contingency table with correctly and incorrectly classified folds for %s", measure); if (evalResults.getSampleData().getPipelineType() == ReportTypes.MULTIPLE_CV) { report.append(String.format( "The contingency table drawn from the %s and the %d models is listed in Tbl. \\ref{%s}. The correctly and incorrectly classified instances per fold were averaged over all repetitions. \n", pipelineDescription, nModels, ref)); caption = String.format( "Averaged contingency table with correctly and incorrectly classified folds for %s", measure); } else { report.append(String.format( "The contingency table drawn from the %s and the %d models is listed in Tbl. \\ref{%s}.\n", pipelineDescription, nModels, ref)); } int[][] contingencyMatrix = evalResults.getSampleData().getContingencyMatrix(); ref = "tbl:ContingencyMatrix"; values = new String[][] { { "Wrong", "", "" }, { "Correct", "", "" } }; values[0][1] = String.valueOf(contingencyMatrix[0][0]); values[0][2] = String.valueOf(contingencyMatrix[0][1]); values[1][1] = String.valueOf(contingencyMatrix[1][0]); values[1][2] = String.valueOf(contingencyMatrix[1][1]); table = createLatexTable(caption, ref, new String[] { "M0/M1", "Wrong", "Correct" }, "|l|l|l|", values); report.append(table); // Test results report.append(String.format("\\subsubsection{%s Testing}", testType)); report.append( String.format("The system compared the %d models using the \\emph{McNemar test}. ", nModels)); Pair<String, AbstractTestResult> result = evalResults.getNonParametricTestResults().get(measure); // Use pretty-print method descriptor if specified String method = result.getKey(); if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) { method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method); } methodsSummary.put(testType, method); TestResult r = (TestResult) result.getValue(); if (r != null && !Double.isNaN(r.getpValue())) { StringBuilder parameters = new StringBuilder(); Iterator<String> it = r.getParameter().keySet().iterator(); while (it.hasNext()) { String parameter = it.next(); double value = r.getParameter().get(parameter); parameters.append(String.format("%s=%.3f, ", parameter, value)); } // Verbalize result according to p value Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false); report.append(String.format( "The test %s differences between the performances of the models ($%sp=%.3f, \\alpha=%.2f$).\\\\ \n", verbalizedP.getKey(), parameters.toString(), r.getpValue(), verbalizedP.getValue())); // Store result for summary if (testSummary.get(testType).containsKey(verbalizedP.getKey())) { testSummary.get(testType).get(verbalizedP.getKey()).add(measure); } else { ArrayList<String> list = new ArrayList<String>(); list.add(measure); testSummary.get(testType).put(verbalizedP.getKey(), list); } } else { report.append("The test failed.\\\\ \n"); } } // // Summary of results // report.append("\\FloatBarrier\n"); report.append("\\section{Summary}\n"); for (String testType : new String[] { "Parametric", "Non-Parametric" }) { String prefix = ""; if (nModels == 2) { report.append( String.format("The system performed %s testing of the %d models using a %s. The test ", testType.toLowerCase(), nModels, methodsSummary.get(testType))); prefix = "It"; } else { String postHocTesting = String.format("%sPostHoc", testType); report.append(String.format( "The system performed %s testing of the %d models using a %s and a %s post-hoc. The tests ", testType.toLowerCase(), nModels, methodsSummary.get(testType), methodsSummary.get(postHocTesting))); prefix = "They"; } // If all tests failed, there're no results to summarize. HashMap<String, List<String>> summary = testSummary.get(testType); if (summary.keySet().size() == 0) { report.append("failed. "); continue; } Iterator<String> it = summary.keySet().iterator(); boolean usePrefix = false; while (it.hasNext()) { String pVerbalization = it.next(); List<String> affectedMeasures = summary.get(pVerbalization); if (!usePrefix) { report.append(String.format("%s differences in performance for the %s. ", pVerbalization, createEnumeration(affectedMeasures))); } else { report.append(String.format("%s %s differences in performance for the %s. ", prefix, pVerbalization, createEnumeration(affectedMeasures))); } usePrefix = true; } report.append("\\\\ \n\n"); } // // Appendix // // Add all figures report.append("\\FloatBarrier\n"); report.append("\\section{Appendix}\n"); for (int i = 0; i < figures.size(); i++) { ref = figures.get(i)[0]; String caption = figures.get(i)[1]; String filename = figures.get(i)[2]; report.append("\\begin{figure}\n"); report.append("\\centering\n"); report.append(String.format("\\includegraphics[width=1\\linewidth]{%s}\n", filename)); report.append(String.format("\\caption{%s}\n", caption)); report.append(String.format("\\label{%s}\n", ref)); report.append("\\end{figure}\n\n"); } // Close document report.append("\\end{document}"); return report.toString(); }
From source file:com.gargoylesoftware.htmlunit.javascript.host.css.CSSStyleSheetTest.java
/** * @throws Exception if an error occurs// w ww . j a va 2 s.c o m */ @Test @Alerts("rgb(255, 0, 0)") public void veryBig() throws Exception { getWebDriver(); int maxInMemory = 0; final WebClient webClient = get(this, WebDriverTestCase.class, "webClient_"); if (webClient != null) { maxInMemory = webClient.getOptions().getMaxInMemory(); } final String baseUrl = getDefaultUrl().toExternalForm(); final String html = "<html>\n" + " <head>\n" + " <link href='" + baseUrl + "style.css' rel='stylesheet'></link>\n" + " </head>\n" + " <body>\n" + " <a href='second.html'>second page</a>\n" + " </body>\n" + "</html>"; final String html2 = "<html>\n" + " <head>\n" + " <link href='" + baseUrl + "style.css' rel='stylesheet'></link>\n" + " </head>\n" + " <body class='someRed'>\n" + " <script>\n" + " var getStyle = function(e) {\n" + " return window.getComputedStyle ? window.getComputedStyle(e,'') : e.currentStyle;\n" + " };\n" + " alert(getStyle(document.body).color);\n" + " </script>\n" + " </body>\n" + "</html>"; final MockWebConnection conn = getMockWebConnection(); final List<NameValuePair> headers2 = new ArrayList<>(); headers2.add(new NameValuePair("Last-Modified", "Sun, 15 Jul 2007 20:46:27 GMT")); final String bigContent = ".someRed { color: red; }" + StringUtils.repeat(' ', maxInMemory); conn.setResponse(new URL(getDefaultUrl(), "style2.css"), bigContent, 200, "OK", "text/html", headers2); conn.setResponse(new URL(getDefaultUrl(), "second.html"), html2); final List<NameValuePair> headers1 = new ArrayList<>(); headers1.add(new NameValuePair("Location", "style2.css")); conn.setResponse(new URL(getDefaultUrl(), "style.css"), "", 302, "Moved", "text/html", headers1); final WebDriver driver = loadPage2(html, new URL(getDefaultUrl(), "test.html")); driver.findElement(By.linkText("second page")).click(); verifyAlerts(driver, getExpectedAlerts()); }
From source file:info.financialecology.finance.utilities.datastruct.VersatileDataTable.java
/** * Prints a horizontal separator. Used for instance to separate * the header row from the values. The total width of the separator * depends on the column width parameter. * @param sep the separator symbol//from w w w . jav a2 s .co m * @return a sequence of separator strings */ public String printRowSeparator(String sep) { String ts = sep; int nItems = getColumnCount(); ts += StringUtils.repeat(sep, 12); // TODO the value '12' should be a parameter for (int i = 0; i < nItems; i++) { ts += sep + StringUtils.repeat(sep, internalParams.getColumnWidth() + 1); } return ts + sep; }
From source file:info.financialecology.finance.utilities.datastruct.VersatileDataTable.java
/** * Prints a header row. The total width of the header row * depends on the column width parameter. * @return the header//from w ww .j a v a 2 s . com */ public String printHeaders() { String ts = " "; int nItems = getColumnCount(); ts += StringUtils.repeat(' ', 12); for (int i = 0; i < nItems; i++) { String columnKey = (String) getColumnKey(i); String label = extractVariableName(columnKey); label += mapIndices(columnKey); ts += " " + StringUtils.leftPad(label, internalParams.getColumnWidth() + 1); } return ts += " "; }
From source file:com.mirth.connect.cli.CommandLineInterface.java
private void commandListCodeTemplateLibraries(boolean includeCodeTemplates) throws ClientException { List<CodeTemplateLibrary> libraries = client.getCodeTemplateLibraries(null, includeCodeTemplates); int maxLibraryNameLength = 4; for (CodeTemplateLibrary library : libraries) { if (library.getName().length() > maxLibraryNameLength) { maxLibraryNameLength = library.getName().length(); }// w w w .j a v a 2s . c o m } int maxCodeTemplateNameLength = 4; if (includeCodeTemplates) { for (CodeTemplateLibrary library : libraries) { for (CodeTemplate codeTemplate : library.getCodeTemplates()) { if (codeTemplate.getName().length() > maxCodeTemplateNameLength) { maxCodeTemplateNameLength = codeTemplate.getName().length(); } } } } boolean showLibraryHeader = true; for (CodeTemplateLibrary library : libraries) { if (showLibraryHeader) { out.printf("%-" + maxLibraryNameLength + "s %-36s %-8s %s\n", "Name", "Id", "Revision", "Last Modified"); out.printf("%-" + maxLibraryNameLength + "s %-36s %-8s %s\n", StringUtils.repeat('-', maxLibraryNameLength), StringUtils.repeat('-', 36), StringUtils.repeat('-', 8), StringUtils.repeat('-', 19)); showLibraryHeader = false; } out.printf("%-" + maxLibraryNameLength + "s %-36s %-8d %tF %<tT\n", library.getName(), library.getId(), library.getRevision(), library.getLastModified()); if (includeCodeTemplates && library.getCodeTemplates().size() > 0) { out.println(); listCodeTemplates(library.getCodeTemplates(), true, maxCodeTemplateNameLength); out.println(); showLibraryHeader = true; } } }
From source file:com.google.dart.tools.ui.internal.text.dart.DartAutoIndentStrategy_NEW.java
private void smartPaste(IDocument document, DocumentCommand command) { int newOffset = command.offset; int newLength = command.length; String newText = command.text; try {/* w ww . ja va2 s.co m*/ DartHeuristicScanner scanner = new DartHeuristicScanner(document); DartIndenter indenter = new DartIndenter(document, scanner, null); int offset = newOffset; // reference position to get the indent from int refOffset = indenter.findReferencePosition(offset); if (refOffset == DartHeuristicScanner.NOT_FOUND) { return; } int peerOffset = getPeerPosition(document, command); peerOffset = indenter.findReferencePosition(peerOffset); if (peerOffset != DartHeuristicScanner.NOT_FOUND) { refOffset = Math.min(refOffset, peerOffset); } // eat any WS before the insertion to the beginning of the line int firstLine = 1; // don't format the first line per default, as it has // other content before it IRegion line = document.getLineInformationOfOffset(offset); String notSelected = document.get(line.getOffset(), offset - line.getOffset()); if (notSelected.trim().length() == 0) { newLength += notSelected.length(); newOffset = line.getOffset(); firstLine = 0; } // prefix: the part we need for formatting but won't paste IRegion refLine = document.getLineInformationOfOffset(refOffset); String prefix = document.get(refLine.getOffset(), newOffset - refLine.getOffset()); // I don't see a good solution for pasting cascades. // For now, if we paste cascade after other cascade, just force the same indentation. String forcedCascadePrefix = computeForcedCascadePrefix(indenter, document, offset, newText); // handle the indentation computation inside a temporary document Document temp = new Document(prefix + newText); DocumentRewriteSession session = temp .startRewriteSession(DocumentRewriteSessionType.STRICTLY_SEQUENTIAL); scanner = new DartHeuristicScanner(temp); indenter = new DartIndenter(temp, scanner, null); installDartStuff(temp); // indent the first and second line // compute the relative indentation difference from the second line // (as the first might be partially selected) and use the value to // indent all other lines. StringBuffer addition = new StringBuffer(); int insertLength = 0; int firstLineOriginalIndent = 0; int firstLineIndent = 0; int first = document.computeNumberOfLines(prefix) + firstLine; // don't format first line int lines = temp.getNumberOfLines(); int tabLength = getVisualTabLengthPreference(); boolean isInMultiLineString = false; { IRegion r = temp.getLineInformation(0); isInMultiLineString = hasMultiLineStringQuotes(temp.get(r.getOffset(), r.getLength())); } for (int l = first; l < lines; l++) { // we don't change the number of lines while adding indents IRegion r = temp.getLineInformation(l); int lineOffset = r.getOffset(); int lineLength = r.getLength(); String lineContent = temp.get(lineOffset, lineLength); if (lineLength == 0) { continue; } // indent the first pasted line String current = getCurrentIndent(temp, l); if (l == first) { firstLineOriginalIndent = computeVisualLength(current, tabLength); } // unless it is a line comment if (current.startsWith(LINE_COMMENT)) { continue; } StringBuffer correct; if (l == first) { if (forcedCascadePrefix != null) { correct = new StringBuffer(forcedCascadePrefix); } else { correct = indenter.computeIndentation(lineOffset); } firstLineIndent = computeVisualLength(correct, tabLength); } else { correct = new StringBuffer(); int secondIndent = firstLineIndent + computeVisualLength(current, tabLength) - firstLineOriginalIndent; if (secondIndent > 0) { correct.append(StringUtils.repeat(' ', secondIndent)); } } if (correct == null) { return; // bail out } insertLength = subtractIndent(correct, current, addition, tabLength); // relatively indent all pasted lines if (!isInMultiLineString) { if (insertLength > 0) { addIndent(temp, l, addition, tabLength); } else if (insertLength < 0) { cutIndent(temp, l, -insertLength, tabLength); } } if (hasMultiLineStringQuotes(lineContent)) { isInMultiLineString = !isInMultiLineString; } } removeDartStuff(temp); temp.stopRewriteSession(session); newText = temp.get(prefix.length(), temp.getLength() - prefix.length()); // if a tab causes indentation to the current level, allow it to add another level if (!(newText.trim().isEmpty() && isRepresentingTab(command.text))) { command.offset = newOffset; command.length = newLength; command.text = newText; } } catch (Throwable e) { DartToolsPlugin.log(e); } }