List of usage examples for weka.estimators DiscreteEstimator getCount
public double getCount(double data)
From source file:br.ufrn.ia.core.clustering.EMIaProject.java
License:Open Source License
public String toString() { if (m_displayModelInOldFormat) { return toStringOriginal(); }// w ww. j a v a 2s. co m if (m_priors == null) { return "No clusterer built yet!"; } StringBuffer temp = new StringBuffer(); temp.append("\nEM\n==\n"); if (m_initialNumClusters == -1) { temp.append("\nNumber of clusters selected by cross validation: " + m_num_clusters + "\n"); } else { temp.append("\nNumber of clusters: " + m_num_clusters + "\n"); } int maxWidth = 0; int maxAttWidth = 0; boolean containsKernel = false; // set up max widths // attributes for (int i = 0; i < m_num_attribs; i++) { Attribute a = m_theInstances.attribute(i); if (a.name().length() > maxAttWidth) { maxAttWidth = m_theInstances.attribute(i).name().length(); } if (a.isNominal()) { // check values for (int j = 0; j < a.numValues(); j++) { String val = a.value(j) + " "; if (val.length() > maxAttWidth) { maxAttWidth = val.length(); } } } } for (int i = 0; i < m_num_clusters; i++) { for (int j = 0; j < m_num_attribs; j++) { if (m_theInstances.attribute(j).isNumeric()) { // check mean and std. dev. against maxWidth double mean = Math.log(Math.abs(m_modelNormal[i][j][0])) / Math.log(10.0); double stdD = Math.log(Math.abs(m_modelNormal[i][j][1])) / Math.log(10.0); double width = (mean > stdD) ? mean : stdD; if (width < 0) { width = 1; } // decimal + # decimal places + 1 width += 6.0; if ((int) width > maxWidth) { maxWidth = (int) width; } } else { // nominal distributions DiscreteEstimator d = (DiscreteEstimator) m_model[i][j]; for (int k = 0; k < d.getNumSymbols(); k++) { String size = Utils.doubleToString(d.getCount(k), maxWidth, 4).trim(); if (size.length() > maxWidth) { maxWidth = size.length(); } } int sum = Utils.doubleToString(d.getSumOfCounts(), maxWidth, 4).trim().length(); if (sum > maxWidth) { maxWidth = sum; } } } } if (maxAttWidth < "Attribute".length()) { maxAttWidth = "Attribute".length(); } maxAttWidth += 2; temp.append("\n\n"); temp.append(pad("Cluster", " ", (maxAttWidth + maxWidth + 1) - "Cluster".length(), true)); temp.append("\n"); temp.append(pad("Attribute", " ", maxAttWidth - "Attribute".length(), false)); // cluster #'s for (int i = 0; i < m_num_clusters; i++) { String classL = "" + i; temp.append(pad(classL, " ", maxWidth + 1 - classL.length(), true)); } temp.append("\n"); // cluster priors temp.append(pad("", " ", maxAttWidth, true)); for (int i = 0; i < m_num_clusters; i++) { String priorP = Utils.doubleToString(m_priors[i], maxWidth, 2).trim(); priorP = "(" + priorP + ")"; temp.append(pad(priorP, " ", maxWidth + 1 - priorP.length(), true)); } temp.append("\n"); temp.append(pad("", "=", maxAttWidth + (maxWidth * m_num_clusters) + m_num_clusters + 1, true)); temp.append("\n"); for (int i = 0; i < m_num_attribs; i++) { String attName = m_theInstances.attribute(i).name(); temp.append(attName + "\n"); if (m_theInstances.attribute(i).isNumeric()) { String meanL = " mean"; temp.append(pad(meanL, " ", maxAttWidth + 1 - meanL.length(), false)); for (int j = 0; j < m_num_clusters; j++) { // means String mean = Utils.doubleToString(m_modelNormal[j][i][0], maxWidth, 4).trim(); temp.append(pad(mean, " ", maxWidth + 1 - mean.length(), true)); } temp.append("\n"); // now do std deviations String stdDevL = " std. dev."; temp.append(pad(stdDevL, " ", maxAttWidth + 1 - stdDevL.length(), false)); for (int j = 0; j < m_num_clusters; j++) { String stdDev = Utils.doubleToString(m_modelNormal[j][i][1], maxWidth, 4).trim(); temp.append(pad(stdDev, " ", maxWidth + 1 - stdDev.length(), true)); } temp.append("\n\n"); } else { Attribute a = m_theInstances.attribute(i); for (int j = 0; j < a.numValues(); j++) { String val = " " + a.value(j); temp.append(pad(val, " ", maxAttWidth + 1 - val.length(), false)); for (int k = 0; k < m_num_clusters; k++) { DiscreteEstimator d = (DiscreteEstimator) m_model[k][i]; String count = Utils.doubleToString(d.getCount(j), maxWidth, 4).trim(); temp.append(pad(count, " ", maxWidth + 1 - count.length(), true)); } temp.append("\n"); } // do the totals String total = " [total]"; temp.append(pad(total, " ", maxAttWidth + 1 - total.length(), false)); for (int k = 0; k < m_num_clusters; k++) { DiscreteEstimator d = (DiscreteEstimator) m_model[k][i]; String count = Utils.doubleToString(d.getSumOfCounts(), maxWidth, 4).trim(); temp.append(pad(count, " ", maxWidth + 1 - count.length(), true)); } temp.append("\n"); } } return temp.toString(); }
From source file:cn.edu.xjtu.dbmine.source.NaiveBayes.java
License:Open Source License
/** * Returns a description of the classifier. * * @return a description of the classifier as a string. *//*from w ww. j a v a 2 s . c o m*/ public String toString() { if (m_displayModelInOldFormat) { return toStringOriginal(); } StringBuffer temp = new StringBuffer(); temp.append("Naive Bayes Classifier"); if (m_Instances == null) { temp.append(": No model built yet."); } else { int maxWidth = 0; int maxAttWidth = 0; boolean containsKernel = false; // set up max widths // class values for (int i = 0; i < m_Instances.numClasses(); i++) { if (m_Instances.classAttribute().value(i).length() > maxWidth) { maxWidth = m_Instances.classAttribute().value(i).length(); } } // attributes for (int i = 0; i < m_Instances.numAttributes(); i++) { if (i != m_Instances.classIndex()) { Attribute a = m_Instances.attribute(i); if (a.name().length() > maxAttWidth) { maxAttWidth = m_Instances.attribute(i).name().length(); } if (a.isNominal()) { // check values for (int j = 0; j < a.numValues(); j++) { String val = a.value(j) + " "; if (val.length() > maxAttWidth) { maxAttWidth = val.length(); } } } } } for (int i = 0; i < m_Distributions.length; i++) { for (int j = 0; j < m_Instances.numClasses(); j++) { if (m_Distributions[i][0] instanceof NormalEstimator) { // check mean/precision dev against maxWidth NormalEstimator n = (NormalEstimator) m_Distributions[i][j]; double mean = Math.log(Math.abs(n.getMean())) / Math.log(10.0); double precision = Math.log(Math.abs(n.getPrecision())) / Math.log(10.0); double width = (mean > precision) ? mean : precision; if (width < 0) { width = 1; } // decimal + # decimal places + 1 width += 6.0; if ((int) width > maxWidth) { maxWidth = (int) width; } } else if (m_Distributions[i][0] instanceof KernelEstimator) { containsKernel = true; KernelEstimator ke = (KernelEstimator) m_Distributions[i][j]; int numK = ke.getNumKernels(); String temps = "K" + numK + ": mean (weight)"; if (maxAttWidth < temps.length()) { maxAttWidth = temps.length(); } // check means + weights against maxWidth if (ke.getNumKernels() > 0) { double[] means = ke.getMeans(); double[] weights = ke.getWeights(); for (int k = 0; k < ke.getNumKernels(); k++) { String m = Utils.doubleToString(means[k], maxWidth, 4).trim(); m += " (" + Utils.doubleToString(weights[k], maxWidth, 1).trim() + ")"; if (maxWidth < m.length()) { maxWidth = m.length(); } } } } else if (m_Distributions[i][0] instanceof DiscreteEstimator) { DiscreteEstimator d = (DiscreteEstimator) m_Distributions[i][j]; for (int k = 0; k < d.getNumSymbols(); k++) { String size = "" + d.getCount(k); if (size.length() > maxWidth) { maxWidth = size.length(); } } int sum = ("" + d.getSumOfCounts()).length(); if (sum > maxWidth) { maxWidth = sum; } } } } // Check width of class labels for (int i = 0; i < m_Instances.numClasses(); i++) { String cSize = m_Instances.classAttribute().value(i); if (cSize.length() > maxWidth) { maxWidth = cSize.length(); } } // Check width of class priors for (int i = 0; i < m_Instances.numClasses(); i++) { String priorP = Utils .doubleToString(((DiscreteEstimator) m_ClassDistribution).getProbability(i), maxWidth, 2) .trim(); priorP = "(" + priorP + ")"; if (priorP.length() > maxWidth) { maxWidth = priorP.length(); } } if (maxAttWidth < "Attribute".length()) { maxAttWidth = "Attribute".length(); } if (maxAttWidth < " weight sum".length()) { maxAttWidth = " weight sum".length(); } if (containsKernel) { if (maxAttWidth < " [precision]".length()) { maxAttWidth = " [precision]".length(); } } maxAttWidth += 2; temp.append("\n\n"); temp.append(pad("Class", " ", (maxAttWidth + maxWidth + 1) - "Class".length(), true)); temp.append("\n"); temp.append(pad("Attribute", " ", maxAttWidth - "Attribute".length(), false)); // class labels for (int i = 0; i < m_Instances.numClasses(); i++) { String classL = m_Instances.classAttribute().value(i); temp.append(pad(classL, " ", maxWidth + 1 - classL.length(), true)); } temp.append("\n"); // class priors temp.append(pad("", " ", maxAttWidth, true)); for (int i = 0; i < m_Instances.numClasses(); i++) { String priorP = Utils .doubleToString(((DiscreteEstimator) m_ClassDistribution).getProbability(i), maxWidth, 2) .trim(); priorP = "(" + priorP + ")"; temp.append(pad(priorP, " ", maxWidth + 1 - priorP.length(), true)); } temp.append("\n"); temp.append(pad("", "=", maxAttWidth + (maxWidth * m_Instances.numClasses()) + m_Instances.numClasses() + 1, true)); temp.append("\n"); // loop over the attributes int counter = 0; for (int i = 0; i < m_Instances.numAttributes(); i++) { if (i == m_Instances.classIndex()) { continue; } String attName = m_Instances.attribute(i).name(); temp.append(attName + "\n"); if (m_Distributions[counter][0] instanceof NormalEstimator) { String meanL = " mean"; temp.append(pad(meanL, " ", maxAttWidth + 1 - meanL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { // means NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String mean = Utils.doubleToString(n.getMean(), maxWidth, 4).trim(); temp.append(pad(mean, " ", maxWidth + 1 - mean.length(), true)); } temp.append("\n"); // now do std deviations String stdDevL = " std. dev."; temp.append(pad(stdDevL, " ", maxAttWidth + 1 - stdDevL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String stdDev = Utils.doubleToString(n.getStdDev(), maxWidth, 4).trim(); temp.append(pad(stdDev, " ", maxWidth + 1 - stdDev.length(), true)); } temp.append("\n"); // now the weight sums String weightL = " weight sum"; temp.append(pad(weightL, " ", maxAttWidth + 1 - weightL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String weight = Utils.doubleToString(n.getSumOfWeights(), maxWidth, 4).trim(); temp.append(pad(weight, " ", maxWidth + 1 - weight.length(), true)); } temp.append("\n"); // now the precisions String precisionL = " precision"; temp.append(pad(precisionL, " ", maxAttWidth + 1 - precisionL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String precision = Utils.doubleToString(n.getPrecision(), maxWidth, 4).trim(); temp.append(pad(precision, " ", maxWidth + 1 - precision.length(), true)); } temp.append("\n\n"); } else if (m_Distributions[counter][0] instanceof DiscreteEstimator) { Attribute a = m_Instances.attribute(i); for (int j = 0; j < a.numValues(); j++) { String val = " " + a.value(j); temp.append(pad(val, " ", maxAttWidth + 1 - val.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { DiscreteEstimator d = (DiscreteEstimator) m_Distributions[counter][k]; String count = "" + d.getCount(j); temp.append(pad(count, " ", maxWidth + 1 - count.length(), true)); } temp.append("\n"); } // do the totals String total = " [total]"; temp.append(pad(total, " ", maxAttWidth + 1 - total.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { DiscreteEstimator d = (DiscreteEstimator) m_Distributions[counter][k]; String count = "" + d.getSumOfCounts(); temp.append(pad(count, " ", maxWidth + 1 - count.length(), true)); } temp.append("\n\n"); } else if (m_Distributions[counter][0] instanceof KernelEstimator) { String kL = " [# kernels]"; temp.append(pad(kL, " ", maxAttWidth + 1 - kL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String nk = "" + ke.getNumKernels(); temp.append(pad(nk, " ", maxWidth + 1 - nk.length(), true)); } temp.append("\n"); // do num kernels, std. devs and precisions String stdDevL = " [std. dev]"; temp.append(pad(stdDevL, " ", maxAttWidth + 1 - stdDevL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String stdD = Utils.doubleToString(ke.getStdDev(), maxWidth, 4).trim(); temp.append(pad(stdD, " ", maxWidth + 1 - stdD.length(), true)); } temp.append("\n"); String precL = " [precision]"; temp.append(pad(precL, " ", maxAttWidth + 1 - precL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String prec = Utils.doubleToString(ke.getPrecision(), maxWidth, 4).trim(); temp.append(pad(prec, " ", maxWidth + 1 - prec.length(), true)); } temp.append("\n"); // first determine max number of kernels accross the classes int maxK = 0; for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; if (ke.getNumKernels() > maxK) { maxK = ke.getNumKernels(); } } for (int j = 0; j < maxK; j++) { // means first String meanL = " K" + (j + 1) + ": mean (weight)"; temp.append(pad(meanL, " ", maxAttWidth + 1 - meanL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; double[] means = ke.getMeans(); double[] weights = ke.getWeights(); String m = "--"; if (ke.getNumKernels() == 0) { m = "" + 0; } else if (j < ke.getNumKernels()) { m = Utils.doubleToString(means[j], maxWidth, 4).trim(); m += " (" + Utils.doubleToString(weights[j], maxWidth, 1).trim() + ")"; } temp.append(pad(m, " ", maxWidth + 1 - m.length(), true)); } temp.append("\n"); } temp.append("\n"); } counter++; } } return temp.toString(); }
From source file:main.NaiveBayes.java
License:Open Source License
/** * Returns a description of the classifier. * //from w w w .ja v a2s.c o m * @return a description of the classifier as a string. */ @Override public String toString() { if (m_displayModelInOldFormat) { return toStringOriginal(); } StringBuffer temp = new StringBuffer(); temp.append("Naive Bayes Classifier"); if (m_Instances == null) { temp.append(": No model built yet."); } else { int maxWidth = 0; int maxAttWidth = 0; boolean containsKernel = false; // set up max widths // class values for (int i = 0; i < m_Instances.numClasses(); i++) { if (m_Instances.classAttribute().value(i).length() > maxWidth) { maxWidth = m_Instances.classAttribute().value(i).length(); } } // attributes for (int i = 0; i < m_Instances.numAttributes(); i++) { if (i != m_Instances.classIndex()) { Attribute a = m_Instances.attribute(i); if (a.name().length() > maxAttWidth) { maxAttWidth = m_Instances.attribute(i).name().length(); } if (a.isNominal()) { // check values for (int j = 0; j < a.numValues(); j++) { String val = a.value(j) + " "; if (val.length() > maxAttWidth) { maxAttWidth = val.length(); } } } } } for (Estimator[] m_Distribution : m_Distributions) { for (int j = 0; j < m_Instances.numClasses(); j++) { if (m_Distribution[0] instanceof NormalEstimator) { // check mean/precision dev against maxWidth NormalEstimator n = (NormalEstimator) m_Distribution[j]; double mean = Math.log(Math.abs(n.getMean())) / Math.log(10.0); double precision = Math.log(Math.abs(n.getPrecision())) / Math.log(10.0); double width = (mean > precision) ? mean : precision; if (width < 0) { width = 1; } // decimal + # decimal places + 1 width += 6.0; if ((int) width > maxWidth) { maxWidth = (int) width; } } else if (m_Distribution[0] instanceof KernelEstimator) { containsKernel = true; KernelEstimator ke = (KernelEstimator) m_Distribution[j]; int numK = ke.getNumKernels(); String temps = "K" + numK + ": mean (weight)"; if (maxAttWidth < temps.length()) { maxAttWidth = temps.length(); } // check means + weights against maxWidth if (ke.getNumKernels() > 0) { double[] means = ke.getMeans(); double[] weights = ke.getWeights(); for (int k = 0; k < ke.getNumKernels(); k++) { String m = Utils.doubleToString(means[k], maxWidth, 4).trim(); m += " (" + Utils.doubleToString(weights[k], maxWidth, 1).trim() + ")"; if (maxWidth < m.length()) { maxWidth = m.length(); } } } } else if (m_Distribution[0] instanceof DiscreteEstimator) { DiscreteEstimator d = (DiscreteEstimator) m_Distribution[j]; for (int k = 0; k < d.getNumSymbols(); k++) { String size = "" + d.getCount(k); if (size.length() > maxWidth) { maxWidth = size.length(); } } int sum = ("" + d.getSumOfCounts()).length(); if (sum > maxWidth) { maxWidth = sum; } } } } // Check width of class labels for (int i = 0; i < m_Instances.numClasses(); i++) { String cSize = m_Instances.classAttribute().value(i); if (cSize.length() > maxWidth) { maxWidth = cSize.length(); } } // Check width of class priors for (int i = 0; i < m_Instances.numClasses(); i++) { String priorP = Utils .doubleToString(((DiscreteEstimator) m_ClassDistribution).getProbability(i), maxWidth, 2) .trim(); priorP = "(" + priorP + ")"; if (priorP.length() > maxWidth) { maxWidth = priorP.length(); } } if (maxAttWidth < "Attribute".length()) { maxAttWidth = "Attribute".length(); } if (maxAttWidth < " weight sum".length()) { maxAttWidth = " weight sum".length(); } if (containsKernel) { if (maxAttWidth < " [precision]".length()) { maxAttWidth = " [precision]".length(); } } maxAttWidth += 2; temp.append("\n\n"); temp.append(pad("Class", " ", (maxAttWidth + maxWidth + 1) - "Class".length(), true)); temp.append("\n"); temp.append(pad("Attribute", " ", maxAttWidth - "Attribute".length(), false)); // class labels for (int i = 0; i < m_Instances.numClasses(); i++) { String classL = m_Instances.classAttribute().value(i); temp.append(pad(classL, " ", maxWidth + 1 - classL.length(), true)); } temp.append("\n"); // class priors temp.append(pad("", " ", maxAttWidth, true)); for (int i = 0; i < m_Instances.numClasses(); i++) { String priorP = Utils .doubleToString(((DiscreteEstimator) m_ClassDistribution).getProbability(i), maxWidth, 2) .trim(); priorP = "(" + priorP + ")"; temp.append(pad(priorP, " ", maxWidth + 1 - priorP.length(), true)); } temp.append("\n"); temp.append(pad("", "=", maxAttWidth + (maxWidth * m_Instances.numClasses()) + m_Instances.numClasses() + 1, true)); temp.append("\n"); // loop over the attributes int counter = 0; for (int i = 0; i < m_Instances.numAttributes(); i++) { if (i == m_Instances.classIndex()) { continue; } String attName = m_Instances.attribute(i).name(); temp.append(attName + "\n"); if (m_Distributions[counter][0] instanceof NormalEstimator) { String meanL = " mean"; temp.append(pad(meanL, " ", maxAttWidth + 1 - meanL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { // means NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String mean = Utils.doubleToString(n.getMean(), maxWidth, 4).trim(); temp.append(pad(mean, " ", maxWidth + 1 - mean.length(), true)); } temp.append("\n"); // now do std deviations String stdDevL = " std. dev."; temp.append(pad(stdDevL, " ", maxAttWidth + 1 - stdDevL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String stdDev = Utils.doubleToString(n.getStdDev(), maxWidth, 4).trim(); temp.append(pad(stdDev, " ", maxWidth + 1 - stdDev.length(), true)); } temp.append("\n"); // now the weight sums String weightL = " weight sum"; temp.append(pad(weightL, " ", maxAttWidth + 1 - weightL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String weight = Utils.doubleToString(n.getSumOfWeights(), maxWidth, 4).trim(); temp.append(pad(weight, " ", maxWidth + 1 - weight.length(), true)); } temp.append("\n"); // now the precisions String precisionL = " precision"; temp.append(pad(precisionL, " ", maxAttWidth + 1 - precisionL.length(), false)); for (int j = 0; j < m_Instances.numClasses(); j++) { NormalEstimator n = (NormalEstimator) m_Distributions[counter][j]; String precision = Utils.doubleToString(n.getPrecision(), maxWidth, 4).trim(); temp.append(pad(precision, " ", maxWidth + 1 - precision.length(), true)); } temp.append("\n\n"); } else if (m_Distributions[counter][0] instanceof DiscreteEstimator) { Attribute a = m_Instances.attribute(i); for (int j = 0; j < a.numValues(); j++) { String val = " " + a.value(j); temp.append(pad(val, " ", maxAttWidth + 1 - val.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { DiscreteEstimator d = (DiscreteEstimator) m_Distributions[counter][k]; String count = "" + d.getCount(j); temp.append(pad(count, " ", maxWidth + 1 - count.length(), true)); } temp.append("\n"); } // do the totals String total = " [total]"; temp.append(pad(total, " ", maxAttWidth + 1 - total.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { DiscreteEstimator d = (DiscreteEstimator) m_Distributions[counter][k]; String count = "" + d.getSumOfCounts(); temp.append(pad(count, " ", maxWidth + 1 - count.length(), true)); } temp.append("\n\n"); } else if (m_Distributions[counter][0] instanceof KernelEstimator) { String kL = " [# kernels]"; temp.append(pad(kL, " ", maxAttWidth + 1 - kL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String nk = "" + ke.getNumKernels(); temp.append(pad(nk, " ", maxWidth + 1 - nk.length(), true)); } temp.append("\n"); // do num kernels, std. devs and precisions String stdDevL = " [std. dev]"; temp.append(pad(stdDevL, " ", maxAttWidth + 1 - stdDevL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String stdD = Utils.doubleToString(ke.getStdDev(), maxWidth, 4).trim(); temp.append(pad(stdD, " ", maxWidth + 1 - stdD.length(), true)); } temp.append("\n"); String precL = " [precision]"; temp.append(pad(precL, " ", maxAttWidth + 1 - precL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String prec = Utils.doubleToString(ke.getPrecision(), maxWidth, 4).trim(); temp.append(pad(prec, " ", maxWidth + 1 - prec.length(), true)); } temp.append("\n"); // first determine max number of kernels accross the classes int maxK = 0; for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; if (ke.getNumKernels() > maxK) { maxK = ke.getNumKernels(); } } for (int j = 0; j < maxK; j++) { // means first String meanL = " K" + (j + 1) + ": mean (weight)"; temp.append(pad(meanL, " ", maxAttWidth + 1 - meanL.length(), false)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; double[] means = ke.getMeans(); double[] weights = ke.getWeights(); String m = "--"; if (ke.getNumKernels() == 0) { m = "" + 0; } else if (j < ke.getNumKernels()) { m = Utils.doubleToString(means[j], maxWidth, 4).trim(); m += " (" + Utils.doubleToString(weights[j], maxWidth, 1).trim() + ")"; } temp.append(pad(m, " ", maxWidth + 1 - m.length(), true)); } temp.append("\n"); } temp.append("\n"); } counter++; } } return temp.toString(); }