List of usage examples for weka.classifiers Evaluation errorRate
public final double errorRate()
From source file:Pair.java
License:Open Source License
private double getTestError() throws Exception { Evaluation evaluation; evaluation = new Evaluation(testData); evaluation.evaluateModel(this, testData); return evaluation.errorRate(); }
From source file:binarizer.LayoutAnalysis.java
public double crossValidation(String arffFile) throws Exception { DataSource source = new DataSource(arffFile); Instances trainingData = source.getDataSet(); if (trainingData.classIndex() == -1) trainingData.setClassIndex(trainingData.numAttributes() - 1); NaiveBayes nb = new NaiveBayes(); nb.setUseSupervisedDiscretization(true); Evaluation evaluation = new Evaluation(trainingData); evaluation.crossValidateModel(nb, trainingData, 10, new Random(1)); System.out.println(evaluation.toSummaryString()); return evaluation.errorRate(); }
From source file:boostingPL.boosting.AdaBoost.java
License:Open Source License
private double weightError(int t) throws Exception { // evaluate all instances Evaluation eval = new Evaluation(insts); eval.evaluateModel(classifiers[t], insts); return eval.errorRate(); }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
private void runDropsSteped(String algo, Prototyper prototype) { try {/*from www. j a va 2 s.com*/ nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); for (int i = 1; i <= nbPrototypesMax; i++) { prototype.setNbPrototypesPerClass(i); prototype.setFillPrototypes(false); startTime = System.currentTimeMillis(); prototype.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(prototype.prototypes, train); Evaluation eval = new Evaluation(train); eval.evaluateModel(prototype, test); double testError = eval.errorRate(); double trainError = Double.NaN; out.format("%s;%s;%d;%d;%.4f;%.4f;%s\n", dataName, algo, (i * train.numClasses()), duration, trainError, testError, Arrays.toString(classDistrib)); out.flush(); } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
private void runDrops(String algo, Prototyper prototype) { try {//from w ww . j a v a2 s . c o m for (int i = 1; i <= this.train.numInstances(); i++) { prototype.setNbPrototypesPerClass(i); prototype.setFillPrototypes(false); startTime = System.currentTimeMillis(); prototype.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(prototype.prototypes, train); Evaluation eval = new Evaluation(train); eval.evaluateModel(prototype, test); double testError = eval.errorRate(); // double trainError = prototype.predictAccuracyXVal(10); double trainError = Double.NaN; out.format("%s;%s;%d;%d;%.4f;%.4f;%s\n", dataName, algo, i, duration, trainError, testError, Arrays.toString(classDistrib)); out.flush(); } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
/** * //from w ww . ja v a 2s . com */ public void launchKMeans() { try { // File f = new File(rep + "/" + dataName + "_results.csv"); // // if somebody is processing it // if (f.exists()) { // return; // } // // out = new PrintStream(new FileOutputStream(rep + "/KMeansDTW_" + "all" + "_results.csv", true)); // out.println("dataset,algorithm,nbPrototypes,testErrorRate,trainErrorRate"); String algo = "KMEANS"; System.out.println(algo); // PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append)); nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); if (nbPrototypesMax > 10) nbPrototypesMax = 10; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); DTWKNNClassifierKMeans classifierKMeans = new DTWKNNClassifierKMeans(); classifierKMeans.setNbPrototypesPerClass(j); classifierKMeans.setFillPrototypes(true); startTime = System.currentTimeMillis(); classifierKMeans.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // Duration traintime = Duration.ofMillis(duration); // System.out.println(traintime); int[] classDistrib = PrototyperUtil .getPrototypesPerClassDistribution(classifierKMeans.prototypes, train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeans, test); double testError = eval.errorRate(); System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierKMeans.prototypes, rep + "/" + dataName + "_KMEANS[" + j + "]_XP" + n + ".proto"); // out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); // out.flush(); } } // outProto.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchKMeansEUC() { try {// w ww .j a v a 2 s. c om out = new PrintStream(new FileOutputStream(rep + "/KEUC_All_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;execTime;trainErrorRate;testErrorRate;prototypesPerClassDistribution"); String algo = "KMEANSEUC"; System.out.println(algo); // PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append)); nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>20) nbPrototypesMax = 10; // if (nbPrototypesMax > 100) // nbPrototypesMax = 100; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { EUCKNNClassifierKMeans classifierKMeansEUC = new EUCKNNClassifierKMeans(); classifierKMeansEUC.setNbPrototypesPerClass(j); classifierKMeansEUC.setFillPrototypes(true); startTime = System.currentTimeMillis(); classifierKMeansEUC.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierKMeansEUC.prototypes, train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeansEUC, test); System.out.println(eval.toSummaryString()); // Evaluation evaltrain = new Evaluation(train); // evaltrain.evaluateModel(classifierKMeansEUC, train); double testError = eval.errorRate(); // double trainError = evaltrain.errorRate(); // System.out.println("TrainError:"+trainError+"\n"); System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierKMeansEUC.prototypes, rep + "/" + dataName + "_KMEANSEUC[" + j + "]_XP" + n + ".proto"); out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); out.flush(); } } // outProto.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchKMeansProbabilistic() { try {/* w w w. j a v a 2 s.com*/ out = new PrintStream(new FileOutputStream(rep + "/All_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;trainErrorRate_Now;testErrorRate_Now;trainErrorRate_Before;testErrorRate_Before"); String algo = "KMEANS"; nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>2) // nbPrototypesMax=2; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); DTWProbabilisticClassifierKMeans classifierKMeansPro = new DTWProbabilisticClassifierKMeans(); classifierKMeansPro.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierKMeansPro.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; int[] classDistrib = PrototyperUtil .getPrototypesPerClassDistribution(classifierKMeansPro.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeansPro, test); double testError = eval.errorRate(); double trainError = Double.NaN; System.out.println(testError + "\n"); PrototyperUtil.savePrototypes(classifierKMeansPro.getPrototypes(), rep + "/" + dataName + "_KMEANSPro[" + j + "]_XP" + n + ".proto"); out.format("%s;%s;%d;%.4f\n", dataName, algo, (j * train.numClasses()), testError); out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchKMeansProbabilisticEUC() { try {// ww w.j a v a2s . c o m // out = new PrintStream(new FileOutputStream(rep + "/KPEUC_All_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;trainErrorRate_Now;testErrorRate_Now;trainErrorRate_Before;testErrorRate_Before"); String algo = "KMEANSProbabilisticEUC"; System.out.println(algo); nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>20) nbPrototypesMax = 1; // if (nbPrototypesMax > 100) // nbPrototypesMax = 100; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); EUCProbabilisticClassifierKMeans classifierKMeansProEUC = new EUCProbabilisticClassifierKMeans(); classifierKMeansProEUC.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierKMeansProEUC.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierKMeansProEUC.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeansProEUC, test); Evaluation evaltrain = new Evaluation(train); evaltrain.evaluateModel(classifierKMeansProEUC, train); double testError = eval.errorRate(); double trainError = evaltrain.errorRate(); System.out.println("TrainError:" + trainError + "\n"); System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierKMeansProEUC.getPrototypes(), rep + "/" + dataName + "_KMEANSProEUC[" + j + "]_XP" + n + ".proto"); // out.format("%s;%s;%d;%.4f;%.4f\n", dataName, algo, (j * train.numClasses()), trainError,testError); // out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchNewKMeans() { try {/*from w w w . j av a 2 s. c om*/ // out = new PrintStream(new FileOutputStream(rep + "/GMMDTW_"+dataName+"_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;testErrorRate"); String algo = "GMM"; System.out.println(algo); // nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>10) nbPrototypesMax = 10; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); DTWKNNClassifierNK classifierGmm = new DTWKNNClassifierNK(); classifierGmm.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierGmm.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierGmm.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierGmm, test); double testError = eval.errorRate(); double trainError = Double.NaN; System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierGmm.getPrototypes(), rep + "/" + dataName + "_GMM[" + j + "]_XP" + n + ".proto"); // out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); // out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }