List of usage examples for weka.classifiers Evaluation Evaluation
public Evaluation(Instances data) throws Exception
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchKMeansEUC() { try {//from w w w . j a va 2s . c o m out = new PrintStream(new FileOutputStream(rep + "/KEUC_All_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;execTime;trainErrorRate;testErrorRate;prototypesPerClassDistribution"); String algo = "KMEANSEUC"; System.out.println(algo); // PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append)); nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>20) nbPrototypesMax = 10; // if (nbPrototypesMax > 100) // nbPrototypesMax = 100; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { EUCKNNClassifierKMeans classifierKMeansEUC = new EUCKNNClassifierKMeans(); classifierKMeansEUC.setNbPrototypesPerClass(j); classifierKMeansEUC.setFillPrototypes(true); startTime = System.currentTimeMillis(); classifierKMeansEUC.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierKMeansEUC.prototypes, train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeansEUC, test); System.out.println(eval.toSummaryString()); // Evaluation evaltrain = new Evaluation(train); // evaltrain.evaluateModel(classifierKMeansEUC, train); double testError = eval.errorRate(); // double trainError = evaltrain.errorRate(); // System.out.println("TrainError:"+trainError+"\n"); System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierKMeansEUC.prototypes, rep + "/" + dataName + "_KMEANSEUC[" + j + "]_XP" + n + ".proto"); out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); out.flush(); } } // outProto.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchKMeansProbabilistic() { try {/*from w w w .ja va 2 s . c o m*/ out = new PrintStream(new FileOutputStream(rep + "/All_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;trainErrorRate_Now;testErrorRate_Now;trainErrorRate_Before;testErrorRate_Before"); String algo = "KMEANS"; nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>2) // nbPrototypesMax=2; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); DTWProbabilisticClassifierKMeans classifierKMeansPro = new DTWProbabilisticClassifierKMeans(); classifierKMeansPro.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierKMeansPro.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; int[] classDistrib = PrototyperUtil .getPrototypesPerClassDistribution(classifierKMeansPro.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeansPro, test); double testError = eval.errorRate(); double trainError = Double.NaN; System.out.println(testError + "\n"); PrototyperUtil.savePrototypes(classifierKMeansPro.getPrototypes(), rep + "/" + dataName + "_KMEANSPro[" + j + "]_XP" + n + ".proto"); out.format("%s;%s;%d;%.4f\n", dataName, algo, (j * train.numClasses()), testError); out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchKMeansProbabilisticEUC() { try {// w ww.jav a2 s .c om // out = new PrintStream(new FileOutputStream(rep + "/KPEUC_All_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;trainErrorRate_Now;testErrorRate_Now;trainErrorRate_Before;testErrorRate_Before"); String algo = "KMEANSProbabilisticEUC"; System.out.println(algo); nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>20) nbPrototypesMax = 1; // if (nbPrototypesMax > 100) // nbPrototypesMax = 100; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); EUCProbabilisticClassifierKMeans classifierKMeansProEUC = new EUCProbabilisticClassifierKMeans(); classifierKMeansProEUC.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierKMeansProEUC.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierKMeansProEUC.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeansProEUC, test); Evaluation evaltrain = new Evaluation(train); evaltrain.evaluateModel(classifierKMeansProEUC, train); double testError = eval.errorRate(); double trainError = evaltrain.errorRate(); System.out.println("TrainError:" + trainError + "\n"); System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierKMeansProEUC.getPrototypes(), rep + "/" + dataName + "_KMEANSProEUC[" + j + "]_XP" + n + ".proto"); // out.format("%s;%s;%d;%.4f;%.4f\n", dataName, algo, (j * train.numClasses()), trainError,testError); // out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchNewKMeans() { try {/*from w ww. j ava2s . c o m*/ // out = new PrintStream(new FileOutputStream(rep + "/GMMDTW_"+dataName+"_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;testErrorRate"); String algo = "GMM"; System.out.println(algo); // nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>10) nbPrototypesMax = 10; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); DTWKNNClassifierNK classifierGmm = new DTWKNNClassifierNK(); classifierGmm.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierGmm.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierGmm.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierGmm, test); double testError = eval.errorRate(); double trainError = Double.NaN; System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierGmm.getPrototypes(), rep + "/" + dataName + "_GMM[" + j + "]_XP" + n + ".proto"); // out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); // out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchGmm() { try {//from w ww . j av a 2s . c om // out = new PrintStream(new FileOutputStream(rep + "/GMMDTW_"+dataName+"_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;testErrorRate"); String algo = "GMM"; System.out.println(algo); // nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>10) nbPrototypesMax = 10; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); DTWKNNClassifierGmm classifierGmm = new DTWKNNClassifierGmm(); classifierGmm.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierGmm.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierGmm.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierGmm, test); double testError = eval.errorRate(); double trainError = Double.NaN; System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierGmm.getPrototypes(), rep + "/" + dataName + "_GMM[" + j + "]_XP" + n + ".proto"); // out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); // out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchGmmEUC() { try {/*from w w w. j ava2s . co m*/ // out = new PrintStream(new FileOutputStream(rep + "/GMMEUC_"+dataName+"_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;trainErrorRate_Now;testErrorRate_Now;trainErrorRate_Before;testErrorRate_Before"); String algo = "GMMEUC"; System.out.println(algo); // nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if (nbPrototypesMax > 10) nbPrototypesMax = 3; // if (nbPrototypesMax > 100) // nbPrototypesMax = 100; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); EUCKNNClassifierGmm classifierGmmEUC = new EUCKNNClassifierGmm(); classifierGmmEUC.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierGmmEUC.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; int[] classDistrib = PrototyperUtil .getPrototypesPerClassDistribution(classifierGmmEUC.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierGmmEUC, test); // Evaluation evaltrain = new Evaluation(train); // evaltrain.evaluateModel(classifierGmmEUC, train); double testError = eval.errorRate(); // double trainError = evaltrain.errorRate(); // System.out.println("TrainError:"+trainError+"\n"); System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierGmmEUC.getPrototypes(), rep + "/" + dataName + "_GMMEUC[" + j + "]_XP" + n + ".proto"); // out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); // out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchFCM() { try {/* w ww .ja v a2s .com*/ out = new PrintStream(new FileOutputStream(rep + "/FCMDTW_" + dataName + "_results.csv", true)); // out.println("dataset,algorithm,nbPrototypes,testErrorRate"); String algo = "FCM"; System.out.println(algo); // nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>10) nbPrototypesMax = 10; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { System.out.println("This is the " + n + " time."); DTWKNNClassifierFCM classifierFCM = new DTWKNNClassifierFCM(); classifierFCM.setNClustersPerClass(j); startTime = System.currentTimeMillis(); classifierFCM.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierGmm.getPrototypes(), train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierFCM, test); double testError = eval.errorRate(); double trainError = Double.NaN; System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierGmm.getPrototypes(), rep + "/" + dataName + "_GMM[" + j + "]_XP" + n + ".proto"); out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); out.flush(); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
/** * /*from w ww .j a v a 2s . c o m*/ */ public void launchKMedoids() { try { File f = new File(rep + "/" + dataName + "_results.csv"); // if somebody is processing it if (f.exists()) { return; } out = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_results.csv", append)); out.println( "dataset;algorithm;nbPrototypes;execTime;trainErrorRate;testErrorRate;prototypesPerClassDistribution"); String algo = "KMEDOIDS"; nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); for (int j = 1; j <= nbPrototypesMax; j++) { System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { DTWKNNClassifierKMedoids classifier = new DTWKNNClassifierKMedoids(); classifier.setNbPrototypesPerClass(j); classifier.setFillPrototypes(true); startTime = System.currentTimeMillis(); classifier.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifier.prototypes, train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); double testError = eval.errorRate(); double trainError = classifier.predictAccuracyXVal(10); PrototyperUtil.savePrototypes(classifier.prototypes, rep + "/" + dataName + "_KMEDOIDS[" + j + "]_XP" + n + ".proto"); out.format("%s;%s;%d;%d;%.4f;%.4f;%s\n", dataName, algo, (j * train.numClasses()), duration, trainError, testError, Arrays.toString(classDistrib)); out.flush(); // deterministic if (j == 1) { break; } } } } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchseq() { try {/*from w w w . j a va 2 s .co m*/ nbPrototypesMax = 10; int[] bestprototypes = new int[train.numClasses()]; double lowerror = 1.0; for (int j = 1; j <= nbPrototypesMax; j++) { int[] nbPrototypesPerClass = new int[train.numClasses()]; for (int i = 0; i < train.numClasses(); i++) { nbPrototypesPerClass[i] = j; } double errorBefore = 1; double errorNow = 1; int flag = 0; do { Unbalancecluster classifierseq = new Unbalancecluster(); classifierseq.setNbPrototypesPerClass(nbPrototypesPerClass); System.out.println(Arrays.toString(nbPrototypesPerClass)); // classifierseq.buildClassifier(train); Evaluation evalcv = new Evaluation(train); Random rand = new Random(1); evalcv.crossValidateModel(classifierseq, train, 10, rand); // errorNow = classifierseq.predictAccuracyXVal(10); errorNow = evalcv.errorRate(); System.out.println("errorBefore " + errorBefore); System.out.println("errorNow " + errorNow); if (errorNow < errorBefore) { nbPrototypesPerClass[flag]++; errorBefore = errorNow; } else { nbPrototypesPerClass[flag]--; flag++; if (flag >= nbPrototypesPerClass.length) break; nbPrototypesPerClass[flag]++; } } while (flag < nbPrototypesPerClass.length); // System.out.println("\nbest nbPrototypesPerClass " + Arrays.toString(nbPrototypesPerClass)); double testError = 0; for (int n = 0; n < nbExp; n++) { Unbalancecluster classifier = new Unbalancecluster(); classifier.setNbPrototypesPerClass(nbPrototypesPerClass); classifier.buildClassifier(train); Evaluation evaltest = new Evaluation(train); evaltest.evaluateModel(classifier, test); testError += evaltest.errorRate(); } double avgTestError = testError / nbExp; System.out.println(avgTestError); if (avgTestError < lowerror) { bestprototypes = nbPrototypesPerClass; lowerror = avgTestError; } } System.out.println("Best prototypes:" + Arrays.toString(bestprototypes) + "\n"); System.out.println("Best errorRate:" + lowerror + "\n"); } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchFSKMeans() { try {/*from w w w. j av a 2 s .co m*/ // File f = new File(rep + "/" + dataName + "_results.csv"); // // if somebody is processing it // if (f.exists()) { // return; // } // // out = new PrintStream(new FileOutputStream(rep + "/FastKMeansDTW_" + dataName + "_results.csv", true)); // out.println("dataset,algorithm,nbPrototypes,testErrorRate,trainErrorRate"); String algo = "FastKMEANS"; System.out.println(algo); // PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append)); nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if (nbPrototypesMax>10) nbPrototypesMax = 50; int tmp; tmp = nbExp; double[] avgerror = new double[5]; double[] avgf1 = new double[5]; // double[] trainrctmp = new double[5]; // double[] testrctmp = new double[5]; // double[] cvrctmp = new double[5]; // boolean stopflag=false; for (int j = 1; j <= nbPrototypesMax; j++) { // double[] trainrc = new double[5]; // double[] testrc = new double[5]; // double[] cvrc = new double[5]; if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { // System.out.println("This is the "+n+" time."); DTWKNNClassifierKMeansCached classifierKMeans = new DTWKNNClassifierKMeansCached(); classifierKMeans.setNbPrototypesPerClass(j); classifierKMeans.setFillPrototypes(true); startTime = System.currentTimeMillis(); classifierKMeans.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; int[] classDistrib = PrototyperUtil .getPrototypesPerClassDistribution(classifierKMeans.prototypes, train); Evaluation evaltest = new Evaluation(train); evaltest.evaluateModel(classifierKMeans, test); avgerror[n] = evaltest.errorRate(); avgf1[n] = evaltest.fMeasure(0); // Evaluation evaltrain = new Evaluation(train); // evaltrain.evaluateModel(classifierKMeans, train); /*DTWKNNClassifierKMeansCached KMeans = new DTWKNNClassifierKMeansCached(); KMeans.setNbPrototypesPerClass(j); KMeans.setFillPrototypes(true); Evaluation evalcv = new Evaluation(train); Random rand = new Random(1); evalcv.crossValidateModel(KMeans, train, 10, rand); double CVError = evalcv.errorRate(); System.out.println("CVError:"+CVError+"\n");*/ // PrototyperUtil.savePrototypes(classifierKMeans.prototypes, rep + "/" + dataName + "_KMEANS[" + j + "]_XP" + n + ".proto"); // out.format("%s,%s,%d,%.4f,%.4f,%.4f\n", dataName, algo, (j * train.numClasses()), testError,CVError,trainError); // out.flush(); // trainrc[n]=trainError; // testrc[n]=testError; // cvrc[n]=CVError; // if (n == 4) { // if (j == 1) { // trainrctmp = trainrc; // testrctmp = testrc; // cvrctmp = cvrc; // } else { // if (Arrays.equals(trainrc, trainrctmp) && Arrays.equals(testrc, testrctmp) // && Arrays.equals(cvrc, cvrctmp)) { // System.out.println("Stable at " + j); // stopflag=true; // } else { // trainrctmp = trainrc; // testrctmp = testrc; // cvrctmp = cvrc; // } // } // } } System.out .println("TestError:" + Utils.mean(avgerror) + "\tF-Measures:" + Utils.mean(avgf1) + "\n"); // if(stopflag==true) // break; } // outProto.close(); } catch (Exception e) { e.printStackTrace(); } }