Example usage for weka.classifiers Evaluation errorRate

List of usage examples for weka.classifiers Evaluation errorRate

Introduction

In this page you can find the example usage for weka.classifiers Evaluation errorRate.

Prototype

public final double errorRate() 

Source Link

Document

Returns the estimated error rate or the root mean squared error (if the class is numeric).

Usage

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchGmm() {
    try {/*from ww  w .j av  a2  s .  com*/
        //         out = new PrintStream(new FileOutputStream(rep + "/GMMDTW_"+dataName+"_results.csv", true));
        //         out.println("dataset;algorithm;nbPrototypes;testErrorRate");
        String algo = "GMM";
        System.out.println(algo);

        //         nbPrototypesMax = this.train.numInstances() / this.train.numClasses();
        //         if(nbPrototypesMax>10)
        nbPrototypesMax = 10;
        int tmp;
        tmp = nbExp;

        for (int j = 1; j <= nbPrototypesMax; j++) {
            if (j == 1)
                nbExp = 1;
            else
                nbExp = tmp;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                System.out.println("This is the " + n + " time.");
                DTWKNNClassifierGmm classifierGmm = new DTWKNNClassifierGmm();
                classifierGmm.setNClustersPerClass(j);

                startTime = System.currentTimeMillis();
                classifierGmm.buildClassifier(train);
                endTime = System.currentTimeMillis();
                duration = endTime - startTime;

                //               int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierGmm.getPrototypes(), train);

                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifierGmm, test);
                double testError = eval.errorRate();
                double trainError = Double.NaN;

                System.out.println("TestError:" + testError + "\n");
                //               PrototyperUtil.savePrototypes(classifierGmm.getPrototypes(), rep + "/" + dataName + "_GMM[" + j + "]_XP" + n + ".proto");

                //               out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError);
                //               out.flush();
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchGmmEUC() {
    try {/*w ww .  j a  va2s  .c  o  m*/
        //         out = new PrintStream(new FileOutputStream(rep + "/GMMEUC_"+dataName+"_results.csv", true));
        //         out.println("dataset;algorithm;nbPrototypes;trainErrorRate_Now;testErrorRate_Now;trainErrorRate_Before;testErrorRate_Before");
        String algo = "GMMEUC";
        System.out.println(algo);
        //         nbPrototypesMax = this.train.numInstances() / this.train.numClasses();
        //         if (nbPrototypesMax > 10)
        nbPrototypesMax = 3;
        //         if (nbPrototypesMax > 100)
        //            nbPrototypesMax = 100;
        int tmp;
        tmp = nbExp;

        for (int j = 1; j <= nbPrototypesMax; j++) {
            if (j == 1)
                nbExp = 1;
            else
                nbExp = tmp;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                System.out.println("This is the " + n + " time.");
                EUCKNNClassifierGmm classifierGmmEUC = new EUCKNNClassifierGmm();
                classifierGmmEUC.setNClustersPerClass(j);

                startTime = System.currentTimeMillis();
                classifierGmmEUC.buildClassifier(train);
                endTime = System.currentTimeMillis();
                duration = endTime - startTime;

                int[] classDistrib = PrototyperUtil
                        .getPrototypesPerClassDistribution(classifierGmmEUC.getPrototypes(), train);

                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifierGmmEUC, test);
                //               Evaluation evaltrain = new Evaluation(train);
                //               evaltrain.evaluateModel(classifierGmmEUC, train);

                double testError = eval.errorRate();
                //               double trainError = evaltrain.errorRate();
                //               System.out.println("TrainError:"+trainError+"\n");
                System.out.println("TestError:" + testError + "\n");
                //               PrototyperUtil.savePrototypes(classifierGmmEUC.getPrototypes(), rep + "/" + dataName + "_GMMEUC[" + j + "]_XP" + n + ".proto");

                //               out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError);
                //               out.flush();
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchFCM() {
    try {// w w w . ja v a  2s.  co  m
        out = new PrintStream(new FileOutputStream(rep + "/FCMDTW_" + dataName + "_results.csv", true));
        //         out.println("dataset,algorithm,nbPrototypes,testErrorRate");
        String algo = "FCM";
        System.out.println(algo);

        //         nbPrototypesMax = this.train.numInstances() / this.train.numClasses();
        //         if(nbPrototypesMax>10)
        nbPrototypesMax = 10;
        int tmp;
        tmp = nbExp;

        for (int j = 1; j <= nbPrototypesMax; j++) {
            if (j == 1)
                nbExp = 1;
            else
                nbExp = tmp;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                System.out.println("This is the " + n + " time.");
                DTWKNNClassifierFCM classifierFCM = new DTWKNNClassifierFCM();
                classifierFCM.setNClustersPerClass(j);

                startTime = System.currentTimeMillis();
                classifierFCM.buildClassifier(train);
                endTime = System.currentTimeMillis();
                duration = endTime - startTime;

                //               int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierGmm.getPrototypes(), train);

                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifierFCM, test);
                double testError = eval.errorRate();
                double trainError = Double.NaN;

                System.out.println("TestError:" + testError + "\n");
                //               PrototyperUtil.savePrototypes(classifierGmm.getPrototypes(), rep + "/" + dataName + "_GMM[" + j + "]_XP" + n + ".proto");

                out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError);
                out.flush();
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

/**
 * //from  w w  w.j av a 2  s.c  o m
 */
public void launchKMedoids() {
    try {
        File f = new File(rep + "/" + dataName + "_results.csv");
        // if somebody is processing it
        if (f.exists()) {
            return;
        }

        out = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_results.csv", append));
        out.println(
                "dataset;algorithm;nbPrototypes;execTime;trainErrorRate;testErrorRate;prototypesPerClassDistribution");
        String algo = "KMEDOIDS";

        nbPrototypesMax = this.train.numInstances() / this.train.numClasses();

        for (int j = 1; j <= nbPrototypesMax; j++) {
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                DTWKNNClassifierKMedoids classifier = new DTWKNNClassifierKMedoids();
                classifier.setNbPrototypesPerClass(j);
                classifier.setFillPrototypes(true);

                startTime = System.currentTimeMillis();
                classifier.buildClassifier(train);
                endTime = System.currentTimeMillis();
                duration = endTime - startTime;

                int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifier.prototypes,
                        train);

                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifier, test);

                double testError = eval.errorRate();
                double trainError = classifier.predictAccuracyXVal(10);

                PrototyperUtil.savePrototypes(classifier.prototypes,
                        rep + "/" + dataName + "_KMEDOIDS[" + j + "]_XP" + n + ".proto");

                out.format("%s;%s;%d;%d;%.4f;%.4f;%s\n", dataName, algo, (j * train.numClasses()), duration,
                        trainError, testError, Arrays.toString(classDistrib));
                out.flush();
                // deterministic
                if (j == 1) {
                    break;
                }
            }

        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchseq() {
    try {/*from   w ww.  ja  v a  2 s.  c  o m*/
        nbPrototypesMax = 10;
        int[] bestprototypes = new int[train.numClasses()];
        double lowerror = 1.0;
        for (int j = 1; j <= nbPrototypesMax; j++) {
            int[] nbPrototypesPerClass = new int[train.numClasses()];
            for (int i = 0; i < train.numClasses(); i++) {
                nbPrototypesPerClass[i] = j;
            }
            double errorBefore = 1;
            double errorNow = 1;
            int flag = 0;
            do {
                Unbalancecluster classifierseq = new Unbalancecluster();
                classifierseq.setNbPrototypesPerClass(nbPrototypesPerClass);
                System.out.println(Arrays.toString(nbPrototypesPerClass));
                //               classifierseq.buildClassifier(train);
                Evaluation evalcv = new Evaluation(train);
                Random rand = new Random(1);
                evalcv.crossValidateModel(classifierseq, train, 10, rand);
                //               errorNow = classifierseq.predictAccuracyXVal(10);
                errorNow = evalcv.errorRate();
                System.out.println("errorBefore " + errorBefore);
                System.out.println("errorNow " + errorNow);
                if (errorNow < errorBefore) {
                    nbPrototypesPerClass[flag]++;
                    errorBefore = errorNow;
                } else {
                    nbPrototypesPerClass[flag]--;
                    flag++;
                    if (flag >= nbPrototypesPerClass.length)
                        break;
                    nbPrototypesPerClass[flag]++;
                }
            } while (flag < nbPrototypesPerClass.length);
            //            System.out.println("\nbest nbPrototypesPerClass " + Arrays.toString(nbPrototypesPerClass));
            double testError = 0;
            for (int n = 0; n < nbExp; n++) {
                Unbalancecluster classifier = new Unbalancecluster();
                classifier.setNbPrototypesPerClass(nbPrototypesPerClass);
                classifier.buildClassifier(train);
                Evaluation evaltest = new Evaluation(train);
                evaltest.evaluateModel(classifier, test);
                testError += evaltest.errorRate();
            }
            double avgTestError = testError / nbExp;
            System.out.println(avgTestError);
            if (avgTestError < lowerror) {
                bestprototypes = nbPrototypesPerClass;
                lowerror = avgTestError;
            }
        }
        System.out.println("Best prototypes:" + Arrays.toString(bestprototypes) + "\n");
        System.out.println("Best errorRate:" + lowerror + "\n");
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchFSKMeans() {
    try {//from  ww  w .  j  a v  a  2  s  .  c  o m
        //         File f = new File(rep + "/" + dataName + "_results.csv");
        //         // if somebody is processing it
        //         if (f.exists()) {
        //            return;
        //         }
        //
        //         out = new PrintStream(new FileOutputStream(rep + "/FastKMeansDTW_" + dataName + "_results.csv", true));
        //         out.println("dataset,algorithm,nbPrototypes,testErrorRate,trainErrorRate");
        String algo = "FastKMEANS";
        System.out.println(algo);
        //         PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append));

        nbPrototypesMax = this.train.numInstances() / this.train.numClasses();
        //         if (nbPrototypesMax>10)
        nbPrototypesMax = 50;
        int tmp;
        tmp = nbExp;
        double[] avgerror = new double[5];
        double[] avgf1 = new double[5];
        //         double[] trainrctmp = new double[5];
        //         double[] testrctmp = new double[5];
        //         double[] cvrctmp = new double[5];
        //         boolean stopflag=false;
        for (int j = 1; j <= nbPrototypesMax; j++) {
            //            double[] trainrc = new double[5];
            //            double[] testrc = new double[5];
            //            double[] cvrc = new double[5];
            if (j == 1)
                nbExp = 1;
            else
                nbExp = tmp;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                //               System.out.println("This is the "+n+" time.");
                DTWKNNClassifierKMeansCached classifierKMeans = new DTWKNNClassifierKMeansCached();
                classifierKMeans.setNbPrototypesPerClass(j);
                classifierKMeans.setFillPrototypes(true);

                startTime = System.currentTimeMillis();
                classifierKMeans.buildClassifier(train);
                endTime = System.currentTimeMillis();
                duration = endTime - startTime;

                int[] classDistrib = PrototyperUtil
                        .getPrototypesPerClassDistribution(classifierKMeans.prototypes, train);

                Evaluation evaltest = new Evaluation(train);
                evaltest.evaluateModel(classifierKMeans, test);
                avgerror[n] = evaltest.errorRate();
                avgf1[n] = evaltest.fMeasure(0);
                //               Evaluation evaltrain = new Evaluation(train);
                //               evaltrain.evaluateModel(classifierKMeans, train);

                /*DTWKNNClassifierKMeansCached KMeans = new DTWKNNClassifierKMeansCached();
                KMeans.setNbPrototypesPerClass(j);
                KMeans.setFillPrototypes(true);
                Evaluation evalcv = new Evaluation(train);
                Random rand = new Random(1);
                evalcv.crossValidateModel(KMeans, train, 10, rand);
                double CVError = evalcv.errorRate();
                System.out.println("CVError:"+CVError+"\n");*/

                //               PrototyperUtil.savePrototypes(classifierKMeans.prototypes, rep + "/" + dataName + "_KMEANS[" + j + "]_XP" + n + ".proto");

                //               out.format("%s,%s,%d,%.4f,%.4f,%.4f\n", dataName, algo, (j * train.numClasses()), testError,CVError,trainError);
                //               out.flush();
                //               trainrc[n]=trainError;
                //               testrc[n]=testError;
                //               cvrc[n]=CVError;
                //               if (n == 4) {
                //                  if (j == 1) {
                //                     trainrctmp = trainrc;
                //                     testrctmp = testrc;
                //                     cvrctmp = cvrc;
                //                  } else {
                //                     if (Arrays.equals(trainrc, trainrctmp) && Arrays.equals(testrc, testrctmp)
                //                           && Arrays.equals(cvrc, cvrctmp)) {
                //                        System.out.println("Stable at " + j);
                //                        stopflag=true;
                //                     } else {
                //                        trainrctmp = trainrc;
                //                        testrctmp = testrc;
                //                        cvrctmp = cvrc;
                //                     }
                //                  }
                //               }
            }
            System.out
                    .println("TestError:" + Utils.mean(avgerror) + "\tF-Measures:" + Utils.mean(avgf1) + "\n");
            //            if(stopflag==true)
            //               break;
        }
        //         outProto.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchAllKMeans() {
    try {/*from  w w  w  .  ja v a  2 s  .  com*/
        String algo = "AllKMEANS";
        System.out.println(algo);

        nbPrototypesMax = this.train.numInstances();
        int prototypestart = this.train.numClasses();
        for (int j = prototypestart; j <= nbPrototypesMax; j++) {
            double testError = 0.0;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                System.out.println("This is the " + n + " time.");
                KMeans classifier = new KMeans();
                classifier.setNbPrototypes(j);
                classifier.buildClassifier(train);
                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifier, test);
                testError += eval.errorRate();
                System.out.println("TestError:" + eval.errorRate() + "\n");
            }
            System.out.println("TestError of average:" + (testError / nbExp) + "\n");

        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchDT() {
    if (train.numClasses() == 2) {

        try {//from w  w  w.  j  ava  2s . c  o  m
            String algo = "DecisionTree";
            System.out.println(algo);

            double testError = 0.0;
            ClassifyDT dt = new ClassifyDT();
            dt.buildClassifier(train);
            System.out.println("\nClassify test sets:\n");
            Evaluation eval = new Evaluation(train);
            eval.evaluateModel(dt, test);
            testError = eval.errorRate();
            System.out.println("TestError:" + testError + "\n");
            System.out.println(eval.toSummaryString());
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchJ48() {
    try {//from   w w  w . ja v  a  2s .c om
        String algo = "J48";
        System.out.println(algo);

        double testError = 0.0;
        J48 dt = new J48();
        dt.buildClassifier(train);
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(dt, test);
        testError = eval.errorRate();
        System.out.println("TestError:" + testError + "\n");
        System.out.println(dt.toSummaryString());
        System.out.println(dt.graph());
        System.out.println(eval.toSummaryString());

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchBigDT() {
    try {//ww  w  .jav a  2s.  co  m
        //         out = new PrintStream(new FileOutputStream(rep + "/DT_" + dataName + "_results.csv", true));
        String algo = "BigDT_Forest";
        System.out.println(algo);
        double testError = 0.0;
        startTime = System.currentTimeMillis();
        ClassifyBigDT dt = new ClassifyBigDT();
        dt.buildClassifier(train);
        endTime = System.currentTimeMillis();
        duration = endTime - startTime;
        //         Duration traintime = Duration.ofMillis(duration);
        //         System.out.println(traintime);
        startTime = System.currentTimeMillis();
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(dt, test);
        testError = eval.errorRate();
        endTime = System.currentTimeMillis();
        duration = endTime - startTime;
        //         Duration testtime = Duration.ofMillis(duration);
        //         System.out.println(testtime);
        System.out.println("TestError:" + testError + "\n");
        //         System.out.println(eval.toSummaryString());
        //         out.format("%s,%.4f\n", dataName,  testError);
        //         out.flush();
    } catch (Exception e) {
        e.printStackTrace();
    }
}