Example usage for weka.classifiers Evaluation Evaluation

List of usage examples for weka.classifiers Evaluation Evaluation

Introduction

In this page you can find the example usage for weka.classifiers Evaluation Evaluation.

Prototype

public Evaluation(Instances data) throws Exception 

Source Link

Usage

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchAllKMeans() {
    try {/*  ww  w. j  av  a 2s .  c o m*/
        String algo = "AllKMEANS";
        System.out.println(algo);

        nbPrototypesMax = this.train.numInstances();
        int prototypestart = this.train.numClasses();
        for (int j = prototypestart; j <= nbPrototypesMax; j++) {
            double testError = 0.0;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                System.out.println("This is the " + n + " time.");
                KMeans classifier = new KMeans();
                classifier.setNbPrototypes(j);
                classifier.buildClassifier(train);
                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifier, test);
                testError += eval.errorRate();
                System.out.println("TestError:" + eval.errorRate() + "\n");
            }
            System.out.println("TestError of average:" + (testError / nbExp) + "\n");

        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchDT() {
    if (train.numClasses() == 2) {

        try {//w  ww . jav a  2 s . c o m
            String algo = "DecisionTree";
            System.out.println(algo);

            double testError = 0.0;
            ClassifyDT dt = new ClassifyDT();
            dt.buildClassifier(train);
            System.out.println("\nClassify test sets:\n");
            Evaluation eval = new Evaluation(train);
            eval.evaluateModel(dt, test);
            testError = eval.errorRate();
            System.out.println("TestError:" + testError + "\n");
            System.out.println(eval.toSummaryString());
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchJ48() {
    try {//from   www.  j  a  va 2  s .c  o m
        String algo = "J48";
        System.out.println(algo);

        double testError = 0.0;
        J48 dt = new J48();
        dt.buildClassifier(train);
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(dt, test);
        testError = eval.errorRate();
        System.out.println("TestError:" + testError + "\n");
        System.out.println(dt.toSummaryString());
        System.out.println(dt.graph());
        System.out.println(eval.toSummaryString());

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchBigDT() {
    try {//w  w  w .ja v a 2 s . c om
        //         out = new PrintStream(new FileOutputStream(rep + "/DT_" + dataName + "_results.csv", true));
        String algo = "BigDT_Forest";
        System.out.println(algo);
        double testError = 0.0;
        startTime = System.currentTimeMillis();
        ClassifyBigDT dt = new ClassifyBigDT();
        dt.buildClassifier(train);
        endTime = System.currentTimeMillis();
        duration = endTime - startTime;
        //         Duration traintime = Duration.ofMillis(duration);
        //         System.out.println(traintime);
        startTime = System.currentTimeMillis();
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(dt, test);
        testError = eval.errorRate();
        endTime = System.currentTimeMillis();
        duration = endTime - startTime;
        //         Duration testtime = Duration.ofMillis(duration);
        //         System.out.println(testtime);
        System.out.println("TestError:" + testError + "\n");
        //         System.out.println(eval.toSummaryString());
        //         out.format("%s,%.4f\n", dataName,  testError);
        //         out.flush();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchStaticEnsemble() {
    try {/*from   ww w  .ja v a2  s  . c  o  m*/
        String algo = "StaticEnsemble";
        System.out.println(algo);

        double testError = 0.0;
        double testError_DT = 0.0;
        double testError_FKM_4 = 0.0;
        double testError_FKM_10 = 0.0;
        //         double testError_KMeans = 0.0;
        startTime = System.currentTimeMillis();
        StaticEnsembleClassify staticensembleClassify = new StaticEnsembleClassify();
        staticensembleClassify.buildClassifier(train);
        endTime = System.currentTimeMillis();
        duration = endTime - startTime;
        //         Duration traintime = Duration.ofMillis(duration);
        //         System.out.println(traintime);

        Evaluation eval_FKM_4 = new Evaluation(train);
        eval_FKM_4.evaluateModel(staticensembleClassify.getFkm_4(), test);
        testError_FKM_4 = eval_FKM_4.errorRate();
        staticensembleClassify.setWeight_fkm_4(testError_FKM_4);
        System.out.println("TestError of FKM_4:" + testError_FKM_4 + "\n");

        //         Evaluation eval_KMeans = new Evaluation(train);
        //         eval_KMeans.evaluateModel(ensembleClassify.getKMeans(), test);
        //         testError_KMeans = eval_KMeans.errorRate();
        //         ensembleClassify.setWeight_kmeans(testError_KMeans);
        //         System.out.println("TestError of KMeans:" + testError_KMeans + "\n");

        Evaluation eval_FKM_10 = new Evaluation(train);
        eval_FKM_10.evaluateModel(staticensembleClassify.getFkm_10(), test);
        testError_FKM_10 = eval_FKM_10.errorRate();
        staticensembleClassify.setWeight_fkm_10(testError_FKM_10);
        System.out.println("TestError of FKM_10:" + testError_FKM_10 + "\n");

        Evaluation eval_DT = new Evaluation(train);
        eval_DT.evaluateModel(staticensembleClassify.getDt(), test);
        testError_DT = eval_DT.errorRate();
        staticensembleClassify.setWeight_dt(testError_DT);
        System.out.println("TestError of DT:" + testError_DT + "\n");

        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(staticensembleClassify, test);
        testError = eval.errorRate();
        System.out.println("TestError of Ensemble:" + testError + "\n");
        System.out.println(eval.toSummaryString());

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchDynamicEnsemble() {
    int method = 0;
    switch (method) {
    case 0:/*from  w  w  w.  j  a v a  2s.c om*/
        try {
            String algo = "BigDTDynamicEnsemble";
            System.out.println(algo);

            double testError = 0.0;
            BDTEClassifier dynamicEnsembleClassify = new BDTEClassifier();
            dynamicEnsembleClassify.buildClassifier(train);
            Evaluation eval = new Evaluation(train);
            eval.evaluateModel(dynamicEnsembleClassify, test);
            testError = eval.errorRate();
            System.out.println("TestError:" + testError + "\n");
            System.out.println(eval.toSummaryString());
        } catch (Exception e) {
            e.printStackTrace();
        }
        break;

    case 1:
        try {
            String algo = "FKMDynamicEnsemble";
            System.out.println(algo);

            double testError = 0.0;
            FKMDEClassifier dynamicEnsembleClassify = new FKMDEClassifier();
            dynamicEnsembleClassify.buildClassifier(train);
            Evaluation eval = new Evaluation(train);
            eval.evaluateModel(dynamicEnsembleClassify, test);
            testError = eval.errorRate();
            System.out.println("TestError:" + testError + "\n");
            System.out.println(eval.toSummaryString());
        } catch (Exception e) {
            e.printStackTrace();
        }
        break;
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchPU(double ratio) {
    try {/*  w w w.  j  a  v a 2  s .  c  om*/
        //         out = new PrintStream(new FileOutputStream(rep + "/DT_" + dataName + "_results.csv", true));
        String algo = "PU";
        System.out.println(algo);
        double testError = 0.0;
        startTime = System.currentTimeMillis();
        //         POSC45 pu = new POSC45();
        DTWD pu = new DTWD();
        pu.setRatio(ratio);
        pu.buildClassifier(train);
        endTime = System.currentTimeMillis();
        duration = endTime - startTime;
        //         Duration traintime = Duration.ofMillis(duration);
        //         System.out.println(traintime);
        startTime = System.currentTimeMillis();
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(pu, test);
        //         StringBuffer forPredictionsPrinting = new StringBuffer();
        //         eval.evaluateModel(pu, train, forPredictionsPrinting, null, false);
        //         System.out.println(eval.toClassDetailsString());
        //         System.out.println(eval.toSummaryString("\nResults\n======\n", false));
        //         System.out.println(eval.toMatrixString());
        //         System.out.println(forPredictionsPrinting);
        System.out.println(eval.fMeasure(0));
        testError = eval.errorRate();
        endTime = System.currentTimeMillis();
        duration = endTime - startTime;
        //         Duration testtime = Duration.ofMillis(duration);
        //         System.out.println(testtime);
        System.out.println("TestError:" + testError + "\n");
        //         System.out.println(eval.toSummaryString());
        //         out.format("%s,%.4f\n", dataName,  testError);
        //         out.flush();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

public void launchPUKMeans() {
    try {//from w  ww .j  a  v a2 s . c  om
        //         File f = new File(rep + "/" + dataName + "_results.csv");
        //         // if somebody is processing it
        //         if (f.exists()) {
        //            return;
        //         }
        //
        //         out = new PrintStream(new FileOutputStream(rep + "/KMeansDTW_" + "all" + "_results.csv", true));
        //         out.println("dataset,algorithm,nbPrototypes,testErrorRate,trainErrorRate");
        String algo = "PUKMEANS";
        System.out.println(algo);
        //         PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append));

        nbPrototypesMax = this.train.numInstances() / this.train.numClasses();
        if (nbPrototypesMax > 100)
            nbPrototypesMax = 50;
        int tmp;
        tmp = nbExp;
        double[] avgerror = new double[5];
        double[] avgf1 = new double[5];

        for (int j = 1; j <= nbPrototypesMax; j += 1) {
            if (j == 1)
                nbExp = 1;
            else
                nbExp = tmp;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                //               System.out.println("This is the "+n+" time.");
                DTWPUKMeans classifierKMeans = new DTWPUKMeans();
                classifierKMeans.setNbClustersinUNL(j);
                startTime = System.currentTimeMillis();
                classifierKMeans.buildClassifier(train);
                endTime = System.currentTimeMillis();
                duration = endTime - startTime;
                //               Duration traintime = Duration.ofMillis(duration);
                //               System.out.println(traintime);
                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifierKMeans, test);
                avgerror[n] = eval.errorRate();
                avgf1[n] = eval.fMeasure(0);

                //               PrototyperUtil.savePrototypes(classifierKMeans.prototypes, rep + "/" + dataName + "_KMEANS[" + j + "]_XP" + n + ".proto");

                //               out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError);
                //               out.flush();
            }
            System.out
                    .println("TestError:" + Utils.mean(avgerror) + "\tF-Measures:" + Utils.mean(avgf1) + "\n");
        }
        //         outProto.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.Prototyper.java

License:Open Source License

/**
 * Predict the accuracy of the prototypes based on the learning set. It uses
 * cross validation to draw the prediction.
 * /*from  w  w w.j  av  a 2 s.  co  m*/
 * @param nbFolds
 *            the number of folds for the x-validation
 * @return the predicted accuracy
 */
public double predictAccuracyXVal(int nbFolds) throws Exception {
    Evaluation eval = new Evaluation(trainingData);
    eval.crossValidateModel(this, trainingData, nbFolds, new Random(), new Object[] {});
    return eval.errorRate();
}

From source file:classify.Classifier.java

/**
 * @param args the command line arguments
 *//*  ww w. ja v  a 2  s  .  c om*/
public static void main(String[] args) {
    //read in data
    try {
        DataSource input = new DataSource("no_missing_values.csv");
        Instances data = input.getDataSet();
        //Instances data = readFile("newfixed.txt");
        missingValuesRows(data);

        setAttributeValues(data);
        data.setClassIndex(data.numAttributes() - 1);

        //boosting
        AdaBoostM1 boosting = new AdaBoostM1();
        boosting.setNumIterations(25);
        boosting.setClassifier(new DecisionStump());

        //build the classifier
        boosting.buildClassifier(data);

        //evaluate using 10-fold cross validation
        Evaluation e1 = new Evaluation(data);
        e1.crossValidateModel(boosting, data, 10, new Random(1));

        DecimalFormat nf = new DecimalFormat("0.000");

        System.out.println("Results of Boosting with Decision Stumps:");
        System.out.println(boosting.toString());
        System.out.println("Results of Cross Validation:");
        System.out.println("Number of correctly classified instances: " + e1.correct() + " ("
                + nf.format(e1.pctCorrect()) + "%)");
        System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " ("
                + nf.format(e1.pctIncorrect()) + "%)");

        System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%");
        System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%");
        System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%");
        System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%");

        System.out.println();
        System.out.println("Confusion Matrix:");
        for (int i = 0; i < e1.confusionMatrix().length; i++) {
            for (int j = 0; j < e1.confusionMatrix()[0].length; j++) {
                System.out.print(e1.confusionMatrix()[i][j] + "   ");
            }
            System.out.println();
        }
        System.out.println();
        System.out.println();
        System.out.println();

        //logistic regression
        Logistic l = new Logistic();
        l.buildClassifier(data);

        e1 = new Evaluation(data);

        e1.crossValidateModel(l, data, 10, new Random(1));
        System.out.println("Results of Logistic Regression:");
        System.out.println(l.toString());
        System.out.println("Results of Cross Validation:");
        System.out.println("Number of correctly classified instances: " + e1.correct() + " ("
                + nf.format(e1.pctCorrect()) + "%)");
        System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " ("
                + nf.format(e1.pctIncorrect()) + "%)");

        System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%");
        System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%");
        System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%");
        System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%");

        System.out.println();
        System.out.println("Confusion Matrix:");
        for (int i = 0; i < e1.confusionMatrix().length; i++) {
            for (int j = 0; j < e1.confusionMatrix()[0].length; j++) {
                System.out.print(e1.confusionMatrix()[i][j] + "   ");
            }
            System.out.println();
        }

    } catch (Exception ex) {
        //data couldn't be read, so end program
        System.out.println("Exception thrown, program ending.");
    }
}