Example usage for weka.classifiers Evaluation correct

List of usage examples for weka.classifiers Evaluation correct

Introduction

In this page you can find the example usage for weka.classifiers Evaluation correct.

Prototype

public final double correct() 

Source Link

Document

Gets the number of instances correctly classified (that is, for which a correct prediction was made).

Usage

From source file:mlpoc.MLPOC.java

public static Evaluation crossValidate(String filename) {
    Evaluation eval = null;
    try {//from w  w  w .  j a  va 2s.  com
        BufferedReader br = new BufferedReader(new FileReader(filename));
        // loads data and set class index
        Instances data = new Instances(br);
        br.close();
        /*File csv=new File(filename);
        CSVLoader loader = new CSVLoader();
        loader.setSource(csv);
        Instances data = loader.getDataSet();*/
        data.setClassIndex(data.numAttributes() - 1);

        // classifier
        String[] tmpOptions;
        String classname = "weka.classifiers.trees.J48 -C 0.25";
        tmpOptions = classname.split(" ");
        classname = "weka.classifiers.trees.J48";
        tmpOptions[0] = "";
        Classifier cls = (Classifier) Utils.forName(Classifier.class, classname, tmpOptions);

        // other options
        int seed = 2;
        int folds = 10;

        // randomize data
        Random rand = new Random(seed);
        Instances randData = new Instances(data);
        randData.randomize(rand);
        if (randData.classAttribute().isNominal())
            randData.stratify(folds);

        // perform cross-validation
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n);
            Instances test = randData.testCV(folds, n);
            // the above code is used by the StratifiedRemoveFolds filter, the
            // code below by the Explorer/Experimenter:
            // Instances train = randData.trainCV(folds, n, rand);

            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(cls);
            clsCopy.buildClassifier(train);
            eval.evaluateModel(clsCopy, test);
        }

        // output evaluation
        System.out.println();
        System.out.println("=== Setup ===");
        System.out
                .println("Classifier: " + cls.getClass().getName() + " " + Utils.joinOptions(cls.getOptions()));
        System.out.println("Dataset: " + data.relationName());
        System.out.println("Folds: " + folds);
        System.out.println("Seed: " + seed);
        System.out.println();
        System.out.println(eval.toSummaryString("Summary for testing", true));
        System.out.println("Correctly Classified Instances: " + eval.correct());
        System.out.println("Percentage of Correctly Classified Instances: " + eval.pctCorrect());
        System.out.println("InCorrectly Classified Instances: " + eval.incorrect());
        System.out.println("Percentage of InCorrectly Classified Instances: " + eval.pctIncorrect());

    } catch (Exception ex) {
        System.err.println(ex.getMessage());
    }
    return eval;
}

From source file:my.randomforestui.RandomForestUI.java

public static double doRandomForest(Instances training, Instances testing) throws Exception {
    double accuracy;

    //inisialisasi random forest
    String[] options = new String[1];
    // set tree random forest unpruned tree
    options[0] = "-U";
    // new instance of tree
    RandomForest tree = new RandomForest();
    // set the options
    tree.setOptions(options);/*www .j ava  2s. c  o  m*/
    // build classifier using training data
    tree.buildClassifier(training);

    Evaluation eval = new Evaluation(testing);
    eval.evaluateModel(tree, testing);
    //System.out.println((eval.correct()/56)*100);

    accuracy = (eval.correct() / 56) * 100;

    return accuracy;
}

From source file:NaiveBayesPckge.NaiveBayesMain.java

public static void printEvaluation(Instances instance) throws Exception {
    Evaluation eval = new Evaluation(instance);
    Evaluation eval2 = new Evaluation(instance);

    System.out.println("Full training Result :");
    eval.evaluateModel(naive, instance);
    System.out.println(eval.toSummaryString()); // Summary of Training
    //System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());

    System.out.println("10 cross validation Result :");
    Random rand = new Random(1);
    eval2.crossValidateModel(naive, instance, 10, rand);
    System.out.println(eval2.toSummaryString()); // Summary of Training
    //System.out.println(eval2.toClassDetailsString());
    System.out.println(eval2.toMatrixString());

    double errorRates = eval.incorrect() / eval.numInstances() * 100;
    double accuracy = eval.correct() / eval.numInstances() * 100;

    //        System.out.println("Accuracy: " + df.format(accuracy) + " %");
    //        System.out.println("Error rate: " + df.format(errorRates) + " %"); // Printing Training Mean root squared error
}

From source file:Neural_Network.NuralN.java

public int[] testNet() {

    System.out.println();//from  w  w  w  .j a  v a2s.c  om
    int[] results = new int[2];
    if (!trained) {
        System.out.println("Neural netowrk is not trained....");
    } else {
        try {
            loadTestData();
            Evaluation tempEvaluator = new Evaluation(testSet);
            tempEvaluator.evaluateModel(nN, testSet);

            results[0] = (int) tempEvaluator.correct();
            results[1] = (int) tempEvaluator.incorrect();
            tested = true;
            // "Test completed;

        } catch (IOException e) {
            //"Test file missing
            System.out.println(e.toString());
        } catch (Exception e) {
            System.err.println(e.toString());
        }
    }
    return results;
}

From source file:trabfs.machineLeaningFrameWork.core.AvaliadordeSolucao.java

public double avalia(Solucao s) {
    double precision = 0.0, c;
    try {/*from   www .  j a  v  a  2  s . c  om*/
        // cria vetor de indices dos atributos selecionados            
        int[] toremove = makeIndex(s);

        //remove atributos nao selecionados
        Remove remove = new Remove();
        remove.setAttributeIndicesArray(toremove);
        remove.setInvertSelection(true);
        remove.setInputFormat(p.getInstances());
        Instances subproblema = Filter.useFilter(p.getInstances(), remove);
        subproblema.setClassIndex(subproblema.numAttributes() - 1);
        // classifica e pega o resultado
        Random rand = new Random(1); // create seeded number generator

        IBk clf = new IBk(K);
        //SimpleNaiveBayes clf = new SimpleNaiveBayes();
        //NaiveBayesSimple clf = new NaiveBayesSimple();

        //clf.buildClassifier(subproblema);
        Evaluation eval = new Evaluation(subproblema);
        eval.crossValidateModel(clf, subproblema, nfolds, rand);
        precision = (double) eval.correct() / subproblema.numInstances();

        calls++;

    } catch (Exception ex) {
        Logger.getLogger(AvaliadordeSolucao.class.getName()).log(Level.SEVERE, null, ex);
    }

    s.setQuality(precision);

    if (precision > this.best) {
        this.best = precision;
    }

    evolucao.add(this.best);
    return s.getQuality();
}