Example usage for weka.classifiers Evaluation crossValidateModel

List of usage examples for weka.classifiers Evaluation crossValidateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation crossValidateModel.

Prototype

public void crossValidateModel(Classifier classifier, Instances data, int numFolds, Random random)
        throws Exception 

Source Link

Document

Performs a (stratified if class is nominal) cross-validation for a classifier on a set of instances.

Usage

From source file:meddle.TrainModelByDomainOS.java

License:Open Source License

/**
 * Do evalution on trained classifier/model, including the summary, false
 * positive/negative rate, AUC, running time
 *
 * @param j48//from  ww w . j a v  a 2 s .  c  o m
 *            - the trained classifier
 * @param domain
 *            - the domain name
 */
public static MetaEvaluationMeasures doEvaluation(Classifier classifier, String domainOS, Instances tras,
        MetaEvaluationMeasures mem) {
    try {
        Evaluation evaluation = new Evaluation(tras);
        evaluation.crossValidateModel(classifier, tras, 10, new Random(1));
        mem.numInstance = evaluation.numInstances();
        double M = evaluation.numTruePositives(1) + evaluation.numFalseNegatives(1);
        mem.numPositive = (int) M;
        mem.AUC = evaluation.areaUnderROC(1);
        mem.numCorrectlyClassified = (int) evaluation.correct();
        mem.accuracy = 1.0 * mem.numCorrectlyClassified / mem.numInstance;
        mem.falseNegativeRate = evaluation.falseNegativeRate(1);
        mem.falsePositiveRate = evaluation.falsePositiveRate(1);
        mem.fMeasure = evaluation.fMeasure(1);
        double[][] cmMatrix = evaluation.confusionMatrix();
        mem.confusionMatrix = cmMatrix;
        mem.TP = evaluation.numTruePositives(1);
        mem.TN = evaluation.numTrueNegatives(1);
        mem.FP = evaluation.numFalsePositives(1);
        mem.FN = evaluation.numFalseNegatives(1);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return mem;
}

From source file:ml.ann.MainDriver.java

public static void testModel() {
    System.out.println("## Pilih bahan testing");
    System.out.println("## 1. Uji dengan data dari masukan training");
    System.out.println("## 2. Uji dengan data data masukan baru");
    System.out.print("## > ");

    int choice = (new Scanner(System.in)).nextInt();
    if (choice == 1) {
        try {/* w w w.j a v a  2s  .  co m*/
            Evaluation eval = new Evaluation(train);

            if (cv10) {
                eval.crossValidateModel(model, test, 10, new Random(1));
            } else {
                eval.evaluateModel(model, test);
            }

            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            System.out.println(eval.toMatrixString());

        } catch (Exception E) {
            E.printStackTrace();
        }
    } else if (choice == 2) {
        try {
            loadTestData();
            Evaluation eval = new Evaluation(train);
            if (cv10) {
                eval.crossValidateModel(model, test, 10, new Random(1));
            } else {
                eval.evaluateModel(model, test);
            }

            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            System.out.println(eval.toMatrixString());
        } catch (Exception E) {
            E.printStackTrace();
        }
    }

}

From source file:mlpoc.MLPOC.java

/**
 * uses the meta-classifier/*from   ww w  .  j  a v  a2 s. c o  m*/
 */
protected static void useClassifier(Instances data) throws Exception {
    System.out.println("\n1. Meta-classfier");
    AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();
    CfsSubsetEval eval = new CfsSubsetEval();
    GreedyStepwise search = new GreedyStepwise();
    search.setSearchBackwards(true);
    J48 base = new J48();
    classifier.setClassifier(base);
    classifier.setEvaluator(eval);
    classifier.setSearch(search);
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(classifier, data, 10, new Random(1));
    System.out.println(evaluation.toSummaryString());
}

From source file:myclassifier.naiveBayes.java

public void CrossValidation() throws Exception {
    if (data != null) {
        Instances train = data;//  www . j  av  a  2  s . c  om
        // train classifier
        NBClassifier.buildClassifier(train);
        // evaluate classifier and print some statistics
        Evaluation eval = new Evaluation(train);
        eval.crossValidateModel(NBClassifier, train, 10, new Random(1));
        System.out.println(eval.toSummaryString("\nResults\n======\n", false));
        System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n"));
        System.out.println(eval.toMatrixString());
    } else {
        System.out.println("Data is null");
    }
}

From source file:myclassifier.wekaCode.java

public static void foldValidation(Instances dataSet, Classifier classifiers) throws Exception {
    Evaluation evaluation = new Evaluation(dataSet);
    evaluation.crossValidateModel(classifiers, dataSet, 10, new Random(1)); //Evaluates the classifier on a given set of instances.
    System.out.println(evaluation.toSummaryString("\n 10-fold cross validation", false));
    System.out.println(evaluation.toMatrixString("\n Confusion Matrix"));

}

From source file:NaiveBayes.NaiveBayes.java

/**
 * @param args the command line arguments
 * @throws java.io.IOException//from  w w w.  ja va  2  s.  c o  m
 */
public static void main(String[] args) throws IOException, Exception {
    System.out.print("1. Buat Model \n");
    System.out.print("2. Load Model\n");
    System.out.print("Masukkan pilihan : ");
    Scanner sc = new Scanner(System.in);
    int pil = sc.nextInt();
    System.out.print("Masukkan nama file : ");
    String filename = sc.next();
    DataSource source = new DataSource(("D:\\Program Files\\Weka-3-8\\data\\" + filename));
    Instances train = source.getDataSet();
    for (int i = 0; i < train.numAttributes(); i++)
        System.out.println(i + ". " + train.attribute(i).name());
    System.out.print("Masukkan indeks kelas : ");
    int classIdx = sc.nextInt();
    train.setClassIndex(classIdx);
    //        MultilayerPerceptron mlp = new MultilayerPerceptron(train, 0.1, 10000, 14);
    //        mlp.buildClassifier(train);
    //        Evaluation eval = new Evaluation (train);
    ////        eval.evaluateModel(mlp, train);
    //        System.out.println(eval.toSummaryString());
    NaiveBayes tb = new NaiveBayes();
    Evaluation eval = new Evaluation(train);
    switch (pil) {
    case 1:
        //                tb.buildClassifier(train);
        //                tb.toSummaryString();
        //                eval.evaluateModel(tb, train);
        eval.crossValidateModel(tb, train, 10, new Random(1));
        System.out.println(eval.toSummaryString(true));
        System.out.println(eval.toMatrixString());
        //saveModel(tb);
        break;
    default:
        tb = loadModel();
        tb.toSummaryString();
        eval.crossValidateModel(tb, train, 10, new Random(1));
        System.out.println(eval.toSummaryString());
        //                System.out.println(eval.toMatrixString());
    }
}

From source file:naivebayes.NBRun.java

public static void main(String[] args) throws Exception {
    System.out.println("Naive Bayes Classifier");
    Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush_test.arff");
    Scanner scan = new Scanner(System.in);
    Classifier cls;/*from   w w  w.j  a va 2 s . co  m*/
    Instances train = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush.arff");

    System.out.println("Read or create model? r/c");
    if (scan.next().equals("c")) {
        cls = new NBTubesAI();
        cls.buildClassifier(train);
        TucilWeka.saveModel(train);
    } else {
        cls = (NBTubesAI) TucilWeka.readModel();
    }
    Evaluation eval = new Evaluation(data);
    System.out.println("10 fold cross validate or Full train? c/f");
    if (scan.next().equals("c")) {
        int fold = 10;
        for (int i = 0; i < data.numAttributes(); i++) {
            System.out.println(i + ". " + data.attribute(i));
        }
        eval.crossValidateModel(cls, data, fold, new Random(1));
    } else {
        for (int i = 0; i < data.numAttributes(); i++) {
            System.out.println(i + ". " + data.attribute(i));
        }
        data.deleteWithMissingClass();
        try {

            eval.evaluateModel(cls, data);
        } catch (java.lang.Exception ex) {
            eval.crossValidateModel(cls, data, 11, new Random(1));
        }
    }
    //        Classifier cls=new NBTubesAI();
    //        cls.buildClassifier(data);

    System.out.println("Hasil evaluasi: ");
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
}

From source file:naivebayes.TucilWeka.java

public static Evaluation crossValidation(Instances data) {
    //10-fold cross validation
    Evaluation eval = null;
    try {// ww  w .  j  av  a2 s  . co  m

        eval = new Evaluation(data);
        Classifier cls = new NBTubesAI();
        if (cls == null) {
            System.out.println("MODEL CANNOT BE USED");
        } else {
            System.out.println("MODEL IS USED");
        }
        cls.buildClassifier(data);
        //crossValidateModel:
        //param 1 = tipe classifier (disini J48)
        //param 2 = Instances data
        //param 3 = jumlah fold
        //param 4 = Randomizer (seed)

        eval.crossValidateModel(cls, data, 10, new Random(1));
    } catch (Exception ex) {
        Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:NaiveBayesPckge.NaiveBayesMain.java

public static void printEvaluation(Instances instance) throws Exception {
    Evaluation eval = new Evaluation(instance);
    Evaluation eval2 = new Evaluation(instance);

    System.out.println("Full training Result :");
    eval.evaluateModel(naive, instance);
    System.out.println(eval.toSummaryString()); // Summary of Training
    //System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());

    System.out.println("10 cross validation Result :");
    Random rand = new Random(1);
    eval2.crossValidateModel(naive, instance, 10, rand);
    System.out.println(eval2.toSummaryString()); // Summary of Training
    //System.out.println(eval2.toClassDetailsString());
    System.out.println(eval2.toMatrixString());

    double errorRates = eval.incorrect() / eval.numInstances() * 100;
    double accuracy = eval.correct() / eval.numInstances() * 100;

    //        System.out.println("Accuracy: " + df.format(accuracy) + " %");
    //        System.out.println("Error rate: " + df.format(errorRates) + " %"); // Printing Training Mean root squared error
}

From source file:newclassifier.NewClassifier.java

public void crossValidation() throws Exception {
    cls.buildClassifier(data);//from ww  w  .jav a2 s.co m
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(cls, data, 10, new Random(1));
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}