Example usage for weka.classifiers Evaluation crossValidateModel

List of usage examples for weka.classifiers Evaluation crossValidateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation crossValidateModel.

Prototype

public void crossValidateModel(Classifier classifier, Instances data, int numFolds, Random random)
        throws Exception 

Source Link

Document

Performs a (stratified if class is nominal) cross-validation for a classifier on a set of instances.

Usage

From source file:elh.eus.absa.WekaWrapper.java

License:Open Source License

public void crossValidate(int foldNum) throws Exception {
    System.out.println("WekaWrapper: " + foldNum + "-fold cross validation over train data.");
    System.err.println("WekaWrapper: " + foldNum + "-fold cross validation over train data.");
    Evaluation eTest = new Evaluation(traindata);
    eTest.crossValidateModel(this.MLclass, traindata, foldNum, new Random(1)); //seed = 1;      
    /* it remains for the future to inspect the random generation. 
     * It seems using the same seed over an specific sequence generates the same randomization. 
     * Thus, for the same sequence of instances, fold generation is always the same.  
     *///from ww  w.j  ava 2 s .c  o  m
    //eTest.crossValidateModel(this.MLclass, traindata, foldNum, new Random((int)(Math.random()*traindata.numInstances())));
    printClassifierResults(eTest);
}

From source file:epsi.i5.datamining.Weka.java

public void excutionAlgo() throws FileNotFoundException, IOException, Exception {
    BufferedReader reader = new BufferedReader(new FileReader("src/epsi/i5/data/" + fileOne + ".arff"));
    Instances data = new Instances(reader);
    reader.close();//  w w w  . j a va 2 s  .  co m
    //System.out.println(data.attribute(0));
    data.setClass(data.attribute(0));
    NaiveBayes NB = new NaiveBayes();
    NB.buildClassifier(data);
    Evaluation naiveBayes = new Evaluation(data);
    naiveBayes.crossValidateModel(NB, data, 10, new Random(1));
    naiveBayes.evaluateModel(NB, data);
    //System.out.println(test.confusionMatrix() + "1");
    //System.out.println(test.correct() + "2");
    System.out.println("*****************************");
    System.out.println("******** Naive Bayes ********");
    System.out.println(naiveBayes.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(naiveBayes.pctCorrect());
    System.out.println("");
    J48 j = new J48();
    j.buildClassifier(data);
    Evaluation jeval = new Evaluation(data);
    jeval.crossValidateModel(j, data, 10, new Random(1));
    jeval.evaluateModel(j, data);
    System.out.println("*****************************");
    System.out.println("************ J48 ************");
    System.out.println(jeval.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(jeval.pctCorrect());
    System.out.println("");
    DecisionTable DT = new DecisionTable();
    DT.buildClassifier(data);
    Evaluation decisionTable = new Evaluation(data);
    decisionTable.crossValidateModel(DT, data, 10, new Random(1));
    decisionTable.evaluateModel(DT, data);
    System.out.println("*****************************");
    System.out.println("******* DecisionTable *******");
    System.out.println(decisionTable.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(decisionTable.pctCorrect());
    System.out.println("");
    OneR OR = new OneR();
    OR.buildClassifier(data);
    Evaluation oneR = new Evaluation(data);
    oneR.crossValidateModel(OR, data, 10, new Random(1));
    oneR.evaluateModel(OR, data);
    System.out.println("*****************************");
    System.out.println("************ OneR ***********");
    System.out.println(oneR.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(oneR.pctCorrect());

    //Polarit
    data.setClass(data.attribute(1));
    System.out.println("");
    M5Rules MR = new M5Rules();
    MR.buildClassifier(data);
    Evaluation m5rules = new Evaluation(data);
    m5rules.crossValidateModel(MR, data, 10, new Random(1));
    m5rules.evaluateModel(MR, data);
    System.out.println("*****************************");
    System.out.println("********** M5Rules **********");
    System.out.println(m5rules.correlationCoefficient());

    System.out.println("");
    LinearRegression LR = new LinearRegression();
    LR.buildClassifier(data);
    Evaluation linearR = new Evaluation(data);
    linearR.crossValidateModel(LR, data, 10, new Random(1));
    linearR.evaluateModel(LR, data);
    System.out.println("*****************************");
    System.out.println("********** linearR **********");
    System.out.println(linearR.correlationCoefficient());
}

From source file:ffnn.FFNN.java

/**
 * @param args the command line arguments
 *///from   w  w w .  j a v a2 s  . c o  m
public static void main(String[] args) throws Exception {
    FFNNTubesAI cls;
    Scanner scan = new Scanner(System.in);
    System.out.print("new / read? (n/r)");
    if (scan.next().equals("n")) {
        cls = new FFNNTubesAI();
    } else {
        cls = (FFNNTubesAI) TucilWeka.readModel();
    }
    int temp;
    Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\Team.arff");
    //Tampilkan opsi
    for (int i = 0; i < data.numAttributes(); i++) {
        System.out.println(i + ". " + data.attribute(i));
    }
    System.out.print("Class Index : ");
    temp = scan.nextInt();
    data.setClassIndex(temp);
    data = preprocess(data);
    System.out.println(data);

    System.out.print("full train? (y/n)");
    if (scan.next().equals("y")) {
        try {
            cls.buildClassifier(data);
        } catch (Exception ex) {
            Logger.getLogger(FFNNTubesAI.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    int fold = 10;

    //FFNNTubesAI.printMatrix(cls.weight1, cls.input_layer+1, cls.hidden_layer);
    //FFNNTubesAI.printMatrix(cls.weight2, cls.hidden_layer, cls.output_layer);
    //FFNNTubesAI.printMatrix(cls.bias2, 1, cls.output_layer);
    Evaluation eval = new Evaluation(data);
    System.out.print("eval/10-fold? (e/f)");
    if (scan.next().equals("e")) {
        eval.evaluateModel(cls, data);
    } else {
        eval.crossValidateModel(cls, data, fold, new Random(1));
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
}

From source file:ffnn.TucilWeka.java

public static Evaluation crossValidation(Instances data) {
    //10-fold cross validation
    Evaluation eval = null;
    try {/*w ww .j a v a2 s.com*/

        eval = new Evaluation(data);
        Classifier cls = new FFNNTubesAI();
        if (cls == null) {
            System.out.println("MODEL CANNOT BE USED");
        } else {
            System.out.println("MODEL IS USED");
        }
        cls.buildClassifier(data);
        //crossValidateModel:
        //param 1 = tipe classifier (disini J48)
        //param 2 = Instances data
        //param 3 = jumlah fold
        //param 4 = Randomizer (seed)

        eval.crossValidateModel(cls, data, 10, new Random(1));
    } catch (Exception ex) {
        Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:function.CrossValidation.java

public static void crossValidation(Instances data, AbstractClassifier cls) throws Exception {
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(cls, data, 10, new Random(1));
    System.out.println(evaluation.toSummaryString());
}

From source file:function.CrossValidation.java

public static double crossValidationrate(Instances data, AbstractClassifier cls) throws Exception {
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(cls, data, 10, new Random(1));
    return evaluation.pctCorrect();
}

From source file:general.Util.java

/**
 * show learning statistic result by folds-cross-validation
 * @param data instances/*from  w  w w.  ja v  a2s .c om*/
 * @param folds num of folds
 */
public static void FoldSchema(Instances data, int folds) {
    try {
        Evaluation eval = new Evaluation(data);
        eval.crossValidateModel(Util.getClassifier(), data, folds, new Random(1));
        System.out.println(eval.toSummaryString("\nResults " + folds + " folds cross-validation\n\n", false));
    } catch (Exception ex) {
        Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:gr.uoc.nlp.opinion.analysis.suggestion.AnalyzeSuggestions.java

/**
 *
 * @param classifier//from  w w  w .j av a2 s. com
 */
public void crossValidationTrainSet(Classifier classifier) {

    Evaluation eval;
    try {
        //initialize cross validation
        eval = new Evaluation(this.trainset);
        //validate
        eval.crossValidateModel(classifier, this.trainset, 10, new Random(1));

        System.out.println(eval.toSummaryString());
        System.out.println(eval.toClassDetailsString());
        System.out.println(eval.toMatrixString());

    } catch (Exception ex) {
        Logger.getLogger(AnalyzeSuggestions.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:hero.unstable.util.classification.wekaClassifier.java

public Evaluation classify(Instances data) throws Exception {
    data.setClassIndex(0);//w  ww .  j  a  va 2  s. c o m

    // Randomize data
    Evaluation eval = new Evaluation(data);
    Random rand = new Random(seed);

    // Perform cross-validation
    eval.crossValidateModel(classifier, data, folds, rand);

    // output evaluation
    String result = eval.toClassDetailsString();
    /*
    System.out.println();
    System.out.println("=== Setup ===");
    System.out.println("Clasiffier: " + classifier.toString());
    System.out.println("Dataset: " + data.relationName());
    System.out.println("Folds: " + folds);
    System.out.println("Seed: " + seed);
    System.out.println();
    System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
    */
    //System.out.println(result);
    return eval;
}

From source file:id3j48.WekaAccess.java

public static Evaluation tenFoldCrossValidation(Instances data, Classifier classifier) throws Exception {
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(classifier, data, 10, new Random(1));
    return eval;/*from w ww . ja v  a  2s .c o m*/
}