Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:mao.datamining.ModelProcess.java

private void testWithExtraDS(Classifier classifier, Instances finalTrainDataSet, Instances finalTestDataSet,
        FileOutputStream testCaseSummaryOut, TestResult result) {
    //Use final training dataset and final test dataset
    double confusionMatrix[][] = null;

    long start, end, trainTime = 0, testTime = 0;
    if (finalTestDataSet != null) {
        try {/*from   w w  w . j  a v a 2 s  .  co  m*/
            //counting training time
            start = System.currentTimeMillis();
            classifier.buildClassifier(finalTrainDataSet);
            end = System.currentTimeMillis();
            trainTime += end - start;

            //counting test time
            start = System.currentTimeMillis();
            Evaluation testEvalOnly = new Evaluation(finalTrainDataSet);
            testEvalOnly.evaluateModel(classifier, finalTestDataSet);
            end = System.currentTimeMillis();
            testTime += end - start;

            testCaseSummaryOut.write("=====================================================\n".getBytes());
            testCaseSummaryOut.write((testEvalOnly.toSummaryString("=== Test Summary ===", true)).getBytes());
            testCaseSummaryOut.write("\n".getBytes());
            testCaseSummaryOut
                    .write((testEvalOnly.toClassDetailsString("=== Test Class Detail ===\n")).getBytes());
            testCaseSummaryOut.write("\n".getBytes());
            testCaseSummaryOut
                    .write((testEvalOnly.toMatrixString("=== Confusion matrix for Test ===\n")).getBytes());
            testCaseSummaryOut.flush();

            confusionMatrix = testEvalOnly.confusionMatrix();
            result.setConfusionMatrix4Test(confusionMatrix);

            result.setAUT(testEvalOnly.areaUnderROC(1));
            result.setPrecision(testEvalOnly.precision(1));
            result.setRecall(testEvalOnly.recall(1));
        } catch (Exception e) {
            ModelProcess.logging(null, e);
        }
        result.setTrainingTime(trainTime);
        result.setTestTime(testTime);
    } //using test data set , end

}

From source file:mao.datamining.ModelProcess.java

private void testCV(Classifier classifier, Instances finalTrainDataSet, FileOutputStream testCaseSummaryOut,
        TestResult result) {//from  www.j a  v  a2  s  .c o m
    long start, end, trainTime = 0, testTime = 0;
    Evaluation evalAll = null;
    double confusionMatrix[][] = null;
    // randomize data, and then stratify it into 10 groups
    Random rand = new Random(1);
    Instances randData = new Instances(finalTrainDataSet);
    randData.randomize(rand);
    if (randData.classAttribute().isNominal()) {
        //always run with 10 cross validation
        randData.stratify(folds);
    }

    try {
        evalAll = new Evaluation(randData);
        for (int i = 0; i < folds; i++) {
            Evaluation eval = new Evaluation(randData);
            Instances train = randData.trainCV(folds, i);
            Instances test = randData.testCV(folds, i);
            //counting traininig time
            start = System.currentTimeMillis();
            Classifier j48ClassifierCopy = Classifier.makeCopy(classifier);
            j48ClassifierCopy.buildClassifier(train);
            end = System.currentTimeMillis();
            trainTime += end - start;

            //counting test time
            start = System.currentTimeMillis();
            eval.evaluateModel(j48ClassifierCopy, test);
            evalAll.evaluateModel(j48ClassifierCopy, test);
            end = System.currentTimeMillis();
            testTime += end - start;
        }

    } catch (Exception e) {
        ModelProcess.logging(null, e);
    } //end test by cross validation

    // output evaluation
    try {
        ModelProcess.logging("");
        //write into summary file
        testCaseSummaryOut
                .write((evalAll.toSummaryString("=== Cross Validation Summary ===", true)).getBytes());
        testCaseSummaryOut.write("\n".getBytes());
        testCaseSummaryOut.write(
                (evalAll.toClassDetailsString("=== " + folds + "-fold Cross-validation Class Detail ===\n"))
                        .getBytes());
        testCaseSummaryOut.write("\n".getBytes());
        testCaseSummaryOut
                .write((evalAll.toMatrixString("=== Confusion matrix for all folds ===\n")).getBytes());
        testCaseSummaryOut.flush();

        confusionMatrix = evalAll.confusionMatrix();
        result.setConfusionMatrix10Folds(confusionMatrix);
    } catch (Exception e) {
        ModelProcess.logging(null, e);
    }
}

From source file:miRdup.WekaModule.java

License:Open Source License

public static void testModel(File testarff, String predictionsFile, String classifier, boolean predictMiRNA) {
    System.out.println("Testing model on " + predictionsFile + " adapted in " + testarff
            + ". Submitted to model " + classifier);

    try {// w w w . j ava  2 s .  co m
        //add predictions sequences to object
        ArrayList<MirnaObject> alobj = new ArrayList<MirnaObject>();
        BufferedReader br = null;
        try {
            br = new BufferedReader(new FileReader(predictionsFile + ".folded"));
        } catch (FileNotFoundException fileNotFoundException) {
            br = new BufferedReader(new FileReader(predictionsFile));
        }
        BufferedReader br2 = new BufferedReader(new FileReader(testarff));
        String line2 = br2.readLine();
        while (!line2.startsWith("@data")) {
            line2 = br2.readLine();
        }
        String line = " ";
        int cpt = 0;
        while (br.ready()) {
            line = br.readLine();
            line2 = br2.readLine();
            String[] tab = line.split("\t");
            MirnaObject m = new MirnaObject();
            m.setArff(line2);
            m.setId(cpt++);
            m.setIdName(tab[0]);
            m.setMatureSequence(tab[1]);
            m.setPrecursorSequence(tab[2]);
            m.setStructure(tab[3]);
            alobj.add(m);
        }
        br.close();
        br2.close();

        // load data
        DataSource source = new DataSource(testarff.toString());
        Instances data = source.getDataSet();
        if (data.classIndex() == -1) {
            data.setClassIndex(data.numAttributes() - 1);
        }
        //remove ID row
        data.deleteAttributeAt(0);
        //load model
        Classifier model = (Classifier) weka.core.SerializationHelper.read(classifier);

        // evaluate dataset on the model
        Evaluation eval = new Evaluation(data);

        eval.evaluateModel(model, data);

        FastVector fv = eval.predictions();

        // output
        PrintWriter pw = new PrintWriter(new FileWriter(predictionsFile + "." + classifier + ".miRdup.txt"));
        PrintWriter pwt = new PrintWriter(
                new FileWriter(predictionsFile + "." + classifier + ".miRdup.tab.txt"));
        PrintWriter pwout = new PrintWriter(
                new FileWriter(predictionsFile + "." + classifier + ".miRdupOutput.txt"));

        for (int i = 0; i < fv.size(); i++) {
            //System.out.println(fv.elementAt(i).toString());
            String[] tab = fv.elementAt(i).toString().split(" ");
            int actual = Integer.valueOf(tab[1].substring(0, 1));
            int predicted = Integer.valueOf(tab[2].substring(0, 1));
            double score = 0.0;
            boolean validated = false;
            if (actual == predicted) { //case validated
                int s = tab[4].length();
                try {
                    score = Double.valueOf(tab[4]);
                    //score = Double.valueOf(tab[4].substring(0, s - 1));
                } catch (NumberFormatException numberFormatException) {
                    score = 0.0;
                }

                validated = true;
            } else {// case not validated
                int s = tab[5].length();
                try {
                    score = Double.valueOf(tab[5]);
                    //score = Double.valueOf(tab[5].substring(0, s - 1));
                } catch (NumberFormatException numberFormatException) {
                    score = 0.0;
                }
                validated = false;
            }
            MirnaObject m = alobj.get(i);
            m.setActual(actual);
            m.setPredicted(predicted);
            m.setScore(score);
            m.setValidated(validated);
            m.setNeedPrediction(predictMiRNA);
            String predictionMiRNA = "";
            if (predictMiRNA && validated == false) {
                predictionMiRNA = miRdupPredictor.Predictor.predictionBySequence(m.getPrecursorSequence(),
                        classifier, classifier + ".miRdupPrediction.txt");
                try {
                    m.setPredictedmiRNA(predictionMiRNA.split(",")[0]);
                    m.setPredictedmiRNAstar(predictionMiRNA.split(",")[1]);
                } catch (Exception e) {
                    m.setPredictedmiRNA(predictionMiRNA);
                    m.setPredictedmiRNAstar(predictionMiRNA);
                }
            }

            pw.println(m.toStringFullPredictions());
            pwt.println(m.toStringPredictions());
            if (i % 100 == 0) {
                pw.flush();
                pwt.flush();
            }
        }

        //System.out.println(eval.toSummaryString("\nSummary results of predictions\n======\n", false));
        String[] out = eval.toSummaryString("\nSummary results of predictions\n======\n", false).split("\n");
        String info = out[0] + "\n" + out[1] + "\n" + out[2] + "\n" + out[4] + "\n" + out[5] + "\n" + out[6]
                + "\n" + out[7] + "\n" + out[11] + "\n";
        System.out.println(info);
        //System.out.println("Predicted position of the miRNA by miRdup:"+predictionMiRNA);
        pwout.println(
                "File " + predictionsFile + " adapted in " + testarff + " submitted to model " + classifier);
        pwout.println(info);

        pw.flush();
        pw.close();
        pwt.flush();
        pwt.close();
        pwout.flush();
        pwout.close();

        System.out.println("Results in " + predictionsFile + "." + classifier + ".miRdup.txt");

        // draw curve
        //rocCurve(eval);
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:miRdup.WekaModule.java

License:Open Source License

public static String testModel(File testarff, String classifier) {
    // System.out.println("Testing model on "+testarff+". Submitted to model "+classifier);
    try {/*from  w ww .j a v  a2  s  . c  o  m*/

        // load data
        DataSource source = new DataSource(testarff.toString());
        Instances data = source.getDataSet();
        if (data.classIndex() == -1) {
            data.setClassIndex(data.numAttributes() - 1);
        }

        //load model
        Classifier model = (Classifier) weka.core.SerializationHelper.read(classifier);

        // evaluate dataset on the model
        Evaluation eval = new Evaluation(data);

        eval.evaluateModel(model, data);
        FastVector fv = eval.predictions();

        //calculate importants values
        String ev[] = eval.toClassDetailsString().split("\n");

        String p = ev[3].trim();
        String n = ev[4].trim();

        double tp = Double.parseDouble(p.substring(0, 6).trim());
        double fp = 0;
        try {
            fp = Double.parseDouble(p.substring(11, 16).trim());
        } catch (Exception exception) {
            fp = Double.parseDouble(p.substring(7, 16).trim());
        }
        double tn = Double.parseDouble(n.substring(0, 6).trim());
        double fn = 0;
        try {
            fn = Double.parseDouble(n.substring(11, 16).trim());
        } catch (Exception exception) {
            fn = Double.parseDouble(n.substring(7, 16).trim());
        }

        //System.out.println("\nTP="+tp+"\nFP="+fp+"\nTN="+tn+"\nFN="+fn);
        //specificity, sensitivity, Mathew's correlation, Prediction accuracy
        double sp = ((tn) / (tn + fp));
        double se = ((tp) / (tp + fn));
        double acc = ((tp + tn) / (tp + tn + fp + fn));
        double mcc = ((tp * tn) - (fp * fn)) / Math.sqrt((tp + fp) * (tn + fn) * (tp + fn) * tn + fp);
        //            System.out.println("\nse="+se+"\nsp="+sp+"\nACC="+dec.format(acc).replace(",", ".")+"\nMCC="+dec.format(mcc).replace(",", "."));
        //            System.out.println(eval.toMatrixString());

        String out = dec.format(acc).replace(",", ".");
        System.out.println(out);
        return out;
    } catch (Exception e) {
        e.printStackTrace();
        return "";
    }

}

From source file:ml.ann.MainDriver.java

public static void testModel() {
    System.out.println("## Pilih bahan testing");
    System.out.println("## 1. Uji dengan data dari masukan training");
    System.out.println("## 2. Uji dengan data data masukan baru");
    System.out.print("## > ");

    int choice = (new Scanner(System.in)).nextInt();
    if (choice == 1) {
        try {/*from www  .  ja  va  2  s  .  c  om*/
            Evaluation eval = new Evaluation(train);

            if (cv10) {
                eval.crossValidateModel(model, test, 10, new Random(1));
            } else {
                eval.evaluateModel(model, test);
            }

            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            System.out.println(eval.toMatrixString());

        } catch (Exception E) {
            E.printStackTrace();
        }
    } else if (choice == 2) {
        try {
            loadTestData();
            Evaluation eval = new Evaluation(train);
            if (cv10) {
                eval.crossValidateModel(model, test, 10, new Random(1));
            } else {
                eval.evaluateModel(model, test);
            }

            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            System.out.println(eval.toMatrixString());
        } catch (Exception E) {
            E.printStackTrace();
        }
    }

}

From source file:ml.ann.MainPTR.java

public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
    boolean randomWeight;
    double weightawal = 0.0;
    double learningRate = 0.0001;
    double threshold = 0.00;
    double momentum = 0.00;
    int maxEpoch = 100000;
    int nCrossValidate = 2;

    m_nominalToBinaryFilter = new NominalToBinary();
    m_normalize = new Normalize();

    Scanner in = new Scanner(System.in);
    System.out.println("Lokasi file: ");

    String filepath = in.nextLine();
    filepath = "test-arffs/iris.arff";
    System.out.println("--- Algoritma ---");
    System.out.println("1. Perceptron Training Rule");
    System.out.println("2. Delta Rule Incremental");
    System.out.println("3. Delta Rule Batch");
    System.out.println("Pilihan Algoritma (1/2/3) : ");
    int choice = in.nextInt();
    String temp = in.nextLine();//  ww w .j av  a2 s.  c o  m

    System.out.println("Apakah Anda ingin memasukkan nilai weight awal? (YES/NO)");
    String isRandom = in.nextLine();
    System.out.println("Apakah Anda ingin memasukkan konfigurasi? (YES/NO)");
    String config = in.nextLine();

    if (config.equalsIgnoreCase("yes")) {
        System.out.print("Masukkan nilai learning rate: ");
        learningRate = in.nextDouble();
        System.out.print("Masukkan nilai threshold: ");
        threshold = in.nextDouble();
        System.out.print("Masukkan nilai momentum: ");
        momentum = in.nextDouble();
        System.out.print("Masukkan jumlah epoch: ");
        threshold = in.nextInt();
        System.out.print("Masukkan jumlah folds untuk crossvalidate: ");
        nCrossValidate = in.nextInt();
    }

    randomWeight = isRandom.equalsIgnoreCase("yes");

    if (randomWeight) {
        System.out.print("Masukkan nilai weight awal: ");
        weightawal = Double.valueOf(in.nextLine());
    }

    //print config
    if (isRandom.equalsIgnoreCase("yes")) {
        System.out.print("isRandom | ");
    } else {
        System.out.print("Weight " + weightawal + " | ");
    }

    System.out.print("L.rate " + learningRate + " | ");
    System.out.print("Max Epoch " + maxEpoch + " | ");
    System.out.print("Threshold " + threshold + " | ");
    System.out.print("Momentum " + momentum + " | ");
    System.out.print("Folds " + nCrossValidate + " | ");
    System.out.println();

    FileReader trainreader = new FileReader(filepath);
    Instances train = new Instances(trainreader);
    train.setClassIndex(train.numAttributes() - 1);

    m_nominalToBinaryFilter.setInputFormat(train);
    train = new Instances(Filter.useFilter(train, m_nominalToBinaryFilter));

    m_normalize.setInputFormat(train);
    train = new Instances(Filter.useFilter(train, m_normalize));

    MultiClassPTR tempMulti = new MultiClassPTR(choice, randomWeight, learningRate, maxEpoch, threshold);
    tempMulti.buildClassifier(train);

    Evaluation eval = new Evaluation(new Instances(train));
    eval.evaluateModel(tempMulti, train);
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}

From source file:mlpoc.MLPOC.java

public static Evaluation crossValidate(String filename) {
    Evaluation eval = null;
    try {//w w w  .j  a v a2  s. co m
        BufferedReader br = new BufferedReader(new FileReader(filename));
        // loads data and set class index
        Instances data = new Instances(br);
        br.close();
        /*File csv=new File(filename);
        CSVLoader loader = new CSVLoader();
        loader.setSource(csv);
        Instances data = loader.getDataSet();*/
        data.setClassIndex(data.numAttributes() - 1);

        // classifier
        String[] tmpOptions;
        String classname = "weka.classifiers.trees.J48 -C 0.25";
        tmpOptions = classname.split(" ");
        classname = "weka.classifiers.trees.J48";
        tmpOptions[0] = "";
        Classifier cls = (Classifier) Utils.forName(Classifier.class, classname, tmpOptions);

        // other options
        int seed = 2;
        int folds = 10;

        // randomize data
        Random rand = new Random(seed);
        Instances randData = new Instances(data);
        randData.randomize(rand);
        if (randData.classAttribute().isNominal())
            randData.stratify(folds);

        // perform cross-validation
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n);
            Instances test = randData.testCV(folds, n);
            // the above code is used by the StratifiedRemoveFolds filter, the
            // code below by the Explorer/Experimenter:
            // Instances train = randData.trainCV(folds, n, rand);

            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(cls);
            clsCopy.buildClassifier(train);
            eval.evaluateModel(clsCopy, test);
        }

        // output evaluation
        System.out.println();
        System.out.println("=== Setup ===");
        System.out
                .println("Classifier: " + cls.getClass().getName() + " " + Utils.joinOptions(cls.getOptions()));
        System.out.println("Dataset: " + data.relationName());
        System.out.println("Folds: " + folds);
        System.out.println("Seed: " + seed);
        System.out.println();
        System.out.println(eval.toSummaryString("Summary for testing", true));
        System.out.println("Correctly Classified Instances: " + eval.correct());
        System.out.println("Percentage of Correctly Classified Instances: " + eval.pctCorrect());
        System.out.println("InCorrectly Classified Instances: " + eval.incorrect());
        System.out.println("Percentage of InCorrectly Classified Instances: " + eval.pctIncorrect());

    } catch (Exception ex) {
        System.err.println(ex.getMessage());
    }
    return eval;
}

From source file:model.clasification.klasifikacijaIstanca.java

public static void main(String[] args) throws Exception {

    // load data/*from   w w  w.ja v a  2s.c om*/
    DataSource loader = new DataSource(fileName);
    Instances data = loader.getDataSet();
    data.setClassIndex(data.numAttributes() - 1);

    // Create the Naive Bayes Classifier
    NaiveBayes bayesClsf = new NaiveBayes();
    bayesClsf.buildClassifier(data);

    // output generated model
    //      System.out.println(bayesClsf);

    // Test the model with the original set
    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(bayesClsf, data);

    // Print the result as in Weka explorer
    String strSummary = eval.toSummaryString();

    //      System.out.println("=== Evaluation on training set ===");
    //      System.out.println("=== Summary ===");
    //      System.out.println(strSummary);

    // Get the confusion matrix
    System.out.println(eval.toMatrixString());

}

From source file:my.randomforestui.RandomForestUI.java

public static double doRandomForest(Instances training, Instances testing) throws Exception {
    double accuracy;

    //inisialisasi random forest
    String[] options = new String[1];
    // set tree random forest unpruned tree
    options[0] = "-U";
    // new instance of tree
    RandomForest tree = new RandomForest();
    // set the options
    tree.setOptions(options);//from www. j  a va  2  s  .  c om
    // build classifier using training data
    tree.buildClassifier(training);

    Evaluation eval = new Evaluation(testing);
    eval.evaluateModel(tree, testing);
    //System.out.println((eval.correct()/56)*100);

    accuracy = (eval.correct() / 56) * 100;

    return accuracy;
}

From source file:myclassifier.naiveBayes.java

public void PercentageSplit(double percent) throws Exception {
    // Percent split
    int trainSize = (int) Math.round(data.numInstances() * percent / 100);
    int testSize = data.numInstances() - trainSize;
    Instances train = new Instances(data, 0, trainSize);
    Instances test = new Instances(data, trainSize, testSize);
    // train classifier
    NBClassifier.buildClassifier(train);
    // evaluate classifier and print some statistics
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(NBClassifier, test);
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
    System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n"));
    System.out.println(eval.toMatrixString());
}