Example usage for weka.classifiers Evaluation toClassDetailsString

List of usage examples for weka.classifiers Evaluation toClassDetailsString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toClassDetailsString.

Prototype

public String toClassDetailsString() throws Exception 

Source Link

Document

Generates a breakdown of the accuracy for each class (with default title), incorporating various information-retrieval statistics, such as true/false positive rate, precision/recall/F-Measure.

Usage

From source file:miRdup.WekaModule.java

License:Open Source License

public static String testModel(File testarff, String classifier) {
    // System.out.println("Testing model on "+testarff+". Submitted to model "+classifier);
    try {/*ww w.java  2 s  . com*/

        // load data
        DataSource source = new DataSource(testarff.toString());
        Instances data = source.getDataSet();
        if (data.classIndex() == -1) {
            data.setClassIndex(data.numAttributes() - 1);
        }

        //load model
        Classifier model = (Classifier) weka.core.SerializationHelper.read(classifier);

        // evaluate dataset on the model
        Evaluation eval = new Evaluation(data);

        eval.evaluateModel(model, data);
        FastVector fv = eval.predictions();

        //calculate importants values
        String ev[] = eval.toClassDetailsString().split("\n");

        String p = ev[3].trim();
        String n = ev[4].trim();

        double tp = Double.parseDouble(p.substring(0, 6).trim());
        double fp = 0;
        try {
            fp = Double.parseDouble(p.substring(11, 16).trim());
        } catch (Exception exception) {
            fp = Double.parseDouble(p.substring(7, 16).trim());
        }
        double tn = Double.parseDouble(n.substring(0, 6).trim());
        double fn = 0;
        try {
            fn = Double.parseDouble(n.substring(11, 16).trim());
        } catch (Exception exception) {
            fn = Double.parseDouble(n.substring(7, 16).trim());
        }

        //System.out.println("\nTP="+tp+"\nFP="+fp+"\nTN="+tn+"\nFN="+fn);
        //specificity, sensitivity, Mathew's correlation, Prediction accuracy
        double sp = ((tn) / (tn + fp));
        double se = ((tp) / (tp + fn));
        double acc = ((tp + tn) / (tp + tn + fp + fn));
        double mcc = ((tp * tn) - (fp * fn)) / Math.sqrt((tp + fp) * (tn + fn) * (tp + fn) * tn + fp);
        //            System.out.println("\nse="+se+"\nsp="+sp+"\nACC="+dec.format(acc).replace(",", ".")+"\nMCC="+dec.format(mcc).replace(",", "."));
        //            System.out.println(eval.toMatrixString());

        String out = dec.format(acc).replace(",", ".");
        System.out.println(out);
        return out;
    } catch (Exception e) {
        e.printStackTrace();
        return "";
    }

}

From source file:ml.ann.MainDriver.java

public static void testModel() {
    System.out.println("## Pilih bahan testing");
    System.out.println("## 1. Uji dengan data dari masukan training");
    System.out.println("## 2. Uji dengan data data masukan baru");
    System.out.print("## > ");

    int choice = (new Scanner(System.in)).nextInt();
    if (choice == 1) {
        try {/*from  w w w.ja va2  s .  c  o m*/
            Evaluation eval = new Evaluation(train);

            if (cv10) {
                eval.crossValidateModel(model, test, 10, new Random(1));
            } else {
                eval.evaluateModel(model, test);
            }

            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            System.out.println(eval.toMatrixString());

        } catch (Exception E) {
            E.printStackTrace();
        }
    } else if (choice == 2) {
        try {
            loadTestData();
            Evaluation eval = new Evaluation(train);
            if (cv10) {
                eval.crossValidateModel(model, test, 10, new Random(1));
            } else {
                eval.evaluateModel(model, test);
            }

            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            System.out.println(eval.toMatrixString());
        } catch (Exception E) {
            E.printStackTrace();
        }
    }

}

From source file:ml.ann.MainPTR.java

public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
    boolean randomWeight;
    double weightawal = 0.0;
    double learningRate = 0.0001;
    double threshold = 0.00;
    double momentum = 0.00;
    int maxEpoch = 100000;
    int nCrossValidate = 2;

    m_nominalToBinaryFilter = new NominalToBinary();
    m_normalize = new Normalize();

    Scanner in = new Scanner(System.in);
    System.out.println("Lokasi file: ");

    String filepath = in.nextLine();
    filepath = "test-arffs/iris.arff";
    System.out.println("--- Algoritma ---");
    System.out.println("1. Perceptron Training Rule");
    System.out.println("2. Delta Rule Incremental");
    System.out.println("3. Delta Rule Batch");
    System.out.println("Pilihan Algoritma (1/2/3) : ");
    int choice = in.nextInt();
    String temp = in.nextLine();//from  w ww.  j  a v a  2 s .  co m

    System.out.println("Apakah Anda ingin memasukkan nilai weight awal? (YES/NO)");
    String isRandom = in.nextLine();
    System.out.println("Apakah Anda ingin memasukkan konfigurasi? (YES/NO)");
    String config = in.nextLine();

    if (config.equalsIgnoreCase("yes")) {
        System.out.print("Masukkan nilai learning rate: ");
        learningRate = in.nextDouble();
        System.out.print("Masukkan nilai threshold: ");
        threshold = in.nextDouble();
        System.out.print("Masukkan nilai momentum: ");
        momentum = in.nextDouble();
        System.out.print("Masukkan jumlah epoch: ");
        threshold = in.nextInt();
        System.out.print("Masukkan jumlah folds untuk crossvalidate: ");
        nCrossValidate = in.nextInt();
    }

    randomWeight = isRandom.equalsIgnoreCase("yes");

    if (randomWeight) {
        System.out.print("Masukkan nilai weight awal: ");
        weightawal = Double.valueOf(in.nextLine());
    }

    //print config
    if (isRandom.equalsIgnoreCase("yes")) {
        System.out.print("isRandom | ");
    } else {
        System.out.print("Weight " + weightawal + " | ");
    }

    System.out.print("L.rate " + learningRate + " | ");
    System.out.print("Max Epoch " + maxEpoch + " | ");
    System.out.print("Threshold " + threshold + " | ");
    System.out.print("Momentum " + momentum + " | ");
    System.out.print("Folds " + nCrossValidate + " | ");
    System.out.println();

    FileReader trainreader = new FileReader(filepath);
    Instances train = new Instances(trainreader);
    train.setClassIndex(train.numAttributes() - 1);

    m_nominalToBinaryFilter.setInputFormat(train);
    train = new Instances(Filter.useFilter(train, m_nominalToBinaryFilter));

    m_normalize.setInputFormat(train);
    train = new Instances(Filter.useFilter(train, m_normalize));

    MultiClassPTR tempMulti = new MultiClassPTR(choice, randomWeight, learningRate, maxEpoch, threshold);
    tempMulti.buildClassifier(train);

    Evaluation eval = new Evaluation(new Instances(train));
    eval.evaluateModel(tempMulti, train);
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}

From source file:myclassifier.wekaCode.java

public static void testingTestSet(Instances dataSet, Classifier classifiers, Instances testSet)
        throws Exception {
    Evaluation evaluation = new Evaluation(dataSet);
    evaluation.evaluateModel(classifiers, testSet); //Evaluates the classifier on a given set of instances.
    System.out.println(evaluation.toSummaryString("\n Testing Model given Test Set ", false));
    System.out.println(evaluation.toClassDetailsString());

}

From source file:naivebayes.NBRun.java

public static void main(String[] args) throws Exception {
    System.out.println("Naive Bayes Classifier");
    Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush_test.arff");
    Scanner scan = new Scanner(System.in);
    Classifier cls;//from ww w  .ja  v a  2 s  .  c o  m
    Instances train = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush.arff");

    System.out.println("Read or create model? r/c");
    if (scan.next().equals("c")) {
        cls = new NBTubesAI();
        cls.buildClassifier(train);
        TucilWeka.saveModel(train);
    } else {
        cls = (NBTubesAI) TucilWeka.readModel();
    }
    Evaluation eval = new Evaluation(data);
    System.out.println("10 fold cross validate or Full train? c/f");
    if (scan.next().equals("c")) {
        int fold = 10;
        for (int i = 0; i < data.numAttributes(); i++) {
            System.out.println(i + ". " + data.attribute(i));
        }
        eval.crossValidateModel(cls, data, fold, new Random(1));
    } else {
        for (int i = 0; i < data.numAttributes(); i++) {
            System.out.println(i + ". " + data.attribute(i));
        }
        data.deleteWithMissingClass();
        try {

            eval.evaluateModel(cls, data);
        } catch (java.lang.Exception ex) {
            eval.crossValidateModel(cls, data, 11, new Random(1));
        }
    }
    //        Classifier cls=new NBTubesAI();
    //        cls.buildClassifier(data);

    System.out.println("Hasil evaluasi: ");
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
}

From source file:newclassifier.NewClassifier.java

public void crossValidation() throws Exception {
    cls.buildClassifier(data);/*www.  j  av  a  2  s  .  c om*/
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(cls, data, 10, new Random(1));
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}

From source file:newclassifier.NewClassifier.java

public void givenTestSet(String path) throws Exception {
    Instances test = DataSource.read(path);
    test.setClassIndex(test.numAttributes() - 1);

    cls.buildClassifier(data);//from w ww.jav a  2  s. c om

    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(cls, test);
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}

From source file:nl.uva.expose.classification.WekaClassification.java

private void classifierTrainer(Instances trainData) throws Exception {
    trainData.setClassIndex(0);// ww  w  . ja  v  a 2s  .  co m
    //        classifier.setFilter(filter);
    classifier.setClassifier(new NaiveBayes());
    classifier.buildClassifier(trainData);
    Evaluation eval = new Evaluation(trainData);
    eval.crossValidateModel(classifier, trainData, 5, new Random(1));
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println("===== Evaluating on filtered (training) dataset done =====");
    System.out.println("\n\nClassifier model:\n\n" + classifier);
}

From source file:org.processmining.analysis.clusteranalysis.DecisionAnalyzer.java

License:Open Source License

/**
 * Creates an evaluation overview of the built classifier.
 * /*from   www .j  av a2 s  . c  o m*/
 * @return the panel to be displayed as result evaluation view for the
 *         current decision point
 */
protected JPanel createEvaluationVisualization(Instances data) {
    // build text field to display evaluation statistics
    JTextPane statistic = new JTextPane();

    try {
        // build evaluation statistics
        Evaluation evaluation = new Evaluation(data);
        evaluation.evaluateModel(myClassifier, data);
        statistic.setText(evaluation.toSummaryString() + "\n\n" + evaluation.toClassDetailsString() + "\n\n"
                + evaluation.toMatrixString());

    } catch (Exception ex) {
        ex.printStackTrace();
        return createMessagePanel("Error while creating the decision tree evaluation view");
    }

    statistic.setFont(new Font("Courier", Font.PLAIN, 14));
    statistic.setEditable(false);
    statistic.setCaretPosition(0);

    JPanel resultViewPanel = new JPanel();
    resultViewPanel.setLayout(new BoxLayout(resultViewPanel, BoxLayout.PAGE_AXIS));
    resultViewPanel.add(new JScrollPane(statistic));

    return resultViewPanel;
}

From source file:PEBL.TwoStep.java

public static void main(String[] args) throws Exception {

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            "Z:\\\\shared from vm\\\\fourthset\\\\mixed.csv");

    Instances data = source.getDataSet();

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }/*from  w  w w  . j av a2 s. co  m*/

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    // build a c4.5 classifier
    String[] options = new String[1];
    // options[0] = "-C 0.25 -M 2";            // unpruned tree
    options[0] = "-K";
    NaiveBayes c = new NaiveBayes(); // new instance of tree
    c.setOptions(options); // set the options
    c.buildClassifier(data); // build classifier

    // eval
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(c, data, 10, new Random(1));
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
    System.out.println("--- model learned on mixed set ---");

    // load unlabeled data
    ConverterUtils.DataSource s = new ConverterUtils.DataSource(
            "Z:\\\\shared from vm\\\\fourthset\\\\unlabelled.csv");
    Instances unlabeled = s.getDataSet();
    // set class attribute
    unlabeled.setClassIndex(unlabeled.numAttributes() - 1);

    nmf = new NumericToNominal();
    nmf.setInputFormat(unlabeled);
    unlabeled = Filter.useFilter(unlabeled, nmf);

    // label instances
    for (int i = 0; i < unlabeled.numInstances(); i++) {
        double classZero = c.distributionForInstance(unlabeled.instance(i))[0];
        double classOne = c.distributionForInstance(unlabeled.instance(i))[1];
        System.out.print(
                "classifying: " + unlabeled.instance(i) + " : " + classZero + " - " + classOne + " == class: ");
        if (classZero > classOne) {
            System.out.print("0");
            unlabeled.instance(i).setClassValue("0");
        } else {
            System.out.print("1");
            unlabeled.instance(i).setClassValue("1");
        }
        System.out.println("");
    }

    // save labeled data
    // BufferedWriter writer = new BufferedWriter(
    //         new FileWriter("Z:\\\\shared from vm\\\\thirdset\\\\relabelled.arff"));
    // writer.write(labeled.toString());
    // writer.newLine();
    // writer.flush();
    // writer.close();
    ArffSaver saver = new ArffSaver();
    saver.setInstances(unlabeled);
    saver.setFile(new File("Z:\\shared from vm\\thirdset\\relabelled.arff"));
    //        saver.setDestination(new File("Z:\\shared from vm\\thirdset\\relabelled.arff"));   // **not** necessary in 3.5.4 and later
    saver.writeBatch();

}