Example usage for weka.classifiers Evaluation toClassDetailsString

List of usage examples for weka.classifiers Evaluation toClassDetailsString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toClassDetailsString.

Prototype

public String toClassDetailsString() throws Exception 

Source Link

Document

Generates a breakdown of the accuracy for each class (with default title), incorporating various information-retrieval statistics, such as true/false positive rate, precision/recall/F-Measure.

Usage

From source file:de.tudarmstadt.ukp.alignment.framework.combined.WekaMachineLearning.java

License:Apache License

/**
 *
 * This method creates a serialized WEKA model file from an .arff file containing the annotated gold standard
 *
 *
 * @param gs_arff the annotated gold standard in an .arff file
 * @param model output file for the model
 * @param output_eval if true, the evaluation of the trained classifier is printed (10-fold cross validation)
 * @throws Exception//from w w w. j  a va2 s .  c o  m
 */

public static void createModelFromGoldstandard(String gs_arff, String model, boolean output_eval)
        throws Exception {
    DataSource source = new DataSource(gs_arff);
    Instances data = source.getDataSet();
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }

    Remove rm = new Remove();
    rm.setAttributeIndices("1"); // remove ID  attribute

    BayesNet bn = new BayesNet(); //Standard classifier; BNs proved most robust, but of course other classifiers are possible
    // meta-classifier
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(bn);
    fc.buildClassifier(data); // build classifier
    SerializationHelper.write(model, fc);
    if (output_eval) {
        Evaluation eval = new Evaluation(data);
        eval.crossValidateModel(fc, data, 10, new Random(1));
        System.out.println(eval.toSummaryString());
        System.out.println(eval.toMatrixString());
        System.out.println(eval.toClassDetailsString());
    }

}

From source file:edu.teco.context.recognition.WekaManager.java

License:Apache License

public void testClassification() {
    // set class attribute (last attribute)
    testingData.setClassIndex(testingData.numAttributes() - 1);

    if (FrameworkContext.INFO)
        Log.i("WekaData", "Testing data:\n" + testingData.toString());

    // Test the model
    Evaluation eTest;
    try {/*from   w w w .  java  2 s . com*/
        eTest = new Evaluation(trainingData);
        eTest.evaluateModel(classifier, testingData);

        if (FrameworkContext.INFO)
            Log.i("WekaData", "\nClass detail:\n\n" + eTest.toClassDetailsString());

        // Print the result  la Weka explorer:
        String strSummary = eTest.toSummaryString();
        if (FrameworkContext.INFO)
            Log.i("WekaData", "----- Summary -----\n" + strSummary);

        // print the confusion matrix
        if (FrameworkContext.INFO)
            Log.i("WekaData", "----- Confusion Matrix -----\n" + eTest.toMatrixString());

        // print class details
        if (FrameworkContext.INFO)
            Log.i("WekaData", "----- Class Detail -----\n" + eTest.toClassDetailsString());

        notifyTestCalculated(strSummary);

    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:elh.eus.absa.WekaWrapper.java

License:Open Source License

/**
 *  Prints the results stored in an Evaluation object to standard output
 *  (summary, class results and confusion matrix)
 * //  www  . j a  va 2s .c  om
 * @param Evaluation eval
 * @throws Exception
 */
public void printClassifierResults(Evaluation eval) throws Exception {
    // Print the result  la Weka explorer:
    String strSummary = eval.toSummaryString();
    System.out.println(strSummary);

    // Print per class results
    String resPerClass = eval.toClassDetailsString();
    System.out.println(resPerClass);

    // Get the confusion matrix
    String cMatrix = eval.toMatrixString();
    System.out.println(cMatrix);

    System.out.println();
}

From source file:farm_ads.MyClassifier.java

public String printEvaluation(Evaluation e) throws Exception {
    String s = new String();
    s += e.toSummaryString("\nResults\n======\n", false);
    s += "\n" + e.toMatrixString("Matrix String");
    s += "\n" + e.toClassDetailsString();
    return s;/*from  ww w  . j a v  a  2s  . c o m*/
}

From source file:ffnn.FFNN.java

/**
 * @param args the command line arguments
 *///from   w w w .j  av a2s  . c om
public static void main(String[] args) throws Exception {
    FFNNTubesAI cls;
    Scanner scan = new Scanner(System.in);
    System.out.print("new / read? (n/r)");
    if (scan.next().equals("n")) {
        cls = new FFNNTubesAI();
    } else {
        cls = (FFNNTubesAI) TucilWeka.readModel();
    }
    int temp;
    Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\Team.arff");
    //Tampilkan opsi
    for (int i = 0; i < data.numAttributes(); i++) {
        System.out.println(i + ". " + data.attribute(i));
    }
    System.out.print("Class Index : ");
    temp = scan.nextInt();
    data.setClassIndex(temp);
    data = preprocess(data);
    System.out.println(data);

    System.out.print("full train? (y/n)");
    if (scan.next().equals("y")) {
        try {
            cls.buildClassifier(data);
        } catch (Exception ex) {
            Logger.getLogger(FFNNTubesAI.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    int fold = 10;

    //FFNNTubesAI.printMatrix(cls.weight1, cls.input_layer+1, cls.hidden_layer);
    //FFNNTubesAI.printMatrix(cls.weight2, cls.hidden_layer, cls.output_layer);
    //FFNNTubesAI.printMatrix(cls.bias2, 1, cls.output_layer);
    Evaluation eval = new Evaluation(data);
    System.out.print("eval/10-fold? (e/f)");
    if (scan.next().equals("e")) {
        eval.evaluateModel(cls, data);
    } else {
        eval.crossValidateModel(cls, data, fold, new Random(1));
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
}

From source file:gr.uoc.nlp.opinion.analysis.suggestion.AnalyzeSuggestions.java

/**
 *
 * @param classifier//  w  w  w.  j  a v  a 2s. com
 */
public void crossValidationTrainSet(Classifier classifier) {

    Evaluation eval;
    try {
        //initialize cross validation
        eval = new Evaluation(this.trainset);
        //validate
        eval.crossValidateModel(classifier, this.trainset, 10, new Random(1));

        System.out.println(eval.toSummaryString());
        System.out.println(eval.toClassDetailsString());
        System.out.println(eval.toMatrixString());

    } catch (Exception ex) {
        Logger.getLogger(AnalyzeSuggestions.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:gr.uoc.nlp.opinion.analysis.suggestion.AnalyzeSuggestions.java

/**
 *
 * @param classifier/* w  w w. j  a v a  2 s .c o m*/
 * @param testset
 */
public void valuateSet(Classifier classifier, Instances testset) {

    Evaluation eval;
    try {
        eval = new Evaluation(this.trainset);
        eval.evaluateModel(classifier, testset);

        System.out.println(eval.toSummaryString());
        System.out.println(eval.toClassDetailsString());
        System.out.println(eval.toMatrixString());
    } catch (Exception ex) {
        Logger.getLogger(AnalyzeSuggestions.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:hero.unstable.util.classification.wekaClassifier.java

public Evaluation classify(Instances data) throws Exception {
    data.setClassIndex(0);/*w  w w  .ja va 2s  . c o m*/

    // Randomize data
    Evaluation eval = new Evaluation(data);
    Random rand = new Random(seed);

    // Perform cross-validation
    eval.crossValidateModel(classifier, data, folds, rand);

    // output evaluation
    String result = eval.toClassDetailsString();
    /*
    System.out.println();
    System.out.println("=== Setup ===");
    System.out.println("Clasiffier: " + classifier.toString());
    System.out.println("Dataset: " + data.relationName());
    System.out.println("Folds: " + folds);
    System.out.println("Seed: " + seed);
    System.out.println();
    System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
    */
    //System.out.println(result);
    return eval;
}

From source file:machinelearningproject.MachineLearningProject.java

/**
 * @param args the command line arguments
 *//*from  w  w w  .j  a  va  2s .  c o  m*/
public static void main(String[] args) throws Exception {
    // TODO code application logic here
    DataSource source = new DataSource("D:\\spambase.arff");
    //        DataSource source = new DataSource("D:\\weather-nominal.arff");
    Instances instances = source.getDataSet();
    int numAttr = instances.numAttributes();
    instances.setClassIndex(instances.numAttributes() - 1);

    int runs = 5;
    int seed = 15;
    for (int i = 0; i < runs; i++) {
        //randomize data
        seed = seed + 1; // the seed for randomizing the data
        Random rand = new Random(seed); // create seeded number generator
        Instances randData = new Instances(instances); // create copy of original data
        Collections.shuffle(randData);

        Evaluation evalDTree = new Evaluation(randData);
        Evaluation evalRF = new Evaluation(randData);
        Evaluation evalSVM = new Evaluation(randData);

        int folds = 10;
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n, rand);
            Instances test = randData.testCV(folds, n);
            //instantiate classifiers
            DecisionTree dtree = new DecisionTree();
            RandomForest rf = new RandomForest(100);
            SMO svm = new SMO();
            RBFKernel rbfKernel = new RBFKernel();
            double gamma = 0.70;
            rbfKernel.setGamma(gamma);

            dtree.buildClassifier(train);
            rf.buildClassifier(train);
            svm.buildClassifier(train);

            evalDTree.evaluateModel(dtree, test);
            evalRF.evaluateModel(rf, test);
            evalSVM.evaluateModel(svm, test);
        }
        System.out.println("=== Decision Tree Evaluation ===");
        System.out.println(evalDTree.toSummaryString());
        System.out.println(evalDTree.toClassDetailsString());
        System.out.println(evalDTree.toMatrixString());

        System.out.println("=== Random Forest Evaluation ===");
        System.out.println(evalRF.toSummaryString());
        System.out.println(evalRF.toClassDetailsString());
        System.out.println(evalRF.toMatrixString());

        System.out.println("=== SVM Evaluation ===");
        System.out.println(evalSVM.toSummaryString());
        System.out.println(evalSVM.toClassDetailsString());
        System.out.println(evalSVM.toMatrixString());
    }
}

From source file:miRdup.WekaModule.java

License:Open Source License

public static void trainModel(File arff, String keyword) {
    dec.setMaximumFractionDigits(3);// ww w .  j  av  a  2 s .com
    System.out.println("\nTraining model on file " + arff);
    try {
        // load data
        DataSource source = new DataSource(arff.toString());
        Instances data = source.getDataSet();
        if (data.classIndex() == -1) {
            data.setClassIndex(data.numAttributes() - 1);
        }

        PrintWriter pwout = new PrintWriter(new FileWriter(keyword + Main.modelExtension + "Output"));
        PrintWriter pwroc = new PrintWriter(new FileWriter(keyword + Main.modelExtension + "roc.arff"));

        //remove ID row
        Remove rm = new Remove();
        rm.setAttributeIndices("1");
        FilteredClassifier fc = new FilteredClassifier();
        fc.setFilter(rm);

        //            // train model svm
        //            weka.classifiers.functions.LibSVM model = new weka.classifiers.functions.LibSVM();
        //            model.setOptions(weka.core.Utils.splitOptions("-S 0 -K 2 -D 3 -G 0.0 -R 0.0 -N 0.5 -M 40.0 -C 1.0 -E 0.0010 -P 0.1 -B"));
        // train model MultilayerPerceptron
        //            weka.classifiers.functions.MultilayerPerceptron model = new weka.classifiers.functions.MultilayerPerceptron();
        //            model.setOptions(weka.core.Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H a"));
        // train model Adaboost on RIPPER
        //            weka.classifiers.meta.AdaBoostM1 model = new weka.classifiers.meta.AdaBoostM1();
        //            model.setOptions(weka.core.Utils.splitOptions("weka.classifiers.meta.AdaBoostM1 -P 100 -S 1 -I 10 -W weka.classifiers.rules.JRip -- -F 10 -N 2.0 -O 5 -S 1"));
        // train model Adaboost on FURIA
        //            weka.classifiers.meta.AdaBoostM1 model = new weka.classifiers.meta.AdaBoostM1();
        //            model.setOptions(weka.core.Utils.splitOptions("weka.classifiers.meta.AdaBoostM1 -P 100 -S 1 -I 10 -W weka.classifiers.rules.FURIA -- -F 10 -N 2.0 -O 5 -S 1 -p 0 -s 0"));
        //train model Adaboot on J48 trees
        //             weka.classifiers.meta.AdaBoostM1 model = new weka.classifiers.meta.AdaBoostM1();
        //             model.setOptions(
        //                     weka.core.Utils.splitOptions(
        //                     "-P 100 -S 1 -I 10 -W weka.classifiers.trees.J48 -- -C 0.25 -M 2"));
        //train model Adaboot on Random Forest trees
        weka.classifiers.meta.AdaBoostM1 model = new weka.classifiers.meta.AdaBoostM1();
        model.setOptions(weka.core.Utils
                .splitOptions("-P 100 -S 1 -I 10 -W weka.classifiers.trees.RandomForest -- -I 50 -K 0 -S 1"));

        if (Main.debug) {
            System.out.print("Model options: " + model.getClass().getName().trim() + " ");
        }
        System.out.print(model.getClass() + " ");
        for (String s : model.getOptions()) {
            System.out.print(s + " ");
        }

        pwout.print("Model options: " + model.getClass().getName().trim() + " ");
        for (String s : model.getOptions()) {
            pwout.print(s + " ");
        }

        //build model
        //            model.buildClassifier(data);
        fc.setClassifier(model);
        fc.buildClassifier(data);

        // cross validation 10 times on the model
        Evaluation eval = new Evaluation(data);
        //eval.crossValidateModel(model, data, 10, new Random(1));
        StringBuffer sb = new StringBuffer();
        eval.crossValidateModel(fc, data, 10, new Random(1), sb, new Range("first,last"), false);

        //System.out.println(sb);
        pwout.println(sb);
        pwout.flush();

        // output
        pwout.println("\n" + eval.toSummaryString());
        System.out.println(eval.toSummaryString());

        pwout.println(eval.toClassDetailsString());
        System.out.println(eval.toClassDetailsString());

        //calculate importants values
        String ev[] = eval.toClassDetailsString().split("\n");

        String ptmp[] = ev[3].trim().split(" ");
        String ntmp[] = ev[4].trim().split(" ");
        String avgtmp[] = ev[5].trim().split(" ");

        ArrayList<String> p = new ArrayList<String>();
        ArrayList<String> n = new ArrayList<String>();
        ArrayList<String> avg = new ArrayList<String>();

        for (String s : ptmp) {
            if (!s.trim().isEmpty()) {
                p.add(s);
            }
        }
        for (String s : ntmp) {
            if (!s.trim().isEmpty()) {
                n.add(s);
            }
        }
        for (String s : avgtmp) {
            if (!s.trim().isEmpty()) {
                avg.add(s);
            }
        }

        double tp = Double.parseDouble(p.get(0));
        double fp = Double.parseDouble(p.get(1));
        double tn = Double.parseDouble(n.get(0));
        double fn = Double.parseDouble(n.get(1));
        double auc = Double.parseDouble(avg.get(7));

        pwout.println("\nTP=" + tp + "\nFP=" + fp + "\nTN=" + tn + "\nFN=" + fn);
        System.out.println("\nTP=" + tp + "\nFP=" + fp + "\nTN=" + tn + "\nFN=" + fn);

        //specificity, sensitivity, Mathew's correlation, Prediction accuracy
        double sp = ((tn) / (tn + fp));
        double se = ((tp) / (tp + fn));
        double acc = ((tp + tn) / (tp + tn + fp + fn));
        double mcc = ((tp * tn) - (fp * fn)) / Math.sqrt((tp + fp) * (tn + fn) * (tp + fn) * tn + fp);

        String output = "\nse=" + dec.format(se).replace(",", ".") + "\nsp=" + dec.format(sp).replace(",", ".")
                + "\nACC=" + dec.format(acc).replace(",", ".") + "\nMCC=" + dec.format(mcc).replace(",", ".")
                + "\nAUC=" + dec.format(auc).replace(",", ".");

        pwout.println(output);
        System.out.println(output);

        pwout.println(eval.toMatrixString());
        System.out.println(eval.toMatrixString());

        pwout.flush();
        pwout.close();

        //Saving model
        System.out.println("Model saved: " + keyword + Main.modelExtension);
        weka.core.SerializationHelper.write(keyword + Main.modelExtension, fc.getClassifier() /*model*/);

        // get curve
        ThresholdCurve tc = new ThresholdCurve();
        int classIndex = 0;
        Instances result = tc.getCurve(eval.predictions(), classIndex);
        pwroc.print(result.toString());
        pwroc.flush();
        pwroc.close();

        // draw curve
        //rocCurve(eval);
    } catch (Exception e) {
        e.printStackTrace();
    }
}