Example usage for weka.classifiers Evaluation toSummaryString

List of usage examples for weka.classifiers Evaluation toSummaryString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toSummaryString.

Prototype

@Override
public String toSummaryString() 

Source Link

Document

Calls toSummaryString() with no title and no complexity stats.

Usage

From source file:ANN.MultiplePerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\iris.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);//from  ww  w . j  a  v  a  2s  . c o m
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    MultiplePerceptron mlp = new MultiplePerceptron(train, 20, 0.3);
    mlp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(mlp, train);
    System.out.println(eval.toSummaryString());
    System.out.print(eval.toMatrixString());
}

From source file:ANN_Single.SinglelayerPerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\diabetes.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);/*from ww  w  . j  a  va2  s .  c o m*/
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    System.out.println();
    //                System.out.println(i + " "+0.8);
    SinglelayerPerceptron slp = new SinglelayerPerceptron(train, 0.1, 5000);
    slp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    //                eval.crossValidateModel(slp, train, 10, new Random(1));
    eval.evaluateModel(slp, train);
    System.out.println(eval.toSummaryString());
    System.out.print(eval.toMatrixString());
}

From source file:ANN_single2.MultilayerPerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\Team.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);/*from www. j a  v a 2  s.co m*/
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    MultilayerPerceptron slp = new MultilayerPerceptron(train, 13, 0.1, 0.5);
    //        slp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    eval.crossValidateModel(slp, train, 10, new Random(1));
    //        eval.evaluateModel(slp, train);
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
}

From source file:ANN_single2.SinglelayerPerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\Team.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);//  w ww. jav  a 2  s  . c o  m
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    for (int i = 100; i < 3000; i += 100) {
        for (double j = 0.01; j < 1; j += 0.01) {
            System.out.println(i + " " + j);
            SinglelayerPerceptron slp = new SinglelayerPerceptron(i, j, 0.00);
            slp.buildClassifier(train);
            Evaluation eval = new Evaluation(train);
            //                eval.crossValidateModel(slp, train,10, new Random(1));
            eval.evaluateModel(slp, train);
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toMatrixString());
        }
    }
}

From source file:asap.NLPSystem.java

private void evaluateModel(boolean printEvaluation) {
    //        checkInstancesFeatures(evaluationSet);
    PerformanceCounters.startTimer("evaluateModel");
    System.out.println("Evaluating model...");
    AbstractClassifier abstractClassifier = (AbstractClassifier) classifier;
    try {/*from w  w  w  .j a v a 2  s.  c o m*/
        // evaluate classifier and print some statistics
        Evaluation eval = new Evaluation(evaluationSet);

        evaluationPredictions = eval.evaluateModel(abstractClassifier, evaluationSet);

        if (printEvaluation) {
            System.out.println("\tstats for model:" + abstractClassifier.getClass().getName() + " "
                    + Utils.joinOptions(abstractClassifier.getOptions()));
            System.out.println(eval.toSummaryString());
        }

        evaluationPearsonsCorrelation = eval.correlationCoefficient();
        evaluated = true;
    } catch (Exception ex) {
        Logger.getLogger(PostProcess.class.getName()).log(Level.SEVERE, null, ex);
    }

    System.out.println("\tevaluation done.");
    PerformanceCounters.stopTimer("evaluateModel");
}

From source file:asap.PostProcess.java

private static double[] evaluateModel(AbstractClassifier cl, Instances data, boolean printEvaluation) {
    PerformanceCounters.startTimer("evaluateModel");
    System.out.println("Evaluating model...");
    double[] predictions = null;

    try {/*from ww w.j  a va  2s .c o m*/
        // evaluate classifier and print some statistics
        Evaluation eval = new Evaluation(data);

        predictions = eval.evaluateModel(cl, data);

        if (printEvaluation) {
            System.out.println(
                    "\tstats for model:" + cl.getClass().getName() + " " + Utils.joinOptions(cl.getOptions()));
            System.out.println(eval.toSummaryString());
        }
    } catch (Exception ex) {
        Logger.getLogger(PostProcess.class.getName()).log(Level.SEVERE, null, ex);
    }

    System.out.println("\tevaluation done.");
    PerformanceCounters.stopTimer("evaluateModel");
    return predictions;
}

From source file:at.aictopic1.sentimentanalysis.machinelearning.impl.TwitterClassifer.java

public void trainModel() {
    Instances trainingData = loadTrainingData();

    System.out.println("Class attribute: " + trainingData.classAttribute().toString());

    // Partition dataset into training and test sets
    RemovePercentage filter = new RemovePercentage();

    filter.setPercentage(10);/* w  w w .j  a va2 s  .c  o  m*/

    Instances testData = null;

    // Split in training and testdata
    try {
        filter.setInputFormat(trainingData);

        testData = Filter.useFilter(trainingData, filter);
    } catch (Exception ex) {
        //Logger.getLogger(Trainer.class.getName()).log(Level.SEVERE, null, ex);
        System.out.println("Error getting testData: " + ex.toString());
    }

    // Train the classifier
    Classifier model = (Classifier) new NaiveBayes();

    try {
        // Save the model to fil
        // serialize model
        weka.core.SerializationHelper.write(modelDir + algorithm + ".model", model);
    } catch (Exception ex) {
        Logger.getLogger(TwitterClassifer.class.getName()).log(Level.SEVERE, null, ex);
    }
    // Set the local model 
    this.trainedModel = model;

    try {
        model.buildClassifier(trainingData);
    } catch (Exception ex) {
        //Logger.getLogger(Trainer.class.getName()).log(Level.SEVERE, null, ex);
        System.out.println("Error training model: " + ex.toString());
    }

    try {
        // Evaluate model
        Evaluation test = new Evaluation(trainingData);
        test.evaluateModel(model, testData);

        System.out.println(test.toSummaryString());

    } catch (Exception ex) {
        //Logger.getLogger(Trainer.class.getName()).log(Level.SEVERE, null, ex);
        System.out.println("Error evaluating model: " + ex.toString());
    }
}

From source file:binarizer.LayoutAnalysis.java

public double crossValidation(String arffFile) throws Exception {
    DataSource source = new DataSource(arffFile);
    Instances trainingData = source.getDataSet();
    if (trainingData.classIndex() == -1)
        trainingData.setClassIndex(trainingData.numAttributes() - 1);
    NaiveBayes nb = new NaiveBayes();
    nb.setUseSupervisedDiscretization(true);
    Evaluation evaluation = new Evaluation(trainingData);
    evaluation.crossValidateModel(nb, trainingData, 10, new Random(1));
    System.out.println(evaluation.toSummaryString());
    return evaluation.errorRate();
}

From source file:boostingPL.boosting.AdaBoost.java

License:Open Source License

public static void main(String[] args) throws Exception {
    java.io.File inputFile = new java.io.File(
            "/home/aax/xpShareSpace/dataset/single-class/+winered/winequality-red.datatrain1.arff");
    ArffLoader atf = new ArffLoader();
    atf.setFile(inputFile);//from   w  w  w  . ja  v a2s  .  co m
    Instances training = atf.getDataSet();
    training.setClassIndex(training.numAttributes() - 1);

    AdaBoost adaBoost = new AdaBoost(training, 100);
    for (int t = 0; t < 100; t++) {
        adaBoost.run(t);
    }

    java.io.File inputFilet = new java.io.File(
            "/home/aax/xpShareSpace/dataset/single-class/+winered/winequality-red.datatest1.arff");
    ArffLoader atft = new ArffLoader();
    atft.setFile(inputFilet);
    Instances testing = atft.getDataSet();
    testing.setClassIndex(testing.numAttributes() - 1);

    Evaluation eval = new Evaluation(testing);
    for (Instance inst : testing) {
        eval.evaluateModelOnceAndRecordPrediction(adaBoost, inst);
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());

    /*
    int right = 0;
    for (int i = 0; i < testing.numInstances(); i++) {
       Instance inst = testing.instance(i);
       if (adaBoost.classifyInstance(inst) == inst.classValue()) {
    right++;
       }
    }
    System.out.println(right);
    System.out.println((double)right/training.numInstances());
    */
}

From source file:boostingPL.boosting.SAMME.java

License:Open Source License

public static void main(String[] args) throws Exception {
    java.io.File inputFile = new java.io.File(args[0]);
    ArffLoader atf = new ArffLoader();
    atf.setFile(inputFile);//from   w w w  .  j  av  a  2s  .c  om
    Instances training = atf.getDataSet();
    training.setClassIndex(training.numAttributes() - 1);
    //Instances testing = new Instances(training);

    int iterationNum = 100;
    SAMME samme = new SAMME(training, iterationNum);
    for (int t = 0; t < iterationNum; t++) {
        samme.run(t);
    }

    java.io.File inputFilet = new java.io.File(args[1]);
    ArffLoader atft = new ArffLoader();
    atft.setFile(inputFilet);
    Instances testing = atft.getDataSet();
    testing.setClassIndex(testing.numAttributes() - 1);

    Evaluation eval = new Evaluation(testing);
    for (Instance inst : testing) {
        eval.evaluateModelOnceAndRecordPrediction(samme, inst);
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}