Example usage for weka.classifiers Evaluation toSummaryString

List of usage examples for weka.classifiers Evaluation toSummaryString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toSummaryString.

Prototype

public String toSummaryString(String title, boolean printComplexityStatistics) 

Source Link

Document

Outputs the performance statistics in summary form.

Usage

From source file:assign00.ExperimentShell.java

/**
 * @param args the command line arguments
 *//*from w  w  w  . j a v a2 s. c o m*/
public static void main(String[] args) throws Exception {
    DataSource source = new DataSource(file);
    Instances dataSet = source.getDataSet();

    //Set up data
    dataSet.setClassIndex(dataSet.numAttributes() - 1);
    dataSet.randomize(new Random(1));

    //determine sizes
    int trainingSize = (int) Math.round(dataSet.numInstances() * .7);
    int testSize = dataSet.numInstances() - trainingSize;

    Instances training = new Instances(dataSet, 0, trainingSize);

    Instances test = new Instances(dataSet, trainingSize, testSize);

    Standardize standardizedData = new Standardize();
    standardizedData.setInputFormat(training);

    Instances newTest = Filter.useFilter(test, standardizedData);
    Instances newTraining = Filter.useFilter(training, standardizedData);

    NeuralNetworkClassifier NWC = new NeuralNetworkClassifier();
    NWC.buildClassifier(newTraining);

    Evaluation eval = new Evaluation(newTraining);
    eval.evaluateModel(NWC, newTest);

    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:c4.pkg5crossv.Classifier.java

public static void trainAndTest() throws FileNotFoundException, IOException, Exception {

    Instances data = DataLoad.loadData("./src/data/irysy.arff");
    data.setClassIndex(data.numAttributes() - 1);

    //Losowy podzial tablicy
    data.randomize(new Random());
    double percent = 60.0;
    int trainSize = (int) Math.round(data.numInstances() * percent / 100);
    int testSize = data.numInstances() - trainSize;
    Instances trainData = new Instances(data, 0, trainSize);
    Instances testData = new Instances(data, trainSize, testSize);

    String[] options = Utils.splitOptions("-U -M 10");
    J48 tree = new J48();
    tree.setOptions(options);//from  ww  w.  j  av  a 2 s  .co m
    tree.buildClassifier(trainData);

    Evaluation eval2 = new Evaluation(trainData);
    eval2.crossValidateModel(tree, testData, 10, new Random(1)); // 5 - fold
    System.out.println(eval2.toSummaryString("Wyniki:", false)); //Wypisanie testovania cross validation
}

From source file:cezeri.evaluater.FactoryEvaluation.java

public static Evaluation performCrossValidate(Classifier model, Instances datax, int folds, boolean show_text,
        boolean show_plot, TFigureAttribute attr) {
    Random rand = new Random(1);
    Instances randData = new Instances(datax);
    randData.randomize(rand);//w  ww . j a v  a  2s .  c om
    if (randData.classAttribute().isNominal()) {
        randData.stratify(folds);
    }
    Evaluation eval = null;
    try {
        // perform cross-validation
        eval = new Evaluation(randData);
        //            double[] simulated = new double[0];
        //            double[] observed = new double[0];
        //            double[] sim = new double[0];
        //            double[] obs = new double[0];
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n, rand);
            Instances validation = randData.testCV(folds, n);
            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);

            //                sim = eval.evaluateModel(clsCopy, validation);
            //                obs = validation.attributeToDoubleArray(validation.classIndex());
            //                if (show_plot) {
            //                    double[][] d = new double[2][sim.length];
            //                    d[0] = obs;
            //                    d[1] = sim;
            //                    CMatrix f1 = CMatrix.getInstance(d);
            //                    f1.transpose().plot(attr);
            //                }
            //                if (show_text) {
            //                    // output evaluation
            //                    System.out.println();
            //                    System.out.println("=== Setup for each Cross Validation fold===");
            //                    System.out.println("Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            //                    System.out.println("Dataset: " + randData.relationName());
            //                    System.out.println("Folds: " + folds);
            //                    System.out.println("Seed: " + 1);
            //                    System.out.println();
            //                    System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
            //                }
            simulated = FactoryUtils.concatenate(simulated, eval.evaluateModel(clsCopy, validation));
            observed = FactoryUtils.concatenate(observed,
                    validation.attributeToDoubleArray(validation.classIndex()));
            //                simulated = FactoryUtils.mean(simulated,eval.evaluateModel(clsCopy, validation));
            //                observed = FactoryUtils.mean(observed,validation.attributeToDoubleArray(validation.classIndex()));
        }

        if (show_plot) {
            double[][] d = new double[2][simulated.length];
            d[0] = observed;
            d[1] = simulated;
            CMatrix f1 = CMatrix.getInstance(d);
            attr.figureCaption = "overall performance";
            f1.transpose().plot(attr);
        }
        if (show_text) {
            // output evaluation
            System.out.println();
            System.out.println("=== Setup for Overall Cross Validation===");
            System.out.println(
                    "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            System.out.println("Dataset: " + randData.relationName());
            System.out.println("Folds: " + folds);
            System.out.println("Seed: " + 1);
            System.out.println();
            System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
        }
    } catch (Exception ex) {
        Logger.getLogger(FactoryEvaluation.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:cezeri.evaluater.FactoryEvaluation.java

public static Evaluation performCrossValidateTestAlso(Classifier model, Instances datax, Instances test,
        boolean show_text, boolean show_plot) {
    TFigureAttribute attr = new TFigureAttribute();
    Random rand = new Random(1);
    Instances randData = new Instances(datax);
    randData.randomize(rand);/*  w  ww.j ava  2 s .  c  om*/

    Evaluation eval = null;
    int folds = randData.numInstances();
    try {
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            //                randData.randomize(rand);
            //                Instances train = randData;                
            Instances train = randData.trainCV(folds, n);
            //                Instances train = randData.trainCV(folds, n, rand);
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);
            Instances validation = randData.testCV(folds, n);
            //                Instances validation = test.testCV(test.numInstances(), n%test.numInstances());
            //                CMatrix.fromInstances(train).showDataGrid();
            //                CMatrix.fromInstances(validation).showDataGrid();

            simulated = FactoryUtils.concatenate(simulated, eval.evaluateModel(clsCopy, validation));
            observed = FactoryUtils.concatenate(observed,
                    validation.attributeToDoubleArray(validation.classIndex()));
        }

        if (show_plot) {
            double[][] d = new double[2][simulated.length];
            d[0] = observed;
            d[1] = simulated;
            CMatrix f1 = CMatrix.getInstance(d);
            attr.figureCaption = "overall performance";
            f1.transpose().plot(attr);
        }
        if (show_text) {
            // output evaluation
            System.out.println();
            System.out.println("=== Setup for Overall Cross Validation===");
            System.out.println(
                    "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            System.out.println("Dataset: " + randData.relationName());
            System.out.println("Folds: " + folds);
            System.out.println("Seed: " + 1);
            System.out.println();
            System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
        }
    } catch (Exception ex) {
        Logger.getLogger(FactoryEvaluation.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:cezeri.evaluater.FactoryEvaluation.java

private static Evaluation doTest(boolean isTrained, Classifier model, Instances train, Instances test,
        boolean show_text, boolean show_plot, TFigureAttribute attr) {
    Instances data = new Instances(train);
    Random rand = new Random(1);
    data.randomize(rand);//from  w ww. j a v a2s  .  c o m
    Evaluation eval = null;
    try {
        //            double[] simulated = null;
        eval = new Evaluation(train);
        if (isTrained) {
            simulated = eval.evaluateModel(model, test);
        } else {
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);
            simulated = eval.evaluateModel(clsCopy, test);
        }
        if (show_plot) {
            observed = test.attributeToDoubleArray(test.classIndex());
            double[][] d = new double[2][simulated.length];
            d[0] = observed;
            d[1] = simulated;
            CMatrix f1 = CMatrix.getInstance(d);
            String[] items = { "Observed", "Simulated" };
            attr.items = items;
            attr.figureCaption = model.getClass().getCanonicalName();
            f1.transpose().plot(attr);
            //                if (attr.axis[0].isEmpty() && attr.axis[1].isEmpty()) {
            //                    f1.transpose().plot(attr);
            //                } else {
            //                    f1.transpose().plot(model.getClass().getCanonicalName(), attr.items, attr.axis);
            //                }
        }
        if (show_text) {
            System.out.println();
            System.out.println("=== Setup for Test ===");
            System.out.println(
                    "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            System.out.println("Dataset: " + test.relationName());
            System.out.println();
            System.out.println(eval.toSummaryString("=== Test Results ===", false));
        }
    } catch (Exception ex) {
        Logger.getLogger(FactoryEvaluation.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:cezeri.feature.selection.FeatureSelectionInfluence.java

public static Evaluation getEvaluation(Instances randData, Classifier model, int folds) {
    Evaluation eval = null;
    try {/*from   w w  w  . j ava  2 s  .  c  o m*/
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n);
            Instances test = randData.testCV(folds, n);
            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);
            eval.evaluateModel(clsCopy, test);
            //                double[] prediction = eval.evaluateModel(clsCopy, test);
            //                double[] original = getAttributeValues(test);
            //                double[][] d = new double[2][prediction.length];
            //                d[0] = prediction;
            //                d[1] = original;
            //                CMatrix f1 = new CMatrix(d);
        }

        // output evaluation
        System.out.println();
        System.out.println("=== Setup ===");
        System.out.println(
                "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
        System.out.println("Dataset: " + randData.relationName());
        System.out.println("Folds: " + folds);
        System.out.println();
        System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
        System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ==="));
        System.out.println(eval.toMatrixString("Confusion Matrix"));

        double acc = eval.correct() / eval.numInstances() * 100;
        System.out.println("correct:" + eval.correct() + "  " + acc + "%");
    } catch (Exception ex) {

        Logger.getLogger(FeatureSelectionInfluence.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:clasificador.Perceptron.java

public void perceptron_multicapa() {
    try {/*from  ww  w .  j a  v  a  2 s . c om*/
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA EL TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);

        //CONTRUCCIN DEL CLASIFICADOR
        MultilayerPerceptron perceptron = new MultilayerPerceptron();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO PERCEPTRN MULTICAPA\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //EVALUACIN DEL MODELO
        ev.evaluateModel(perceptron, testInstance);
        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO PERCEPTRN MULTICAPA\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < ev.evaluateModel(perceptron, testInstance).length; i++) {
            System.out.println("Se clasifica como:  " + ev.evaluateModel(perceptron, testInstance)[i]);
        }

    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:clasificador.Perceptron.java

public void naive_Bayes() {
    try {//from   w  w  w  .  ja  v  a2  s  .  c om
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA EL TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);

        //CONTRUCCIN DEL CLASIFICADOR
        NaiveBayes perceptron = new NaiveBayes();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO NAIVE BAYES\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //EVALUACIN DEL MODELO
        ev.evaluateModel(perceptron, testInstance);
        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO NAIVE BAYES\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < ev.evaluateModel(perceptron, testInstance).length; i++) {
            System.out.println("Se clasifica como:  " + ev.evaluateModel(perceptron, testInstance)[i]);
        }
    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:clasificador.Perceptron.java

public void J48() {
    try {//from w  w  w. ja  v a 2  s  .c  o  m
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        //INSTANCIAS PARA PREDICCIN
        ConverterUtils.DataSource converPredict = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro1.arff");
        Instances predictInstance = converPredict.getDataSet();
        predictInstance.setClassIndex(predictInstance.numAttributes() - 1);
        //CONTRUCCIN DEL CLASIFICADOR
        J48 perceptron = new J48();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO ?RBOL DE DECISIN J48\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //PREDECIR CON EL MODELO
        Evaluation evPredict = new Evaluation(instancias);
        evPredict.evaluateModel(perceptron, predictInstance);

        //System.out.println(instancias);
        System.out.println("\n\nPREDICCIN DEL MODELO ?RBOL DE DECISIN J48\n\n");
        System.out.println(evPredict.toSummaryString("_____RESULTADO_____", false));
        System.out.println(evPredict.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < evPredict.evaluateModel(perceptron, predictInstance).length; i++) {
            resultado = evPredict.evaluateModel(perceptron, predictInstance)[i];
            polaridad += polaridad(resultado) + "\n";
            //System.out.println("Se clasifica como:  "+resultado + "que es: " + polaridad(resultado));                
        }
        archivoResultados(polaridad);

        //TEST DEL MODELO CON LOS DATOS DEL CLASIFICADOR
        Evaluation evtesting = new Evaluation(instancias);
        evtesting.evaluateModel(perceptron, testInstance);

        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO ?RBOL DE DECISIN J48\n\n");
        System.out.println(evtesting.toSummaryString("_____RESULTADO_____", false));
        System.out.println(evtesting.toMatrixString("_____Matriz confusion___"));
    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.github.r351574nc3.amex.assignment2.App.java

License:Open Source License

/**
 * Tests/evaluates the trained model. This method assumes that {@link #train()} was previously called to assign a {@link LinearRegression} 
 * classifier. If it wasn't, an exception will be thrown.
 *
 * @throws Exception if train wasn't called prior.
 *///  www  . j  a  va 2  s. c  o  m
public void test() throws Exception {
    if (getClassifier() == null) {
        throw new RuntimeException("Make sure train was run prior to this method call");
    }

    final Evaluation eval = new Evaluation(getTrained());
    eval.evaluateModel(getClassifier(), getTest());
    info("%s", eval.toSummaryString("Results\n\n", false));
    info("Percent of correctly classified instances: %s", eval.pctCorrect());
}