Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:cezeri.evaluater.FactoryEvaluation.java

public static Evaluation performCrossValidateTestAlso(Classifier model, Instances datax, Instances test,
        boolean show_text, boolean show_plot) {
    TFigureAttribute attr = new TFigureAttribute();
    Random rand = new Random(1);
    Instances randData = new Instances(datax);
    randData.randomize(rand);/*  www  . j  a v  a 2s. c  o  m*/

    Evaluation eval = null;
    int folds = randData.numInstances();
    try {
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            //                randData.randomize(rand);
            //                Instances train = randData;                
            Instances train = randData.trainCV(folds, n);
            //                Instances train = randData.trainCV(folds, n, rand);
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);
            Instances validation = randData.testCV(folds, n);
            //                Instances validation = test.testCV(test.numInstances(), n%test.numInstances());
            //                CMatrix.fromInstances(train).showDataGrid();
            //                CMatrix.fromInstances(validation).showDataGrid();

            simulated = FactoryUtils.concatenate(simulated, eval.evaluateModel(clsCopy, validation));
            observed = FactoryUtils.concatenate(observed,
                    validation.attributeToDoubleArray(validation.classIndex()));
        }

        if (show_plot) {
            double[][] d = new double[2][simulated.length];
            d[0] = observed;
            d[1] = simulated;
            CMatrix f1 = CMatrix.getInstance(d);
            attr.figureCaption = "overall performance";
            f1.transpose().plot(attr);
        }
        if (show_text) {
            // output evaluation
            System.out.println();
            System.out.println("=== Setup for Overall Cross Validation===");
            System.out.println(
                    "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            System.out.println("Dataset: " + randData.relationName());
            System.out.println("Folds: " + folds);
            System.out.println("Seed: " + 1);
            System.out.println();
            System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
        }
    } catch (Exception ex) {
        Logger.getLogger(FactoryEvaluation.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:cezeri.evaluater.FactoryEvaluation.java

private static Evaluation doTest(boolean isTrained, Classifier model, Instances train, Instances test,
        boolean show_text, boolean show_plot, TFigureAttribute attr) {
    Instances data = new Instances(train);
    Random rand = new Random(1);
    data.randomize(rand);//  w w w .  ja v a2 s. c om
    Evaluation eval = null;
    try {
        //            double[] simulated = null;
        eval = new Evaluation(train);
        if (isTrained) {
            simulated = eval.evaluateModel(model, test);
        } else {
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);
            simulated = eval.evaluateModel(clsCopy, test);
        }
        if (show_plot) {
            observed = test.attributeToDoubleArray(test.classIndex());
            double[][] d = new double[2][simulated.length];
            d[0] = observed;
            d[1] = simulated;
            CMatrix f1 = CMatrix.getInstance(d);
            String[] items = { "Observed", "Simulated" };
            attr.items = items;
            attr.figureCaption = model.getClass().getCanonicalName();
            f1.transpose().plot(attr);
            //                if (attr.axis[0].isEmpty() && attr.axis[1].isEmpty()) {
            //                    f1.transpose().plot(attr);
            //                } else {
            //                    f1.transpose().plot(model.getClass().getCanonicalName(), attr.items, attr.axis);
            //                }
        }
        if (show_text) {
            System.out.println();
            System.out.println("=== Setup for Test ===");
            System.out.println(
                    "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            System.out.println("Dataset: " + test.relationName());
            System.out.println();
            System.out.println(eval.toSummaryString("=== Test Results ===", false));
        }
    } catch (Exception ex) {
        Logger.getLogger(FactoryEvaluation.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:cezeri.feature.selection.FeatureSelectionInfluence.java

public static Evaluation getEvaluation(Instances randData, Classifier model, int folds) {
    Evaluation eval = null;
    try {/*from  w  ww .j  a  v  a 2  s  . c  om*/
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n);
            Instances test = randData.testCV(folds, n);
            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);
            eval.evaluateModel(clsCopy, test);
            //                double[] prediction = eval.evaluateModel(clsCopy, test);
            //                double[] original = getAttributeValues(test);
            //                double[][] d = new double[2][prediction.length];
            //                d[0] = prediction;
            //                d[1] = original;
            //                CMatrix f1 = new CMatrix(d);
        }

        // output evaluation
        System.out.println();
        System.out.println("=== Setup ===");
        System.out.println(
                "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
        System.out.println("Dataset: " + randData.relationName());
        System.out.println("Folds: " + folds);
        System.out.println();
        System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
        System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ==="));
        System.out.println(eval.toMatrixString("Confusion Matrix"));

        double acc = eval.correct() / eval.numInstances() * 100;
        System.out.println("correct:" + eval.correct() + "  " + acc + "%");
    } catch (Exception ex) {

        Logger.getLogger(FeatureSelectionInfluence.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:Clases.RedNeuronal.RedNeuronal.java

public void redNeuronal(int puntaje, int tiempo, int error) throws Exception {
    //si puntaje >= 200 entonces aprendido
    //si tiempo <= 240 (4 minutos) entonces aprendido
    //si errores <= 3 entonces aprendido
    String[] dato = { obtnerPuntaje(puntaje), obtenerTiempo(tiempo), obtenerErrores(error) };

    ConverterUtils.DataSource con = new ConverterUtils.DataSource(
            "C:\\Users\\USUARIO\\Documents\\SILVIIS\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff");
    //        ConverterUtils.DataSource con = new ConverterUtils.DataSource("E:\\Unl\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff");

    Instances instances = con.getDataSet();
    System.out.println(instances);
    instances.setClassIndex(instances.numAttributes() - 1);

    MultilayerPerceptron mp = new MultilayerPerceptron();
    mp.buildClassifier(instances);/*from w  ww . j  a va 2 s .  c  om*/

    Evaluation evalucion = new Evaluation(instances);
    evalucion.evaluateModel(mp, instances);
    System.out.println(evalucion.toSummaryString());
    System.out.println(evalucion.toMatrixString());

    String datosEntrada = null;
    String datosSalida = "no se puede predecir";
    for (int i = 0; i < instances.numInstances(); i++) {
        double predecido = mp.classifyInstance(instances.instance(i));
        datosEntrada = dato[0] + " " + dato[1] + " " + dato[2];
        if ((int) instances.instance(i).value(0) == Integer.parseInt(dato[0])
                && (int) instances.instance(i).value(1) == Integer.parseInt(dato[1])
                && (int) instances.instance(i).value(2) == Integer.parseInt(dato[2])) {
            datosSalida = instances.classAttribute().value((int) predecido);
        }
    }
    System.out.println("DATOS DE ENTRADA: " + datosEntrada);
    System.out.println("SALIDA PREDECIDA: " + datosSalida);

    switch (datosSalida) {
    case "0":
        resultado = "Excelente ha aprendido";
        imgResultado = "Excelente.jpg";
        imgREDneuronal = "0.png";
        System.out.println("Excelente ha aprendido");
        break;
    case "1":
        resultado = "Disminuir Errores";
        imgResultado = "Bueno.jpg";
        imgREDneuronal = "1.png";
        System.out.println("Disminuir Errores");
        break;
    case "2":
        resultado = "Disminuir Tiempo";
        imgResultado = "Bueno.jpg";
        imgREDneuronal = "2.png";
        System.out.println("Disminuir Tiempo");
        break;
    case "3":
        resultado = "Disminuir Errores y tiempo";
        imgResultado = "Bueno.jpg";
        imgREDneuronal = "3.png";
        System.out.println("Disminuir Errores y tiempo");
        break;
    case "4":
        resultado = "Subir Puntaje";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "4.png";
        System.out.println("Subir Puntaje");
        break;
    case "5":
        resultado = "Subir Puntaje y disminuir Errores";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "5.png";
        System.out.println("Subir Puntaje y disminuir Errores");
        break;
    case "6":
        resultado = "Subir Puntaje y disminuir Tiempo";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "6.png";
        System.out.println("Subir Puntaje y disminuir Tiempo");
        break;
    case "7":
        resultado = "Ponle mas Empeo";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "7.png";
        System.out.println("Ponle mas Empeo");
        break;
    default:
        resultado = "Verifique entradas, no se puede predecir";
        imgResultado = "Error.jpg";
        System.out.println("Verifique entradas, no se puede predecir");
        break;
    }
}

From source file:clasificador.Perceptron.java

public void perceptron_multicapa() {
    try {//  ww w  .ja v a2s .co  m
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA EL TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);

        //CONTRUCCIN DEL CLASIFICADOR
        MultilayerPerceptron perceptron = new MultilayerPerceptron();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO PERCEPTRN MULTICAPA\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //EVALUACIN DEL MODELO
        ev.evaluateModel(perceptron, testInstance);
        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO PERCEPTRN MULTICAPA\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < ev.evaluateModel(perceptron, testInstance).length; i++) {
            System.out.println("Se clasifica como:  " + ev.evaluateModel(perceptron, testInstance)[i]);
        }

    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:clasificador.Perceptron.java

public void naive_Bayes() {
    try {//from  w w w  . ja  va  2 s.  c  o m
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA EL TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);

        //CONTRUCCIN DEL CLASIFICADOR
        NaiveBayes perceptron = new NaiveBayes();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO NAIVE BAYES\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //EVALUACIN DEL MODELO
        ev.evaluateModel(perceptron, testInstance);
        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO NAIVE BAYES\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < ev.evaluateModel(perceptron, testInstance).length; i++) {
            System.out.println("Se clasifica como:  " + ev.evaluateModel(perceptron, testInstance)[i]);
        }
    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:clasificador.Perceptron.java

public void J48() {
    try {/*from   w ww.  j a  v  a2 s . co  m*/
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        //INSTANCIAS PARA PREDICCIN
        ConverterUtils.DataSource converPredict = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro1.arff");
        Instances predictInstance = converPredict.getDataSet();
        predictInstance.setClassIndex(predictInstance.numAttributes() - 1);
        //CONTRUCCIN DEL CLASIFICADOR
        J48 perceptron = new J48();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO ?RBOL DE DECISIN J48\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //PREDECIR CON EL MODELO
        Evaluation evPredict = new Evaluation(instancias);
        evPredict.evaluateModel(perceptron, predictInstance);

        //System.out.println(instancias);
        System.out.println("\n\nPREDICCIN DEL MODELO ?RBOL DE DECISIN J48\n\n");
        System.out.println(evPredict.toSummaryString("_____RESULTADO_____", false));
        System.out.println(evPredict.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < evPredict.evaluateModel(perceptron, predictInstance).length; i++) {
            resultado = evPredict.evaluateModel(perceptron, predictInstance)[i];
            polaridad += polaridad(resultado) + "\n";
            //System.out.println("Se clasifica como:  "+resultado + "que es: " + polaridad(resultado));                
        }
        archivoResultados(polaridad);

        //TEST DEL MODELO CON LOS DATOS DEL CLASIFICADOR
        Evaluation evtesting = new Evaluation(instancias);
        evtesting.evaluateModel(perceptron, testInstance);

        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO ?RBOL DE DECISIN J48\n\n");
        System.out.println(evtesting.toSummaryString("_____RESULTADO_____", false));
        System.out.println(evtesting.toMatrixString("_____Matriz confusion___"));
    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

private void runDropsSteped(String algo, Prototyper prototype) {
    try {/*from   www. ja va  2s.c  o  m*/
        nbPrototypesMax = this.train.numInstances() / this.train.numClasses();

        for (int i = 1; i <= nbPrototypesMax; i++) {
            prototype.setNbPrototypesPerClass(i);
            prototype.setFillPrototypes(false);

            startTime = System.currentTimeMillis();
            prototype.buildClassifier(train);
            endTime = System.currentTimeMillis();
            duration = endTime - startTime;

            int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(prototype.prototypes, train);

            Evaluation eval = new Evaluation(train);
            eval.evaluateModel(prototype, test);

            double testError = eval.errorRate();
            double trainError = Double.NaN;

            out.format("%s;%s;%d;%d;%.4f;%.4f;%s\n", dataName, algo, (i * train.numClasses()), duration,
                    trainError, testError, Arrays.toString(classDistrib));
            out.flush();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

private void runDrops(String algo, Prototyper prototype) {
    try {//from   w  w  w . j a v a 2 s  .co m
        for (int i = 1; i <= this.train.numInstances(); i++) {
            prototype.setNbPrototypesPerClass(i);
            prototype.setFillPrototypes(false);

            startTime = System.currentTimeMillis();
            prototype.buildClassifier(train);
            endTime = System.currentTimeMillis();
            duration = endTime - startTime;

            int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(prototype.prototypes, train);

            Evaluation eval = new Evaluation(train);
            eval.evaluateModel(prototype, test);

            double testError = eval.errorRate();
            // double trainError = prototype.predictAccuracyXVal(10);
            double trainError = Double.NaN;

            out.format("%s;%s;%d;%d;%.4f;%.4f;%s\n", dataName, algo, i, duration, trainError, testError,
                    Arrays.toString(classDistrib));
            out.flush();
        }

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:classif.ExperimentsLauncher.java

License:Open Source License

/**
 * /*from  w w  w  .  ja va 2s .  c om*/
 */
public void launchKMeans() {
    try {
        //         File f = new File(rep + "/" + dataName + "_results.csv");
        //         // if somebody is processing it
        //         if (f.exists()) {
        //            return;
        //         }
        //
        //         out = new PrintStream(new FileOutputStream(rep + "/KMeansDTW_" + "all" + "_results.csv", true));
        //         out.println("dataset,algorithm,nbPrototypes,testErrorRate,trainErrorRate");
        String algo = "KMEANS";
        System.out.println(algo);
        //         PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append));

        nbPrototypesMax = this.train.numInstances() / this.train.numClasses();
        if (nbPrototypesMax > 10)
            nbPrototypesMax = 10;
        int tmp;
        tmp = nbExp;

        for (int j = 1; j <= nbPrototypesMax; j++) {
            if (j == 1)
                nbExp = 1;
            else
                nbExp = tmp;
            System.out.println("nbPrototypes=" + j);
            for (int n = 0; n < nbExp; n++) {
                System.out.println("This is the " + n + " time.");
                DTWKNNClassifierKMeans classifierKMeans = new DTWKNNClassifierKMeans();
                classifierKMeans.setNbPrototypesPerClass(j);
                classifierKMeans.setFillPrototypes(true);

                startTime = System.currentTimeMillis();
                classifierKMeans.buildClassifier(train);
                endTime = System.currentTimeMillis();
                duration = endTime - startTime;
                //               Duration traintime = Duration.ofMillis(duration);
                //               System.out.println(traintime);

                int[] classDistrib = PrototyperUtil
                        .getPrototypesPerClassDistribution(classifierKMeans.prototypes, train);

                Evaluation eval = new Evaluation(train);
                eval.evaluateModel(classifierKMeans, test);

                double testError = eval.errorRate();
                System.out.println("TestError:" + testError + "\n");

                //               PrototyperUtil.savePrototypes(classifierKMeans.prototypes, rep + "/" + dataName + "_KMEANS[" + j + "]_XP" + n + ".proto");

                //               out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError);
                //               out.flush();
            }

        }
        //         outProto.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}