Example usage for weka.classifiers Evaluation pctCorrect

List of usage examples for weka.classifiers Evaluation pctCorrect

Introduction

In this page you can find the example usage for weka.classifiers Evaluation pctCorrect.

Prototype

public final double pctCorrect() 

Source Link

Document

Gets the percentage of instances correctly classified (that is, for which a correct prediction was made).

Usage

From source file:classify.Classifier.java

/**
 * @param args the command line arguments
 *//*from   w  ww.  j a v a 2  s  .  c  o m*/
public static void main(String[] args) {
    //read in data
    try {
        DataSource input = new DataSource("no_missing_values.csv");
        Instances data = input.getDataSet();
        //Instances data = readFile("newfixed.txt");
        missingValuesRows(data);

        setAttributeValues(data);
        data.setClassIndex(data.numAttributes() - 1);

        //boosting
        AdaBoostM1 boosting = new AdaBoostM1();
        boosting.setNumIterations(25);
        boosting.setClassifier(new DecisionStump());

        //build the classifier
        boosting.buildClassifier(data);

        //evaluate using 10-fold cross validation
        Evaluation e1 = new Evaluation(data);
        e1.crossValidateModel(boosting, data, 10, new Random(1));

        DecimalFormat nf = new DecimalFormat("0.000");

        System.out.println("Results of Boosting with Decision Stumps:");
        System.out.println(boosting.toString());
        System.out.println("Results of Cross Validation:");
        System.out.println("Number of correctly classified instances: " + e1.correct() + " ("
                + nf.format(e1.pctCorrect()) + "%)");
        System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " ("
                + nf.format(e1.pctIncorrect()) + "%)");

        System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%");
        System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%");
        System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%");
        System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%");

        System.out.println();
        System.out.println("Confusion Matrix:");
        for (int i = 0; i < e1.confusionMatrix().length; i++) {
            for (int j = 0; j < e1.confusionMatrix()[0].length; j++) {
                System.out.print(e1.confusionMatrix()[i][j] + "   ");
            }
            System.out.println();
        }
        System.out.println();
        System.out.println();
        System.out.println();

        //logistic regression
        Logistic l = new Logistic();
        l.buildClassifier(data);

        e1 = new Evaluation(data);

        e1.crossValidateModel(l, data, 10, new Random(1));
        System.out.println("Results of Logistic Regression:");
        System.out.println(l.toString());
        System.out.println("Results of Cross Validation:");
        System.out.println("Number of correctly classified instances: " + e1.correct() + " ("
                + nf.format(e1.pctCorrect()) + "%)");
        System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " ("
                + nf.format(e1.pctIncorrect()) + "%)");

        System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%");
        System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%");
        System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%");
        System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%");

        System.out.println();
        System.out.println("Confusion Matrix:");
        for (int i = 0; i < e1.confusionMatrix().length; i++) {
            for (int j = 0; j < e1.confusionMatrix()[0].length; j++) {
                System.out.print(e1.confusionMatrix()[i][j] + "   ");
            }
            System.out.println();
        }

    } catch (Exception ex) {
        //data couldn't be read, so end program
        System.out.println("Exception thrown, program ending.");
    }
}

From source file:com.actelion.research.orbit.imageAnalysis.tasks.ObjectTrainWorker.java

License:Open Source License

@Override
protected void doWork() {
    if (dontRun) {
        dontRun = false;/*from  ww  w  .  ja v a2  s  . c om*/
        return;
    }
    trainSet = null;
    if (modelToBuild != null && modelToBuild.getClassifier() != null)
        modelToBuild.getClassifier().setBuild(false);
    List<double[]> trainData = new ArrayList<double[]>();
    int mipLayer = -1; // used for checking if all iFrames (with trainData) have the same mapLayer (otherwise the model cannot be trained)
    for (ImageFrame iFrame : iFrames) {
        int sampleSize = Math.min(3, iFrame.recognitionFrame.bimg.getImage().getSampleModel().getNumBands()); // was always 1 before! (max 3 because alpha should be ignored)
        for (int i = 0; i < iFrame.recognitionFrame.getClassShapes().size(); i++) {
            checkPaused();
            List<Shape> shapes = iFrame.recognitionFrame.getClassShapes().get(i).getShapeList();
            if (shapes != null && shapes.size() > 0) {
                if (mipLayer < 0) {
                    mipLayer = iFrame.getMipLayer();
                    logger.trace("iFrame candidate mipLayer {} from iFrame with width {}", mipLayer,
                            iFrame.recognitionFrame.bimg.getWidth());
                } else {
                    if (mipLayer != iFrame.getMipLayer()) {
                        logger.error(
                                "Cell classifier cannot be trained on different image layers. Please use only training data of the same image layer.");
                        return;
                    }
                }
                if (mipLayer != modelToBuild.getMipLayer()) {
                    // only same layer as segmentation allowed. Otherwise the cell features must be scaled, too (which is not yet the case).
                    logger.error("Cell classifier must be trained on same layer as segmentation");
                    return;
                }
            }
            trainData.addAll(new ObjectFeatureBuilderTiled(null).buildFeatures(shapes, i + 1,
                    iFrame.recognitionFrame, iFrame.recognitionFrame.getClassImage(), sampleSize, 0, 0)); // classes 1.0, 2.0, ...
        }
    }
    logger.trace("train levelNum: {}", mipLayer);
    if (trainData.size() == 0) {
        logger.error("trainset is empty, classifier cannot be trained.");
        trainSet = null;
        return;
    }
    if (isCancelled()) {
        cleanUp();
        return;
    }
    timeEst = 1000 * 10L;
    setProgress(10);

    logger.debug("trainData contains " + trainData.size() + " samples");

    Attribute classAttr = null;
    // create the first time a new trainSet. All further trainings will append new instances.
    if (trainSet == null) {
        // build traindata header
        double[] firstRowAll = trainData.get(0);
        double[] firstRow = Arrays.copyOfRange(firstRowAll, 0,
                firstRowAll.length - ObjectFeatureBuilderTiled.SkipTailForClassification);
        ArrayList<Attribute> attrInfo = new ArrayList<Attribute>(firstRow.length);
        for (int a = 0; a < firstRow.length - 1; a++) {
            Attribute attr = new Attribute("a" + a);
            // if (a<firstRow.length-2) attr.setWeight(0.1d); else attr.setWeight(1.0d);
            attrInfo.add(attr);
        }
        List<String> classValues = new ArrayList<String>(
                iFrames.get(0).recognitionFrame.getClassShapes().size());
        for (int i = 0; i < iFrames.get(0).recognitionFrame.getClassShapes().size(); i++) {
            classValues.add((i + 1) + ".0"); // "1.0", "2.0", ...
        }
        classAttr = new Attribute("class", classValues);
        attrInfo.add(classAttr);

        trainSet = new Instances("trainSet pattern classes", attrInfo, trainData.size());
        trainSet.setClassIndex(firstRow.length - 1);
    } else
        classAttr = trainSet.attribute("class");

    // add instances
    for (double[] valsAll : trainData) {
        // skip some non relevant attributes like centerX/Y
        double[] vals = Arrays.copyOfRange(valsAll, 0,
                valsAll.length - ObjectFeatureBuilderTiled.SkipTailForClassification);
        vals[vals.length - 1] = valsAll[valsAll.length - 1]; // class value

        double classV = classAttr.indexOfValue(Double.toString(vals[vals.length - 1]));
        vals[vals.length - 1] = classV;
        Instance inst = new DenseInstance(1.0d, vals);
        trainSet.add(inst);
    }
    // trainSet = trainSet.resample(rand);
    logger.debug("trainSet contains " + trainSet.numInstances() + " instances");

    if (logger.isTraceEnabled())
        logger.trace(trainSet.toString());

    // building classifier
    if (isCancelled()) {
        cleanUp();
        return;
    }
    checkPaused();
    timeEst = 1000 * 5L;
    setProgress(20);
    logger.info("Start training classifier... ");
    classifier = new ClassifierWrapper(new weka.classifiers.functions.SMO());
    try {
        classifier.buildClassifier(trainSet);
        classifier.setBuild(true);
        modelToBuild.setClassifier(classifier);
        modelToBuild.setStructure(trainSet.stringFreeStructure());
        modelToBuild.setCellClassification(true);
        modelToBuild.setMipLayer(mipLayer);
        setProgress(85);

        // evaluation
        StringBuilder cnamesInfo = new StringBuilder(
                "Evaluation for object classification model with classes: ");
        for (int i = 0; i < modelToBuild.getClassShapes().size(); i++) {
            cnamesInfo.append(modelToBuild.getClassShapes().get(i).getName());
            if (i < modelToBuild.getClassShapes().size() - 1)
                cnamesInfo.append(", ");
        }
        logger.info(cnamesInfo.toString());
        Evaluation evaluation = new Evaluation(trainSet);
        evaluation.evaluateModel(classifier.getClassifier(), trainSet);
        logger.info(evaluation.toSummaryString());
        if (evaluation.pctCorrect() < OrbitUtils.ACCURACY_WARNING) {
            String w = "Warning: The model classifies the training objects only with an accuracy of "
                    + evaluation.pctCorrect()
                    + "%.\nThat means that the marked objects are not diverse enough.\nYou might want to remove some marked objects and mark some more representative ones.\nHowever, you can still use this model if you want (check the object classification).";
            logger.warn(w);
            if (withGUI && !ScaleoutMode.SCALEOUTMODE.get()) {
                JOptionPane.showMessageDialog(null, w, "Warning: Low accuracy", JOptionPane.WARNING_MESSAGE);
            }
        }

    } catch (Exception e) {
        classifier = null;
        logger.error("error training classifier: ", e);
    }
    logger.info("training done.");
    timeEst = 0L;
    setProgress(100);

}

From source file:com.actelion.research.orbit.imageAnalysis.tasks.TrainWorker.java

License:Open Source License

private void trainClassifier() throws OrbitImageServletException {
    logger.debug("start trainClassifier");
    if (modelToBuild != null && modelToBuild.getClassifier() != null)
        modelToBuild.getClassifier().setBuild(false);
    trainSet = null;//from  w w  w . j a  v a 2  s .  com
    List<double[]> trainData = new ArrayList<double[]>();
    int mipLayer = -1; // used for checking if all iFrames (with trainData) have the same mapLayer (otherwise the model cannot be trained)
    for (ImageFrame iFrame : iFrames) {
        if (logger.isTraceEnabled())
            logger.trace(
                    iFrame.getTitle() + ": #ClassShapes: " + iFrame.recognitionFrame.getClassShapes().size());
        for (int i = 0; i < iFrame.recognitionFrame.getClassShapes().size(); i++) {
            //  checkPaused();
            if (iFrame.recognitionFrame.getClassShapes().get(i).getShapeList().size() > 0) { // set and check mip level only for iFrames with shapes (training data)
                if (mipLayer < 0) {
                    mipLayer = iFrame.getMipLayer();
                    logger.trace("iFrame candidate mipLayer {} from iFrame with width {}", mipLayer,
                            iFrame.recognitionFrame.bimg.getWidth());
                } else {
                    if (mipLayer != iFrame.getMipLayer()) {
                        logger.error(
                                "Model cannot be trained on different image layers. Please use only training data of the same image layer.");
                        return;
                    }
                }
            }
            List<Shape> shapes = iFrame.recognitionFrame.getClassShapes().get(i).getShapeList();
            trainData.addAll(getFeatures(shapes, i + 1, iFrame.recognitionFrame.bimg)); // classes 1.0, 2.0, ...
        }
    }
    logger.trace("train levelNum: {}", mipLayer);

    if (trainData.size() == 0) {
        logger.error("trainset is empty, classifier cannot be trained.");
        trainSet = null;
        return;
    }
    if (isCancelled()) {
        logger.debug("canceled");
        cleanUp();
        return;
    }
    timeEst = 1000 * 10L;
    setProgress(10);

    logger.debug("trainData contains " + trainData.size() + " samples");

    // limit training instances
    if (trainData.size() > MAXINST) {
        Collections.shuffle(trainData, rand);
        trainData = trainData.subList(0, MAXINST);
        logger.debug("trainSet shirked to " + trainData.size() + " instances");
    }

    Attribute classAttr = null;
    // create the first time a new trainSet. All further trainings will append new instances.
    if (trainSet == null) {
        // build traindata header
        double[] firstRow = trainData.get(0);
        ArrayList<Attribute> attrInfo = new ArrayList<Attribute>(firstRow.length);
        for (int a = 0; a < firstRow.length - 1; a++) {
            Attribute attr = new Attribute("a" + a);
            // if (a<firstRow.length-2) attr.setWeight(0.1d); else attr.setWeight(1.0d);
            attrInfo.add(attr);
        }
        List<String> classValues = new ArrayList<String>(
                iFrames.get(0).recognitionFrame.getClassShapes().size());
        for (int i = 0; i < iFrames.get(0).recognitionFrame.getClassShapes().size(); i++) {
            classValues.add((i + 1) + ".0"); // "1.0", "2.0", ...
        }
        classAttr = new Attribute("class", classValues);
        attrInfo.add(classAttr);

        trainSet = new Instances("trainSet pattern classes", attrInfo, trainData.size());
        trainSet.setClassIndex(firstRow.length - 1);
    } else
        classAttr = trainSet.attribute("class");

    // add instances
    for (double[] vals : trainData) {
        double classV = classAttr.indexOfValue(Double.toString(vals[vals.length - 1]));
        vals[vals.length - 1] = classV;
        //Instance inst = new Instance(1.0d, vals);
        Instance inst = new DenseInstance(1.0d, vals);
        trainSet.add(inst);
    }
    trainSet = trainSet.resample(rand);
    logger.debug("trainSet contains " + trainSet.numInstances() + " instances");

    // building classifier
    if (isCancelled()) {
        cleanUp();
        return;
    }
    checkPaused();
    timeEst = 1000 * 5L;
    setProgress(20);
    logger.info("Start training classifier... ");
    Classifier c;
    /*
    // experiments with deep learning... do not use in production.
    if (AparUtils.DEEPORBIT) {
    FeatureDescription fd = modelToBuild!=null? modelToBuild.getFeatureDescription(): new FeatureDescription();
    TissueFeatures tissueFeaturre = AparUtils.createTissueFeatures(fd, null);
    int numOutNeurons = modelToBuild.getClassShapes().size();
    int numInNeurons = tissueFeaturre.prepareDoubleArray().length-1;
    logger.debug("numNeuronsIn:"+numInNeurons+"  numNeuronsOut:"+numOutNeurons);
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(numInNeurons,100, numOutNeurons);
    for (int a=0; a<numOutNeurons; a++) {
      neuralNet.getOutputNeurons()[a].setLabel("class"+a);
    }
    neuralNet.connectInputsToOutputs();
            
    MomentumBackpropagation mb = new MomentumBackpropagation();
    mb.setLearningRate(0.2d);
    mb.setMomentum(0.7d);
    //mb.setMaxIterations(20);
    mb.setMaxError(0.12);
    neuralNet.setLearningRule(mb);
    c = new WekaNeurophClassifier(neuralNet);
            
    } else {
    c = new weka.classifiers.functions.SMO();
    }
    */
    c = new weka.classifiers.functions.SMO();

    //weka.classifiers.functions.LibSVM c = new weka.classifiers.functions.LibSVM();
    //Classifier c = new weka.classifiers.trees.J48();
    classifier = new ClassifierWrapper(c);
    //classifier = new weka.classifiers.bayes.BayesNet();
    //classifier = new weka.classifiers.functions.MultilayerPerceptron();
    //((weka.classifiers.functions.SMO)classifier).setKernel(new weka.classifiers.functions.supportVector.RBFKernel());
    try {

        classifier.buildClassifier(trainSet);
        classifier.setBuild(true);
        modelToBuild.setClassifier(classifier);
        modelToBuild.setStructure(trainSet.stringFreeStructure());
        modelToBuild.setCellClassification(false);
        modelToBuild.setMipLayer(mipLayer);
        logger.debug("training done");

        // evaluation
        StringBuilder cnamesInfo = new StringBuilder("Evaluation for model with classes: ");
        for (int i = 0; i < modelToBuild.getClassShapes().size(); i++) {
            cnamesInfo.append(modelToBuild.getClassShapes().get(i).getName());
            if (i < modelToBuild.getClassShapes().size() - 1)
                cnamesInfo.append(", ");
        }
        logger.info(cnamesInfo.toString());
        Evaluation evaluation = new Evaluation(trainSet);
        evaluation.evaluateModel(classifier.getClassifier(), trainSet);
        logger.info(evaluation.toSummaryString());
        if (evaluation.pctCorrect() < OrbitUtils.ACCURACY_WARNING) {
            final String w = "Warning: The model classifies the training shapes only with an accuracy of "
                    + evaluation.pctCorrect()
                    + "%.\nThat means that the drawn class shapes are not diverse enough.\nYou might want to remove some class shapes and mark some more representative regions.\nHowever, you can still use this model if you want (check the classification).";
            logger.warn(w);
            if (withGUI && !ScaleoutMode.SCALEOUTMODE.get()) {
                SwingUtilities.invokeLater(new Runnable() {
                    @Override
                    public void run() {
                        JOptionPane.showMessageDialog(null, w, "Warning: Low accuracy",
                                JOptionPane.WARNING_MESSAGE);
                    }
                });
            }
        }

    } catch (Exception e) {
        classifier = null;
        logger.error("error training classifier", e);
    }
    //   logger.trace(classifier.toString());

}

From source file:com.github.r351574nc3.amex.assignment2.App.java

License:Open Source License

/**
 * Tests/evaluates the trained model. This method assumes that {@link #train()} was previously called to assign a {@link LinearRegression} 
 * classifier. If it wasn't, an exception will be thrown.
 *
 * @throws Exception if train wasn't called prior.
 *//*from  www .  j a va2s .c om*/
public void test() throws Exception {
    if (getClassifier() == null) {
        throw new RuntimeException("Make sure train was run prior to this method call");
    }

    final Evaluation eval = new Evaluation(getTrained());
    eval.evaluateModel(getClassifier(), getTest());
    info("%s", eval.toSummaryString("Results\n\n", false));
    info("Percent of correctly classified instances: %s", eval.pctCorrect());
}

From source file:Controller.CtlDataMining.java

public String redBayesiana(Instances data) {
    try {//from  w w w.jav  a 2 s .  com
        //Creamos un clasificador Bayesiano                
        NaiveBayes nb = new NaiveBayes();

        //creamos el clasificador de la redBayesiana 
        nb.buildClassifier(data);

        //Creamos un objeto para la validacion del modelo con redBayesiana
        Evaluation evalB = new Evaluation(data);

        /*Aplicamos el clasificador bayesiano
        hacemos validacion cruzada, de redBayesiana, con 10 campos, 
        y un aleatorio para la semilla, en este caso es 1 para el 
        muestreo de la validacion cruzada (Como ordenar para luego
        partirlo en 10)*/
        evalB.crossValidateModel(nb, data, 10, new Random(1));

        String resBay = "<br><br><b><center>Resultados NaiveBayes</center>" + "<br>========<br>"
                + "Modelo generado indica los siguientes resultados:" + "<br>========<br></b>";
        //Obtenemos resultados
        resBay = resBay
                + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalB.numInstances() + "<br>");
        resBay = resBay + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> "
                + formato.format(evalB.pctCorrect()) + "%<br>");
        resBay = resBay + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b> "
                + (int) evalB.correct() + "<br>");
        resBay = resBay + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> "
                + formato.format(evalB.pctIncorrect()) + "%<br>");
        resBay = resBay + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> "
                + (int) evalB.incorrect() + "<br>");
        resBay = resBay + ("<b>6. Media del error absoluto:</b> " + formato.format(evalB.meanAbsoluteError())
                + "%<br>");
        resBay = resBay
                + ("<b>7. " + evalB.toMatrixString("Matriz de " + "confusion</b>").replace("\n", "<br>"));

        return resBay;

    } catch (Exception e) {
        return "El error es" + e.getMessage();
    }
}

From source file:Controller.CtlDataMining.java

public String arbolJ48(Instances data) {
    try {/*from  w  w w.  j a  v a2  s  .  com*/
        // Creamos un clasidicador J48
        J48 j48 = new J48();
        //creamos el clasificador  del J48 con los datos 
        j48.buildClassifier(data);

        //Creamos un objeto para la validacion del modelo con redBayesiana
        Evaluation evalJ48 = new Evaluation(data);

        /*Aplicamos el clasificador J48
        hacemos validacion cruzada, de redBayesiana, con 10 campos, 
        y el aleatorio arrancando desde 1 para la semilla*/
        evalJ48.crossValidateModel(j48, data, 10, new Random(1));
        //Obtenemos resultados
        String resJ48 = "<br><b><center>Resultados Arbol de decision J48"
                + "</center><br>========<br>Modelo generado indica los "
                + "siguientes resultados:<br>========<br></b>";

        resJ48 = resJ48
                + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalJ48.numInstances() + "<br>");
        resJ48 = resJ48 + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> "
                + formato.format(evalJ48.pctCorrect()) + "<br>");
        resJ48 = resJ48 + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b>"
                + (int) evalJ48.correct() + "<br>");
        resJ48 = resJ48 + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> "
                + formato.format(evalJ48.pctIncorrect()) + "<br>");
        resJ48 = resJ48 + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> "
                + (int) evalJ48.incorrect() + "<br>");
        resJ48 = resJ48 + ("<b>6. Media del error absoluto:</b> " + formato.format(evalJ48.meanAbsoluteError())
                + "<br>");
        resJ48 = resJ48
                + ("<b>7. " + evalJ48.toMatrixString("Matriz de" + " confusion</b>").replace("\n", "<br>"));

        // SE GRAFICA EL ARBOL GENERADO
        //Se crea un Jframe Temporal
        final javax.swing.JFrame jf = new javax.swing.JFrame("Arbol de decision: J48");
        /*Se asigna un tamao*/
        jf.setSize(500, 400);
        /*Se define un borde*/
        jf.getContentPane().setLayout(new BorderLayout());
        /*Se instancia la grafica del arbol, estableciendo el tipo J48
        Parametros (Listener, Tipo de arbol, Tipo de nodos)
        El placeNode2 colocar los nodos para que caigan en forma uniforme
        por debajo de su padre*/
        TreeVisualizer tv = new TreeVisualizer(null, j48.graph(), new PlaceNode2());
        /*Aade el arbol centrandolo*/
        jf.getContentPane().add(tv, BorderLayout.CENTER);
        /*Aadimos un listener para la X del close*/
        jf.addWindowListener(new java.awt.event.WindowAdapter() {
            @Override
            public void windowClosing(java.awt.event.WindowEvent e) {
                jf.dispose();
            }
        });
        /*Lo visualizamos*/
        jf.setVisible(true);
        /*Ajustamos el arbol al ancho del JFRM*/
        tv.fitToScreen();

        return resJ48;

    } catch (Exception e) {
        return "El error es" + e.getMessage();

    }
}

From source file:de.fub.maps.project.detector.model.inference.ui.EvaluationPanel.java

License:Apache License

public void updatePanel(Evaluation evaluation) {
    DefaultCategoryDataset dataset = getBarChartPanel().getDataset();
    dataset.clear();//from  www.  j  a  va2 s  . c  o m

    this.evaluation = evaluation;
    double correct = evaluation.pctCorrect();
    double incorrect = evaluation.pctIncorrect();

    getCorrectClassifiedInstances().setText(MessageFormat.format(NUMBER_PATTERN, correct));
    getIncorrectClassifiedInstances().setText(MessageFormat.format(NUMBER_PATTERN, incorrect));

    int numClasses = evaluation.getHeader().numClasses();
    for (int classIndex = 0; classIndex < numClasses; classIndex++) {
        double precision = evaluation.precision(classIndex) * 100;
        double recall = evaluation.recall(classIndex) * 100;
        dataset.addValue(precision,
                NbBundle.getMessage(EvaluationPanel.class, "EvaluationPanel.CLT_Precision_Text"),
                evaluation.getHeader().classAttribute().value(classIndex));
        dataset.addValue(recall, NbBundle.getMessage(EvaluationPanel.class, "EvaluationPanel.CLT_Recall_Text"),
                evaluation.getHeader().classAttribute().value(classIndex));
    }

    getExplorerManager()
            .setRootContext(new AbstractNode(Children.create(new EvaluationNodeFactory(evaluation), true)));
    repaint();
}

From source file:epsi.i5.datamining.Weka.java

public void excutionAlgo() throws FileNotFoundException, IOException, Exception {
    BufferedReader reader = new BufferedReader(new FileReader("src/epsi/i5/data/" + fileOne + ".arff"));
    Instances data = new Instances(reader);
    reader.close();//from   w w  w. j  av  a 2s.  com
    //System.out.println(data.attribute(0));
    data.setClass(data.attribute(0));
    NaiveBayes NB = new NaiveBayes();
    NB.buildClassifier(data);
    Evaluation naiveBayes = new Evaluation(data);
    naiveBayes.crossValidateModel(NB, data, 10, new Random(1));
    naiveBayes.evaluateModel(NB, data);
    //System.out.println(test.confusionMatrix() + "1");
    //System.out.println(test.correct() + "2");
    System.out.println("*****************************");
    System.out.println("******** Naive Bayes ********");
    System.out.println(naiveBayes.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(naiveBayes.pctCorrect());
    System.out.println("");
    J48 j = new J48();
    j.buildClassifier(data);
    Evaluation jeval = new Evaluation(data);
    jeval.crossValidateModel(j, data, 10, new Random(1));
    jeval.evaluateModel(j, data);
    System.out.println("*****************************");
    System.out.println("************ J48 ************");
    System.out.println(jeval.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(jeval.pctCorrect());
    System.out.println("");
    DecisionTable DT = new DecisionTable();
    DT.buildClassifier(data);
    Evaluation decisionTable = new Evaluation(data);
    decisionTable.crossValidateModel(DT, data, 10, new Random(1));
    decisionTable.evaluateModel(DT, data);
    System.out.println("*****************************");
    System.out.println("******* DecisionTable *******");
    System.out.println(decisionTable.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(decisionTable.pctCorrect());
    System.out.println("");
    OneR OR = new OneR();
    OR.buildClassifier(data);
    Evaluation oneR = new Evaluation(data);
    oneR.crossValidateModel(OR, data, 10, new Random(1));
    oneR.evaluateModel(OR, data);
    System.out.println("*****************************");
    System.out.println("************ OneR ***********");
    System.out.println(oneR.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(oneR.pctCorrect());

    //Polarit
    data.setClass(data.attribute(1));
    System.out.println("");
    M5Rules MR = new M5Rules();
    MR.buildClassifier(data);
    Evaluation m5rules = new Evaluation(data);
    m5rules.crossValidateModel(MR, data, 10, new Random(1));
    m5rules.evaluateModel(MR, data);
    System.out.println("*****************************");
    System.out.println("********** M5Rules **********");
    System.out.println(m5rules.correlationCoefficient());

    System.out.println("");
    LinearRegression LR = new LinearRegression();
    LR.buildClassifier(data);
    Evaluation linearR = new Evaluation(data);
    linearR.crossValidateModel(LR, data, 10, new Random(1));
    linearR.evaluateModel(LR, data);
    System.out.println("*****************************");
    System.out.println("********** linearR **********");
    System.out.println(linearR.correlationCoefficient());
}

From source file:es.upm.dit.gsi.barmas.launcher.WekaClassifiersValidator.java

License:Open Source License

/**
 * @param cls/* ww  w . j  a  va 2  s.com*/
 * @param trainingData
 * @param testData
 * @param leba
 * @return [0] = pctCorrect, [1] = pctIncorrect
 * @throws Exception
 */
public double[] getValidation(Classifier cls, Instances trainingData, Instances testData, int leba)
        throws Exception {

    Instances testDataWithLEBA = new Instances(testData);

    for (int j = 0; j < leba; j++) {
        if (j < testDataWithLEBA.numAttributes() - 1) {
            for (int i = 0; i < testDataWithLEBA.numInstances(); i++) {
                testDataWithLEBA.instance(i).setMissing(j);
            }
        }
    }

    Evaluation eval;
    try {
        eval = new Evaluation(trainingData);
        logger.fine("Evaluating model with leba: " + leba);
        eval.evaluateModel(cls, testDataWithLEBA);

        double[] results = new double[2];
        results[0] = eval.pctCorrect() / 100;
        results[1] = eval.pctIncorrect() / 100;
        return results;
    } catch (Exception e) {
        logger.severe("Problems evaluating model for " + cls.getClass().getSimpleName());
        logger.severe(e.getMessage());
        e.printStackTrace();
        throw e;
    }
}

From source file:function.CrossValidation.java

public static double crossValidationrate(Instances data, AbstractClassifier cls) throws Exception {
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(cls, data, 10, new Random(1));
    return evaluation.pctCorrect();
}