Example usage for weka.classifiers Evaluation errorRate

List of usage examples for weka.classifiers Evaluation errorRate

Introduction

In this page you can find the example usage for weka.classifiers Evaluation errorRate.

Prototype

public final double errorRate() 

Source Link

Document

Returns the estimated error rate or the root mean squared error (if the class is numeric).

Usage

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image//from  w  ww . j  a  v  a 2 s  .c o  m
 * @param labels binary labels
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, int whiteClassIndex, int blackClassIndex,
        boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image (slice " + z + ")...");
        if (verbose)
            IJ.log("Creating features for test image (slice " + z + ")...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Use the same features as the current classifier
        testImageFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures());
        testImageFeatures.setMaximumSigma(maximumSigma);
        testImageFeatures.setMinimumSigma(minimumSigma);
        testImageFeatures.setMembranePatchSize(membranePatchSize);
        testImageFeatures.setMembraneSize(membraneThickness);
        testImageFeatures.updateFeaturesMT();
        testImageFeatures.setUseNeighbors(featureStackArray.useNeighborhood());
        filterFeatureStackByList(this.featureNames, testImageFeatures);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image/* ww w .j  av  a 2 s  .  c om*/
 * @param labels binary labels
 * @param filters list of filters to create features
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, ImagePlus filters, int whiteClassIndex,
        int blackClassIndex, boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image...");
        if (verbose)
            IJ.log("Creating features for test image " + z + "...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Create features by applying the filters
        testImageFeatures.addFeaturesMT(filters);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:wekimini.learning.ModelEvaluator.java

public void evaluateAll(final List<Path> paths, final boolean isTraining, final int numFolds,
        PropertyChangeListener listener) {
    final List<Instances> data = new LinkedList<>();
    for (Path p : paths) {
        Instances i = w.getSupervisedLearningManager().getTrainingDataForPath(p, false);
        data.add(i);/*from  ww w .  ja v a  2 s  .co m*/
    }

    setResults(new String[paths.size()]);
    if (evalWorker != null && evalWorker.getState() != SwingWorker.StateValue.DONE) {
        return;
    }

    evalWorker = new SwingWorker<Integer, Void>() {

        //trainingWorker.
        @Override
        public Integer doInBackground() {
            // train(); //TODO: Add status updates
            int progress = 0;
            //setProgress(progress);
            int numToEvaluate = 0;
            for (Path p : paths) {
                if (p.canBuild()) {
                    numToEvaluate++;
                }
            }

            int numEvaluated = 0;
            int numErr = 0;
            setEvalStatus(new EvaluationStatus(numToEvaluate, numEvaluated, numErr, false));

            for (int i = 0; i < paths.size(); i++) {
                Path p = paths.get(i);
                if (p.canBuild()) {
                    try {
                        System.out.println("Evaluating with " + numFolds);
                        //EVALUATE HERE: TODO 
                        Instances instances = w.getSupervisedLearningManager().getTrainingDataForPath(p, false);
                        Evaluation eval = new Evaluation(instances);
                        Classifier c = ((LearningModelBuilder) p.getModelBuilder()).getClassifier();
                        if (!isTraining) {
                            Random r = new Random();
                            eval.crossValidateModel(c, instances, numFolds, r);
                        } else {
                            Classifier c2 = Classifier.makeCopy(c);
                            c2.buildClassifier(instances);
                            eval.evaluateModel(c2, instances);
                        }
                        String result;
                        if (p.getModelBuilder() instanceof ClassificationModelBuilder) {
                            result = dFormat.format(eval.pctCorrect()) + "%"; //WON"T WORK FOR NN
                        } else {
                            result = dFormat.format(eval.errorRate()) + " (RMS)";
                        }
                        if (!isTraining) {
                            KadenzeLogging.getLogger().crossValidationComputed(w, i, numFolds, result);
                        } else {
                            KadenzeLogging.getLogger().trainingAccuracyComputed(w, i, result);
                        }
                        setResults(i, result);
                        finishedModel(i, result);
                        numEvaluated++;

                        if (isCancelled()) {
                            cancelMe(p);
                            setResults(i, "Cancelled");
                            return 0;
                        }

                    } catch (InterruptedException ex) {
                        cancelMe(p);
                        setResults(i, "Cancelled");
                        return 0; //Not sure this will be called...
                    } catch (Exception ex) {
                        numErr++;
                        Util.showPrettyErrorPane(null, "Error encountered during evaluation "
                                + p.getCurrentModelName() + ": " + ex.getMessage());
                        logger.log(Level.SEVERE, ex.getMessage());
                    }
                    setEvalStatus(new EvaluationStatus(numToEvaluate, numEvaluated, numErr, false));
                } else {
                    logger.log(Level.WARNING, "Could not evaluate path");
                }

            }
            wasCancelled = false;
            hadError = evaluationStatus.numErrorsEncountered > 0;
            return 0;
        }

        @Override
        public void done() {
            if (isCancelled()) {
                EvaluationStatus t = new EvaluationStatus(evaluationStatus.numToEvaluate,
                        evaluationStatus.numEvaluated, evaluationStatus.numErrorsEncountered, true);
                setEvalStatus(t);
            }
            finished();
        }
    };
    evalWorker.addPropertyChangeListener(listener);
    evalWorker.execute();
}