Example usage for weka.classifiers Evaluation toClassDetailsString

List of usage examples for weka.classifiers Evaluation toClassDetailsString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toClassDetailsString.

Prototype

public String toClassDetailsString() throws Exception 

Source Link

Document

Generates a breakdown of the accuracy for each class (with default title), incorporating various information-retrieval statistics, such as true/false positive rate, precision/recall/F-Measure.

Usage

From source file:PointAnalyser.Main.java

public static void trainC45Classifier() throws Exception {

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }//from w  w w.ja  v a  2  s .  c  o  m

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    // build a c4.5 classifier
    String[] options = new String[1];
    options[0] = "-C 0.25 -M 2 -U"; // unpruned tree
    tree = new J48(); // new instance of tree
    tree.setOptions(options); // set the options
    tree.buildClassifier(data); // build classifier
    /*
             RemoveMisclassified rm = new RemoveMisclassified();
             rm.setInputFormat(data);
             rm.setClassifier(tree);
             rm.setNumFolds(10);
             rm.setThreshold(0.1);
             rm.setMaxIterations(0);
             data = Filter.useFilter(data, rm);
            
             tree = new J48();         // new instance of tree
             tree.setOptions(options);     // set the options
             tree.buildClassifier(data);   // build classifier
             */
    // eval
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(tree, data, 10, new Random(1));

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());

}

From source file:PointAnalyser.Main.java

public static void trainNNClassifier() throws Exception {

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }//  w w w .  jav a 2s  . c  om

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    // build a c4.5 classifier
    String[] options = new String[1];
    // options[0] = "-K 1";            // unpruned tree
    nn = new IBk(); // new instance of tree
    //  nn.setCrossValidate(true);
    nn.setKNN(7);
    nn.setNearestNeighbourSearchAlgorithm(new weka.core.neighboursearch.KDTree(data));

    nn.setWindowSize(0);

    // nn.setOptions(options);     // set the options
    nn.buildClassifier(data); // build classifier
    // eval
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(nn, data, 10, new Random(1));

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());

}

From source file:sentinets.Prediction.java

License:Open Source License

public String updateModel(String inputFile, ArrayList<Double[]> metrics) {
    String output = "";
    this.setInstances(inputFile);
    FilteredClassifier fcls = (FilteredClassifier) this.cls;
    SGD cls = (SGD) fcls.getClassifier();
    Filter filter = fcls.getFilter();
    Instances insAll;/*from w  w  w.j  av  a 2  s. c o m*/
    try {
        insAll = Filter.useFilter(this.unlabled, filter);
        if (insAll.size() > 0) {
            Random rand = new Random(10);
            int folds = 10 > insAll.size() ? 2 : 10;
            Instances randData = new Instances(insAll);
            randData.randomize(rand);
            if (randData.classAttribute().isNominal()) {
                randData.stratify(folds);
            }
            Evaluation eval = new Evaluation(randData);
            eval.evaluateModel(cls, insAll);
            System.out.println("Initial Evaluation");
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            output += "\n====" + "Initial Evaluation" + "====\n";
            output += "\n" + eval.toSummaryString();
            output += "\n" + eval.toClassDetailsString();
            System.out.println("Cross Validated Evaluation");
            output += "\n====" + "Cross Validated Evaluation" + "====\n";
            for (int n = 0; n < folds; n++) {
                Instances train = randData.trainCV(folds, n);
                Instances test = randData.testCV(folds, n);

                for (int i = 0; i < train.numInstances(); i++) {
                    cls.updateClassifier(train.instance(i));
                }

                eval.evaluateModel(cls, test);
                System.out.println("Cross Validated Evaluation fold: " + n);
                output += "\n====" + "Cross Validated Evaluation fold (" + n + ")====\n";
                System.out.println(eval.toSummaryString());
                System.out.println(eval.toClassDetailsString());
                output += "\n" + eval.toSummaryString();
                output += "\n" + eval.toClassDetailsString();
                metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            }
            for (int i = 0; i < insAll.numInstances(); i++) {
                cls.updateClassifier(insAll.instance(i));
            }
            eval.evaluateModel(cls, insAll);
            System.out.println("Final Evaluation");
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            output += "\n====" + "Final Evaluation" + "====\n";
            output += "\n" + eval.toSummaryString();
            output += "\n" + eval.toClassDetailsString();
            metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            fcls.setClassifier(cls);
            String modelFilePath = outputDir + "/" + Utils.getOutDir(Utils.OutDirIndex.MODELS)
                    + "/updatedClassifier.model";
            weka.core.SerializationHelper.write(modelFilePath, fcls);
            output += "\n" + "Updated Model saved at: " + modelFilePath;
        } else {
            output += "No new instances for training the model.";
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return output;
}

From source file:sentinets.TrainModel.java

License:Open Source License

public void trainModel(Classifier c, String name) {
    Evaluation e;
    try {//from   w  w  w .j a  va2 s.c o  m
        e = new Evaluation(ins);
        e.crossValidateModel(c, ins, 10, new Random(1));
        System.out.println("****Results of " + name + "****");
        System.out.println(e.toSummaryString());
        System.out.println(e.toClassDetailsString());
        System.out.println(e.toCumulativeMarginDistributionString());
        System.out.println(e.toMatrixString());
        System.out.println("*********************");
        TrainModel.saveModel(c, name);
    } catch (Exception e1) {
        e1.printStackTrace();
    }

}

From source file:statistics.BinaryStatisticsEvaluator.java

@Override
public double[][] getConfusionMatrix(Instances Training_Instances, Instances Testing_Instances,
        String classifier) {//  w ww . j  a v a 2 s.  c  o m

    Classifier cModel = null;
    if ("NB".equals(classifier)) {
        cModel = (Classifier) new NaiveBayes();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("DT".equals(classifier)) {
        cModel = (Classifier) new J48();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("SVM".equals(classifier)) {
        cModel = (Classifier) new SMO();

        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("KNN".equals(classifier)) {
        cModel = (Classifier) new IBk();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    //Test the model
    Evaluation eTest;
    try {
        eTest = new Evaluation(Training_Instances);
        eTest.evaluateModel(cModel, Testing_Instances);
        //Print the result
        String strSummary = eTest.toSummaryString();
        System.out.println(strSummary);
        String strSummary1 = eTest.toMatrixString();
        System.out.println(strSummary1);
        String strSummary2 = eTest.toClassDetailsString();
        System.out.println(strSummary2);

        //Get the confusion matrix
        double[][] cmMatrix = eTest.confusionMatrix();
        return cmMatrix;
    } catch (Exception ex) {
        Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image/*from  w  w w  .  j a  v  a 2 s. c om*/
 * @param labels binary labels
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, int whiteClassIndex, int blackClassIndex,
        boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image (slice " + z + ")...");
        if (verbose)
            IJ.log("Creating features for test image (slice " + z + ")...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Use the same features as the current classifier
        testImageFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures());
        testImageFeatures.setMaximumSigma(maximumSigma);
        testImageFeatures.setMinimumSigma(minimumSigma);
        testImageFeatures.setMembranePatchSize(membranePatchSize);
        testImageFeatures.setMembraneSize(membraneThickness);
        testImageFeatures.updateFeaturesMT();
        testImageFeatures.setUseNeighbors(featureStackArray.useNeighborhood());
        filterFeatureStackByList(this.featureNames, testImageFeatures);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image/*from ww w.  j a v a 2  s .c  om*/
 * @param labels binary labels
 * @param filters list of filters to create features
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, ImagePlus filters, int whiteClassIndex,
        int blackClassIndex, boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image...");
        if (verbose)
            IJ.log("Creating features for test image " + z + "...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Create features by applying the filters
        testImageFeatures.addFeaturesMT(filters);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:Tubes.Classification.java

public static void main(String[] args) throws FileNotFoundException, IOException, Exception {

    StringToWordVector filter = new StringToWordVector();

    File training = new File(classTrain);
    File testing = new File(classTest);

    BufferedReader readTrain = new BufferedReader(new FileReader(training));
    BufferedReader readTest = new BufferedReader(new FileReader(testing));

    Instances dataTrain = new Instances(readTrain);
    Instances dataTest = new Instances(readTest);

    filter.setInputFormat(dataTrain);/*from  w w  w.j a  v  a2 s  . c  o m*/
    dataTrain = Filter.useFilter(dataTrain, filter);

    dataTrain.setClassIndex(dataTrain.numAttributes() - 1);
    dataTest.setClassIndex(dataTest.numAttributes() - 1);

    Classification classify = new Classification();
    NaiveBayes bayes = new NaiveBayes();
    //        RandomForest rf = new RandomForest();
    //        BayesNet bayesNet = new BayesNet();
    LibSVM libSVM = new LibSVM();
    System.out.println("==========================Naive Bayes Evaluation===========================");
    Evaluation eval = classify.runClassifier(bayes, dataTrain, dataTest);
    System.out.println(eval.toSummaryString() + "\n");
    System.out.println(eval.toClassDetailsString() + "\n");
    System.out.println(eval.toMatrixString() + "\n");
    System.out.println("===========================================================================");
    //
    //        ====System.out.println("==============================Random Forest================================");
    //        Evaluation eval2 = classify.runClassifier(rf, dataTrain, dataTest);
    //        System.out.println(eval2.toSummaryString() + "\n");
    //        System.out.println(eval2.toClassDetailsString() + "\n");
    //        System.out.println(eval2.toMatrixString() + "\n");
    //        System.out.println("=======================================================================");
    //
    //        System.out.println("==============================Bayesian Network================================");
    //        Evaluation eval3 = classify.runClassifier(bayesNet, dataTrain, dataTest);
    //        System.out.println(eval3.toSummaryString() + "\n");
    //        System.out.println(eval3.toClassDetailsString() + "\n");
    //        System.out.println(eval3.toMatrixString() + "\n");
    //        System.out.println("===========================================================================");

    System.out.println("==============================LibSVM================================");
    libSVM.setCacheSize(512); // MB
    libSVM.setNormalize(true);
    libSVM.setShrinking(true);
    libSVM.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_LINEAR, LibSVM.TAGS_KERNELTYPE));
    libSVM.setDegree(3);
    libSVM.setSVMType(new SelectedTag(LibSVM.SVMTYPE_C_SVC, LibSVM.TAGS_SVMTYPE));
    Evaluation eval4 = classify.runClassifier(libSVM, dataTrain, dataTest);
    System.out.println(eval4.toSummaryString() + "\n");
    System.out.println(eval4.toClassDetailsString() + "\n");
    System.out.println(eval4.toMatrixString() + "\n");
    System.out.println("===========================================================================");
}

From source file:tubes2ai.DriverNB.java

public static void run(String data) throws Exception {
    //System.out.println("tes driver");

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(data);
    Instances dataTrain = source.getDataSet();
    //if (dataTrain.classIndex() == -1)
    dataTrain.setClassIndex(0);/*w w  w  .  java 2 s .co  m*/
    ArffSaver saver = new ArffSaver();

    //        dataTrain.setClassIndex();
    Discretize discretize = new Discretize();
    discretize.setInputFormat(dataTrain);
    Instances dataTrainDisc = Filter.useFilter(dataTrain, discretize);

    //NaiveBayes NB = new NaiveBayes();
    AIJKNaiveBayes NB = new AIJKNaiveBayes();
    NB.buildClassifier(dataTrainDisc);

    Evaluation eval = new Evaluation(dataTrainDisc);
    eval.evaluateModel(NB, dataTrainDisc);

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
    /*Instance inst = new DenseInstance(5);
            
    inst.setDataset(dataTrain);
    inst.setValue(0, "sunny");
    inst.setValue(1, "hot");
    inst.setValue(2, "high");
    inst.setValue(3, "FALSE");
    inst.setValue(4, "yes");
    double a = NB.classifyInstance(inst);
    String hasil="";
    if(a==0.0){
    hasil="YES";
    } else{
    hasil="NO";
    }
    //double[] b = NB.distributionForInstance(inst);
    System.out.println("Hasil klasifikasi: "+hasil);
    //System.out.println(b);*/
}

From source file:tucil2ai.Tucil2AI.java

/**
 *
 * @param E// ww  w.ja  v a2s.  c  o m
 * @throws Exception
 */
public static void printEval(Evaluation E) throws Exception {
    System.out.println(E.toSummaryString("\nResults\n======\n", false));
    System.out.println(E.toClassDetailsString());
    System.out.println(E.toMatrixString());
}