Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:smo2.SMO.java

License:Open Source License

/**
 * Main method for testing this class.//from   ww w . java  2  s  . c  o  m
 */
public static void main(String[] argv) {

    // Classifier scheme;
    SMO scheme;

    try {
        scheme = new SMO();

        // String m_inpStr =
        // "-C 1.0 -E 1.0 -G 0.01 -A 250007 -L 0.0010 -P 1.0E-12 -M -N 0 -V 10 -W 1 -t ../EXPERIMENT/filtered/2new/plasm-new-infogain50-dataset.arff -T ./plasm-overlap50-dataset.arff -p 0";
        String m_inpStr = "-C 1.0 -E 1.0 -G 0.01 -A 250007 -L 0.0010 -P 1.0E-12 -M -N 0 -V 10 -W 1 -i -t ./plasm-train184newinfo21-dataset.arff -T ./plasm-gametinfo21-dataset.arff";
        // String m_inpStr =
        // "-C 1.0 -E 1.0 -G 0.01 -A 250007 -L 0.0010 -P 1.0E-12 -M -N 0 -R -V 10 -W 1 -t ../EXPERIMENT/filtered/2class/plasm-infogainranker-t60.arff -d ./smo.model";
        // String m_inpStr = "-T ./1test.arff -l ./smo.model";
        String[] m_res_inpStr = m_inpStr.split("\\s");
        System.out.println(Evaluation.evaluateModel(scheme, m_res_inpStr));

        m_res_inpStr = m_inpStr.split("\\s");
        String m_trainFileName = Utils.getOption('t', m_res_inpStr);
        String m_testFileName = Utils.getOption('T', m_res_inpStr);

        BufferedReader m_trainReader = null;
        BufferedReader m_testReader = null;

        Instances m_train = null;
        Instances m_test = null;

        if (m_trainFileName.length() != 0) {
            System.out.println("trainfile = " + m_trainFileName);
            m_trainReader = new BufferedReader(new FileReader(m_trainFileName));
            m_train = new Instances(m_trainReader);
            m_train.setClassIndex(m_train.numAttributes() - 1);
        }

        if (m_testFileName.length() != 0) {
            System.out.println("testfile = " + m_testFileName);
            m_testReader = new BufferedReader(new FileReader(m_testFileName));
            m_test = new Instances(m_testReader);
            m_test.setClassIndex(m_test.numAttributes() - 1);
        }

        // if(m_trainFileName.length() != 0)
        // scheme.mygenstat(m_train);

        System.out.println("#################");

        if (m_testFileName.length() != 0)
            scheme.mygenstat(m_test);

        // System.out.println(Evaluation.evaluateModel(scheme, argv));

    } catch (Exception e) {
        e.printStackTrace();
        System.err.println(e.getMessage());
    }
}

From source file:SpamDetector.SpamDetector.java

/**
 * @param args the command line arguments
 *///w  ww . j  a v a  2s.  c  om
public static void main(String[] args) throws IOException, Exception {
    ArrayList<ArrayList<String>> notSpam = processCSV("notspam.csv");
    ArrayList<ArrayList<String>> spam = processCSV("spam.csv");

    // Cobain generate attribute & data
    FeatureExtraction fe = new FeatureExtraction();
    fe.generateArff(spam, notSpam);

    // Cobain CART
    BufferedReader br = new BufferedReader(new FileReader("data.arff"));

    ArffReader arff = new ArffReader(br);
    Instances data = arff.getData();
    data.setClassIndex(data.numAttributes() - 1);

    SimpleCart tree = new SimpleCart();
    tree.buildClassifier(data);
    System.out.println(tree.toString());

    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(tree, data);
    System.out.println(eval.toSummaryString("\n\n\n\nResults\n======\n", false));
    eval.crossValidateModel(tree, data, 10, new Random());
    System.out.println(eval.toSummaryString("\n\n\n\n10-Fold\n======\n", false));

}

From source file:statistics.BinaryStatisticsEvaluator.java

@Override
public double[][] getConfusionMatrix(Instances Training_Instances, Instances Testing_Instances,
        String classifier) {/*from w ww  . ja  v a  2  s  .  co m*/

    Classifier cModel = null;
    if ("NB".equals(classifier)) {
        cModel = (Classifier) new NaiveBayes();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("DT".equals(classifier)) {
        cModel = (Classifier) new J48();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("SVM".equals(classifier)) {
        cModel = (Classifier) new SMO();

        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("KNN".equals(classifier)) {
        cModel = (Classifier) new IBk();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    //Test the model
    Evaluation eTest;
    try {
        eTest = new Evaluation(Training_Instances);
        eTest.evaluateModel(cModel, Testing_Instances);
        //Print the result
        String strSummary = eTest.toSummaryString();
        System.out.println(strSummary);
        String strSummary1 = eTest.toMatrixString();
        System.out.println(strSummary1);
        String strSummary2 = eTest.toClassDetailsString();
        System.out.println(strSummary2);

        //Get the confusion matrix
        double[][] cmMatrix = eTest.confusionMatrix();
        return cmMatrix;
    } catch (Exception ex) {
        Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:Statistics.WekaFunctions.java

public void buildLRcls() {
    try {//from ww  w  .  j  a  v a 2 s.co  m
        System.out.println("building classifier");
        model.setOptions(weka.core.Utils.splitOptions("-S 0 -D")); // set options
        model.buildClassifier(trainParameters);

        // evaluate classifier and print some statistics 
        Evaluation eval = new Evaluation(trainParameters);
        eval.evaluateModel(model, trainParameters);
        System.out.println(eval.toSummaryString());

    } catch (Exception ex) {
        Logger.getLogger(WekaFunctions.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:textmining.TextMining.java

private static String setOptions(Classifier classifier, Instances instances, String[] options)
        throws Exception {
    classifier.setOptions(options);//from   w w w  .  j a va2 s . com
    classifier.buildClassifier(instances);
    Evaluation eval = new Evaluation(instances);
    eval.crossValidateModel(classifier, instances, 5, new Random(1));
    eval.evaluateModel(classifier, instances);
    String resume = eval.toSummaryString();
    return eval.toMatrixString(resume);
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get training error (from loaded data).
 *
 * @param verbose option to display evaluation information in the log window
 * @return classifier error on the training data set.
 *///from  ww w. ja va  2 s.  c  o  m
public double getTrainingError(boolean verbose) {
    if (null == this.trainHeader)
        return -1;

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(this.loadedTrainingData);
        evaluation.evaluateModel(classifier, this.loadedTrainingData);
        if (verbose)
            IJ.log(evaluation.toSummaryString("\n=== Training set evaluation ===\n", false));
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image//from w w w  .  j a  v a  2  s.  c o  m
 * @param labels binary labels
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, int whiteClassIndex, int blackClassIndex,
        boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image (slice " + z + ")...");
        if (verbose)
            IJ.log("Creating features for test image (slice " + z + ")...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Use the same features as the current classifier
        testImageFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures());
        testImageFeatures.setMaximumSigma(maximumSigma);
        testImageFeatures.setMinimumSigma(minimumSigma);
        testImageFeatures.setMembranePatchSize(membranePatchSize);
        testImageFeatures.setMembraneSize(membraneThickness);
        testImageFeatures.updateFeaturesMT();
        testImageFeatures.setUseNeighbors(featureStackArray.useNeighborhood());
        filterFeatureStackByList(this.featureNames, testImageFeatures);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image//from ww  w .  java2  s  .c o  m
 * @param labels binary labels
 * @param filters list of filters to create features
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, ImagePlus filters, int whiteClassIndex,
        int blackClassIndex, boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image...");
        if (verbose)
            IJ.log("Creating features for test image " + z + "...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Create features by applying the filters
        testImageFeatures.addFeaturesMT(filters);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:Tubes.Classification.java

public Evaluation runClassifier(Classifier model, Instances training, Instances testing) {
    try {//from  w  w  w  . j a va2  s.  c  o m
        Evaluation eval_train = new Evaluation(training);
        model.buildClassifier(training);

        eval_train.evaluateModel(model, testing);
        return eval_train;

    } catch (Exception e) {
        e.printStackTrace();
    }
    return null;
}

From source file:tubes.ml.pkg1.TubesML1.java

public void akses() throws Exception {
    Discretize filter;/*from ww w  . ja va2  s.c  om*/
    int fold = 10;
    int fold3 = 3;
    int trainNum, testNum;
    PrintWriter file = new PrintWriter("model.txt");

    /***dataset 1***/
    file.println("***DATASET 1***");
    fileReader tets = new fileReader("./src/data/iris.arff");
    try {
        tets.read();
    } catch (IOException ex) {
        Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
    }
    Instances data = tets.getData();
    filter = new Discretize();
    try {
        filter.setInputFormat(data);
    } catch (Exception ex) {
        Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
    }

    /*ID3*/
    Instances discreteData;
    discreteData = Filter.useFilter(data, filter);
    trainNum = discreteData.numInstances() * 3 / 4;
    testNum = discreteData.numInstances() / 4;

    for (int i = 0; i < fold; i++) {
        try {

            Instances train = discreteData.trainCV(fold, i);
            Instances test = discreteData.testCV(fold, i);

            Id3 iTiga = new Id3();
            Evaluation validation = new Evaluation(train);
            try {
                iTiga.buildClassifier(train);
                System.out.println(iTiga.toString());
                file.println(iTiga.toString());
            } catch (Exception ex) {
                Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
            }
            validation.evaluateModel(iTiga, test);
            System.out.println(validation.toSummaryString());
            file.println("Validation " + (i + 1));
            file.println(validation.toSummaryString());
        } catch (Exception ex) {
            Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    /*J48*/
    trainNum = data.numInstances() * 3 / 4;
    testNum = data.numInstances() / 4;
    J48 jKT = new J48();
    for (int i = 0; i < fold; i++) {
        Instances train = data.trainCV(fold, i);
        Instances test = data.testCV(fold, i);
        try {
            Evaluation validation = new Evaluation(train);
            try {
                jKT.buildClassifier(data);
            } catch (Exception ex) {
                Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
            }
            validation.evaluateModel(jKT, test);
            System.out.println(validation.toSummaryString());
            file.println("Validation " + (i + 1));
            file.println(validation.toSummaryString());
            // System.out.println(jKT.toString());
        } catch (Exception ex) {
            Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    /*dataset 2*/
    file.println("***DATASET 2***");
    tets.setFilepath("./src/data/weather.arff");
    try {
        tets.read();
    } catch (IOException ex) {
        Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
    }
    data = new Instances(tets.getData());

    /*ID3*/
    discreteData = Filter.useFilter(data, filter);
    trainNum = discreteData.numInstances() * 3 / 4;
    testNum = discreteData.numInstances() / 4;

    for (int i = 0; i < fold3; i++) {
        try {
            Instances train = discreteData.trainCV(trainNum, i);
            Instances test = discreteData.testCV(testNum, i);

            Id3 iTiga = new Id3();
            Evaluation validation = new Evaluation(train);
            try {
                iTiga.buildClassifier(train);
                System.out.println(iTiga.toString());
                //file.println(iTiga.toString());
            } catch (Exception ex) {
                Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
            }
            validation.evaluateModel(iTiga, test);
            System.out.println(validation.toSummaryString());
            file.println("Validation " + (i + 1));
            file.println(validation.toSummaryString());
        } catch (Exception ex) {
            Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    System.out.println(testNum);
    file.println("Test Number");
    file.println(testNum);

    /*J48*/
    trainNum = data.numInstances() * 3 / 4;
    testNum = data.numInstances() / 4;

    for (int i = 0; i < fold; i++) {
        Instances train = data.trainCV(fold, i);
        Instances test = data.testCV(fold, i);
        try {
            Evaluation validation = new Evaluation(train);
            try {
                jKT.buildClassifier(data);
            } catch (Exception ex) {
                Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
            }
            validation.evaluateModel(jKT, test);
            System.out.println(validation.toSummaryString());
            file.println(validation.toSummaryString());
            System.out.println(jKT.toString());
            file.println(jKT.toString());
        } catch (Exception ex) {
            Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    /*dataset 3*/
    file.println("***DATASET 3***");
    tets.setFilepath("./src/data/weather.nominal.arff");
    try {
        tets.read();
    } catch (IOException ex) {
        Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
    }
    data = new Instances(tets.getData());

    /*ID3*/
    discreteData = Filter.useFilter(data, filter);
    trainNum = discreteData.numInstances() * 3 / 4;
    testNum = discreteData.numInstances() / 4;

    for (int i = 0; i < fold3; i++) {
        try {
            Instances train = discreteData.trainCV(fold, i);
            Instances test = discreteData.testCV(fold, i);

            Id3 iTiga = new Id3();
            Evaluation validation = new Evaluation(train);
            try {
                iTiga.buildClassifier(train);
                System.out.println(iTiga.toString());
                file.println(iTiga.toString());
            } catch (Exception ex) {
                Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
            }
            validation.evaluateModel(iTiga, test);
            System.out.println(validation.toSummaryString());
            file.println(validation.toSummaryString());
        } catch (Exception ex) {
            Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    System.out.println(testNum);
    file.println("Test Number");
    file.println(testNum);

    /*J48*/
    trainNum = data.numInstances() * 3 / 4;
    testNum = data.numInstances() / 4;

    for (int i = 0; i < fold; i++) {
        Instances train = data.trainCV(fold, i);
        Instances test = data.testCV(fold, i);
        try {
            Evaluation validation = new Evaluation(train);
            try {
                jKT.buildClassifier(data);
            } catch (Exception ex) {
                Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
            }
            validation.evaluateModel(jKT, test);
            System.out.println(validation.toSummaryString());
            file.println(validation.toSummaryString());
            System.out.println(jKT.toString());
            file.println(jKT.toString());
        } catch (Exception ex) {
            Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    /*RESULTT*/
    System.out.println(jKT.toString());
    file.println("RESULT");
    file.println(jKT.toString());
    file.close();
}