Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:asap.NLPSystem.java

private void evaluateModel(boolean printEvaluation) {
    //        checkInstancesFeatures(evaluationSet);
    PerformanceCounters.startTimer("evaluateModel");
    System.out.println("Evaluating model...");
    AbstractClassifier abstractClassifier = (AbstractClassifier) classifier;
    try {//from  w w w  .j a  v  a  2s. co m
        // evaluate classifier and print some statistics
        Evaluation eval = new Evaluation(evaluationSet);

        evaluationPredictions = eval.evaluateModel(abstractClassifier, evaluationSet);

        if (printEvaluation) {
            System.out.println("\tstats for model:" + abstractClassifier.getClass().getName() + " "
                    + Utils.joinOptions(abstractClassifier.getOptions()));
            System.out.println(eval.toSummaryString());
        }

        evaluationPearsonsCorrelation = eval.correlationCoefficient();
        evaluated = true;
    } catch (Exception ex) {
        Logger.getLogger(PostProcess.class.getName()).log(Level.SEVERE, null, ex);
    }

    System.out.println("\tevaluation done.");
    PerformanceCounters.stopTimer("evaluateModel");
}

From source file:asap.PostProcess.java

private static double[] evaluateModel(AbstractClassifier cl, Instances data, boolean printEvaluation) {
    PerformanceCounters.startTimer("evaluateModel");
    System.out.println("Evaluating model...");
    double[] predictions = null;

    try {/*from   w  ww . j  a  v  a 2  s . c om*/
        // evaluate classifier and print some statistics
        Evaluation eval = new Evaluation(data);

        predictions = eval.evaluateModel(cl, data);

        if (printEvaluation) {
            System.out.println(
                    "\tstats for model:" + cl.getClass().getName() + " " + Utils.joinOptions(cl.getOptions()));
            System.out.println(eval.toSummaryString());
        }
    } catch (Exception ex) {
        Logger.getLogger(PostProcess.class.getName()).log(Level.SEVERE, null, ex);
    }

    System.out.println("\tevaluation done.");
    PerformanceCounters.stopTimer("evaluateModel");
    return predictions;
}

From source file:assign00.ExperimentShell.java

/**
 * @param args the command line arguments
 */// w ww .j a  va2 s  . co m
public static void main(String[] args) throws Exception {
    DataSource source = new DataSource(file);
    Instances dataSet = source.getDataSet();

    //Set up data
    dataSet.setClassIndex(dataSet.numAttributes() - 1);
    dataSet.randomize(new Random(1));

    //determine sizes
    int trainingSize = (int) Math.round(dataSet.numInstances() * .7);
    int testSize = dataSet.numInstances() - trainingSize;

    Instances training = new Instances(dataSet, 0, trainingSize);

    Instances test = new Instances(dataSet, trainingSize, testSize);

    Standardize standardizedData = new Standardize();
    standardizedData.setInputFormat(training);

    Instances newTest = Filter.useFilter(test, standardizedData);
    Instances newTraining = Filter.useFilter(training, standardizedData);

    NeuralNetworkClassifier NWC = new NeuralNetworkClassifier();
    NWC.buildClassifier(newTraining);

    Evaluation eval = new Evaluation(newTraining);
    eval.evaluateModel(NWC, newTest);

    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:at.aictopic1.sentimentanalysis.machinelearning.impl.TwitterClassifer.java

public void trainModel() {
    Instances trainingData = loadTrainingData();

    System.out.println("Class attribute: " + trainingData.classAttribute().toString());

    // Partition dataset into training and test sets
    RemovePercentage filter = new RemovePercentage();

    filter.setPercentage(10);// w  ww  .j av  a  2s.  co m

    Instances testData = null;

    // Split in training and testdata
    try {
        filter.setInputFormat(trainingData);

        testData = Filter.useFilter(trainingData, filter);
    } catch (Exception ex) {
        //Logger.getLogger(Trainer.class.getName()).log(Level.SEVERE, null, ex);
        System.out.println("Error getting testData: " + ex.toString());
    }

    // Train the classifier
    Classifier model = (Classifier) new NaiveBayes();

    try {
        // Save the model to fil
        // serialize model
        weka.core.SerializationHelper.write(modelDir + algorithm + ".model", model);
    } catch (Exception ex) {
        Logger.getLogger(TwitterClassifer.class.getName()).log(Level.SEVERE, null, ex);
    }
    // Set the local model 
    this.trainedModel = model;

    try {
        model.buildClassifier(trainingData);
    } catch (Exception ex) {
        //Logger.getLogger(Trainer.class.getName()).log(Level.SEVERE, null, ex);
        System.out.println("Error training model: " + ex.toString());
    }

    try {
        // Evaluate model
        Evaluation test = new Evaluation(trainingData);
        test.evaluateModel(model, testData);

        System.out.println(test.toSummaryString());

    } catch (Exception ex) {
        //Logger.getLogger(Trainer.class.getName()).log(Level.SEVERE, null, ex);
        System.out.println("Error evaluating model: " + ex.toString());
    }
}

From source file:au.edu.usyd.it.yangpy.sampling.BPSO.java

License:Open Source License

/**
 * this method evaluate a classifier with 
 * the sampled data and internal test data
 * //ww  w. j  a va2s .c o m
 * @param c   classifier
 * @param train   sampled set
 * @param test   internal test set
 * @return   evaluation results
 */
public double classify(Classifier c, Instances train, Instances test) {
    double AUC = 0;
    double FM = 0;
    double GM = 0;

    try {
        c.buildClassifier(train);

        // evaluate classifier
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(c, test);

        AUC = eval.areaUnderROC(1);
        FM = eval.fMeasure(1);
        GM = eval.truePositiveRate(0);
        GM *= eval.truePositiveRate(1);
        GM = Math.sqrt(GM);

    } catch (IOException ioe) {
        ioe.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }

    double mean = (AUC + FM + GM) / 3;

    if (verbose == true) {
        System.out.print("AUC: " + dec.format(AUC) + " ");
        System.out.print("FM: " + dec.format(FM) + " ");
        System.out.println("GM: " + dec.format(GM));
        System.out.println("      \\       |       /  ");
        System.out.println("        Mean: " + dec.format(mean));
    }

    return mean;
}

From source file:au.edu.usyd.it.yangpy.snp.Ensemble.java

License:Open Source License

public double classify(Classifier c, int cId) throws Exception {

    // train the classifier with training data
    c.buildClassifier(train);/* ww w.j a v a 2s .  co m*/

    // get the predict value and predict distribution from each test instances
    for (int i = 0; i < test.numInstances(); i++) {
        predictDistribution[cId][i] = c.distributionForInstance(test.instance(i));
        predictValue[cId][i] = c.classifyInstance(test.instance(i));
    }

    // of course, get the AUC for each classifier
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(c, test);
    return eval.areaUnderROC(1) * 100;
}

From source file:boostingPL.boosting.AdaBoost.java

License:Open Source License

private double weightError(int t) throws Exception {
    // evaluate all instances
    Evaluation eval = new Evaluation(insts);
    eval.evaluateModel(classifiers[t], insts);
    return eval.errorRate();
}

From source file:br.unicamp.ic.recod.gpsi.gp.gpsiJGAPRoiFitnessFunction.java

@Override
protected double evaluate(IGPProgram igpp) {

    double mean_accuracy = 0.0;
    Object[] noargs = new Object[0];

    gpsiRoiBandCombiner roiBandCombinator = new gpsiRoiBandCombiner(new gpsiJGAPVoxelCombiner(super.b, igpp));
    // TODO: The ROI descriptors must combine the images first
    //roiBandCombinator.combineEntity(this.dataset.getTrainingEntities());

    gpsiMLDataset mlDataset = new gpsiMLDataset(this.descriptor);
    try {/* ww  w .  j  a v  a  2 s. c  o m*/
        mlDataset.loadWholeDataset(this.dataset, true);
    } catch (Exception ex) {
        Logger.getLogger(gpsiJGAPRoiFitnessFunction.class.getName()).log(Level.SEVERE, null, ex);
    }

    int dimensionality = mlDataset.getDimensionality();
    int n_classes = mlDataset.getTrainingEntities().keySet().size();
    int n_entities = mlDataset.getNumberOfTrainingEntities();
    ArrayList<Byte> listOfClasses = new ArrayList<>(mlDataset.getTrainingEntities().keySet());

    Attribute[] attributes = new Attribute[dimensionality];
    FastVector fvClassVal = new FastVector(n_classes);

    int i, j;
    for (i = 0; i < dimensionality; i++)
        attributes[i] = new Attribute("f" + Integer.toString(i));
    for (i = 0; i < n_classes; i++)
        fvClassVal.addElement(Integer.toString(listOfClasses.get(i)));

    Attribute classes = new Attribute("class", fvClassVal);

    FastVector fvWekaAttributes = new FastVector(dimensionality + 1);

    for (i = 0; i < dimensionality; i++)
        fvWekaAttributes.addElement(attributes[i]);
    fvWekaAttributes.addElement(classes);

    Instances instances = new Instances("Rel", fvWekaAttributes, n_entities);
    instances.setClassIndex(dimensionality);

    Instance iExample;
    for (byte label : mlDataset.getTrainingEntities().keySet()) {
        for (double[] featureVector : mlDataset.getTrainingEntities().get(label)) {
            iExample = new Instance(dimensionality + 1);
            for (j = 0; j < dimensionality; j++)
                iExample.setValue(i, featureVector[i]);
            iExample.setValue(dimensionality, label);
            instances.add(iExample);
        }
    }

    int folds = 5;
    Random rand = new Random();
    Instances randData = new Instances(instances);
    randData.randomize(rand);

    Instances trainingSet, testingSet;
    Classifier cModel;
    Evaluation eTest;
    try {
        for (i = 0; i < folds; i++) {
            cModel = (Classifier) new SimpleLogistic();
            trainingSet = randData.trainCV(folds, i);
            testingSet = randData.testCV(folds, i);

            cModel.buildClassifier(trainingSet);

            eTest = new Evaluation(trainingSet);
            eTest.evaluateModel(cModel, testingSet);

            mean_accuracy += eTest.pctCorrect();

        }
    } catch (Exception ex) {
        Logger.getLogger(gpsiJGAPRoiFitnessFunction.class.getName()).log(Level.SEVERE, null, ex);
    }

    mean_accuracy /= (folds * 100);

    return mean_accuracy;

}

From source file:ca.uottawa.balie.WekaLearner.java

License:Open Source License

/**
 * Test the learned model.//from  w  w w  . j a  va 2  s  . c o  m
 * 
 * @return A summary string of the performance of the classifier
 */
public String TestModel() {
    if (DEBUG)
        DebugInfo.Out("Testing on " + m_TestingSet.numInstances() + " instances");
    Evaluation evaluation = null;
    try {
        evaluation = new Evaluation(m_TrainingSet);
        evaluation.evaluateModel(m_Scheme, m_TestingSet);
    } catch (Exception e) {
        System.out.println(e.getMessage());
    }
    String strSummary = evaluation.toSummaryString();
    strSummary += "\n\nConfusion Matrix: \n\n";
    m_ConfusionMatrix = evaluation.confusionMatrix();
    for (int i = 0; i != m_ConfusionMatrix.length; ++i) {
        for (int j = 0; j != m_ConfusionMatrix[i].length; ++j) {
            strSummary += String.valueOf(m_ConfusionMatrix[i][j]) + "\t";
        }
        strSummary += "\n";
    }
    return strSummary;
}

From source file:cezeri.evaluater.FactoryEvaluation.java

public static Evaluation performCrossValidate(Classifier model, Instances datax, int folds, boolean show_text,
        boolean show_plot, TFigureAttribute attr) {
    Random rand = new Random(1);
    Instances randData = new Instances(datax);
    randData.randomize(rand);//from   ww w. j a  v a2s . c  om
    if (randData.classAttribute().isNominal()) {
        randData.stratify(folds);
    }
    Evaluation eval = null;
    try {
        // perform cross-validation
        eval = new Evaluation(randData);
        //            double[] simulated = new double[0];
        //            double[] observed = new double[0];
        //            double[] sim = new double[0];
        //            double[] obs = new double[0];
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n, rand);
            Instances validation = randData.testCV(folds, n);
            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);

            //                sim = eval.evaluateModel(clsCopy, validation);
            //                obs = validation.attributeToDoubleArray(validation.classIndex());
            //                if (show_plot) {
            //                    double[][] d = new double[2][sim.length];
            //                    d[0] = obs;
            //                    d[1] = sim;
            //                    CMatrix f1 = CMatrix.getInstance(d);
            //                    f1.transpose().plot(attr);
            //                }
            //                if (show_text) {
            //                    // output evaluation
            //                    System.out.println();
            //                    System.out.println("=== Setup for each Cross Validation fold===");
            //                    System.out.println("Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            //                    System.out.println("Dataset: " + randData.relationName());
            //                    System.out.println("Folds: " + folds);
            //                    System.out.println("Seed: " + 1);
            //                    System.out.println();
            //                    System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
            //                }
            simulated = FactoryUtils.concatenate(simulated, eval.evaluateModel(clsCopy, validation));
            observed = FactoryUtils.concatenate(observed,
                    validation.attributeToDoubleArray(validation.classIndex()));
            //                simulated = FactoryUtils.mean(simulated,eval.evaluateModel(clsCopy, validation));
            //                observed = FactoryUtils.mean(observed,validation.attributeToDoubleArray(validation.classIndex()));
        }

        if (show_plot) {
            double[][] d = new double[2][simulated.length];
            d[0] = observed;
            d[1] = simulated;
            CMatrix f1 = CMatrix.getInstance(d);
            attr.figureCaption = "overall performance";
            f1.transpose().plot(attr);
        }
        if (show_text) {
            // output evaluation
            System.out.println();
            System.out.println("=== Setup for Overall Cross Validation===");
            System.out.println(
                    "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
            System.out.println("Dataset: " + randData.relationName());
            System.out.println("Folds: " + folds);
            System.out.println("Seed: " + 1);
            System.out.println();
            System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
        }
    } catch (Exception ex) {
        Logger.getLogger(FactoryEvaluation.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}