Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:com.deafgoat.ml.prognosticator.AppClassifier.java

License:Apache License

/**
 * Gets details on classified instances according to supplied attribute
 * /*  www.j a v a  2 s. c o m*/
 * @param attribute
 *            The focal attribute for error analysis
 * @throws Exception
 *             If model can not be evaluated
 */
public void errorAnalysis(String attribute) throws Exception {
    readModel();
    _logger.info("Performing error analysis");
    Evaluation eval = new Evaluation(_testInstances);
    eval.evaluateModel(_cls, _testInstances);
    _predictionList = new HashMap<String, List<Prediction>>();
    String predicted, actual = null;
    double[] distribution = null;
    _predictionList.put(_config._truePositives, new ArrayList<Prediction>());
    _predictionList.put(_config._trueNegatives, new ArrayList<Prediction>());
    _predictionList.put(_config._falsePositives, new ArrayList<Prediction>());
    _predictionList.put(_config._falseNegatives, new ArrayList<Prediction>());
    for (int i = 0; i < _testInstances.numInstances(); i++) {
        distribution = _cls.distributionForInstance(_testInstances.instance(i));
        actual = _testInstances.classAttribute().value((int) _testInstances.instance(i).classValue());
        predicted = _testInstances.classAttribute()
                .value((int) _cls.classifyInstance(_testInstances.instance(i)));
        // 0 is negative, 1 is positive
        if (!predicted.equals(actual)) {
            if (actual.equals(_config._negativeClassValue)) {
                _predictionList.get(_config._falsePositives)
                        .add(new Prediction(i + 1, predicted, distribution, _fullData.instance(i)));
            } else if (actual.equals(_config._positiveClassValue)) {
                _predictionList.get(_config._falseNegatives)
                        .add(new Prediction(i + 1, predicted, distribution, _fullData.instance(i)));
            }
        } else if (predicted.equals(actual)) {
            if (actual.equals(_config._negativeClassValue)) {
                _predictionList.get(_config._trueNegatives)
                        .add(new Prediction(i + 1, predicted, distribution, _fullData.instance(i)));
            } else if (actual.equals(_config._positiveClassValue)) {
                _predictionList.get(_config._truePositives)
                        .add(new Prediction(i + 1, predicted, distribution, _fullData.instance(i)));
            }
        }
    }
    BufferedWriter writer = null;
    String name, prediction = null;
    for (Entry<String, List<Prediction>> entry : _predictionList.entrySet()) {
        name = entry.getKey();
        Collections.sort(_predictionList.get(name), Collections.reverseOrder());
        writer = new BufferedWriter(new FileWriter(name));
        List<Prediction> predictions = _predictionList.get(name);
        for (int count = 0; count < predictions.size(); count++) {
            if (count < _config._maxCount) {
                prediction = predictions.get(count).attributeDistribution(attribute);
                if (Double.parseDouble(prediction.split(_delimeter)[1]) >= _config._minProb) {
                    writer.write(prediction + "\n");
                }
            } else {
                break;
            }
        }
        writer.close();
    }
}

From source file:com.deafgoat.ml.prognosticator.AppClassifier.java

License:Apache License

/**
 * Evaluates model performance on test instances
 * /*from  w  ww  . j a  v a  2s. c  o  m*/
 * @throws Exception
 *             If model can not be evaluated.
 */
public void evaluate() throws Exception {
    readModel();
    _logger.info("Classifying with " + _config._classifier);
    Evaluation eval = new Evaluation(_testInstances);
    eval.evaluateModel(_cls, _testInstances);
    _logger.info("\n" + eval.toSummaryString());
    try {
        _logger.info("\n" + eval.toClassDetailsString());
    } catch (Exception e) {
        _logger.info("Can not create class details" + _config._classifier);
    }
    try {
        _logger.info("\n" + _eval.toMatrixString());
    } catch (Exception e) {
        _logger.info(
                "Can not create confusion matrix for " + _config._classifier + " using " + _config._classValue);
    }
}

From source file:com.edwardraff.WekaMNIST.java

License:Open Source License

private static void evalModel(Classifier wekaModel, Instances train, Instances test) throws Exception {
    long start;//from   w w  w . ja  v a  2  s. c o m
    long end;
    System.gc();
    start = System.currentTimeMillis();
    wekaModel.buildClassifier(train);
    end = System.currentTimeMillis();
    System.out.println("\tTraining took: " + (end - start) / 1000.0);

    System.gc();
    Evaluation eval = new Evaluation(train);
    start = System.currentTimeMillis();
    eval.evaluateModel(wekaModel, test);
    end = System.currentTimeMillis();
    System.out.println(
            "\tEvaluation took " + (end - start) / 1000.0 + " seconds with an error rate " + eval.errorRate());

    System.gc();
}

From source file:com.github.r351574nc3.amex.assignment2.App.java

License:Open Source License

/**
 * Tests/evaluates the trained model. This method assumes that {@link #train()} was previously called to assign a {@link LinearRegression} 
 * classifier. If it wasn't, an exception will be thrown.
 *
 * @throws Exception if train wasn't called prior.
 *///from   w ww. j av  a2  s .  co  m
public void test() throws Exception {
    if (getClassifier() == null) {
        throw new RuntimeException("Make sure train was run prior to this method call");
    }

    final Evaluation eval = new Evaluation(getTrained());
    eval.evaluateModel(getClassifier(), getTest());
    info("%s", eval.toSummaryString("Results\n\n", false));
    info("Percent of correctly classified instances: %s", eval.pctCorrect());
}

From source file:com.mycompany.id3classifier.ID3Shell.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource("lensesData.csv");
    Instances dataSet = source.getDataSet();

    Discretize filter = new Discretize();
    filter.setInputFormat(dataSet);/*from w ww.j  a v  a  2 s  .c o  m*/
    dataSet = Filter.useFilter(dataSet, filter);

    Standardize standardize = new Standardize();
    standardize.setInputFormat(dataSet);
    dataSet = Filter.useFilter(dataSet, standardize);

    dataSet.setClassIndex(dataSet.numAttributes() - 1);
    dataSet.randomize(new Random(9001)); //It's over 9000!!

    int folds = 10;
    //Perform crossvalidation
    Evaluation eval = new Evaluation(dataSet);
    for (int n = 0; n < folds; n++) {
        int trainingSize = (int) Math.round(dataSet.numInstances() * .7);
        int testSize = dataSet.numInstances() - trainingSize;

        Instances trainingData = dataSet.trainCV(folds, n);
        Instances testData = dataSet.testCV(folds, n);

        ID3Classifier classifier = new ID3Classifier();
        // Id3 classifier = new Id3();
        classifier.buildClassifier(trainingData);

        eval.evaluateModel(classifier, testData);
    }
    System.out.println(eval.toSummaryString("\nResults:\n", false));
}

From source file:com.mycompany.knnclassifier.kNNShell.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource("carData.csv");
    Instances dataSet = source.getDataSet();

    Standardize standardize = new Standardize();
    standardize.setInputFormat(dataSet);
    dataSet = Filter.useFilter(dataSet, standardize);

    dataSet.setClassIndex(dataSet.numAttributes() - 1);
    dataSet.randomize(new Random(9001)); //It's over 9000!!

    int trainingSize = (int) Math.round(dataSet.numInstances() * .7);
    int testSize = dataSet.numInstances() - trainingSize;

    Instances trainingData = new Instances(dataSet, 0, trainingSize);
    Instances testData = new Instances(dataSet, trainingSize, testSize);

    kNNClassifier classifier = new kNNClassifier(3);
    classifier.buildClassifier(trainingData);

    //Used to compare to Weka's built in KNN algorithm
    //Classifier classifier = new IBk(1);
    //classifier.buildClassifier(trainingData);

    Evaluation eval = new Evaluation(trainingData);
    eval.evaluateModel(classifier, testData);

    System.out.println(eval.toSummaryString("\nResults:\n", false));
}

From source file:com.mycompany.neuralnetwork.NeuralNetworkShell.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource("irisData.csv");
    Instances dataSet = source.getDataSet();

    Standardize standardize = new Standardize();
    standardize.setInputFormat(dataSet);
    dataSet = Filter.useFilter(dataSet, standardize);
    dataSet.setClassIndex(dataSet.numAttributes() - 1);
    dataSet.randomize(new Random(9001)); //It's over 9000!!

    int trainingSize = (int) Math.round(dataSet.numInstances() * .7);
    int testSize = dataSet.numInstances() - trainingSize;

    Instances trainingData = new Instances(dataSet, 0, trainingSize);
    Instances testData = new Instances(dataSet, trainingSize, testSize);

    //MultilayerPerceptron classifier = new MultilayerPerceptron();
    NeuralNetworkClassifier classifier = new NeuralNetworkClassifier(3, 20000, 0.1);
    classifier.buildClassifier(trainingData);

    Evaluation eval = new Evaluation(trainingData);
    eval.evaluateModel(classifier, testData);

    System.out.println(eval.toSummaryString("\nResults:\n", false));
}

From source file:com.reactivetechnologies.analytics.core.eval.AdaBoostM1WithBuiltClassifiers.java

License:Open Source License

@Override
protected void buildClassifierWithWeights(Instances data) throws Exception {

    Instances training;/* w w  w . j  av  a 2 s  .c  o  m*/
    double epsilon, reweight;
    Evaluation evaluation;
    int numInstances = data.numInstances();

    // Initialize data
    m_Betas = new double[m_Classifiers.length];
    m_NumIterationsPerformed = 0;

    // Create a copy of the data so that when the weights are diddled
    // with it doesn't mess up the weights for anyone else
    training = new Instances(data, 0, numInstances);

    // Do boostrap iterations
    for (m_NumIterationsPerformed = 0; m_NumIterationsPerformed < m_Classifiers.length; m_NumIterationsPerformed++) {
        if (m_Debug) {
            System.err.println("Training classifier " + (m_NumIterationsPerformed + 1));
        }
        // Select instances to train the classifier on
        if (m_WeightThreshold < 100) {
            selectWeightQuantile(training, (double) m_WeightThreshold / 100);
        } else {
            new Instances(training, 0, numInstances);
        }

        /** Changed here: DO NOT Build the classifier! */
        /*if (m_Classifiers[m_NumIterationsPerformed] instanceof Randomizable)
          ((Randomizable) m_Classifiers[m_NumIterationsPerformed]).setSeed(randomInstance.nextInt());
                
        m_Classifiers[m_NumIterationsPerformed].buildClassifier(trainData);*/
        /** End change */

        // Evaluate the classifier
        evaluation = new Evaluation(data);
        evaluation.evaluateModel(m_Classifiers[m_NumIterationsPerformed], training);
        epsilon = evaluation.errorRate();

        // Stop if error too small or error too big and ignore this model
        if (Utils.grOrEq(epsilon, 0.5) || Utils.eq(epsilon, 0)) {
            if (m_NumIterationsPerformed == 0) {
                m_NumIterationsPerformed = 1; // If we're the first we have to to use it
            }
            break;
        }
        // Determine the weight to assign to this model
        m_Betas[m_NumIterationsPerformed] = Math.log((1 - epsilon) / epsilon);
        reweight = (1 - epsilon) / epsilon;
        if (m_Debug) {
            System.err.println("\terror rate = " + epsilon + "  beta = " + m_Betas[m_NumIterationsPerformed]);
        }

        // Update instance weights
        setWeights(training, reweight);
    }
}

From source file:com.reactivetechnologies.analytics.core.eval.AdaBoostM1WithBuiltClassifiers.java

License:Open Source License

@Override
protected void buildClassifierUsingResampling(Instances data) throws Exception {

    Instances trainData, training;/* www.j  a  v a  2s  .  co m*/
    double epsilon, reweight, sumProbs;
    Evaluation evaluation;
    int numInstances = data.numInstances();
    int resamplingIterations = 0;

    // Initialize data
    m_Betas = new double[m_Classifiers.length];
    m_NumIterationsPerformed = 0;
    // Create a copy of the data so that when the weights are diddled
    // with it doesn't mess up the weights for anyone else
    training = new Instances(data, 0, numInstances);
    sumProbs = training.sumOfWeights();
    for (int i = 0; i < training.numInstances(); i++) {
        training.instance(i).setWeight(training.instance(i).weight() / sumProbs);
    }

    // Do boostrap iterations
    for (m_NumIterationsPerformed = 0; m_NumIterationsPerformed < m_Classifiers.length; m_NumIterationsPerformed++) {
        if (m_Debug) {
            System.err.println("Training classifier " + (m_NumIterationsPerformed + 1));
        }

        // Select instances to train the classifier on
        if (m_WeightThreshold < 100) {
            trainData = selectWeightQuantile(training, (double) m_WeightThreshold / 100);
        } else {
            trainData = new Instances(training);
        }

        // Resample
        resamplingIterations = 0;
        double[] weights = new double[trainData.numInstances()];
        for (int i = 0; i < weights.length; i++) {
            weights[i] = trainData.instance(i).weight();
        }
        do {

            /** Changed here: DO NOT build classifier*/
            // Build and evaluate classifier
            //m_Classifiers[m_NumIterationsPerformed].buildClassifier(sample);
            /** End change */

            evaluation = new Evaluation(data);
            evaluation.evaluateModel(m_Classifiers[m_NumIterationsPerformed], training);
            epsilon = evaluation.errorRate();
            resamplingIterations++;
        } while (Utils.eq(epsilon, 0) && (resamplingIterations < 10));

        // Stop if error too big or 0
        if (Utils.grOrEq(epsilon, 0.5) || Utils.eq(epsilon, 0)) {
            if (m_NumIterationsPerformed == 0) {
                m_NumIterationsPerformed = 1; // If we're the first we have to to use it
            }
            break;
        }

        // Determine the weight to assign to this model
        m_Betas[m_NumIterationsPerformed] = Math.log((1 - epsilon) / epsilon);
        reweight = (1 - epsilon) / epsilon;
        if (m_Debug) {
            System.err.println("\terror rate = " + epsilon + "  beta = " + m_Betas[m_NumIterationsPerformed]);
        }

        // Update instance weights
        setWeights(training, reweight);
    }
}

From source file:com.sliit.rules.RuleContainer.java

public Map<String, String> evaluateModel() {

    Map<String, String> evaluationSummary = new HashMap<String, String>();
    try {/*  ww  w  . j  a  v  a2  s .  c om*/

        instances.setClassIndex(instances.numAttributes() - 1);
        Evaluation evaluation = new Evaluation(instances);
        evaluation.evaluateModel(ruleMoldel, instances);
        ArrayList<Rule> rulesList = ruleMoldel.getRuleset();
        String rules = ruleMoldel.toString();
        evaluationSummary.put("rules", rules);
        evaluationSummary.put("summary", evaluation.toSummaryString());
        evaluationSummary.put("confusion_matrix", evaluation.toMatrixString());
    } catch (Exception e) {

        log.error("Error occurred:" + e.getLocalizedMessage());
    }
    return evaluationSummary;
}