Example usage for weka.classifiers Evaluation Evaluation

List of usage examples for weka.classifiers Evaluation Evaluation

Introduction

In this page you can find the example usage for weka.classifiers Evaluation Evaluation.

Prototype

public Evaluation(Instances data) throws Exception 

Source Link

Usage

From source file:adams.flow.transformer.WekaTestSetEvaluator.java

License:Open Source License

/**
 * Executes the flow item./*from   w  ww.ja v  a  2s .com*/
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances test;
    Evaluation eval;
    weka.classifiers.Classifier cls;
    CallableSource gs;
    Token output;

    result = null;
    test = null;

    try {
        // get test set
        test = null;
        gs = new CallableSource();
        gs.setCallableName(m_Testset);
        gs.setParent(getParent());
        gs.setUp();
        gs.execute();
        output = gs.output();
        if (output != null)
            test = (Instances) output.getPayload();
        else
            result = "No test set available!";
        gs.wrapUp();

        // evaluate classifier
        if (result == null) {
            if (m_InputToken.getPayload() instanceof weka.classifiers.Classifier)
                cls = (weka.classifiers.Classifier) m_InputToken.getPayload();
            else
                cls = (weka.classifiers.Classifier) ((WekaModelContainer) m_InputToken.getPayload())
                        .getValue(WekaModelContainer.VALUE_MODEL);
            initOutputBuffer();
            m_Output.setHeader(test);
            eval = new Evaluation(test);
            eval.setDiscardPredictions(m_DiscardPredictions);
            eval.evaluateModel(cls, test, m_Output);

            // broadcast result
            if (m_Output instanceof Null) {
                m_OutputToken = new Token(new WekaEvaluationContainer(eval, cls));
            } else {
                if (m_AlwaysUseContainer)
                    m_OutputToken = new Token(
                            new WekaEvaluationContainer(eval, cls, m_Output.getBuffer().toString()));
                else
                    m_OutputToken = new Token(m_Output.getBuffer().toString());
            }
        }
    } catch (Exception e) {
        m_OutputToken = null;
        result = handleException("Failed to evaluate: ", e);
    }

    if (m_OutputToken != null) {
        if (m_OutputToken.getPayload() instanceof WekaEvaluationContainer) {
            if (test != null)
                ((WekaEvaluationContainer) m_OutputToken.getPayload())
                        .setValue(WekaEvaluationContainer.VALUE_TESTDATA, test);
        }
        updateProvenance(m_OutputToken);
    }

    return result;
}

From source file:adams.flow.transformer.WekaTrainTestSetEvaluator.java

License:Open Source License

/**
 * Executes the flow item.//from w ww  .  j  a  v  a  2s  . com
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances train;
    Instances test;
    weka.classifiers.Classifier cls;
    Evaluation eval;
    WekaTrainTestSetContainer cont;

    result = null;
    test = null;

    try {
        // cross-validate classifier
        cls = getClassifierInstance();
        if (cls == null)
            throw new IllegalStateException("Classifier '" + getClassifier() + "' not found!");

        cont = (WekaTrainTestSetContainer) m_InputToken.getPayload();
        train = (Instances) cont.getValue(WekaTrainTestSetContainer.VALUE_TRAIN);
        test = (Instances) cont.getValue(WekaTrainTestSetContainer.VALUE_TEST);
        cls.buildClassifier(train);
        initOutputBuffer();
        m_Output.setHeader(train);
        eval = new Evaluation(train);
        eval.setDiscardPredictions(m_DiscardPredictions);
        eval.evaluateModel(cls, test, m_Output);

        // broadcast result
        if (m_Output instanceof Null) {
            m_OutputToken = new Token(new WekaEvaluationContainer(eval, cls));
        } else {
            if (m_AlwaysUseContainer)
                m_OutputToken = new Token(
                        new WekaEvaluationContainer(eval, cls, m_Output.getBuffer().toString()));
            else
                m_OutputToken = new Token(m_Output.getBuffer().toString());
        }
    } catch (Exception e) {
        m_OutputToken = null;
        result = handleException("Failed to evaluate: ", e);
    }

    if (m_OutputToken != null) {
        if (m_OutputToken.getPayload() instanceof WekaEvaluationContainer) {
            if (test != null)
                ((WekaEvaluationContainer) m_OutputToken.getPayload())
                        .setValue(WekaEvaluationContainer.VALUE_TESTDATA, test);
        }
        updateProvenance(m_OutputToken);
    }

    return result;
}

From source file:adams.multiprocess.WekaCrossValidationExecution.java

License:Open Source License

/**
 * Executes the flow item.//from w w  w  . ja  va2 s  .c  o  m
 *
 * @return      null if everything is fine, otherwise error message
 */
public String execute() {
    MessageCollection result;
    Evaluation eval;
    AggregateEvaluations evalAgg;
    int folds;
    CrossValidationFoldGenerator generator;
    JobList<WekaCrossValidationJob> list;
    WekaCrossValidationJob job;
    WekaTrainTestSetContainer cont;
    int i;
    int current;
    int[] indices;
    Instances train;
    Instances test;
    Classifier cls;

    result = new MessageCollection();
    indices = null;
    m_Evaluation = null;
    m_Evaluations = null;

    try {
        // evaluate classifier
        if (m_Classifier == null)
            throw new IllegalStateException("Classifier '" + getClassifier() + "' not found!");
        if (isLoggingEnabled())
            getLogger().info(OptionUtils.getCommandLine(m_Classifier));

        m_ActualNumThreads = Performance.determineNumThreads(m_NumThreads);

        generator = (CrossValidationFoldGenerator) OptionUtils.shallowCopy(m_Generator);
        generator.setData(m_Data);
        generator.setNumFolds(m_Folds);
        generator.setSeed(m_Seed);
        generator.setStratify(true);
        generator.setUseViews(m_UseViews);
        generator.initializeIterator();
        folds = generator.getActualNumFolds();
        if ((m_ActualNumThreads == 1) && !m_SeparateFolds) {
            initOutputBuffer();
            if (m_Output != null) {
                m_Output.setHeader(m_Data);
                m_Output.printHeader();
            }
            eval = new Evaluation(m_Data);
            eval.setDiscardPredictions(m_DiscardPredictions);
            current = 0;
            while (generator.hasNext()) {
                if (isStopped())
                    break;
                if (m_StatusMessageHandler != null)
                    m_StatusMessageHandler.showStatus("Fold " + current + "/" + folds + ": '"
                            + m_Data.relationName() + "' using " + OptionUtils.getCommandLine(m_Classifier));
                cont = generator.next();
                train = (Instances) cont.getValue(WekaTrainTestSetContainer.VALUE_TRAIN);
                test = (Instances) cont.getValue(WekaTrainTestSetContainer.VALUE_TEST);
                cls = (Classifier) OptionUtils.shallowCopy(m_Classifier);
                cls.buildClassifier(train);
                eval.setPriors(train);
                eval.evaluateModel(cls, test, m_Output);
                current++;
            }
            if (m_Output != null)
                m_Output.printFooter();
            if (!isStopped())
                m_Evaluation = eval;
        } else {
            if (m_DiscardPredictions)
                throw new IllegalStateException(
                        "Cannot discard predictions in parallel mode, as they are used for aggregating the statistics!");
            if (m_JobRunnerSetup == null)
                m_JobRunner = new LocalJobRunner<WekaCrossValidationJob>();
            else
                m_JobRunner = m_JobRunnerSetup.newInstance();
            if (m_JobRunner instanceof ThreadLimiter)
                ((ThreadLimiter) m_JobRunner).setNumThreads(m_NumThreads);
            list = new JobList<>();
            while (generator.hasNext()) {
                cont = generator.next();
                job = new WekaCrossValidationJob((Classifier) OptionUtils.shallowCopy(m_Classifier),
                        (Instances) cont.getValue(WekaTrainTestSetContainer.VALUE_TRAIN),
                        (Instances) cont.getValue(WekaTrainTestSetContainer.VALUE_TEST),
                        (Integer) cont.getValue(WekaTrainTestSetContainer.VALUE_FOLD_NUMBER),
                        m_DiscardPredictions, m_StatusMessageHandler);
                list.add(job);
            }
            m_JobRunner.add(list);
            m_JobRunner.start();
            m_JobRunner.stop();
            // aggregate data
            if (!isStopped()) {
                evalAgg = new AggregateEvaluations();
                m_Evaluations = new Evaluation[m_JobRunner.getJobs().size()];
                for (i = 0; i < m_JobRunner.getJobs().size(); i++) {
                    job = (WekaCrossValidationJob) m_JobRunner.getJobs().get(i);
                    if (job.getEvaluation() == null) {
                        result.add("Fold #" + (i + 1) + " failed to evaluate"
                                + (job.hasExecutionError() ? job.getExecutionError() : "?"));
                        break;
                    }
                    evalAgg.add(job.getEvaluation());
                    m_Evaluations[i] = job.getEvaluation();
                    job.cleanUp();
                }
                m_Evaluation = evalAgg.aggregated();
                if (m_Evaluation == null) {
                    if (evalAgg.hasLastError())
                        result.add(evalAgg.getLastError());
                    else
                        result.add("Failed to aggregate evaluations!");
                }
            }
            list.cleanUp();
            m_JobRunner.cleanUp();
            m_JobRunner = null;
        }

        if (!m_DiscardPredictions)
            indices = generator.crossValidationIndices();
    } catch (Exception e) {
        result.add(Utils.handleException(this, "Failed to cross-validate classifier: ", e));
    }

    m_OriginalIndices = indices;

    if (result.isEmpty())
        return null;
    else
        return result.toString();
}

From source file:adams.multiprocess.WekaCrossValidationJob.java

License:Open Source License

/**
 * Does the actual execution of the job.
 *
 * @throws Exception if fails to execute job
 *//*from   w  ww.  j a v a  2  s .co  m*/
@Override
protected void process() throws Exception {
    if (m_StatusMessageHandler != null)
        m_StatusMessageHandler.showStatus("Fold " + m_Fold + " - start: '" + m_Train.relationName() + "' using "
                + Shortening.shortenEnd(OptionUtils.getCommandLine(m_Classifier), 100));
    try {
        m_Classifier.buildClassifier(m_Train);
        m_Evaluation = new Evaluation(m_Train);
        m_Evaluation.setDiscardPredictions(m_DiscardPredictions);
        m_Evaluation.evaluateModel(m_Classifier, m_Test);
    } catch (Exception e) {
        if (m_StatusMessageHandler != null)
            m_StatusMessageHandler.showStatus("Fold " + m_Fold + " - error: '" + m_Train.relationName()
                    + "' using " + Shortening.shortenEnd(OptionUtils.getCommandLine(m_Classifier), 100) + "\n"
                    + Utils.throwableToString(e));
        throw (e);
    }
    if (m_StatusMessageHandler != null)
        m_StatusMessageHandler.showStatus("Fold " + m_Fold + " - end: '" + m_Train.relationName() + "' using "
                + Shortening.shortenEnd(OptionUtils.getCommandLine(m_Classifier), 100));
}

From source file:adams.opt.cso.AbstractClassifierBasedSimpleCatSwarmOptimization.java

License:Open Source License

/**
 * Evaluates the classifier on the dataset and returns the metric.
 *
 * @param cls      the classifier to evaluate
 * @param data      the data to use for evaluation
 * @param folds      the number of folds to use
 * @param seed      the seed for the randomization
 * @return         the metric//  www  .  j a va2  s  . c  om
 * @throws Exception   if the evaluation fails
 */
protected double evaluateClassifier(Classifier cls, Instances data, int folds, int seed) throws Exception {
    Evaluation evaluation;

    evaluation = new Evaluation(data);
    evaluation.crossValidateModel(cls, data, folds, new Random(seed));

    return getMeasure().extract(evaluation, true);
}

From source file:adams.opt.optimise.genetic.fitnessfunctions.AttributeSelection.java

License:Open Source License

public double evaluate(OptData opd) {
    init();//from  ww  w.j a v  a2  s.c om
    int cnt = 0;
    int[] weights = getWeights(opd);
    Instances newInstances = new Instances(getInstances());
    for (int i = 0; i < getInstances().numInstances(); i++) {
        Instance in = newInstances.instance(i);
        cnt = 0;
        for (int a = 0; a < getInstances().numAttributes(); a++) {
            if (a == getInstances().classIndex())
                continue;
            if (weights[cnt++] == 0) {
                in.setValue(a, 0);
            } else {
                in.setValue(a, in.value(a));
            }
        }
    }
    Classifier newClassifier = null;

    try {
        newClassifier = (Classifier) OptionUtils.shallowCopy(getClassifier());
        // evaluate classifier on data
        Evaluation evaluation = new Evaluation(newInstances);
        evaluation.crossValidateModel(newClassifier, newInstances, getFolds(),
                new Random(getCrossValidationSeed()));

        // obtain measure
        double measure = 0;
        if (getMeasure() == Measure.ACC)
            measure = evaluation.pctCorrect();
        else if (getMeasure() == Measure.CC)
            measure = evaluation.correlationCoefficient();
        else if (getMeasure() == Measure.MAE)
            measure = evaluation.meanAbsoluteError();
        else if (getMeasure() == Measure.RAE)
            measure = evaluation.relativeAbsoluteError();
        else if (getMeasure() == Measure.RMSE)
            measure = evaluation.rootMeanSquaredError();
        else if (getMeasure() == Measure.RRSE)
            measure = evaluation.rootRelativeSquaredError();
        else
            throw new IllegalStateException("Unhandled measure '" + getMeasure() + "'!");
        measure = getMeasure().adjust(measure);

        return (measure);
        // process fitness

    } catch (Exception e) {
        getLogger().log(Level.SEVERE, "Error evaluating", e);
    }

    return 0;
}

From source file:algoritmogeneticocluster.Cromossomo.java

private void classifica() {
    //SMO classifier = new SMO();
    //HyperPipes classifier = new HyperPipes();
    IBk classifier = new IBk(5);
    BufferedReader datafile = readDataFile(inId + ".arff");

    Instances data;//  w  ww  .ja v  a2  s .co m
    Evaluation eval;
    try {
        data = new Instances(datafile);
        data.setClassIndex(data.numAttributes() - 1);
        eval = new Evaluation(data);
        Random rand = new Random(1); // usando semente = 1
        int folds = 10;
        eval.crossValidateModel(classifier, data, folds, rand);
        //this.fitness = eval.pctCorrect();
        //fitness = new BigDecimal(fitness).setScale(2, RoundingMode.HALF_UP).doubleValue();//arredondamento para duas casas
        pctAcerto = eval.pctCorrect();
        pctAcerto = new BigDecimal(pctAcerto).setScale(2, RoundingMode.HALF_UP).doubleValue();
        microAverage = getMicroAverage(eval, data);
        microAverage = new BigDecimal(microAverage).setScale(2, RoundingMode.HALF_UP).doubleValue();
        macroAverage = getMacroAverage(eval, data);
        macroAverage = new BigDecimal(macroAverage).setScale(2, RoundingMode.HALF_UP).doubleValue();

    } catch (Exception ex) {
        System.out.println("Erro ao tentar fazer a classificacao");
        Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex);
    }

    switch (metodoFitness) {
    case 1:
        fitness = pctAcerto;
        break;
    case 2:
        fitness = microAverage;
        break;
    case 3:
        fitness = macroAverage;
        break;
    default:
        break;
    }

}

From source file:algoritmogeneticocluster.NewClass.java

public static Evaluation classify(Classifier model, Instances trainingSet, Instances testingSet)
        throws Exception {
    Evaluation evaluation = new Evaluation(trainingSet);

    model.buildClassifier(trainingSet);//w ww. ja v a2 s. c  o m
    evaluation.evaluateModel(model, testingSet);

    return evaluation;
}

From source file:algoritmogeneticocluster.WekaSimulation.java

/**
 * @param args the command line arguments
 *//*w w w .j  av  a 2  s .  c om*/
public static void main(String[] args) {
    SMO classifier = new SMO();
    HyperPipes hy = new HyperPipes();
    //        classifier.buildClassifier(trainset);

    BufferedReader datafile = readDataFile("tabela10.arff");

    Instances data;
    Evaluation eval;
    try {
        data = new Instances(datafile);
        data.setClassIndex(data.numAttributes() - 1);
        eval = new Evaluation(data);
        Random rand = new Random(1); // using seed = 1
        int folds = 10;
        eval.crossValidateModel(classifier, data, folds, rand);
        System.out.println(eval.toString());
        System.out.println(eval.numInstances());
        System.out.println(eval.correct());
        System.out.println(eval.incorrect());
        System.out.println(eval.pctCorrect());
        System.out.println(eval.pctIncorrect());

    } catch (Exception ex) {
        Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:ann.ANN.java

public static void crossValidation(Classifier model, Instances data) {
    try {//from  w  w  w. ja  va 2  s  .  c o  m
        Evaluation eval = new Evaluation(data);
        eval.crossValidateModel(model, data, 10, new Random(1));
        System.out.println("================================");
        System.out.println("========Cross Validation========");
        System.out.println("================================");
        System.out.println(eval.toSummaryString("\n=== Summary ===\n", false));
        System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===\n"));
        System.out.println(eval.toMatrixString("=== Confusion Matrix ===\n"));
    } catch (Exception ex) {
        System.out.println(ex.toString());
    }
}