Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:neuralnetwork.NeuralNetwork.java

/**
 * @param args the command line arguments
 * @throws java.lang.Exception//from   www .java 2s  .com
 */
public static void main(String[] args) throws Exception {

    ConverterUtils.DataSource source;
    source = new ConverterUtils.DataSource("C:\\Users\\Harvey\\Documents\\iris.csv");
    Instances data = source.getDataSet();

    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }

    data.randomize(new Debug.Random(1));

    RemovePercentage trainFilter = new RemovePercentage();
    trainFilter.setPercentage(70);
    trainFilter.setInputFormat(data);
    Instances train = Filter.useFilter(data, trainFilter);

    trainFilter.setInvertSelection(true);
    trainFilter.setInputFormat(data);
    Instances test = Filter.useFilter(data, trainFilter);

    Standardize filter = new Standardize();
    filter.setInputFormat(train);

    Instances newTrain = Filter.useFilter(test, filter);
    Instances newTest = Filter.useFilter(train, filter);

    Classifier nNet = new NeuralNet();
    nNet.buildClassifier(newTrain);
    Evaluation eval = new Evaluation(newTest);
    eval.evaluateModel(nNet, newTest);
    System.out.println(eval.toSummaryString("\nResults\n-------------\n", false));
}

From source file:Neural_Network.NuralN.java

public int[] testNet() {

    System.out.println();/*from  ww w .ja  va  2  s  . com*/
    int[] results = new int[2];
    if (!trained) {
        System.out.println("Neural netowrk is not trained....");
    } else {
        try {
            loadTestData();
            Evaluation tempEvaluator = new Evaluation(testSet);
            tempEvaluator.evaluateModel(nN, testSet);

            results[0] = (int) tempEvaluator.correct();
            results[1] = (int) tempEvaluator.incorrect();
            tested = true;
            // "Test completed;

        } catch (IOException e) {
            //"Test file missing
            System.out.println(e.toString());
        } catch (Exception e) {
            System.err.println(e.toString());
        }
    }
    return results;
}

From source file:newclassifier.NewClassifier.java

public void percentSplit(float percent) throws Exception {
    int trainSize = (int) Math.round(data.numInstances() * percent / 100);
    int testSize = data.numInstances() - trainSize;
    Instances train = new Instances(data, 0, trainSize);
    Instances test = new Instances(data, trainSize, testSize);
    cls.buildClassifier(train);/*from w w w  .  j a v a  2 s  .  c o m*/

    data = new Instances(test);
    Evaluation eval = new Evaluation(data);
    ;
    eval.evaluateModel(cls, data);
}

From source file:newclassifier.NewClassifier.java

public void givenTestSet(String path) throws Exception {
    Instances test = DataSource.read(path);
    test.setClassIndex(test.numAttributes() - 1);

    cls.buildClassifier(data);/*w  ww .j  ava 2s. c om*/

    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(cls, test);
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}

From source file:nl.detoren.ijc.neural.Voorspeller.java

License:Open Source License

/**
 * Evalueer trainingsdata// ww w.j  a v  a2  s.co  m
 *
 * @param data
 * @return
 * @throws Exception
 */
private Evaluation evaluateTrainingData(Instances data) throws Exception {
    mlp.buildClassifier(data);
    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(mlp, data);
    logger.log(Level.INFO, eval.toSummaryString(true));
    return eval;
}

From source file:org.conqat.engine.commons.machine_learning.BaseWekaClassifier.java

License:Apache License

/**
 * Evaluates a classifier using 5-fold cross validation and returns the
 * evaluation object. Use this method for debugging purpose to get
 * information about precision, recall, etc.
 *///w w w  .  ja v  a  2 s .  co  m
public Evaluation debugEvaluateClassifierOnce() throws Exception, IOException {
    Instances data = wekaDataSetCreator.getDataSet();
    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(wekaClassifier, data);
    return eval;
}

From source file:org.dkpro.similarity.algorithms.ml.ClassifierSimilarityMeasure.java

License:Open Source License

public ClassifierSimilarityMeasure(WekaClassifier classifier, File trainArff, File testArff) throws Exception {
    CLASSIFIER = getClassifier(classifier);

    // Get all instances
    Instances train = getTrainInstances(trainArff);
    test = getTestInstances(testArff);//  w w w .j  a v  a 2 s  .c om

    // Apply log filter
    Filter logFilter = new LogFilter();
    logFilter.setInputFormat(train);
    train = Filter.useFilter(train, logFilter);
    logFilter.setInputFormat(test);
    test = Filter.useFilter(test, logFilter);

    Classifier clsCopy;
    try {
        // Copy the classifier
        clsCopy = AbstractClassifier.makeCopy(CLASSIFIER);

        // Build the classifier
        filteredClassifier = clsCopy;
        filteredClassifier.buildClassifier(train);

        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(filteredClassifier, test);

        System.out.println(eval.toSummaryString());
        System.out.println(eval.toMatrixString());
    } catch (Exception e) {
        throw new SimilarityException(e);
    }
}

From source file:org.dkpro.similarity.algorithms.ml.LinearRegressionSimilarityMeasure.java

License:Open Source License

public LinearRegressionSimilarityMeasure(File trainArff, File testArff, boolean useLogFilter) throws Exception {
    // Get all instances
    Instances train = getTrainInstances(trainArff);
    test = getTestInstances(testArff);// ww  w  . ja va 2  s.c  om

    // Apply log filter
    if (useLogFilter) {
        Filter logFilter = new LogFilter();
        logFilter.setInputFormat(train);
        train = Filter.useFilter(train, logFilter);
        logFilter.setInputFormat(test);
        test = Filter.useFilter(test, logFilter);
    }

    Classifier clsCopy;
    try {
        // Copy the classifier
        clsCopy = AbstractClassifier.makeCopy(CLASSIFIER);

        // Build the classifier
        filteredClassifier = clsCopy;
        filteredClassifier.buildClassifier(train);

        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(filteredClassifier, test);

        System.out.println(filteredClassifier.toString());
    } catch (Exception e) {
        throw new SimilarityException(e);
    }
}

From source file:org.opentox.jaqpot3.qsar.trainer.MlrRegression.java

License:Open Source License

@Override
public Model train(Instances data) throws JaqpotException {
    try {//w w w. j a v a  2s.  c om

        getTask().getMeta().addComment(
                "Dataset successfully retrieved and converted " + "into a weka.core.Instances object");
        UpdateTask firstTaskUpdater = new UpdateTask(getTask());
        firstTaskUpdater.setUpdateMeta(true);
        firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary?
        try {
            firstTaskUpdater.update();
        } catch (DbException ex) {
            throw new JaqpotException(ex);
        } finally {
            try {
                firstTaskUpdater.close();
            } catch (DbException ex) {
                throw new JaqpotException(ex);
            }
        }

        Instances trainingSet = data;
        getTask().getMeta().addComment("The downloaded dataset is now preprocessed");
        firstTaskUpdater = new UpdateTask(getTask());
        firstTaskUpdater.setUpdateMeta(true);
        firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary?
        try {
            firstTaskUpdater.update();
        } catch (DbException ex) {
            throw new JaqpotException(ex);
        } finally {
            try {
                firstTaskUpdater.close();
            } catch (DbException ex) {
                throw new JaqpotException(ex);
            }
        }

        /* SET CLASS ATTRIBUTE */
        Attribute target = trainingSet.attribute(targetUri.toString());
        if (target == null) {
            throw new BadParameterException("The prediction feature you provided was not found in the dataset");
        } else {
            if (!target.isNumeric()) {
                throw new QSARException("The prediction feature you provided is not numeric.");
            }
        }
        trainingSet.setClass(target);
        /* Very important: place the target feature at the end! (target = last)*/
        int numAttributes = trainingSet.numAttributes();
        int classIndex = trainingSet.classIndex();
        Instances orderedTrainingSet = null;
        List<String> properOrder = new ArrayList<String>(numAttributes);
        for (int j = 0; j < numAttributes; j++) {
            if (j != classIndex) {
                properOrder.add(trainingSet.attribute(j).name());
            }
        }
        properOrder.add(trainingSet.attribute(classIndex).name());
        try {
            orderedTrainingSet = InstancesUtil.sortByFeatureAttrList(properOrder, trainingSet, -1);
        } catch (JaqpotException ex) {
            logger.error("Improper dataset - training will stop", ex);
            throw ex;
        }
        orderedTrainingSet.setClass(orderedTrainingSet.attribute(targetUri.toString()));

        /* START CONSTRUCTION OF MODEL */
        Model m = new Model(Configuration.getBaseUri().augment("model", getUuid().toString()));
        m.setAlgorithm(getAlgorithm());
        m.setCreatedBy(getTask().getCreatedBy());
        m.setDataset(datasetUri);
        m.addDependentFeatures(dependentFeature);
        try {
            dependentFeature.loadFromRemote();
        } catch (ServiceInvocationException ex) {
            Logger.getLogger(MlrRegression.class.getName()).log(Level.SEVERE, null, ex);
        }

        Set<LiteralValue> depFeatTitles = null;
        if (dependentFeature.getMeta() != null) {
            depFeatTitles = dependentFeature.getMeta().getTitles();
        }

        String depFeatTitle = dependentFeature.getUri().toString();
        if (depFeatTitles != null) {
            depFeatTitle = depFeatTitles.iterator().next().getValueAsString();
            m.getMeta().addTitle("MLR model for " + depFeatTitle)
                    .addDescription("MLR model for the prediction of " + depFeatTitle + " (uri: "
                            + dependentFeature.getUri() + " ).");
        } else {
            m.getMeta().addTitle("MLR model for the prediction of the feature with URI " + depFeatTitle)
                    .addComment("No name was found for the feature " + depFeatTitle);
        }

        /*
         * COMPILE THE LIST OF INDEPENDENT FEATURES with the exact order in which
         * these appear in the Instances object (training set).
         */
        m.setIndependentFeatures(independentFeatures);

        /* CREATE PREDICTED FEATURE AND POST IT TO REMOTE SERVER */
        String predictionFeatureUri = null;
        Feature predictedFeature = publishFeature(m, dependentFeature.getUnits(),
                "Predicted " + depFeatTitle + " by MLR model", datasetUri, featureService);
        m.addPredictedFeatures(predictedFeature);
        predictionFeatureUri = predictedFeature.getUri().toString();

        getTask().getMeta().addComment("Prediction feature " + predictionFeatureUri + " was created.");

        firstTaskUpdater = new UpdateTask(getTask());
        firstTaskUpdater.setUpdateMeta(true);
        firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary?
        try {
            firstTaskUpdater.update();
        } catch (DbException ex) {
            throw new JaqpotException(ex);
        } finally {
            try {
                firstTaskUpdater.close();
            } catch (DbException ex) {
                throw new JaqpotException(ex);
            }
        }

        /* ACTUAL TRAINING OF THE MODEL USING WEKA */
        LinearRegression linreg = new LinearRegression();
        String[] linRegOptions = { "-S", "1", "-C" };

        try {
            linreg.setOptions(linRegOptions);
            linreg.buildClassifier(orderedTrainingSet);

        } catch (final Exception ex) {// illegal options or could not build the classifier!
            String message = "MLR Model could not be trained";
            logger.error(message, ex);
            throw new JaqpotException(message, ex);
        }

        try {
            // evaluate classifier and print some statistics
            Evaluation eval = new Evaluation(orderedTrainingSet);
            eval.evaluateModel(linreg, orderedTrainingSet);
            String stats = eval.toSummaryString("\nResults\n======\n", false);

            ActualModel am = new ActualModel(linreg);
            am.setStatistics(stats);
            m.setActualModel(am);
        } catch (NotSerializableException ex) {
            String message = "Model is not serializable";
            logger.error(message, ex);
            throw new JaqpotException(message, ex);
        } catch (final Exception ex) {// illegal options or could not build the classifier!
            String message = "MLR Model could not be trained";
            logger.error(message, ex);
            throw new JaqpotException(message, ex);
        }

        m.getMeta().addPublisher("OpenTox").addComment("This is a Multiple Linear Regression Model");

        //save the instances being predicted to abstract trainer for calculating DoA
        predictedInstances = orderedTrainingSet;
        excludeAttributesDoA.add(dependentFeature.getUri().toString());

        return m;
    } catch (QSARException ex) {
        String message = "QSAR Exception: cannot train MLR model";
        logger.error(message, ex);
        throw new JaqpotException(message, ex);
    }
}

From source file:org.opentox.jaqpot3.qsar.trainer.PLSTrainer.java

License:Open Source License

@Override
public Model train(Instances data) throws JaqpotException {
    Model model = new Model(Configuration.getBaseUri().augment("model", getUuid().toString()));

    data.setClass(data.attribute(targetUri.toString()));

    Boolean targetURIIncluded = false;
    for (Feature tempFeature : independentFeatures) {
        if (StringUtils.equals(tempFeature.getUri().toString(), targetUri.toString())) {
            targetURIIncluded = true;/* w  ww  .ja  va2s.  c o m*/
            break;
        }
    }
    if (!targetURIIncluded) {
        independentFeatures.add(new Feature(targetUri));
    }
    model.setIndependentFeatures(independentFeatures);

    /*
     * Train the PLS filter
     */
    PLSFilter pls = new PLSFilter();
    try {
        pls.setInputFormat(data);
        pls.setOptions(new String[] { "-C", Integer.toString(numComponents), "-A", pls_algorithm, "-P",
                preprocessing, "-U", doUpdateClass });
        PLSFilter.useFilter(data, pls);
    } catch (Exception ex) {
        Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex);
    }

    PLSModel actualModel = new PLSModel(pls);
    try {

        PLSClassifier cls = new PLSClassifier();
        cls.setFilter(pls);
        cls.buildClassifier(data);

        // evaluate classifier and print some statistics
        Evaluation eval = new Evaluation(data);
        eval.evaluateModel(cls, data);
        String stats = eval.toSummaryString("", false);

        ActualModel am = new ActualModel(actualModel);
        am.setStatistics(stats);

        model.setActualModel(am);
    } catch (NotSerializableException ex) {
        Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex);
        throw new JaqpotException(ex);
    } catch (Exception ex) {
        Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex);
        throw new JaqpotException(ex);
    }

    model.setDataset(datasetUri);
    model.setAlgorithm(Algorithms.plsFilter());
    model.getMeta().addTitle("PLS Model for " + datasetUri);

    Set<Parameter> parameters = new HashSet<Parameter>();
    Parameter targetPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()),
            "target", new LiteralValue(targetUri.toString(), XSDDatatype.XSDstring))
                    .setScope(Parameter.ParameterScope.MANDATORY);
    Parameter nComponentsPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()),
            "numComponents", new LiteralValue(numComponents, XSDDatatype.XSDpositiveInteger))
                    .setScope(Parameter.ParameterScope.MANDATORY);
    Parameter preprocessingPrm = new Parameter(
            Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "preprocessing",
            new LiteralValue(preprocessing, XSDDatatype.XSDstring)).setScope(Parameter.ParameterScope.OPTIONAL);
    Parameter algorithmPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()),
            "algorithm", new LiteralValue(pls_algorithm, XSDDatatype.XSDstring))
                    .setScope(Parameter.ParameterScope.OPTIONAL);
    Parameter doUpdatePrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()),
            "doUpdateClass", new LiteralValue(doUpdateClass, XSDDatatype.XSDboolean))
                    .setScope(Parameter.ParameterScope.OPTIONAL);

    parameters.add(targetPrm);
    parameters.add(nComponentsPrm);
    parameters.add(preprocessingPrm);
    parameters.add(doUpdatePrm);
    parameters.add(algorithmPrm);
    model.setParameters(parameters);

    for (int i = 0; i < numComponents; i++) {
        Feature f = publishFeature(model, "", "PLS-" + i, datasetUri, featureService);
        model.addPredictedFeatures(f);
    }

    //save the instances being predicted to abstract trainer for calculating DoA
    predictedInstances = data;
    //in pls target is not excluded

    return model;
}