Example usage for weka.classifiers Evaluation kappa

List of usage examples for weka.classifiers Evaluation kappa

Introduction

In this page you can find the example usage for weka.classifiers Evaluation kappa.

Prototype

public final double kappa() 

Source Link

Document

Returns value of kappa statistic if class is nominal.

Usage

From source file:FlexDMThread.java

License:Open Source License

public void run() {
    try {//  w  w  w.j av a 2s .  co  m
        //Get the data from the source

        FlexDM.getMainData.acquire();
        Instances data = dataset.getSource().getDataSet();
        FlexDM.getMainData.release();

        //Set class attribute if undefined
        if (data.classIndex() == -1) {
            data.setClassIndex(data.numAttributes() - 1);
        }

        //Process hyperparameters for classifier
        String temp = "";
        for (int i = 0; i < classifier.getNumParams(); i++) {
            temp += classifier.getParameter(i).getName();
            temp += " ";
            if (classifier.getParameter(i).getValue() != null) {
                temp += classifier.getParameter(i).getValue();
                temp += " ";
            }
        }

        String[] options = weka.core.Utils.splitOptions(temp);

        //Print to console- experiment is starting
        if (temp.equals("")) { //no parameters
            temp = "results_no_parameters";
            try {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with no parameters");
            } catch (Exception e) {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with no parameters");
            }
        } else { //parameters
            try {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with parameters " + temp);
            } catch (Exception e) {
                System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with parameters " + temp);
            }
        }

        //Create classifier, setting parameters
        weka.classifiers.Classifier x = createObject(classifier.getName());
        x.setOptions(options);
        x.buildClassifier(data);

        //Process the test selection
        String[] tempTest = dataset.getTest().split("\\s");

        //Create evaluation object for training and testing classifiers
        Evaluation eval = new Evaluation(data);
        StringBuffer predictions = new StringBuffer();

        //Train and evaluate classifier
        if (tempTest[0].equals("testset")) { //specified test file
            //Build classifier
            x.buildClassifier(data);

            //Open test file, load data
            //DataSource testFile = new DataSource(dataset.getTest().substring(7).trim());
            // Instances testSet = testFile.getDataSet();
            FlexDM.getTestData.acquire();
            Instances testSet = dataset.getTestFile().getDataSet();
            FlexDM.getTestData.release();

            //Set class attribute if undefined
            if (testSet.classIndex() == -1) {
                testSet.setClassIndex(testSet.numAttributes() - 1);
            }

            //Evaluate model
            Object[] array = { predictions, new Range(), new Boolean(true) };
            eval.evaluateModel(x, testSet, array);
        } else if (tempTest[0].equals("xval")) { //Cross validation
            //Build classifier
            x.buildClassifier(data);

            //Cross validate
            eval.crossValidateModel(x, data, Integer.parseInt(tempTest[1]), new Random(1), predictions,
                    new Range(), true);
        } else if (tempTest[0].equals("leavexval")) { //Leave one out cross validation
            //Build classifier
            x.buildClassifier(data);

            //Cross validate
            eval.crossValidateModel(x, data, data.numInstances() - 1, new Random(1), predictions, new Range(),
                    true);
        } else if (tempTest[0].equals("percent")) { //Percentage split of single data set
            //Set training and test sizes from percentage
            int trainSize = (int) Math.round(data.numInstances() * Double.parseDouble(tempTest[1]));
            int testSize = data.numInstances() - trainSize;

            //Load specified data
            Instances train = new Instances(data, 0, trainSize);
            Instances testSet = new Instances(data, trainSize, testSize);

            //Build classifier
            x.buildClassifier(train);

            //Train and evaluate model
            Object[] array = { predictions, new Range(), new Boolean(true) };
            eval.evaluateModel(x, testSet, array);
        } else { //Evaluate on training data
            //Test and evaluate model
            Object[] array = { predictions, new Range(), new Boolean(true) };
            eval.evaluateModel(x, data, array);
        }

        //create datafile for results
        String filename = dataset.getDir() + "/" + classifier.getDirName() + "/" + temp + ".txt";
        PrintWriter writer = new PrintWriter(filename, "UTF-8");

        //Print classifier, dataset, parameters info to file
        try {
            writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName()
                    + "\n PARAMETERS: " + temp);
        } catch (Exception e) {
            writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName()
                    + "\n PARAMETERS: " + temp);
        }

        //Add evaluation string to file
        writer.println(eval.toSummaryString());
        //Process result options
        if (checkResults("stats")) { //Classifier statistics
            writer.println(eval.toClassDetailsString());
        }
        if (checkResults("model")) { //The model
            writer.println(x.toString());
        }
        if (checkResults("matrix")) { //Confusion matrix
            writer.println(eval.toMatrixString());
        }
        if (checkResults("entropy")) { //Entropy statistics
            //Set options req'd to get the entropy stats
            String[] opt = new String[4];
            opt[0] = "-t";
            opt[1] = dataset.getName();
            opt[2] = "-k";
            opt[3] = "-v";

            //Evaluate model
            String entropy = Evaluation.evaluateModel(x, opt);

            //Grab the relevant info from the results, print to file
            entropy = entropy.substring(entropy.indexOf("=== Stratified cross-validation ===") + 35,
                    entropy.indexOf("=== Confusion Matrix ==="));
            writer.println("=== Entropy Statistics ===");
            writer.println(entropy);
        }
        if (checkResults("predictions")) { //The models predictions
            writer.println("=== Predictions ===\n");
            if (!dataset.getTest().contains("xval")) { //print header of predictions table if req'd
                writer.println(" inst#     actual  predicted error distribution ()");
            }
            writer.println(predictions.toString()); //print predictions to file
        }

        writer.close();

        //Summary file is semaphore controlled to ensure quality
        try { //get a permit
              //grab the summary file, write the classifiers details to it
            FlexDM.writeFile.acquire();
            PrintWriter p = new PrintWriter(new FileWriter(summary, true));
            if (temp.equals("results_no_parameters")) { //change output based on parameters
                temp = temp.substring(8);
            }

            //write percent correct, classifier name, dataset name to summary file
            p.write(dataset.getName() + ", " + classifier.getName() + ", " + temp + ", " + eval.correct() + ", "
                    + eval.incorrect() + ", " + eval.unclassified() + ", " + eval.pctCorrect() + ", "
                    + eval.pctIncorrect() + ", " + eval.pctUnclassified() + ", " + eval.kappa() + ", "
                    + eval.meanAbsoluteError() + ", " + eval.rootMeanSquaredError() + ", "
                    + eval.relativeAbsoluteError() + ", " + eval.rootRelativeSquaredError() + ", "
                    + eval.SFPriorEntropy() + ", " + eval.SFSchemeEntropy() + ", " + eval.SFEntropyGain() + ", "
                    + eval.SFMeanPriorEntropy() + ", " + eval.SFMeanSchemeEntropy() + ", "
                    + eval.SFMeanEntropyGain() + ", " + eval.KBInformation() + ", " + eval.KBMeanInformation()
                    + ", " + eval.KBRelativeInformation() + ", " + eval.weightedTruePositiveRate() + ", "
                    + eval.weightedFalsePositiveRate() + ", " + eval.weightedTrueNegativeRate() + ", "
                    + eval.weightedFalseNegativeRate() + ", " + eval.weightedPrecision() + ", "
                    + eval.weightedRecall() + ", " + eval.weightedFMeasure() + ", "
                    + eval.weightedAreaUnderROC() + "\n");
            p.close();

            //release semaphore
            FlexDM.writeFile.release();
        } catch (InterruptedException e) { //bad things happened
            System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName()
                    + " on dataset " + dataset.getName());
        }

        //output we have successfully finished processing classifier
        if (temp.equals("no_parameters")) { //no parameters
            try {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with no parameters");
            } catch (Exception e) {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with no parameters");
            }
        } else { //with parameters
            try {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1)
                        + " with parameters " + temp);
            } catch (Exception e) {
                System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset "
                        + dataset.getName() + " with parameters " + temp);
            }
        }

        try { //get a permit
              //grab the log file, write the classifiers details to it
            FlexDM.writeLog.acquire();
            PrintWriter p = new PrintWriter(new FileWriter(log, true));

            Date date = new Date();
            Format formatter = new SimpleDateFormat("dd/MM/YYYY HH:mm:ss");
            //formatter.format(date)

            if (temp.equals("results_no_parameters")) { //change output based on parameters
                temp = temp.substring(8);
            }

            //write details to log file
            p.write(dataset.getName() + ", " + dataset.getTest() + ", \"" + dataset.getResult_string() + "\", "
                    + classifier.getName() + ", " + temp + ", " + formatter.format(date) + "\n");
            p.close();

            //release semaphore
            FlexDM.writeLog.release();
        } catch (InterruptedException e) { //bad things happened
            System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName()
                    + " on dataset " + dataset.getName());
        }

        s.release();

    } catch (Exception e) {
        //an error occurred
        System.err.println("FATAL ERROR OCCURRED: " + e.toString() + "\nClassifier: " + cNum + " - "
                + classifier.getName() + " on dataset " + dataset.getName());
        s.release();
    }

}

From source file:adams.flow.core.EvaluationHelper.java

License:Open Source License

/**
 * Returns a statistical value from the evaluation object.
 *
 * @param eval   the evaluation object to get the value from
 * @param statistic   the type of value to return
 * @param classIndex   the class label index, for statistics like AUC
 * @return      the determined value, Double.NaN if not found
 * @throws Exception   if evaluation fails
 *//*from w  w  w  .  ja  va 2 s. c  o m*/
public static double getValue(Evaluation eval, EvaluationStatistic statistic, int classIndex) throws Exception {
    switch (statistic) {
    case NUMBER_CORRECT:
        return eval.correct();
    case NUMBER_INCORRECT:
        return eval.incorrect();
    case NUMBER_UNCLASSIFIED:
        return eval.unclassified();
    case PERCENT_CORRECT:
        return eval.pctCorrect();
    case PERCENT_INCORRECT:
        return eval.pctIncorrect();
    case PERCENT_UNCLASSIFIED:
        return eval.pctUnclassified();
    case KAPPA_STATISTIC:
        return eval.kappa();
    case MEAN_ABSOLUTE_ERROR:
        return eval.meanAbsoluteError();
    case ROOT_MEAN_SQUARED_ERROR:
        return eval.rootMeanSquaredError();
    case RELATIVE_ABSOLUTE_ERROR:
        return eval.relativeAbsoluteError();
    case ROOT_RELATIVE_SQUARED_ERROR:
        return eval.rootRelativeSquaredError();
    case CORRELATION_COEFFICIENT:
        return eval.correlationCoefficient();
    case SF_PRIOR_ENTROPY:
        return eval.SFPriorEntropy();
    case SF_SCHEME_ENTROPY:
        return eval.SFSchemeEntropy();
    case SF_ENTROPY_GAIN:
        return eval.SFEntropyGain();
    case SF_MEAN_PRIOR_ENTROPY:
        return eval.SFMeanPriorEntropy();
    case SF_MEAN_SCHEME_ENTROPY:
        return eval.SFMeanSchemeEntropy();
    case SF_MEAN_ENTROPY_GAIN:
        return eval.SFMeanEntropyGain();
    case KB_INFORMATION:
        return eval.KBInformation();
    case KB_MEAN_INFORMATION:
        return eval.KBMeanInformation();
    case KB_RELATIVE_INFORMATION:
        return eval.KBRelativeInformation();
    case TRUE_POSITIVE_RATE:
        return eval.truePositiveRate(classIndex);
    case NUM_TRUE_POSITIVES:
        return eval.numTruePositives(classIndex);
    case FALSE_POSITIVE_RATE:
        return eval.falsePositiveRate(classIndex);
    case NUM_FALSE_POSITIVES:
        return eval.numFalsePositives(classIndex);
    case TRUE_NEGATIVE_RATE:
        return eval.trueNegativeRate(classIndex);
    case NUM_TRUE_NEGATIVES:
        return eval.numTrueNegatives(classIndex);
    case FALSE_NEGATIVE_RATE:
        return eval.falseNegativeRate(classIndex);
    case NUM_FALSE_NEGATIVES:
        return eval.numFalseNegatives(classIndex);
    case IR_PRECISION:
        return eval.precision(classIndex);
    case IR_RECALL:
        return eval.recall(classIndex);
    case F_MEASURE:
        return eval.fMeasure(classIndex);
    case MATTHEWS_CORRELATION_COEFFICIENT:
        return eval.matthewsCorrelationCoefficient(classIndex);
    case AREA_UNDER_ROC:
        return eval.areaUnderROC(classIndex);
    case AREA_UNDER_PRC:
        return eval.areaUnderPRC(classIndex);
    case WEIGHTED_TRUE_POSITIVE_RATE:
        return eval.weightedTruePositiveRate();
    case WEIGHTED_FALSE_POSITIVE_RATE:
        return eval.weightedFalsePositiveRate();
    case WEIGHTED_TRUE_NEGATIVE_RATE:
        return eval.weightedTrueNegativeRate();
    case WEIGHTED_FALSE_NEGATIVE_RATE:
        return eval.weightedFalseNegativeRate();
    case WEIGHTED_IR_PRECISION:
        return eval.weightedPrecision();
    case WEIGHTED_IR_RECALL:
        return eval.weightedRecall();
    case WEIGHTED_F_MEASURE:
        return eval.weightedFMeasure();
    case WEIGHTED_MATTHEWS_CORRELATION_COEFFICIENT:
        return eval.weightedMatthewsCorrelation();
    case WEIGHTED_AREA_UNDER_ROC:
        return eval.weightedAreaUnderROC();
    case WEIGHTED_AREA_UNDER_PRC:
        return eval.weightedAreaUnderPRC();
    case UNWEIGHTED_MACRO_F_MEASURE:
        return eval.unweightedMacroFmeasure();
    case UNWEIGHTED_MICRO_F_MEASURE:
        return eval.unweightedMicroFmeasure();
    case BIAS:
        return eval.getPluginMetric(Bias.class.getName()).getStatistic(Bias.NAME);
    case RSQUARED:
        return eval.getPluginMetric(RSquared.class.getName()).getStatistic(RSquared.NAME);
    case SDR:
        return eval.getPluginMetric(SDR.class.getName()).getStatistic(SDR.NAME);
    case RPD:
        return eval.getPluginMetric(RPD.class.getName()).getStatistic(RPD.NAME);
    default:
        throw new IllegalArgumentException("Unhandled statistic field: " + statistic);
    }
}

From source file:ca.uqac.florentinth.speakerauthentication.Learning.Learning.java

License:Apache License

public void trainClassifier(Classifier classifier, FileReader trainingDataset, FileOutputStream trainingModel,
        Integer crossValidationFoldNumber) throws Exception {
    Instances instances = new Instances(new BufferedReader(trainingDataset));

    switch (classifier) {
    case KNN:// w  ww .j  av  a  2 s . c  o m
        int K = (int) Math.ceil(Math.sqrt(instances.numInstances()));
        this.classifier = new IBk(K);
        break;
    case NB:
        this.classifier = new NaiveBayes();
    }

    if (instances.classIndex() == -1) {
        instances.setClassIndex(instances.numAttributes() - 1);
    }

    this.classifier.buildClassifier(instances);

    if (crossValidationFoldNumber > 0) {
        Evaluation evaluation = new Evaluation(instances);
        evaluation.crossValidateModel(this.classifier, instances, crossValidationFoldNumber, new Random(1));
        kappa = evaluation.kappa();
        fMeasure = evaluation.weightedFMeasure();
        confusionMatrix = evaluation.toMatrixString("Confusion matrix: ");
    }

    ObjectOutputStream outputStream = new ObjectOutputStream(trainingModel);
    outputStream.writeObject(this.classifier);
    outputStream.flush();
    outputStream.close();
}

From source file:entity.NfoldCrossValidationManager.java

License:Open Source License

/**
 * n fold cross validation without noise
 * /*  ww  w.j  a  va  2s  .c om*/
 * @param classifier
 * @param dataset
 * @param folds
 * @return
 */
public Stats crossValidate(Classifier classifier, Instances dataset, int folds) {

    // randomizes order of instances
    Instances randDataset = new Instances(dataset);
    randDataset.randomize(RandomizationManager.randomGenerator);

    // cross-validation
    Evaluation eval = null;
    try {
        eval = new Evaluation(randDataset);
    } catch (Exception e) {
        e.printStackTrace();
    }
    for (int n = 0; n < folds; n++) {
        Instances test = randDataset.testCV(folds, n);
        Instances train = randDataset.trainCV(folds, n, RandomizationManager.randomGenerator);

        // build and evaluate classifier
        Classifier clsCopy;
        try {
            clsCopy = Classifier.makeCopy(classifier);
            clsCopy.buildClassifier(train);
            eval.evaluateModel(clsCopy, test);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    // output evaluation for the nfold cross validation
    Double precision = eval.precision(Settings.classificationChoice);
    Double recall = eval.recall(Settings.classificationChoice);
    Double fmeasure = eval.fMeasure(Settings.classificationChoice);
    Double classificationTP = eval.numTruePositives(Settings.classificationChoice);
    Double classificationTN = eval.numTrueNegatives(Settings.classificationChoice);
    Double classificationFP = eval.numFalsePositives(Settings.classificationChoice);
    Double classificationFN = eval.numFalseNegatives(Settings.classificationChoice);
    Double kappa = eval.kappa();

    return new Stats(classificationTP, classificationTN, classificationFP, classificationFN, kappa, precision,
            recall, fmeasure);
}

From source file:entity.NfoldCrossValidationManager.java

License:Open Source License

/**
 * n fold cross validation with noise (independent fp and fn)
 * /*from   w  w  w  .j  a  va2  s .  com*/
 * @param classifier
 * @param dataset
 * @param folds
 * @return
 */
public Stats crossValidateWithNoise(Classifier classifier, Instances dataset, int folds,
        BigDecimal fpPercentage, BigDecimal fnPercentage) {

    // noise manager
    NoiseInjectionManager noiseInjectionManager = new NoiseInjectionManager();

    // randomizes order of instances
    Instances randDataset = new Instances(dataset);
    randDataset.randomize(RandomizationManager.randomGenerator);

    // cross-validation
    Evaluation eval = null;
    try {
        eval = new Evaluation(randDataset);
    } catch (Exception e) {
        e.printStackTrace();
    }
    for (int n = 0; n < folds; n++) {
        Instances test = randDataset.testCV(folds, n);
        Instances train = randDataset.trainCV(folds, n, RandomizationManager.randomGenerator);

        // copies instances of train set to not modify the original
        Instances noisyTrain = new Instances(train);
        // injects level of noise in the copied train set
        noiseInjectionManager.addNoiseToDataset(noisyTrain, fpPercentage, fnPercentage);

        // build and evaluate classifier
        Classifier clsCopy;
        try {
            clsCopy = Classifier.makeCopy(classifier);
            // trains the model using a noisy train set
            clsCopy.buildClassifier(noisyTrain);
            eval.evaluateModel(clsCopy, test);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    // output evaluation for the nfold cross validation
    Double precision = eval.precision(Settings.classificationChoice);
    Double recall = eval.recall(Settings.classificationChoice);
    Double fmeasure = eval.fMeasure(Settings.classificationChoice);
    Double classificationTP = eval.numTruePositives(Settings.classificationChoice);
    Double classificationTN = eval.numTrueNegatives(Settings.classificationChoice);
    Double classificationFP = eval.numFalsePositives(Settings.classificationChoice);
    Double classificationFN = eval.numFalseNegatives(Settings.classificationChoice);
    Double kappa = eval.kappa();

    return new Stats(classificationTP, classificationTN, classificationFP, classificationFN, kappa, precision,
            recall, fmeasure);
}

From source file:entity.NfoldCrossValidationManager.java

License:Open Source License

/**
 * n fold cross validation with noise (combined fp and fn)
 * //from w ww  .ja  va  2s .c o m
 * @param classifier
 * @param dataset
 * @param folds
 * @return
 */

public Stats crossValidateWithNoise(Classifier classifier, Instances dataset, int folds,
        BigDecimal combinedFpFnPercentage) {

    // noise manager
    NoiseInjectionManager noiseInjectionManager = new NoiseInjectionManager();

    // randomizes order of instances
    Instances randDataset = new Instances(dataset);
    randDataset.randomize(RandomizationManager.randomGenerator);

    // cross-validation
    Evaluation eval = null;
    try {
        eval = new Evaluation(randDataset);
    } catch (Exception e) {
        e.printStackTrace();
    }
    for (int n = 0; n < folds; n++) {
        Instances test = randDataset.testCV(folds, n);
        Instances train = randDataset.trainCV(folds, n, RandomizationManager.randomGenerator);

        // copies instances of train set to not modify the original
        Instances noisyTrain = new Instances(train);
        // injects level of noise in the copied train set
        noiseInjectionManager.addNoiseToDataset(noisyTrain, combinedFpFnPercentage);

        // build and evaluate classifier
        Classifier clsCopy;
        try {
            clsCopy = Classifier.makeCopy(classifier);
            // trains the model using a noisy train set
            clsCopy.buildClassifier(noisyTrain);
            eval.evaluateModel(clsCopy, test);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    // output evaluation for the nfold cross validation
    Double precision = eval.precision(Settings.classificationChoice);
    Double recall = eval.recall(Settings.classificationChoice);
    Double fmeasure = eval.fMeasure(Settings.classificationChoice);
    Double classificationTP = eval.numTruePositives(Settings.classificationChoice);
    Double classificationTN = eval.numTrueNegatives(Settings.classificationChoice);
    Double classificationFP = eval.numFalsePositives(Settings.classificationChoice);
    Double classificationFN = eval.numFalseNegatives(Settings.classificationChoice);
    Double kappa = eval.kappa();

    return new Stats(classificationTP, classificationTN, classificationFP, classificationFN, kappa, precision,
            recall, fmeasure);
}

From source file:net.sf.jclal.evaluation.measure.SingleLabelEvaluation.java

License:Open Source License

/**
 *
 * @param evaluation The evaluation/*from   www  . j a  v a  2 s .co  m*/
 */
public void setEvaluation(Evaluation evaluation) {

    try {
        this.evaluation = evaluation;
        StringBuilder st = new StringBuilder();

        st.append("Iteration: ").append(getIteration()).append("\n");
        st.append("Labeled set size: ").append(getLabeledSetSize()).append("\n");
        st.append("Unlabelled set size: ").append(getUnlabeledSetSize()).append("\n");
        st.append("\t\n");

        st.append("Correctly Classified Instances: ").append(evaluation.pctCorrect()).append("\n");
        st.append("Incorrectly Classified Instances: ").append(evaluation.pctIncorrect()).append("\n");
        st.append("Kappa statistic: ").append(evaluation.kappa()).append("\n");
        st.append("Mean absolute error: ").append(evaluation.meanAbsoluteError()).append("\n");
        st.append("Root mean squared error: ").append(evaluation.rootMeanSquaredError()).append("\n");

        st.append("Relative absolute error: ").append(evaluation.relativeAbsoluteError()).append("\n");
        st.append("Root relative squared error: ").append(evaluation.rootRelativeSquaredError()).append("\n");
        st.append("Coverage of cases: ").append(evaluation.coverageOfTestCasesByPredictedRegions())
                .append("\n");
        st.append("Mean region size: ").append(evaluation.sizeOfPredictedRegions()).append("\n");

        st.append("Weighted Precision: ").append(evaluation.weightedPrecision()).append("\n");
        st.append("Weighted Recall: ").append(evaluation.weightedRecall()).append("\n");
        st.append("Weighted FMeasure: ").append(evaluation.weightedFMeasure()).append("\n");
        st.append("Weighted TruePositiveRate: ").append(evaluation.weightedTruePositiveRate()).append("\n");
        st.append("Weighted FalsePositiveRate: ").append(evaluation.weightedFalsePositiveRate()).append("\n");
        st.append("Weighted MatthewsCorrelation: ").append(evaluation.weightedMatthewsCorrelation())
                .append("\n");
        st.append("Weighted AreaUnderROC: ").append(evaluation.weightedAreaUnderROC()).append("\n");
        st.append("Weighted AreaUnderPRC: ").append(evaluation.weightedAreaUnderPRC()).append("\n");

        st.append("\t\t\n");

        loadMetrics(st.toString());

    } catch (Exception e) {
        Logger.getLogger(SingleLabelEvaluation.class.getName()).log(Level.SEVERE, null, e);
    }
}

From source file:org.openml.webapplication.fantail.dc.landmarking.J48BasedLandmarker.java

License:Open Source License

public Map<String, Double> characterize(Instances data) {

    int numFolds = m_NumFolds;

    double score1 = 0.5;
    double score2 = 0.5;
    // double score3 = 0.5;

    double score3 = 0.5;
    double score4 = 0.5;
    // double score3 = 0.5;

    double score5 = 0.5;
    double score6 = 0.5;

    double score7 = 0.5;
    double score8 = 0.5;
    double score9 = 0.5;

    weka.classifiers.trees.J48 cls = new weka.classifiers.trees.J48();
    cls.setConfidenceFactor(0.00001f);/* ww w.ja  v  a  2 s .  c om*/

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);

        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score1 = eval.pctIncorrect();
        score2 = eval.weightedAreaUnderROC();

        score7 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.J48();
    cls.setConfidenceFactor(0.0001f);

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score3 = eval.pctIncorrect();
        score4 = eval.weightedAreaUnderROC();

        score8 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.J48();
    cls.setConfidenceFactor(0.001f);

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score5 = eval.pctIncorrect();
        score6 = eval.weightedAreaUnderROC();

        score9 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    Map<String, Double> qualities = new HashMap<String, Double>();
    qualities.put(ids[0], score1);
    qualities.put(ids[1], score2);
    qualities.put(ids[2], score3);
    qualities.put(ids[3], score4);
    qualities.put(ids[4], score5);
    qualities.put(ids[5], score6);
    qualities.put(ids[6], score7);
    qualities.put(ids[7], score8);
    qualities.put(ids[8], score9);
    return qualities;
}

From source file:org.openml.webapplication.fantail.dc.landmarking.RandomTreeBasedLandmarker2.java

License:Open Source License

public Map<String, Double> characterize(Instances data) {

    int seed = m_Seed;
    Random r = new Random(seed);

    int numFolds = m_NumFolds;

    double score1 = 0.5;
    double score2 = 0.5;
    // double score3 = 0.5;

    double score3 = 0.5;
    double score4 = 0.5;
    // double score3 = 0.5;

    double score5 = 0.5;
    double score6 = 0.5;

    weka.classifiers.trees.RandomTree cls = new weka.classifiers.trees.RandomTree();
    cls.setSeed(r.nextInt());/* www .j  a  v a2  s. c om*/
    cls.setKValue(m_K);
    // cls.setMaxDepth(1);

    try {
        // ds.buildClassifier(data);
        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score1 = eval.pctIncorrect();
        score2 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.RandomTree();
    cls.setSeed(r.nextInt());
    cls.setKValue(m_K);
    // cls.setMaxDepth(2);

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score3 = eval.pctIncorrect();
        score4 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.RandomTree();
    cls.setSeed(r.nextInt());
    cls.setKValue(m_K);
    // cls.setMaxDepth(3);

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score5 = eval.pctIncorrect();
        score6 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.RandomTree();
    cls.setSeed(r.nextInt());
    cls.setKValue(m_K);
    // cls.setMaxDepth(4);

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.RandomTree();
    cls.setSeed(r.nextInt());
    cls.setKValue(m_K);
    // cls.setMaxDepth(5);

    try {
        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

    } catch (Exception e) {
        e.printStackTrace();
    }

    Map<String, Double> qualities = new HashMap<String, Double>();
    qualities.put(ids[0], score1);
    qualities.put(ids[1], score2);
    qualities.put(ids[2], score3);
    qualities.put(ids[3], score4);
    qualities.put(ids[4], score5);
    qualities.put(ids[5], score6);
    return qualities;
}

From source file:org.openml.webapplication.fantail.dc.landmarking.REPTreeBasedLandmarker.java

License:Open Source License

public Map<String, Double> characterize(Instances data) {

    int numFolds = m_NumFolds;

    double score1 = 0.5;
    double score2 = 0.5;
    // double score3 = 0.5;

    double score3 = 0.5;
    double score4 = 0.5;
    // double score3 = 0.5;

    double score5 = 0.5;
    double score6 = 0.5;

    double score7 = 0.5;
    double score8 = 0.5;
    double score9 = 0.5;

    weka.classifiers.trees.REPTree cls = new weka.classifiers.trees.REPTree();
    cls.setMaxDepth(1);//from   ww w  . j  av a 2 s  .c  o  m

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score1 = eval.pctIncorrect();
        score2 = eval.weightedAreaUnderROC();

        score7 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.REPTree();
    cls.setMaxDepth(2);

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score3 = eval.pctIncorrect();
        score4 = eval.weightedAreaUnderROC();

        score8 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    //
    cls = new weka.classifiers.trees.REPTree();
    cls.setMaxDepth(3);

    try {

        weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
        eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1));

        score5 = eval.pctIncorrect();
        score6 = eval.weightedAreaUnderROC();

        score9 = eval.kappa();

    } catch (Exception e) {
        e.printStackTrace();
    }

    Map<String, Double> qualities = new HashMap<String, Double>();
    qualities.put(ids[0], score1);
    qualities.put(ids[1], score2);
    qualities.put(ids[2], score3);
    qualities.put(ids[3], score4);
    qualities.put(ids[4], score5);
    qualities.put(ids[5], score6);
    qualities.put(ids[6], score7);
    qualities.put(ids[7], score8);
    qualities.put(ids[8], score9);
    return qualities;
}