List of usage examples for weka.classifiers Evaluation recall
public double recall(int classIndex)
From source file:adams.flow.core.EvaluationHelper.java
License:Open Source License
/** * Returns a statistical value from the evaluation object. * * @param eval the evaluation object to get the value from * @param statistic the type of value to return * @param classIndex the class label index, for statistics like AUC * @return the determined value, Double.NaN if not found * @throws Exception if evaluation fails *///from www .ja va2 s .c o m public static double getValue(Evaluation eval, EvaluationStatistic statistic, int classIndex) throws Exception { switch (statistic) { case NUMBER_CORRECT: return eval.correct(); case NUMBER_INCORRECT: return eval.incorrect(); case NUMBER_UNCLASSIFIED: return eval.unclassified(); case PERCENT_CORRECT: return eval.pctCorrect(); case PERCENT_INCORRECT: return eval.pctIncorrect(); case PERCENT_UNCLASSIFIED: return eval.pctUnclassified(); case KAPPA_STATISTIC: return eval.kappa(); case MEAN_ABSOLUTE_ERROR: return eval.meanAbsoluteError(); case ROOT_MEAN_SQUARED_ERROR: return eval.rootMeanSquaredError(); case RELATIVE_ABSOLUTE_ERROR: return eval.relativeAbsoluteError(); case ROOT_RELATIVE_SQUARED_ERROR: return eval.rootRelativeSquaredError(); case CORRELATION_COEFFICIENT: return eval.correlationCoefficient(); case SF_PRIOR_ENTROPY: return eval.SFPriorEntropy(); case SF_SCHEME_ENTROPY: return eval.SFSchemeEntropy(); case SF_ENTROPY_GAIN: return eval.SFEntropyGain(); case SF_MEAN_PRIOR_ENTROPY: return eval.SFMeanPriorEntropy(); case SF_MEAN_SCHEME_ENTROPY: return eval.SFMeanSchemeEntropy(); case SF_MEAN_ENTROPY_GAIN: return eval.SFMeanEntropyGain(); case KB_INFORMATION: return eval.KBInformation(); case KB_MEAN_INFORMATION: return eval.KBMeanInformation(); case KB_RELATIVE_INFORMATION: return eval.KBRelativeInformation(); case TRUE_POSITIVE_RATE: return eval.truePositiveRate(classIndex); case NUM_TRUE_POSITIVES: return eval.numTruePositives(classIndex); case FALSE_POSITIVE_RATE: return eval.falsePositiveRate(classIndex); case NUM_FALSE_POSITIVES: return eval.numFalsePositives(classIndex); case TRUE_NEGATIVE_RATE: return eval.trueNegativeRate(classIndex); case NUM_TRUE_NEGATIVES: return eval.numTrueNegatives(classIndex); case FALSE_NEGATIVE_RATE: return eval.falseNegativeRate(classIndex); case NUM_FALSE_NEGATIVES: return eval.numFalseNegatives(classIndex); case IR_PRECISION: return eval.precision(classIndex); case IR_RECALL: return eval.recall(classIndex); case F_MEASURE: return eval.fMeasure(classIndex); case MATTHEWS_CORRELATION_COEFFICIENT: return eval.matthewsCorrelationCoefficient(classIndex); case AREA_UNDER_ROC: return eval.areaUnderROC(classIndex); case AREA_UNDER_PRC: return eval.areaUnderPRC(classIndex); case WEIGHTED_TRUE_POSITIVE_RATE: return eval.weightedTruePositiveRate(); case WEIGHTED_FALSE_POSITIVE_RATE: return eval.weightedFalsePositiveRate(); case WEIGHTED_TRUE_NEGATIVE_RATE: return eval.weightedTrueNegativeRate(); case WEIGHTED_FALSE_NEGATIVE_RATE: return eval.weightedFalseNegativeRate(); case WEIGHTED_IR_PRECISION: return eval.weightedPrecision(); case WEIGHTED_IR_RECALL: return eval.weightedRecall(); case WEIGHTED_F_MEASURE: return eval.weightedFMeasure(); case WEIGHTED_MATTHEWS_CORRELATION_COEFFICIENT: return eval.weightedMatthewsCorrelation(); case WEIGHTED_AREA_UNDER_ROC: return eval.weightedAreaUnderROC(); case WEIGHTED_AREA_UNDER_PRC: return eval.weightedAreaUnderPRC(); case UNWEIGHTED_MACRO_F_MEASURE: return eval.unweightedMacroFmeasure(); case UNWEIGHTED_MICRO_F_MEASURE: return eval.unweightedMicroFmeasure(); case BIAS: return eval.getPluginMetric(Bias.class.getName()).getStatistic(Bias.NAME); case RSQUARED: return eval.getPluginMetric(RSquared.class.getName()).getStatistic(RSquared.NAME); case SDR: return eval.getPluginMetric(SDR.class.getName()).getStatistic(SDR.NAME); case RPD: return eval.getPluginMetric(RPD.class.getName()).getStatistic(RPD.NAME); default: throw new IllegalArgumentException("Unhandled statistic field: " + statistic); } }
From source file:algoritmogeneticocluster.Cromossomo.java
private double getMacroAverage(Evaluation eval, Instances data) { double macroMeasure; double macroPrecision = 0; double macrorecall = 0; for (int i = 0; i < data.numClasses(); i++) { macroPrecision += eval.precision(i); macrorecall += eval.recall(i); }/*from www . ja v a2 s . c o m*/ macroPrecision = macroPrecision / data.numClasses(); macrorecall = macrorecall / data.numClasses(); macroMeasure = (macroPrecision * macrorecall * 2) / (macroPrecision + macrorecall); //System.out.println("macroMeasure: " + macroMeasure); return macroMeasure; }
From source file:Control.Classificador.java
public double recallToDouble(Evaluation avaliacao, Instances base) { double resultadoDbl = 0; for (int i = 0; i < base.numClasses(); i++) { resultadoDbl += avaliacao.recall(i); }//from w ww .j ava2s . c om return resultadoDbl / base.numClasses(); }
From source file:de.fub.maps.project.detector.model.inference.ui.EvaluationPanel.java
License:Apache License
public void updatePanel(Evaluation evaluation) { DefaultCategoryDataset dataset = getBarChartPanel().getDataset(); dataset.clear();/*from w ww .j ava 2s . co m*/ this.evaluation = evaluation; double correct = evaluation.pctCorrect(); double incorrect = evaluation.pctIncorrect(); getCorrectClassifiedInstances().setText(MessageFormat.format(NUMBER_PATTERN, correct)); getIncorrectClassifiedInstances().setText(MessageFormat.format(NUMBER_PATTERN, incorrect)); int numClasses = evaluation.getHeader().numClasses(); for (int classIndex = 0; classIndex < numClasses; classIndex++) { double precision = evaluation.precision(classIndex) * 100; double recall = evaluation.recall(classIndex) * 100; dataset.addValue(precision, NbBundle.getMessage(EvaluationPanel.class, "EvaluationPanel.CLT_Precision_Text"), evaluation.getHeader().classAttribute().value(classIndex)); dataset.addValue(recall, NbBundle.getMessage(EvaluationPanel.class, "EvaluationPanel.CLT_Recall_Text"), evaluation.getHeader().classAttribute().value(classIndex)); } getExplorerManager() .setRootContext(new AbstractNode(Children.create(new EvaluationNodeFactory(evaluation), true))); repaint(); }
From source file:entity.NfoldCrossValidationManager.java
License:Open Source License
/** * n fold cross validation without noise * //from w w w . ja v a 2 s.com * @param classifier * @param dataset * @param folds * @return */ public Stats crossValidate(Classifier classifier, Instances dataset, int folds) { // randomizes order of instances Instances randDataset = new Instances(dataset); randDataset.randomize(RandomizationManager.randomGenerator); // cross-validation Evaluation eval = null; try { eval = new Evaluation(randDataset); } catch (Exception e) { e.printStackTrace(); } for (int n = 0; n < folds; n++) { Instances test = randDataset.testCV(folds, n); Instances train = randDataset.trainCV(folds, n, RandomizationManager.randomGenerator); // build and evaluate classifier Classifier clsCopy; try { clsCopy = Classifier.makeCopy(classifier); clsCopy.buildClassifier(train); eval.evaluateModel(clsCopy, test); } catch (Exception e) { e.printStackTrace(); } } // output evaluation for the nfold cross validation Double precision = eval.precision(Settings.classificationChoice); Double recall = eval.recall(Settings.classificationChoice); Double fmeasure = eval.fMeasure(Settings.classificationChoice); Double classificationTP = eval.numTruePositives(Settings.classificationChoice); Double classificationTN = eval.numTrueNegatives(Settings.classificationChoice); Double classificationFP = eval.numFalsePositives(Settings.classificationChoice); Double classificationFN = eval.numFalseNegatives(Settings.classificationChoice); Double kappa = eval.kappa(); return new Stats(classificationTP, classificationTN, classificationFP, classificationFN, kappa, precision, recall, fmeasure); }
From source file:entity.NfoldCrossValidationManager.java
License:Open Source License
/** * n fold cross validation with noise (independent fp and fn) * /*from w w w . j av a2 s.c om*/ * @param classifier * @param dataset * @param folds * @return */ public Stats crossValidateWithNoise(Classifier classifier, Instances dataset, int folds, BigDecimal fpPercentage, BigDecimal fnPercentage) { // noise manager NoiseInjectionManager noiseInjectionManager = new NoiseInjectionManager(); // randomizes order of instances Instances randDataset = new Instances(dataset); randDataset.randomize(RandomizationManager.randomGenerator); // cross-validation Evaluation eval = null; try { eval = new Evaluation(randDataset); } catch (Exception e) { e.printStackTrace(); } for (int n = 0; n < folds; n++) { Instances test = randDataset.testCV(folds, n); Instances train = randDataset.trainCV(folds, n, RandomizationManager.randomGenerator); // copies instances of train set to not modify the original Instances noisyTrain = new Instances(train); // injects level of noise in the copied train set noiseInjectionManager.addNoiseToDataset(noisyTrain, fpPercentage, fnPercentage); // build and evaluate classifier Classifier clsCopy; try { clsCopy = Classifier.makeCopy(classifier); // trains the model using a noisy train set clsCopy.buildClassifier(noisyTrain); eval.evaluateModel(clsCopy, test); } catch (Exception e) { e.printStackTrace(); } } // output evaluation for the nfold cross validation Double precision = eval.precision(Settings.classificationChoice); Double recall = eval.recall(Settings.classificationChoice); Double fmeasure = eval.fMeasure(Settings.classificationChoice); Double classificationTP = eval.numTruePositives(Settings.classificationChoice); Double classificationTN = eval.numTrueNegatives(Settings.classificationChoice); Double classificationFP = eval.numFalsePositives(Settings.classificationChoice); Double classificationFN = eval.numFalseNegatives(Settings.classificationChoice); Double kappa = eval.kappa(); return new Stats(classificationTP, classificationTN, classificationFP, classificationFN, kappa, precision, recall, fmeasure); }
From source file:entity.NfoldCrossValidationManager.java
License:Open Source License
/** * n fold cross validation with noise (combined fp and fn) * /* w ww . j a v a2 s .co m*/ * @param classifier * @param dataset * @param folds * @return */ public Stats crossValidateWithNoise(Classifier classifier, Instances dataset, int folds, BigDecimal combinedFpFnPercentage) { // noise manager NoiseInjectionManager noiseInjectionManager = new NoiseInjectionManager(); // randomizes order of instances Instances randDataset = new Instances(dataset); randDataset.randomize(RandomizationManager.randomGenerator); // cross-validation Evaluation eval = null; try { eval = new Evaluation(randDataset); } catch (Exception e) { e.printStackTrace(); } for (int n = 0; n < folds; n++) { Instances test = randDataset.testCV(folds, n); Instances train = randDataset.trainCV(folds, n, RandomizationManager.randomGenerator); // copies instances of train set to not modify the original Instances noisyTrain = new Instances(train); // injects level of noise in the copied train set noiseInjectionManager.addNoiseToDataset(noisyTrain, combinedFpFnPercentage); // build and evaluate classifier Classifier clsCopy; try { clsCopy = Classifier.makeCopy(classifier); // trains the model using a noisy train set clsCopy.buildClassifier(noisyTrain); eval.evaluateModel(clsCopy, test); } catch (Exception e) { e.printStackTrace(); } } // output evaluation for the nfold cross validation Double precision = eval.precision(Settings.classificationChoice); Double recall = eval.recall(Settings.classificationChoice); Double fmeasure = eval.fMeasure(Settings.classificationChoice); Double classificationTP = eval.numTruePositives(Settings.classificationChoice); Double classificationTN = eval.numTrueNegatives(Settings.classificationChoice); Double classificationFP = eval.numFalsePositives(Settings.classificationChoice); Double classificationFN = eval.numFalseNegatives(Settings.classificationChoice); Double kappa = eval.kappa(); return new Stats(classificationTP, classificationTN, classificationFP, classificationFN, kappa, precision, recall, fmeasure); }
From source file:hero.unstable.util.classification.wekaClassifier.java
/** Result as: * [correctClassified, TPR(class True), TPR(class False), avgTPR, PPV(class True), PPV(class False), avgPPV, Fvalue(class True), Fvalue(class False), avgFvalue] * @param result// w ww . jav a 2 s. c o m * @return 10 metrics */ public double[] getMetrics(Evaluation result) { double[] metrics = new double[10]; metrics[0] = result.pctCorrect() / 100; metrics[1] = result.precision(0); metrics[2] = result.precision(1); metrics[3] = result.weightedPrecision(); metrics[4] = result.recall(0); metrics[5] = result.recall(1); metrics[6] = result.weightedRecall(); metrics[7] = result.fMeasure(0); metrics[8] = result.fMeasure(1); metrics[9] = result.weightedFMeasure(); return metrics; }
From source file:it.unisa.gitdm.evaluation.WekaEvaluator.java
private static void evaluateModel(String baseFolderPath, String projectName, Classifier pClassifier, Instances pInstances, String pModelName, String pClassifierName) throws Exception { // other options int folds = 10; // randomize data Random rand = new Random(42); Instances randData = new Instances(pInstances); randData.randomize(rand);// www . j a v a 2 s . co m if (randData.classAttribute().isNominal()) { randData.stratify(folds); } // perform cross-validation and add predictions Instances predictedData = null; Evaluation eval = new Evaluation(randData); int positiveValueIndexOfClassFeature = 0; for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n); Instances test = randData.testCV(folds, n); // the above code is used by the StratifiedRemoveFolds filter, the // code below by the Explorer/Experimenter: // Instances train = randData.trainCV(folds, n, rand); int classFeatureIndex = 0; for (int i = 0; i < train.numAttributes(); i++) { if (train.attribute(i).name().equals("isBuggy")) { classFeatureIndex = i; break; } } Attribute classFeature = train.attribute(classFeatureIndex); for (int i = 0; i < classFeature.numValues(); i++) { if (classFeature.value(i).equals("TRUE")) { positiveValueIndexOfClassFeature = i; } } train.setClassIndex(classFeatureIndex); test.setClassIndex(classFeatureIndex); // build and evaluate classifier pClassifier.buildClassifier(train); eval.evaluateModel(pClassifier, test); // add predictions // AddClassification filter = new AddClassification(); // filter.setClassifier(pClassifier); // filter.setOutputClassification(true); // filter.setOutputDistribution(true); // filter.setOutputErrorFlag(true); // filter.setInputFormat(train); // Filter.useFilter(train, filter); // Instances pred = Filter.useFilter(test, filter); // if (predictedData == null) // predictedData = new Instances(pred, 0); // // for (int j = 0; j < pred.numInstances(); j++) // predictedData.add(pred.instance(j)); } double accuracy = (eval.numTruePositives(positiveValueIndexOfClassFeature) + eval.numTrueNegatives(positiveValueIndexOfClassFeature)) / (eval.numTruePositives(positiveValueIndexOfClassFeature) + eval.numFalsePositives(positiveValueIndexOfClassFeature) + eval.numFalseNegatives(positiveValueIndexOfClassFeature) + eval.numTrueNegatives(positiveValueIndexOfClassFeature)); double fmeasure = 2 * ((eval.precision(positiveValueIndexOfClassFeature) * eval.recall(positiveValueIndexOfClassFeature)) / (eval.precision(positiveValueIndexOfClassFeature) + eval.recall(positiveValueIndexOfClassFeature))); File wekaOutput = new File(baseFolderPath + projectName + "/predictors.csv"); PrintWriter pw1 = new PrintWriter(wekaOutput); pw1.write(accuracy + ";" + eval.precision(positiveValueIndexOfClassFeature) + ";" + eval.recall(positiveValueIndexOfClassFeature) + ";" + fmeasure + ";" + eval.areaUnderROC(positiveValueIndexOfClassFeature)); System.out.println(projectName + ";" + pClassifierName + ";" + pModelName + ";" + eval.numTruePositives(positiveValueIndexOfClassFeature) + ";" + eval.numFalsePositives(positiveValueIndexOfClassFeature) + ";" + eval.numFalseNegatives(positiveValueIndexOfClassFeature) + ";" + eval.numTrueNegatives(positiveValueIndexOfClassFeature) + ";" + accuracy + ";" + eval.precision(positiveValueIndexOfClassFeature) + ";" + eval.recall(positiveValueIndexOfClassFeature) + ";" + fmeasure + ";" + eval.areaUnderROC(positiveValueIndexOfClassFeature) + "\n"); }
From source file:main.mFFNN.java
public static void main(String[] args) throws Exception { mFFNN m = new mFFNN(); BufferedReader breader = null; breader = new BufferedReader(new FileReader("src\\main\\iris.arff")); Instances fileTrain = new Instances(breader); fileTrain.setClassIndex(fileTrain.numAttributes() - 1); System.out.println(fileTrain); breader.close();// w w w. j a v a2s . c o m System.out.println("mFFNN!!!\n\n"); FeedForwardNeuralNetwork FFNN = new FeedForwardNeuralNetwork(); Evaluation eval = new Evaluation(fileTrain); FFNN.buildClassifier(fileTrain); eval.evaluateModel(FFNN, fileTrain); //OUTPUT Scanner scan = new Scanner(System.in); System.out.println(eval.toSummaryString("=== Stratified cross-validation ===\n" + "=== Summary ===", true)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===")); System.out.println(eval.toMatrixString("===Confusion matrix===")); System.out.println(eval.fMeasure(1) + " " + eval.recall(1)); System.out.println("\nDo you want to save this model(1/0)? "); FFNN.distributionForInstance(fileTrain.get(0)); /* int c = scan.nextInt(); if (c == 1 ){ System.out.print("Please enter your file name (*.model) : "); String infile = scan.next(); m.saveModel(FFNN,infile); } else { System.out.print("Model not saved."); } */ }