List of usage examples for weka.classifiers Evaluation pctIncorrect
public final double pctIncorrect()
From source file:FlexDMThread.java
License:Open Source License
public void run() { try {// www . j a v a 2 s .c om //Get the data from the source FlexDM.getMainData.acquire(); Instances data = dataset.getSource().getDataSet(); FlexDM.getMainData.release(); //Set class attribute if undefined if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } //Process hyperparameters for classifier String temp = ""; for (int i = 0; i < classifier.getNumParams(); i++) { temp += classifier.getParameter(i).getName(); temp += " "; if (classifier.getParameter(i).getValue() != null) { temp += classifier.getParameter(i).getValue(); temp += " "; } } String[] options = weka.core.Utils.splitOptions(temp); //Print to console- experiment is starting if (temp.equals("")) { //no parameters temp = "results_no_parameters"; try { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with no parameters"); } catch (Exception e) { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with no parameters"); } } else { //parameters try { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with parameters " + temp); } catch (Exception e) { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with parameters " + temp); } } //Create classifier, setting parameters weka.classifiers.Classifier x = createObject(classifier.getName()); x.setOptions(options); x.buildClassifier(data); //Process the test selection String[] tempTest = dataset.getTest().split("\\s"); //Create evaluation object for training and testing classifiers Evaluation eval = new Evaluation(data); StringBuffer predictions = new StringBuffer(); //Train and evaluate classifier if (tempTest[0].equals("testset")) { //specified test file //Build classifier x.buildClassifier(data); //Open test file, load data //DataSource testFile = new DataSource(dataset.getTest().substring(7).trim()); // Instances testSet = testFile.getDataSet(); FlexDM.getTestData.acquire(); Instances testSet = dataset.getTestFile().getDataSet(); FlexDM.getTestData.release(); //Set class attribute if undefined if (testSet.classIndex() == -1) { testSet.setClassIndex(testSet.numAttributes() - 1); } //Evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, testSet, array); } else if (tempTest[0].equals("xval")) { //Cross validation //Build classifier x.buildClassifier(data); //Cross validate eval.crossValidateModel(x, data, Integer.parseInt(tempTest[1]), new Random(1), predictions, new Range(), true); } else if (tempTest[0].equals("leavexval")) { //Leave one out cross validation //Build classifier x.buildClassifier(data); //Cross validate eval.crossValidateModel(x, data, data.numInstances() - 1, new Random(1), predictions, new Range(), true); } else if (tempTest[0].equals("percent")) { //Percentage split of single data set //Set training and test sizes from percentage int trainSize = (int) Math.round(data.numInstances() * Double.parseDouble(tempTest[1])); int testSize = data.numInstances() - trainSize; //Load specified data Instances train = new Instances(data, 0, trainSize); Instances testSet = new Instances(data, trainSize, testSize); //Build classifier x.buildClassifier(train); //Train and evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, testSet, array); } else { //Evaluate on training data //Test and evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, data, array); } //create datafile for results String filename = dataset.getDir() + "/" + classifier.getDirName() + "/" + temp + ".txt"; PrintWriter writer = new PrintWriter(filename, "UTF-8"); //Print classifier, dataset, parameters info to file try { writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName() + "\n PARAMETERS: " + temp); } catch (Exception e) { writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName() + "\n PARAMETERS: " + temp); } //Add evaluation string to file writer.println(eval.toSummaryString()); //Process result options if (checkResults("stats")) { //Classifier statistics writer.println(eval.toClassDetailsString()); } if (checkResults("model")) { //The model writer.println(x.toString()); } if (checkResults("matrix")) { //Confusion matrix writer.println(eval.toMatrixString()); } if (checkResults("entropy")) { //Entropy statistics //Set options req'd to get the entropy stats String[] opt = new String[4]; opt[0] = "-t"; opt[1] = dataset.getName(); opt[2] = "-k"; opt[3] = "-v"; //Evaluate model String entropy = Evaluation.evaluateModel(x, opt); //Grab the relevant info from the results, print to file entropy = entropy.substring(entropy.indexOf("=== Stratified cross-validation ===") + 35, entropy.indexOf("=== Confusion Matrix ===")); writer.println("=== Entropy Statistics ==="); writer.println(entropy); } if (checkResults("predictions")) { //The models predictions writer.println("=== Predictions ===\n"); if (!dataset.getTest().contains("xval")) { //print header of predictions table if req'd writer.println(" inst# actual predicted error distribution ()"); } writer.println(predictions.toString()); //print predictions to file } writer.close(); //Summary file is semaphore controlled to ensure quality try { //get a permit //grab the summary file, write the classifiers details to it FlexDM.writeFile.acquire(); PrintWriter p = new PrintWriter(new FileWriter(summary, true)); if (temp.equals("results_no_parameters")) { //change output based on parameters temp = temp.substring(8); } //write percent correct, classifier name, dataset name to summary file p.write(dataset.getName() + ", " + classifier.getName() + ", " + temp + ", " + eval.correct() + ", " + eval.incorrect() + ", " + eval.unclassified() + ", " + eval.pctCorrect() + ", " + eval.pctIncorrect() + ", " + eval.pctUnclassified() + ", " + eval.kappa() + ", " + eval.meanAbsoluteError() + ", " + eval.rootMeanSquaredError() + ", " + eval.relativeAbsoluteError() + ", " + eval.rootRelativeSquaredError() + ", " + eval.SFPriorEntropy() + ", " + eval.SFSchemeEntropy() + ", " + eval.SFEntropyGain() + ", " + eval.SFMeanPriorEntropy() + ", " + eval.SFMeanSchemeEntropy() + ", " + eval.SFMeanEntropyGain() + ", " + eval.KBInformation() + ", " + eval.KBMeanInformation() + ", " + eval.KBRelativeInformation() + ", " + eval.weightedTruePositiveRate() + ", " + eval.weightedFalsePositiveRate() + ", " + eval.weightedTrueNegativeRate() + ", " + eval.weightedFalseNegativeRate() + ", " + eval.weightedPrecision() + ", " + eval.weightedRecall() + ", " + eval.weightedFMeasure() + ", " + eval.weightedAreaUnderROC() + "\n"); p.close(); //release semaphore FlexDM.writeFile.release(); } catch (InterruptedException e) { //bad things happened System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); } //output we have successfully finished processing classifier if (temp.equals("no_parameters")) { //no parameters try { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with no parameters"); } catch (Exception e) { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with no parameters"); } } else { //with parameters try { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with parameters " + temp); } catch (Exception e) { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with parameters " + temp); } } try { //get a permit //grab the log file, write the classifiers details to it FlexDM.writeLog.acquire(); PrintWriter p = new PrintWriter(new FileWriter(log, true)); Date date = new Date(); Format formatter = new SimpleDateFormat("dd/MM/YYYY HH:mm:ss"); //formatter.format(date) if (temp.equals("results_no_parameters")) { //change output based on parameters temp = temp.substring(8); } //write details to log file p.write(dataset.getName() + ", " + dataset.getTest() + ", \"" + dataset.getResult_string() + "\", " + classifier.getName() + ", " + temp + ", " + formatter.format(date) + "\n"); p.close(); //release semaphore FlexDM.writeLog.release(); } catch (InterruptedException e) { //bad things happened System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); } s.release(); } catch (Exception e) { //an error occurred System.err.println("FATAL ERROR OCCURRED: " + e.toString() + "\nClassifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); s.release(); } }
From source file:adams.flow.core.EvaluationHelper.java
License:Open Source License
/** * Returns a statistical value from the evaluation object. * * @param eval the evaluation object to get the value from * @param statistic the type of value to return * @param classIndex the class label index, for statistics like AUC * @return the determined value, Double.NaN if not found * @throws Exception if evaluation fails */// w w w. j a va 2 s .co m public static double getValue(Evaluation eval, EvaluationStatistic statistic, int classIndex) throws Exception { switch (statistic) { case NUMBER_CORRECT: return eval.correct(); case NUMBER_INCORRECT: return eval.incorrect(); case NUMBER_UNCLASSIFIED: return eval.unclassified(); case PERCENT_CORRECT: return eval.pctCorrect(); case PERCENT_INCORRECT: return eval.pctIncorrect(); case PERCENT_UNCLASSIFIED: return eval.pctUnclassified(); case KAPPA_STATISTIC: return eval.kappa(); case MEAN_ABSOLUTE_ERROR: return eval.meanAbsoluteError(); case ROOT_MEAN_SQUARED_ERROR: return eval.rootMeanSquaredError(); case RELATIVE_ABSOLUTE_ERROR: return eval.relativeAbsoluteError(); case ROOT_RELATIVE_SQUARED_ERROR: return eval.rootRelativeSquaredError(); case CORRELATION_COEFFICIENT: return eval.correlationCoefficient(); case SF_PRIOR_ENTROPY: return eval.SFPriorEntropy(); case SF_SCHEME_ENTROPY: return eval.SFSchemeEntropy(); case SF_ENTROPY_GAIN: return eval.SFEntropyGain(); case SF_MEAN_PRIOR_ENTROPY: return eval.SFMeanPriorEntropy(); case SF_MEAN_SCHEME_ENTROPY: return eval.SFMeanSchemeEntropy(); case SF_MEAN_ENTROPY_GAIN: return eval.SFMeanEntropyGain(); case KB_INFORMATION: return eval.KBInformation(); case KB_MEAN_INFORMATION: return eval.KBMeanInformation(); case KB_RELATIVE_INFORMATION: return eval.KBRelativeInformation(); case TRUE_POSITIVE_RATE: return eval.truePositiveRate(classIndex); case NUM_TRUE_POSITIVES: return eval.numTruePositives(classIndex); case FALSE_POSITIVE_RATE: return eval.falsePositiveRate(classIndex); case NUM_FALSE_POSITIVES: return eval.numFalsePositives(classIndex); case TRUE_NEGATIVE_RATE: return eval.trueNegativeRate(classIndex); case NUM_TRUE_NEGATIVES: return eval.numTrueNegatives(classIndex); case FALSE_NEGATIVE_RATE: return eval.falseNegativeRate(classIndex); case NUM_FALSE_NEGATIVES: return eval.numFalseNegatives(classIndex); case IR_PRECISION: return eval.precision(classIndex); case IR_RECALL: return eval.recall(classIndex); case F_MEASURE: return eval.fMeasure(classIndex); case MATTHEWS_CORRELATION_COEFFICIENT: return eval.matthewsCorrelationCoefficient(classIndex); case AREA_UNDER_ROC: return eval.areaUnderROC(classIndex); case AREA_UNDER_PRC: return eval.areaUnderPRC(classIndex); case WEIGHTED_TRUE_POSITIVE_RATE: return eval.weightedTruePositiveRate(); case WEIGHTED_FALSE_POSITIVE_RATE: return eval.weightedFalsePositiveRate(); case WEIGHTED_TRUE_NEGATIVE_RATE: return eval.weightedTrueNegativeRate(); case WEIGHTED_FALSE_NEGATIVE_RATE: return eval.weightedFalseNegativeRate(); case WEIGHTED_IR_PRECISION: return eval.weightedPrecision(); case WEIGHTED_IR_RECALL: return eval.weightedRecall(); case WEIGHTED_F_MEASURE: return eval.weightedFMeasure(); case WEIGHTED_MATTHEWS_CORRELATION_COEFFICIENT: return eval.weightedMatthewsCorrelation(); case WEIGHTED_AREA_UNDER_ROC: return eval.weightedAreaUnderROC(); case WEIGHTED_AREA_UNDER_PRC: return eval.weightedAreaUnderPRC(); case UNWEIGHTED_MACRO_F_MEASURE: return eval.unweightedMacroFmeasure(); case UNWEIGHTED_MICRO_F_MEASURE: return eval.unweightedMicroFmeasure(); case BIAS: return eval.getPluginMetric(Bias.class.getName()).getStatistic(Bias.NAME); case RSQUARED: return eval.getPluginMetric(RSquared.class.getName()).getStatistic(RSquared.NAME); case SDR: return eval.getPluginMetric(SDR.class.getName()).getStatistic(SDR.NAME); case RPD: return eval.getPluginMetric(RPD.class.getName()).getStatistic(RPD.NAME); default: throw new IllegalArgumentException("Unhandled statistic field: " + statistic); } }
From source file:algoritmogeneticocluster.WekaSimulation.java
/** * @param args the command line arguments *///from ww w .ja va 2s. c o m public static void main(String[] args) { SMO classifier = new SMO(); HyperPipes hy = new HyperPipes(); // classifier.buildClassifier(trainset); BufferedReader datafile = readDataFile("tabela10.arff"); Instances data; Evaluation eval; try { data = new Instances(datafile); data.setClassIndex(data.numAttributes() - 1); eval = new Evaluation(data); Random rand = new Random(1); // using seed = 1 int folds = 10; eval.crossValidateModel(classifier, data, folds, rand); System.out.println(eval.toString()); System.out.println(eval.numInstances()); System.out.println(eval.correct()); System.out.println(eval.incorrect()); System.out.println(eval.pctCorrect()); System.out.println(eval.pctIncorrect()); } catch (Exception ex) { Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:classify.Classifier.java
/** * @param args the command line arguments *//*from w ww . j ava 2 s.c o m*/ public static void main(String[] args) { //read in data try { DataSource input = new DataSource("no_missing_values.csv"); Instances data = input.getDataSet(); //Instances data = readFile("newfixed.txt"); missingValuesRows(data); setAttributeValues(data); data.setClassIndex(data.numAttributes() - 1); //boosting AdaBoostM1 boosting = new AdaBoostM1(); boosting.setNumIterations(25); boosting.setClassifier(new DecisionStump()); //build the classifier boosting.buildClassifier(data); //evaluate using 10-fold cross validation Evaluation e1 = new Evaluation(data); e1.crossValidateModel(boosting, data, 10, new Random(1)); DecimalFormat nf = new DecimalFormat("0.000"); System.out.println("Results of Boosting with Decision Stumps:"); System.out.println(boosting.toString()); System.out.println("Results of Cross Validation:"); System.out.println("Number of correctly classified instances: " + e1.correct() + " (" + nf.format(e1.pctCorrect()) + "%)"); System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " (" + nf.format(e1.pctIncorrect()) + "%)"); System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%"); System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%"); System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%"); System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%"); System.out.println(); System.out.println("Confusion Matrix:"); for (int i = 0; i < e1.confusionMatrix().length; i++) { for (int j = 0; j < e1.confusionMatrix()[0].length; j++) { System.out.print(e1.confusionMatrix()[i][j] + " "); } System.out.println(); } System.out.println(); System.out.println(); System.out.println(); //logistic regression Logistic l = new Logistic(); l.buildClassifier(data); e1 = new Evaluation(data); e1.crossValidateModel(l, data, 10, new Random(1)); System.out.println("Results of Logistic Regression:"); System.out.println(l.toString()); System.out.println("Results of Cross Validation:"); System.out.println("Number of correctly classified instances: " + e1.correct() + " (" + nf.format(e1.pctCorrect()) + "%)"); System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " (" + nf.format(e1.pctIncorrect()) + "%)"); System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%"); System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%"); System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%"); System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%"); System.out.println(); System.out.println("Confusion Matrix:"); for (int i = 0; i < e1.confusionMatrix().length; i++) { for (int j = 0; j < e1.confusionMatrix()[0].length; j++) { System.out.print(e1.confusionMatrix()[i][j] + " "); } System.out.println(); } } catch (Exception ex) { //data couldn't be read, so end program System.out.println("Exception thrown, program ending."); } }
From source file:Controller.CtlDataMining.java
public String redBayesiana(Instances data) { try {/*from w ww. ja v a2 s.c o m*/ //Creamos un clasificador Bayesiano NaiveBayes nb = new NaiveBayes(); //creamos el clasificador de la redBayesiana nb.buildClassifier(data); //Creamos un objeto para la validacion del modelo con redBayesiana Evaluation evalB = new Evaluation(data); /*Aplicamos el clasificador bayesiano hacemos validacion cruzada, de redBayesiana, con 10 campos, y un aleatorio para la semilla, en este caso es 1 para el muestreo de la validacion cruzada (Como ordenar para luego partirlo en 10)*/ evalB.crossValidateModel(nb, data, 10, new Random(1)); String resBay = "<br><br><b><center>Resultados NaiveBayes</center>" + "<br>========<br>" + "Modelo generado indica los siguientes resultados:" + "<br>========<br></b>"; //Obtenemos resultados resBay = resBay + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalB.numInstances() + "<br>"); resBay = resBay + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> " + formato.format(evalB.pctCorrect()) + "%<br>"); resBay = resBay + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b> " + (int) evalB.correct() + "<br>"); resBay = resBay + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> " + formato.format(evalB.pctIncorrect()) + "%<br>"); resBay = resBay + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> " + (int) evalB.incorrect() + "<br>"); resBay = resBay + ("<b>6. Media del error absoluto:</b> " + formato.format(evalB.meanAbsoluteError()) + "%<br>"); resBay = resBay + ("<b>7. " + evalB.toMatrixString("Matriz de " + "confusion</b>").replace("\n", "<br>")); return resBay; } catch (Exception e) { return "El error es" + e.getMessage(); } }
From source file:Controller.CtlDataMining.java
public String arbolJ48(Instances data) { try {// ww w . ja v a2s. co m // Creamos un clasidicador J48 J48 j48 = new J48(); //creamos el clasificador del J48 con los datos j48.buildClassifier(data); //Creamos un objeto para la validacion del modelo con redBayesiana Evaluation evalJ48 = new Evaluation(data); /*Aplicamos el clasificador J48 hacemos validacion cruzada, de redBayesiana, con 10 campos, y el aleatorio arrancando desde 1 para la semilla*/ evalJ48.crossValidateModel(j48, data, 10, new Random(1)); //Obtenemos resultados String resJ48 = "<br><b><center>Resultados Arbol de decision J48" + "</center><br>========<br>Modelo generado indica los " + "siguientes resultados:<br>========<br></b>"; resJ48 = resJ48 + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalJ48.numInstances() + "<br>"); resJ48 = resJ48 + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> " + formato.format(evalJ48.pctCorrect()) + "<br>"); resJ48 = resJ48 + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b>" + (int) evalJ48.correct() + "<br>"); resJ48 = resJ48 + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> " + formato.format(evalJ48.pctIncorrect()) + "<br>"); resJ48 = resJ48 + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> " + (int) evalJ48.incorrect() + "<br>"); resJ48 = resJ48 + ("<b>6. Media del error absoluto:</b> " + formato.format(evalJ48.meanAbsoluteError()) + "<br>"); resJ48 = resJ48 + ("<b>7. " + evalJ48.toMatrixString("Matriz de" + " confusion</b>").replace("\n", "<br>")); // SE GRAFICA EL ARBOL GENERADO //Se crea un Jframe Temporal final javax.swing.JFrame jf = new javax.swing.JFrame("Arbol de decision: J48"); /*Se asigna un tamao*/ jf.setSize(500, 400); /*Se define un borde*/ jf.getContentPane().setLayout(new BorderLayout()); /*Se instancia la grafica del arbol, estableciendo el tipo J48 Parametros (Listener, Tipo de arbol, Tipo de nodos) El placeNode2 colocar los nodos para que caigan en forma uniforme por debajo de su padre*/ TreeVisualizer tv = new TreeVisualizer(null, j48.graph(), new PlaceNode2()); /*Aade el arbol centrandolo*/ jf.getContentPane().add(tv, BorderLayout.CENTER); /*Aadimos un listener para la X del close*/ jf.addWindowListener(new java.awt.event.WindowAdapter() { @Override public void windowClosing(java.awt.event.WindowEvent e) { jf.dispose(); } }); /*Lo visualizamos*/ jf.setVisible(true); /*Ajustamos el arbol al ancho del JFRM*/ tv.fitToScreen(); return resJ48; } catch (Exception e) { return "El error es" + e.getMessage(); } }
From source file:de.fub.maps.project.detector.model.inference.ui.EvaluationPanel.java
License:Apache License
public void updatePanel(Evaluation evaluation) { DefaultCategoryDataset dataset = getBarChartPanel().getDataset(); dataset.clear();//from ww w. jav a 2 s. c om this.evaluation = evaluation; double correct = evaluation.pctCorrect(); double incorrect = evaluation.pctIncorrect(); getCorrectClassifiedInstances().setText(MessageFormat.format(NUMBER_PATTERN, correct)); getIncorrectClassifiedInstances().setText(MessageFormat.format(NUMBER_PATTERN, incorrect)); int numClasses = evaluation.getHeader().numClasses(); for (int classIndex = 0; classIndex < numClasses; classIndex++) { double precision = evaluation.precision(classIndex) * 100; double recall = evaluation.recall(classIndex) * 100; dataset.addValue(precision, NbBundle.getMessage(EvaluationPanel.class, "EvaluationPanel.CLT_Precision_Text"), evaluation.getHeader().classAttribute().value(classIndex)); dataset.addValue(recall, NbBundle.getMessage(EvaluationPanel.class, "EvaluationPanel.CLT_Recall_Text"), evaluation.getHeader().classAttribute().value(classIndex)); } getExplorerManager() .setRootContext(new AbstractNode(Children.create(new EvaluationNodeFactory(evaluation), true))); repaint(); }
From source file:es.upm.dit.gsi.barmas.launcher.WekaClassifiersValidator.java
License:Open Source License
/** * @param cls// w ww . ja v a 2 s.co m * @param trainingData * @param testData * @param leba * @return [0] = pctCorrect, [1] = pctIncorrect * @throws Exception */ public double[] getValidation(Classifier cls, Instances trainingData, Instances testData, int leba) throws Exception { Instances testDataWithLEBA = new Instances(testData); for (int j = 0; j < leba; j++) { if (j < testDataWithLEBA.numAttributes() - 1) { for (int i = 0; i < testDataWithLEBA.numInstances(); i++) { testDataWithLEBA.instance(i).setMissing(j); } } } Evaluation eval; try { eval = new Evaluation(trainingData); logger.fine("Evaluating model with leba: " + leba); eval.evaluateModel(cls, testDataWithLEBA); double[] results = new double[2]; results[0] = eval.pctCorrect() / 100; results[1] = eval.pctIncorrect() / 100; return results; } catch (Exception e) { logger.severe("Problems evaluating model for " + cls.getClass().getSimpleName()); logger.severe(e.getMessage()); e.printStackTrace(); throw e; } }
From source file:mlpoc.MLPOC.java
public static Evaluation crossValidate(String filename) { Evaluation eval = null; try {//from w w w .j a v a 2 s . c o m BufferedReader br = new BufferedReader(new FileReader(filename)); // loads data and set class index Instances data = new Instances(br); br.close(); /*File csv=new File(filename); CSVLoader loader = new CSVLoader(); loader.setSource(csv); Instances data = loader.getDataSet();*/ data.setClassIndex(data.numAttributes() - 1); // classifier String[] tmpOptions; String classname = "weka.classifiers.trees.J48 -C 0.25"; tmpOptions = classname.split(" "); classname = "weka.classifiers.trees.J48"; tmpOptions[0] = ""; Classifier cls = (Classifier) Utils.forName(Classifier.class, classname, tmpOptions); // other options int seed = 2; int folds = 10; // randomize data Random rand = new Random(seed); Instances randData = new Instances(data); randData.randomize(rand); if (randData.classAttribute().isNominal()) randData.stratify(folds); // perform cross-validation eval = new Evaluation(randData); for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n); Instances test = randData.testCV(folds, n); // the above code is used by the StratifiedRemoveFolds filter, the // code below by the Explorer/Experimenter: // Instances train = randData.trainCV(folds, n, rand); // build and evaluate classifier Classifier clsCopy = Classifier.makeCopy(cls); clsCopy.buildClassifier(train); eval.evaluateModel(clsCopy, test); } // output evaluation System.out.println(); System.out.println("=== Setup ==="); System.out .println("Classifier: " + cls.getClass().getName() + " " + Utils.joinOptions(cls.getOptions())); System.out.println("Dataset: " + data.relationName()); System.out.println("Folds: " + folds); System.out.println("Seed: " + seed); System.out.println(); System.out.println(eval.toSummaryString("Summary for testing", true)); System.out.println("Correctly Classified Instances: " + eval.correct()); System.out.println("Percentage of Correctly Classified Instances: " + eval.pctCorrect()); System.out.println("InCorrectly Classified Instances: " + eval.incorrect()); System.out.println("Percentage of InCorrectly Classified Instances: " + eval.pctIncorrect()); } catch (Exception ex) { System.err.println(ex.getMessage()); } return eval; }
From source file:net.sf.jclal.evaluation.measure.SingleLabelEvaluation.java
License:Open Source License
/** * * @param evaluation The evaluation// w w w . jav a 2s .com */ public void setEvaluation(Evaluation evaluation) { try { this.evaluation = evaluation; StringBuilder st = new StringBuilder(); st.append("Iteration: ").append(getIteration()).append("\n"); st.append("Labeled set size: ").append(getLabeledSetSize()).append("\n"); st.append("Unlabelled set size: ").append(getUnlabeledSetSize()).append("\n"); st.append("\t\n"); st.append("Correctly Classified Instances: ").append(evaluation.pctCorrect()).append("\n"); st.append("Incorrectly Classified Instances: ").append(evaluation.pctIncorrect()).append("\n"); st.append("Kappa statistic: ").append(evaluation.kappa()).append("\n"); st.append("Mean absolute error: ").append(evaluation.meanAbsoluteError()).append("\n"); st.append("Root mean squared error: ").append(evaluation.rootMeanSquaredError()).append("\n"); st.append("Relative absolute error: ").append(evaluation.relativeAbsoluteError()).append("\n"); st.append("Root relative squared error: ").append(evaluation.rootRelativeSquaredError()).append("\n"); st.append("Coverage of cases: ").append(evaluation.coverageOfTestCasesByPredictedRegions()) .append("\n"); st.append("Mean region size: ").append(evaluation.sizeOfPredictedRegions()).append("\n"); st.append("Weighted Precision: ").append(evaluation.weightedPrecision()).append("\n"); st.append("Weighted Recall: ").append(evaluation.weightedRecall()).append("\n"); st.append("Weighted FMeasure: ").append(evaluation.weightedFMeasure()).append("\n"); st.append("Weighted TruePositiveRate: ").append(evaluation.weightedTruePositiveRate()).append("\n"); st.append("Weighted FalsePositiveRate: ").append(evaluation.weightedFalsePositiveRate()).append("\n"); st.append("Weighted MatthewsCorrelation: ").append(evaluation.weightedMatthewsCorrelation()) .append("\n"); st.append("Weighted AreaUnderROC: ").append(evaluation.weightedAreaUnderROC()).append("\n"); st.append("Weighted AreaUnderPRC: ").append(evaluation.weightedAreaUnderPRC()).append("\n"); st.append("\t\t\n"); loadMetrics(st.toString()); } catch (Exception e) { Logger.getLogger(SingleLabelEvaluation.class.getName()).log(Level.SEVERE, null, e); } }