List of usage examples for weka.classifiers Evaluation toMatrixString
public String toMatrixString() throws Exception
From source file:FlexDMThread.java
License:Open Source License
public void run() { try {/*w ww . ja v a 2 s.c om*/ //Get the data from the source FlexDM.getMainData.acquire(); Instances data = dataset.getSource().getDataSet(); FlexDM.getMainData.release(); //Set class attribute if undefined if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } //Process hyperparameters for classifier String temp = ""; for (int i = 0; i < classifier.getNumParams(); i++) { temp += classifier.getParameter(i).getName(); temp += " "; if (classifier.getParameter(i).getValue() != null) { temp += classifier.getParameter(i).getValue(); temp += " "; } } String[] options = weka.core.Utils.splitOptions(temp); //Print to console- experiment is starting if (temp.equals("")) { //no parameters temp = "results_no_parameters"; try { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with no parameters"); } catch (Exception e) { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with no parameters"); } } else { //parameters try { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with parameters " + temp); } catch (Exception e) { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with parameters " + temp); } } //Create classifier, setting parameters weka.classifiers.Classifier x = createObject(classifier.getName()); x.setOptions(options); x.buildClassifier(data); //Process the test selection String[] tempTest = dataset.getTest().split("\\s"); //Create evaluation object for training and testing classifiers Evaluation eval = new Evaluation(data); StringBuffer predictions = new StringBuffer(); //Train and evaluate classifier if (tempTest[0].equals("testset")) { //specified test file //Build classifier x.buildClassifier(data); //Open test file, load data //DataSource testFile = new DataSource(dataset.getTest().substring(7).trim()); // Instances testSet = testFile.getDataSet(); FlexDM.getTestData.acquire(); Instances testSet = dataset.getTestFile().getDataSet(); FlexDM.getTestData.release(); //Set class attribute if undefined if (testSet.classIndex() == -1) { testSet.setClassIndex(testSet.numAttributes() - 1); } //Evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, testSet, array); } else if (tempTest[0].equals("xval")) { //Cross validation //Build classifier x.buildClassifier(data); //Cross validate eval.crossValidateModel(x, data, Integer.parseInt(tempTest[1]), new Random(1), predictions, new Range(), true); } else if (tempTest[0].equals("leavexval")) { //Leave one out cross validation //Build classifier x.buildClassifier(data); //Cross validate eval.crossValidateModel(x, data, data.numInstances() - 1, new Random(1), predictions, new Range(), true); } else if (tempTest[0].equals("percent")) { //Percentage split of single data set //Set training and test sizes from percentage int trainSize = (int) Math.round(data.numInstances() * Double.parseDouble(tempTest[1])); int testSize = data.numInstances() - trainSize; //Load specified data Instances train = new Instances(data, 0, trainSize); Instances testSet = new Instances(data, trainSize, testSize); //Build classifier x.buildClassifier(train); //Train and evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, testSet, array); } else { //Evaluate on training data //Test and evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, data, array); } //create datafile for results String filename = dataset.getDir() + "/" + classifier.getDirName() + "/" + temp + ".txt"; PrintWriter writer = new PrintWriter(filename, "UTF-8"); //Print classifier, dataset, parameters info to file try { writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName() + "\n PARAMETERS: " + temp); } catch (Exception e) { writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName() + "\n PARAMETERS: " + temp); } //Add evaluation string to file writer.println(eval.toSummaryString()); //Process result options if (checkResults("stats")) { //Classifier statistics writer.println(eval.toClassDetailsString()); } if (checkResults("model")) { //The model writer.println(x.toString()); } if (checkResults("matrix")) { //Confusion matrix writer.println(eval.toMatrixString()); } if (checkResults("entropy")) { //Entropy statistics //Set options req'd to get the entropy stats String[] opt = new String[4]; opt[0] = "-t"; opt[1] = dataset.getName(); opt[2] = "-k"; opt[3] = "-v"; //Evaluate model String entropy = Evaluation.evaluateModel(x, opt); //Grab the relevant info from the results, print to file entropy = entropy.substring(entropy.indexOf("=== Stratified cross-validation ===") + 35, entropy.indexOf("=== Confusion Matrix ===")); writer.println("=== Entropy Statistics ==="); writer.println(entropy); } if (checkResults("predictions")) { //The models predictions writer.println("=== Predictions ===\n"); if (!dataset.getTest().contains("xval")) { //print header of predictions table if req'd writer.println(" inst# actual predicted error distribution ()"); } writer.println(predictions.toString()); //print predictions to file } writer.close(); //Summary file is semaphore controlled to ensure quality try { //get a permit //grab the summary file, write the classifiers details to it FlexDM.writeFile.acquire(); PrintWriter p = new PrintWriter(new FileWriter(summary, true)); if (temp.equals("results_no_parameters")) { //change output based on parameters temp = temp.substring(8); } //write percent correct, classifier name, dataset name to summary file p.write(dataset.getName() + ", " + classifier.getName() + ", " + temp + ", " + eval.correct() + ", " + eval.incorrect() + ", " + eval.unclassified() + ", " + eval.pctCorrect() + ", " + eval.pctIncorrect() + ", " + eval.pctUnclassified() + ", " + eval.kappa() + ", " + eval.meanAbsoluteError() + ", " + eval.rootMeanSquaredError() + ", " + eval.relativeAbsoluteError() + ", " + eval.rootRelativeSquaredError() + ", " + eval.SFPriorEntropy() + ", " + eval.SFSchemeEntropy() + ", " + eval.SFEntropyGain() + ", " + eval.SFMeanPriorEntropy() + ", " + eval.SFMeanSchemeEntropy() + ", " + eval.SFMeanEntropyGain() + ", " + eval.KBInformation() + ", " + eval.KBMeanInformation() + ", " + eval.KBRelativeInformation() + ", " + eval.weightedTruePositiveRate() + ", " + eval.weightedFalsePositiveRate() + ", " + eval.weightedTrueNegativeRate() + ", " + eval.weightedFalseNegativeRate() + ", " + eval.weightedPrecision() + ", " + eval.weightedRecall() + ", " + eval.weightedFMeasure() + ", " + eval.weightedAreaUnderROC() + "\n"); p.close(); //release semaphore FlexDM.writeFile.release(); } catch (InterruptedException e) { //bad things happened System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); } //output we have successfully finished processing classifier if (temp.equals("no_parameters")) { //no parameters try { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with no parameters"); } catch (Exception e) { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with no parameters"); } } else { //with parameters try { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with parameters " + temp); } catch (Exception e) { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with parameters " + temp); } } try { //get a permit //grab the log file, write the classifiers details to it FlexDM.writeLog.acquire(); PrintWriter p = new PrintWriter(new FileWriter(log, true)); Date date = new Date(); Format formatter = new SimpleDateFormat("dd/MM/YYYY HH:mm:ss"); //formatter.format(date) if (temp.equals("results_no_parameters")) { //change output based on parameters temp = temp.substring(8); } //write details to log file p.write(dataset.getName() + ", " + dataset.getTest() + ", \"" + dataset.getResult_string() + "\", " + classifier.getName() + ", " + temp + ", " + formatter.format(date) + "\n"); p.close(); //release semaphore FlexDM.writeLog.release(); } catch (InterruptedException e) { //bad things happened System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); } s.release(); } catch (Exception e) { //an error occurred System.err.println("FATAL ERROR OCCURRED: " + e.toString() + "\nClassifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); s.release(); } }
From source file:adams.flow.transformer.WekaEvaluationSummary.java
License:Open Source License
/** * Executes the flow item./*from www. j a va2 s .co m*/ * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Evaluation eval; StringBuilder buffer; boolean prolog; String[] comment; result = null; if (m_InputToken.getPayload() instanceof WekaEvaluationContainer) eval = (Evaluation) ((WekaEvaluationContainer) m_InputToken.getPayload()) .getValue(WekaEvaluationContainer.VALUE_EVALUATION); else eval = (Evaluation) m_InputToken.getPayload(); buffer = new StringBuilder(); prolog = false; // comments if (m_Comment.getValue().length() > 0) { comment = m_Comment.getValue().split("\n"); if (comment.length == 1) { buffer.append("Comment: " + m_Comment + "\n"); } else { buffer.append("Comment:\n"); for (String line : comment) buffer.append(line + "\n"); } prolog = true; } // relation name if (m_OutputRelationName) { buffer.append("Relation: " + eval.getHeader().relationName() + "\n"); prolog = true; } // separator if (prolog) buffer.append("\n"); // summary if (m_TitleSummary.isEmpty()) buffer.append(eval.toSummaryString(m_ComplexityStatistics)); else buffer.append(eval.toSummaryString(Utils.unbackQuoteChars(m_TitleSummary), m_ComplexityStatistics)); // confusion matrix if (m_ConfusionMatrix) { try { buffer.append("\n\n"); if (m_TitleMatrix.isEmpty()) buffer.append(eval.toMatrixString()); else buffer.append(eval.toMatrixString(Utils.unbackQuoteChars(m_TitleMatrix))); } catch (Exception e) { result = handleException("Failed to generate confusion matrix: ", e); } } // class details if (m_ClassDetails) { try { buffer.append("\n\n"); if (m_TitleClassDetails.isEmpty()) buffer.append(eval.toClassDetailsString()); else buffer.append(eval.toClassDetailsString(Utils.unbackQuoteChars(m_TitleClassDetails))); } catch (Exception e) { result = handleException("Failed to generate class details: ", e); } } m_OutputToken = new Token(buffer.toString()); return result; }
From source file:ANN.MultilayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\iris.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);/*from w w w .j a va 2 s .com*/ train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); System.out.println(); // System.out.println(i + " "+0.8); MultilayerPerceptron slp = new MultilayerPerceptron(train, 0.1, 5000, 14); slp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.print(eval.toMatrixString()); }
From source file:ANN.MultiplePerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\iris.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);/*from w w w. ja v a2 s. co m*/ train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); MultiplePerceptron mlp = new MultiplePerceptron(train, 20, 0.3); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, train); System.out.println(eval.toSummaryString()); System.out.print(eval.toMatrixString()); }
From source file:ANN_Single.SinglelayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\diabetes.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);//from w ww .j a v a2s .c om train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); System.out.println(); // System.out.println(i + " "+0.8); SinglelayerPerceptron slp = new SinglelayerPerceptron(train, 0.1, 5000); slp.buildClassifier(train); Evaluation eval = new Evaluation(train); // eval.crossValidateModel(slp, train, 10, new Random(1)); eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.print(eval.toMatrixString()); }
From source file:ANN_single2.MultilayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\Team.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);/*from w ww. j av a 2 s . c o m*/ train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); MultilayerPerceptron slp = new MultilayerPerceptron(train, 13, 0.1, 0.5); // slp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.crossValidateModel(slp, train, 10, new Random(1)); // eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); }
From source file:ANN_single2.SinglelayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\Team.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);//from w w w. j a va 2 s . c o m train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); for (int i = 100; i < 3000; i += 100) { for (double j = 0.01; j < 1; j += 0.01) { System.out.println(i + " " + j); SinglelayerPerceptron slp = new SinglelayerPerceptron(i, j, 0.00); slp.buildClassifier(train); Evaluation eval = new Evaluation(train); // eval.crossValidateModel(slp, train,10, new Random(1)); eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); } } }
From source file:boostingPL.boosting.AdaBoost.java
License:Open Source License
public static void main(String[] args) throws Exception { java.io.File inputFile = new java.io.File( "/home/aax/xpShareSpace/dataset/single-class/+winered/winequality-red.datatrain1.arff"); ArffLoader atf = new ArffLoader(); atf.setFile(inputFile);//from ww w.j a v a 2s. com Instances training = atf.getDataSet(); training.setClassIndex(training.numAttributes() - 1); AdaBoost adaBoost = new AdaBoost(training, 100); for (int t = 0; t < 100; t++) { adaBoost.run(t); } java.io.File inputFilet = new java.io.File( "/home/aax/xpShareSpace/dataset/single-class/+winered/winequality-red.datatest1.arff"); ArffLoader atft = new ArffLoader(); atft.setFile(inputFilet); Instances testing = atft.getDataSet(); testing.setClassIndex(testing.numAttributes() - 1); Evaluation eval = new Evaluation(testing); for (Instance inst : testing) { eval.evaluateModelOnceAndRecordPrediction(adaBoost, inst); } System.out.println(eval.toSummaryString()); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); /* int right = 0; for (int i = 0; i < testing.numInstances(); i++) { Instance inst = testing.instance(i); if (adaBoost.classifyInstance(inst) == inst.classValue()) { right++; } } System.out.println(right); System.out.println((double)right/training.numInstances()); */ }
From source file:boostingPL.boosting.SAMME.java
License:Open Source License
public static void main(String[] args) throws Exception { java.io.File inputFile = new java.io.File(args[0]); ArffLoader atf = new ArffLoader(); atf.setFile(inputFile);/*from ww w. ja v a 2s. com*/ Instances training = atf.getDataSet(); training.setClassIndex(training.numAttributes() - 1); //Instances testing = new Instances(training); int iterationNum = 100; SAMME samme = new SAMME(training, iterationNum); for (int t = 0; t < iterationNum; t++) { samme.run(t); } java.io.File inputFilet = new java.io.File(args[1]); ArffLoader atft = new ArffLoader(); atft.setFile(inputFilet); Instances testing = atft.getDataSet(); testing.setClassIndex(testing.numAttributes() - 1); Evaluation eval = new Evaluation(testing); for (Instance inst : testing) { eval.evaluateModelOnceAndRecordPrediction(samme, inst); } System.out.println(eval.toSummaryString()); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); }
From source file:Clases.RedNeuronal.RedNeuronal.java
public void redNeuronal(int puntaje, int tiempo, int error) throws Exception { //si puntaje >= 200 entonces aprendido //si tiempo <= 240 (4 minutos) entonces aprendido //si errores <= 3 entonces aprendido String[] dato = { obtnerPuntaje(puntaje), obtenerTiempo(tiempo), obtenerErrores(error) }; ConverterUtils.DataSource con = new ConverterUtils.DataSource( "C:\\Users\\USUARIO\\Documents\\SILVIIS\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); // ConverterUtils.DataSource con = new ConverterUtils.DataSource("E:\\Unl\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); Instances instances = con.getDataSet(); System.out.println(instances); instances.setClassIndex(instances.numAttributes() - 1); MultilayerPerceptron mp = new MultilayerPerceptron(); mp.buildClassifier(instances);/*from www . j a v a 2 s. c om*/ Evaluation evalucion = new Evaluation(instances); evalucion.evaluateModel(mp, instances); System.out.println(evalucion.toSummaryString()); System.out.println(evalucion.toMatrixString()); String datosEntrada = null; String datosSalida = "no se puede predecir"; for (int i = 0; i < instances.numInstances(); i++) { double predecido = mp.classifyInstance(instances.instance(i)); datosEntrada = dato[0] + " " + dato[1] + " " + dato[2]; if ((int) instances.instance(i).value(0) == Integer.parseInt(dato[0]) && (int) instances.instance(i).value(1) == Integer.parseInt(dato[1]) && (int) instances.instance(i).value(2) == Integer.parseInt(dato[2])) { datosSalida = instances.classAttribute().value((int) predecido); } } System.out.println("DATOS DE ENTRADA: " + datosEntrada); System.out.println("SALIDA PREDECIDA: " + datosSalida); switch (datosSalida) { case "0": resultado = "Excelente ha aprendido"; imgResultado = "Excelente.jpg"; imgREDneuronal = "0.png"; System.out.println("Excelente ha aprendido"); break; case "1": resultado = "Disminuir Errores"; imgResultado = "Bueno.jpg"; imgREDneuronal = "1.png"; System.out.println("Disminuir Errores"); break; case "2": resultado = "Disminuir Tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "2.png"; System.out.println("Disminuir Tiempo"); break; case "3": resultado = "Disminuir Errores y tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "3.png"; System.out.println("Disminuir Errores y tiempo"); break; case "4": resultado = "Subir Puntaje"; imgResultado = "pensando.jpg"; imgREDneuronal = "4.png"; System.out.println("Subir Puntaje"); break; case "5": resultado = "Subir Puntaje y disminuir Errores"; imgResultado = "pensando.jpg"; imgREDneuronal = "5.png"; System.out.println("Subir Puntaje y disminuir Errores"); break; case "6": resultado = "Subir Puntaje y disminuir Tiempo"; imgResultado = "pensando.jpg"; imgREDneuronal = "6.png"; System.out.println("Subir Puntaje y disminuir Tiempo"); break; case "7": resultado = "Ponle mas Empeo"; imgResultado = "pensando.jpg"; imgREDneuronal = "7.png"; System.out.println("Ponle mas Empeo"); break; default: resultado = "Verifique entradas, no se puede predecir"; imgResultado = "Error.jpg"; System.out.println("Verifique entradas, no se puede predecir"); break; } }