List of usage examples for weka.classifiers Evaluation correct
public final double correct()
From source file:FlexDMThread.java
License:Open Source License
public void run() { try {// w w w . j a v a2s.c om //Get the data from the source FlexDM.getMainData.acquire(); Instances data = dataset.getSource().getDataSet(); FlexDM.getMainData.release(); //Set class attribute if undefined if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } //Process hyperparameters for classifier String temp = ""; for (int i = 0; i < classifier.getNumParams(); i++) { temp += classifier.getParameter(i).getName(); temp += " "; if (classifier.getParameter(i).getValue() != null) { temp += classifier.getParameter(i).getValue(); temp += " "; } } String[] options = weka.core.Utils.splitOptions(temp); //Print to console- experiment is starting if (temp.equals("")) { //no parameters temp = "results_no_parameters"; try { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with no parameters"); } catch (Exception e) { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with no parameters"); } } else { //parameters try { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with parameters " + temp); } catch (Exception e) { System.out.println("STARTING CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with parameters " + temp); } } //Create classifier, setting parameters weka.classifiers.Classifier x = createObject(classifier.getName()); x.setOptions(options); x.buildClassifier(data); //Process the test selection String[] tempTest = dataset.getTest().split("\\s"); //Create evaluation object for training and testing classifiers Evaluation eval = new Evaluation(data); StringBuffer predictions = new StringBuffer(); //Train and evaluate classifier if (tempTest[0].equals("testset")) { //specified test file //Build classifier x.buildClassifier(data); //Open test file, load data //DataSource testFile = new DataSource(dataset.getTest().substring(7).trim()); // Instances testSet = testFile.getDataSet(); FlexDM.getTestData.acquire(); Instances testSet = dataset.getTestFile().getDataSet(); FlexDM.getTestData.release(); //Set class attribute if undefined if (testSet.classIndex() == -1) { testSet.setClassIndex(testSet.numAttributes() - 1); } //Evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, testSet, array); } else if (tempTest[0].equals("xval")) { //Cross validation //Build classifier x.buildClassifier(data); //Cross validate eval.crossValidateModel(x, data, Integer.parseInt(tempTest[1]), new Random(1), predictions, new Range(), true); } else if (tempTest[0].equals("leavexval")) { //Leave one out cross validation //Build classifier x.buildClassifier(data); //Cross validate eval.crossValidateModel(x, data, data.numInstances() - 1, new Random(1), predictions, new Range(), true); } else if (tempTest[0].equals("percent")) { //Percentage split of single data set //Set training and test sizes from percentage int trainSize = (int) Math.round(data.numInstances() * Double.parseDouble(tempTest[1])); int testSize = data.numInstances() - trainSize; //Load specified data Instances train = new Instances(data, 0, trainSize); Instances testSet = new Instances(data, trainSize, testSize); //Build classifier x.buildClassifier(train); //Train and evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, testSet, array); } else { //Evaluate on training data //Test and evaluate model Object[] array = { predictions, new Range(), new Boolean(true) }; eval.evaluateModel(x, data, array); } //create datafile for results String filename = dataset.getDir() + "/" + classifier.getDirName() + "/" + temp + ".txt"; PrintWriter writer = new PrintWriter(filename, "UTF-8"); //Print classifier, dataset, parameters info to file try { writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName() + "\n PARAMETERS: " + temp); } catch (Exception e) { writer.println("CLASSIFIER: " + classifier.getName() + "\n DATASET: " + dataset.getName() + "\n PARAMETERS: " + temp); } //Add evaluation string to file writer.println(eval.toSummaryString()); //Process result options if (checkResults("stats")) { //Classifier statistics writer.println(eval.toClassDetailsString()); } if (checkResults("model")) { //The model writer.println(x.toString()); } if (checkResults("matrix")) { //Confusion matrix writer.println(eval.toMatrixString()); } if (checkResults("entropy")) { //Entropy statistics //Set options req'd to get the entropy stats String[] opt = new String[4]; opt[0] = "-t"; opt[1] = dataset.getName(); opt[2] = "-k"; opt[3] = "-v"; //Evaluate model String entropy = Evaluation.evaluateModel(x, opt); //Grab the relevant info from the results, print to file entropy = entropy.substring(entropy.indexOf("=== Stratified cross-validation ===") + 35, entropy.indexOf("=== Confusion Matrix ===")); writer.println("=== Entropy Statistics ==="); writer.println(entropy); } if (checkResults("predictions")) { //The models predictions writer.println("=== Predictions ===\n"); if (!dataset.getTest().contains("xval")) { //print header of predictions table if req'd writer.println(" inst# actual predicted error distribution ()"); } writer.println(predictions.toString()); //print predictions to file } writer.close(); //Summary file is semaphore controlled to ensure quality try { //get a permit //grab the summary file, write the classifiers details to it FlexDM.writeFile.acquire(); PrintWriter p = new PrintWriter(new FileWriter(summary, true)); if (temp.equals("results_no_parameters")) { //change output based on parameters temp = temp.substring(8); } //write percent correct, classifier name, dataset name to summary file p.write(dataset.getName() + ", " + classifier.getName() + ", " + temp + ", " + eval.correct() + ", " + eval.incorrect() + ", " + eval.unclassified() + ", " + eval.pctCorrect() + ", " + eval.pctIncorrect() + ", " + eval.pctUnclassified() + ", " + eval.kappa() + ", " + eval.meanAbsoluteError() + ", " + eval.rootMeanSquaredError() + ", " + eval.relativeAbsoluteError() + ", " + eval.rootRelativeSquaredError() + ", " + eval.SFPriorEntropy() + ", " + eval.SFSchemeEntropy() + ", " + eval.SFEntropyGain() + ", " + eval.SFMeanPriorEntropy() + ", " + eval.SFMeanSchemeEntropy() + ", " + eval.SFMeanEntropyGain() + ", " + eval.KBInformation() + ", " + eval.KBMeanInformation() + ", " + eval.KBRelativeInformation() + ", " + eval.weightedTruePositiveRate() + ", " + eval.weightedFalsePositiveRate() + ", " + eval.weightedTrueNegativeRate() + ", " + eval.weightedFalseNegativeRate() + ", " + eval.weightedPrecision() + ", " + eval.weightedRecall() + ", " + eval.weightedFMeasure() + ", " + eval.weightedAreaUnderROC() + "\n"); p.close(); //release semaphore FlexDM.writeFile.release(); } catch (InterruptedException e) { //bad things happened System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); } //output we have successfully finished processing classifier if (temp.equals("no_parameters")) { //no parameters try { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with no parameters"); } catch (Exception e) { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with no parameters"); } } else { //with parameters try { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName().substring(dataset.getName().lastIndexOf("\\") + 1) + " with parameters " + temp); } catch (Exception e) { System.out.println("FINISHED CLASSIFIER " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName() + " with parameters " + temp); } } try { //get a permit //grab the log file, write the classifiers details to it FlexDM.writeLog.acquire(); PrintWriter p = new PrintWriter(new FileWriter(log, true)); Date date = new Date(); Format formatter = new SimpleDateFormat("dd/MM/YYYY HH:mm:ss"); //formatter.format(date) if (temp.equals("results_no_parameters")) { //change output based on parameters temp = temp.substring(8); } //write details to log file p.write(dataset.getName() + ", " + dataset.getTest() + ", \"" + dataset.getResult_string() + "\", " + classifier.getName() + ", " + temp + ", " + formatter.format(date) + "\n"); p.close(); //release semaphore FlexDM.writeLog.release(); } catch (InterruptedException e) { //bad things happened System.err.println("FATAL ERROR OCCURRED: Classifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); } s.release(); } catch (Exception e) { //an error occurred System.err.println("FATAL ERROR OCCURRED: " + e.toString() + "\nClassifier: " + cNum + " - " + classifier.getName() + " on dataset " + dataset.getName()); s.release(); } }
From source file:adams.flow.core.EvaluationHelper.java
License:Open Source License
/** * Returns a statistical value from the evaluation object. * * @param eval the evaluation object to get the value from * @param statistic the type of value to return * @param classIndex the class label index, for statistics like AUC * @return the determined value, Double.NaN if not found * @throws Exception if evaluation fails */// ww w . j a va2s . co m public static double getValue(Evaluation eval, EvaluationStatistic statistic, int classIndex) throws Exception { switch (statistic) { case NUMBER_CORRECT: return eval.correct(); case NUMBER_INCORRECT: return eval.incorrect(); case NUMBER_UNCLASSIFIED: return eval.unclassified(); case PERCENT_CORRECT: return eval.pctCorrect(); case PERCENT_INCORRECT: return eval.pctIncorrect(); case PERCENT_UNCLASSIFIED: return eval.pctUnclassified(); case KAPPA_STATISTIC: return eval.kappa(); case MEAN_ABSOLUTE_ERROR: return eval.meanAbsoluteError(); case ROOT_MEAN_SQUARED_ERROR: return eval.rootMeanSquaredError(); case RELATIVE_ABSOLUTE_ERROR: return eval.relativeAbsoluteError(); case ROOT_RELATIVE_SQUARED_ERROR: return eval.rootRelativeSquaredError(); case CORRELATION_COEFFICIENT: return eval.correlationCoefficient(); case SF_PRIOR_ENTROPY: return eval.SFPriorEntropy(); case SF_SCHEME_ENTROPY: return eval.SFSchemeEntropy(); case SF_ENTROPY_GAIN: return eval.SFEntropyGain(); case SF_MEAN_PRIOR_ENTROPY: return eval.SFMeanPriorEntropy(); case SF_MEAN_SCHEME_ENTROPY: return eval.SFMeanSchemeEntropy(); case SF_MEAN_ENTROPY_GAIN: return eval.SFMeanEntropyGain(); case KB_INFORMATION: return eval.KBInformation(); case KB_MEAN_INFORMATION: return eval.KBMeanInformation(); case KB_RELATIVE_INFORMATION: return eval.KBRelativeInformation(); case TRUE_POSITIVE_RATE: return eval.truePositiveRate(classIndex); case NUM_TRUE_POSITIVES: return eval.numTruePositives(classIndex); case FALSE_POSITIVE_RATE: return eval.falsePositiveRate(classIndex); case NUM_FALSE_POSITIVES: return eval.numFalsePositives(classIndex); case TRUE_NEGATIVE_RATE: return eval.trueNegativeRate(classIndex); case NUM_TRUE_NEGATIVES: return eval.numTrueNegatives(classIndex); case FALSE_NEGATIVE_RATE: return eval.falseNegativeRate(classIndex); case NUM_FALSE_NEGATIVES: return eval.numFalseNegatives(classIndex); case IR_PRECISION: return eval.precision(classIndex); case IR_RECALL: return eval.recall(classIndex); case F_MEASURE: return eval.fMeasure(classIndex); case MATTHEWS_CORRELATION_COEFFICIENT: return eval.matthewsCorrelationCoefficient(classIndex); case AREA_UNDER_ROC: return eval.areaUnderROC(classIndex); case AREA_UNDER_PRC: return eval.areaUnderPRC(classIndex); case WEIGHTED_TRUE_POSITIVE_RATE: return eval.weightedTruePositiveRate(); case WEIGHTED_FALSE_POSITIVE_RATE: return eval.weightedFalsePositiveRate(); case WEIGHTED_TRUE_NEGATIVE_RATE: return eval.weightedTrueNegativeRate(); case WEIGHTED_FALSE_NEGATIVE_RATE: return eval.weightedFalseNegativeRate(); case WEIGHTED_IR_PRECISION: return eval.weightedPrecision(); case WEIGHTED_IR_RECALL: return eval.weightedRecall(); case WEIGHTED_F_MEASURE: return eval.weightedFMeasure(); case WEIGHTED_MATTHEWS_CORRELATION_COEFFICIENT: return eval.weightedMatthewsCorrelation(); case WEIGHTED_AREA_UNDER_ROC: return eval.weightedAreaUnderROC(); case WEIGHTED_AREA_UNDER_PRC: return eval.weightedAreaUnderPRC(); case UNWEIGHTED_MACRO_F_MEASURE: return eval.unweightedMacroFmeasure(); case UNWEIGHTED_MICRO_F_MEASURE: return eval.unweightedMicroFmeasure(); case BIAS: return eval.getPluginMetric(Bias.class.getName()).getStatistic(Bias.NAME); case RSQUARED: return eval.getPluginMetric(RSquared.class.getName()).getStatistic(RSquared.NAME); case SDR: return eval.getPluginMetric(SDR.class.getName()).getStatistic(SDR.NAME); case RPD: return eval.getPluginMetric(RPD.class.getName()).getStatistic(RPD.NAME); default: throw new IllegalArgumentException("Unhandled statistic field: " + statistic); } }
From source file:algoritmogeneticocluster.WekaSimulation.java
/** * @param args the command line arguments *//*from w ww. j av a2s .c om*/ public static void main(String[] args) { SMO classifier = new SMO(); HyperPipes hy = new HyperPipes(); // classifier.buildClassifier(trainset); BufferedReader datafile = readDataFile("tabela10.arff"); Instances data; Evaluation eval; try { data = new Instances(datafile); data.setClassIndex(data.numAttributes() - 1); eval = new Evaluation(data); Random rand = new Random(1); // using seed = 1 int folds = 10; eval.crossValidateModel(classifier, data, folds, rand); System.out.println(eval.toString()); System.out.println(eval.numInstances()); System.out.println(eval.correct()); System.out.println(eval.incorrect()); System.out.println(eval.pctCorrect()); System.out.println(eval.pctIncorrect()); } catch (Exception ex) { Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:cezeri.feature.selection.FeatureSelectionInfluence.java
public static Evaluation getEvaluation(Instances randData, Classifier model, int folds) { Evaluation eval = null; try {//from w w w . j a va 2s . com eval = new Evaluation(randData); for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n); Instances test = randData.testCV(folds, n); // build and evaluate classifier Classifier clsCopy = Classifier.makeCopy(model); clsCopy.buildClassifier(train); eval.evaluateModel(clsCopy, test); // double[] prediction = eval.evaluateModel(clsCopy, test); // double[] original = getAttributeValues(test); // double[][] d = new double[2][prediction.length]; // d[0] = prediction; // d[1] = original; // CMatrix f1 = new CMatrix(d); } // output evaluation System.out.println(); System.out.println("=== Setup ==="); System.out.println( "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions())); System.out.println("Dataset: " + randData.relationName()); System.out.println("Folds: " + folds); System.out.println(); System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===")); System.out.println(eval.toMatrixString("Confusion Matrix")); double acc = eval.correct() / eval.numInstances() * 100; System.out.println("correct:" + eval.correct() + " " + acc + "%"); } catch (Exception ex) { Logger.getLogger(FeatureSelectionInfluence.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:cezeri.feature.selection.FeatureSelectionInfluence.java
public static Influence[] getMostDiscriminativeFeature(String filePath, Classifier model) { Influence[] ret = null;/* w ww.ja v a2 s . com*/ try { Instances data = DataSource.read(filePath); ret = new Influence[data.numAttributes() - 1]; data.setClassIndex(data.numAttributes() - 1); // other options int seed = 1; int folds = 10; // randomize data Instances randData = new Instances(data); Random rand = new Random(seed); randData.randomize(rand); Evaluation evalBase = getEvaluation(randData, model, folds); double accBase = evalBase.correct() / evalBase.numInstances() * 100; double nf = randData.numAttributes(); for (int j = 0; j < nf - 1; j++) { ret[j] = new Influence(); String str = randData.attribute(j).name(); Attribute att = randData.attribute(j); randData.deleteAttributeAt(j); Evaluation evalTemp = getEvaluation(randData, model, folds); double accTemp = evalTemp.correct() / evalTemp.numInstances() * 100; double tempInfluence = accBase - accTemp; ret[j].attributeName = str; ret[j].infVal = tempInfluence; randData.insertAttributeAt(att, j); } sortInfluenceArray(ret); } catch (Exception ex) { Logger.getLogger(FeatureSelectionInfluence.class.getName()).log(Level.SEVERE, null, ex); } return ret; }
From source file:classify.Classifier.java
/** * @param args the command line arguments */// w w w . ja v a 2 s.com public static void main(String[] args) { //read in data try { DataSource input = new DataSource("no_missing_values.csv"); Instances data = input.getDataSet(); //Instances data = readFile("newfixed.txt"); missingValuesRows(data); setAttributeValues(data); data.setClassIndex(data.numAttributes() - 1); //boosting AdaBoostM1 boosting = new AdaBoostM1(); boosting.setNumIterations(25); boosting.setClassifier(new DecisionStump()); //build the classifier boosting.buildClassifier(data); //evaluate using 10-fold cross validation Evaluation e1 = new Evaluation(data); e1.crossValidateModel(boosting, data, 10, new Random(1)); DecimalFormat nf = new DecimalFormat("0.000"); System.out.println("Results of Boosting with Decision Stumps:"); System.out.println(boosting.toString()); System.out.println("Results of Cross Validation:"); System.out.println("Number of correctly classified instances: " + e1.correct() + " (" + nf.format(e1.pctCorrect()) + "%)"); System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " (" + nf.format(e1.pctIncorrect()) + "%)"); System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%"); System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%"); System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%"); System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%"); System.out.println(); System.out.println("Confusion Matrix:"); for (int i = 0; i < e1.confusionMatrix().length; i++) { for (int j = 0; j < e1.confusionMatrix()[0].length; j++) { System.out.print(e1.confusionMatrix()[i][j] + " "); } System.out.println(); } System.out.println(); System.out.println(); System.out.println(); //logistic regression Logistic l = new Logistic(); l.buildClassifier(data); e1 = new Evaluation(data); e1.crossValidateModel(l, data, 10, new Random(1)); System.out.println("Results of Logistic Regression:"); System.out.println(l.toString()); System.out.println("Results of Cross Validation:"); System.out.println("Number of correctly classified instances: " + e1.correct() + " (" + nf.format(e1.pctCorrect()) + "%)"); System.out.println("Number of incorrectly classified instances: " + e1.incorrect() + " (" + nf.format(e1.pctIncorrect()) + "%)"); System.out.println("TP Rate: " + nf.format(e1.weightedTruePositiveRate() * 100) + "%"); System.out.println("FP Rate: " + nf.format(e1.weightedFalsePositiveRate() * 100) + "%"); System.out.println("Precision: " + nf.format(e1.weightedPrecision() * 100) + "%"); System.out.println("Recall: " + nf.format(e1.weightedRecall() * 100) + "%"); System.out.println(); System.out.println("Confusion Matrix:"); for (int i = 0; i < e1.confusionMatrix().length; i++) { for (int j = 0; j < e1.confusionMatrix()[0].length; j++) { System.out.print(e1.confusionMatrix()[i][j] + " "); } System.out.println(); } } catch (Exception ex) { //data couldn't be read, so end program System.out.println("Exception thrown, program ending."); } }
From source file:Controller.CtlDataMining.java
public String redBayesiana(Instances data) { try {/*from ww w.j ava 2s . co m*/ //Creamos un clasificador Bayesiano NaiveBayes nb = new NaiveBayes(); //creamos el clasificador de la redBayesiana nb.buildClassifier(data); //Creamos un objeto para la validacion del modelo con redBayesiana Evaluation evalB = new Evaluation(data); /*Aplicamos el clasificador bayesiano hacemos validacion cruzada, de redBayesiana, con 10 campos, y un aleatorio para la semilla, en este caso es 1 para el muestreo de la validacion cruzada (Como ordenar para luego partirlo en 10)*/ evalB.crossValidateModel(nb, data, 10, new Random(1)); String resBay = "<br><br><b><center>Resultados NaiveBayes</center>" + "<br>========<br>" + "Modelo generado indica los siguientes resultados:" + "<br>========<br></b>"; //Obtenemos resultados resBay = resBay + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalB.numInstances() + "<br>"); resBay = resBay + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> " + formato.format(evalB.pctCorrect()) + "%<br>"); resBay = resBay + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b> " + (int) evalB.correct() + "<br>"); resBay = resBay + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> " + formato.format(evalB.pctIncorrect()) + "%<br>"); resBay = resBay + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> " + (int) evalB.incorrect() + "<br>"); resBay = resBay + ("<b>6. Media del error absoluto:</b> " + formato.format(evalB.meanAbsoluteError()) + "%<br>"); resBay = resBay + ("<b>7. " + evalB.toMatrixString("Matriz de " + "confusion</b>").replace("\n", "<br>")); return resBay; } catch (Exception e) { return "El error es" + e.getMessage(); } }
From source file:Controller.CtlDataMining.java
public String arbolJ48(Instances data) { try {/*from w ww. j a v a2 s .c o m*/ // Creamos un clasidicador J48 J48 j48 = new J48(); //creamos el clasificador del J48 con los datos j48.buildClassifier(data); //Creamos un objeto para la validacion del modelo con redBayesiana Evaluation evalJ48 = new Evaluation(data); /*Aplicamos el clasificador J48 hacemos validacion cruzada, de redBayesiana, con 10 campos, y el aleatorio arrancando desde 1 para la semilla*/ evalJ48.crossValidateModel(j48, data, 10, new Random(1)); //Obtenemos resultados String resJ48 = "<br><b><center>Resultados Arbol de decision J48" + "</center><br>========<br>Modelo generado indica los " + "siguientes resultados:<br>========<br></b>"; resJ48 = resJ48 + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalJ48.numInstances() + "<br>"); resJ48 = resJ48 + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> " + formato.format(evalJ48.pctCorrect()) + "<br>"); resJ48 = resJ48 + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b>" + (int) evalJ48.correct() + "<br>"); resJ48 = resJ48 + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> " + formato.format(evalJ48.pctIncorrect()) + "<br>"); resJ48 = resJ48 + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> " + (int) evalJ48.incorrect() + "<br>"); resJ48 = resJ48 + ("<b>6. Media del error absoluto:</b> " + formato.format(evalJ48.meanAbsoluteError()) + "<br>"); resJ48 = resJ48 + ("<b>7. " + evalJ48.toMatrixString("Matriz de" + " confusion</b>").replace("\n", "<br>")); // SE GRAFICA EL ARBOL GENERADO //Se crea un Jframe Temporal final javax.swing.JFrame jf = new javax.swing.JFrame("Arbol de decision: J48"); /*Se asigna un tamao*/ jf.setSize(500, 400); /*Se define un borde*/ jf.getContentPane().setLayout(new BorderLayout()); /*Se instancia la grafica del arbol, estableciendo el tipo J48 Parametros (Listener, Tipo de arbol, Tipo de nodos) El placeNode2 colocar los nodos para que caigan en forma uniforme por debajo de su padre*/ TreeVisualizer tv = new TreeVisualizer(null, j48.graph(), new PlaceNode2()); /*Aade el arbol centrandolo*/ jf.getContentPane().add(tv, BorderLayout.CENTER); /*Aadimos un listener para la X del close*/ jf.addWindowListener(new java.awt.event.WindowAdapter() { @Override public void windowClosing(java.awt.event.WindowEvent e) { jf.dispose(); } }); /*Lo visualizamos*/ jf.setVisible(true); /*Ajustamos el arbol al ancho del JFRM*/ tv.fitToScreen(); return resJ48; } catch (Exception e) { return "El error es" + e.getMessage(); } }
From source file:ffnn.FFNNTubesAI.java
@Override public void buildClassifier(Instances i) throws Exception { Instance temp_instance = null;/* ww w . j a va 2 s .c o m*/ RealMatrix error_output; RealMatrix error_hidden; RealMatrix input_matrix; RealMatrix hidden_matrix; RealMatrix output_matrix; Instances temp_instances; int r = 0; Scanner scan = new Scanner(System.in); output_layer = i.numDistinctValues(i.classIndex()); //3 temp_instances = filterNominalNumeric(i); if (output_layer == 2) { Add filter = new Add(); filter.setAttributeIndex("last"); filter.setAttributeName("dummy"); filter.setInputFormat(temp_instances); temp_instances = Filter.useFilter(temp_instances, filter); // System.out.println(temp_instances); for (int j = 0; j < temp_instances.numInstances(); j++) { if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) { temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1); temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0); } else { temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0); temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1); } } } //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1)); //System.out.println(temp_instances); input_layer = temp_instances.numAttributes() - output_layer; //4 hidden_layer = 0; while (hidden_layer < 1) { System.out.print("Hidden layer : "); hidden_layer = scan.nextInt(); } int init_hidden = hidden_layer; error_hidden = new BlockRealMatrix(1, hidden_layer); error_output = new BlockRealMatrix(1, output_layer); input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias buildWeight(input_layer, hidden_layer, output_layer); long last_time = System.nanoTime(); double last_error_rate = 1; double best_error_rate = 1; double last_update = System.nanoTime(); // brp iterasi // for( long itr = 0; last_error_rate > 0.001; ++ itr ){ for (long itr = 0; itr < 50000; ++itr) { if (r == 10) { break; } long time = System.nanoTime(); if (time - last_time > 2000000000) { Evaluation eval = new Evaluation(i); eval.evaluateModel(this, i); double accry = eval.correct() / eval.numInstances(); if (eval.errorRate() < last_error_rate) { last_update = System.nanoTime(); if (eval.errorRate() < best_error_rate) SerializationHelper.write(accry + "-" + time + ".model", this); } if (accry > 0) last_error_rate = eval.errorRate(); // 2 minute without improvement restart if (time - last_update > 30000000000L) { last_update = System.nanoTime(); learning_rate = random() * 0.05; hidden_layer = (int) (10 + floor(random() * 15)); hidden_layer = (int) floor((hidden_layer / 25) * init_hidden); if (hidden_layer == 0) { hidden_layer = 1; } itr = 0; System.out.println("RESTART " + learning_rate + " " + hidden_layer); buildWeight(input_layer, hidden_layer, output_layer); r++; } System.out.println(accry + " " + itr); last_time = time; } for (int j = 0; j < temp_instances.numInstances(); j++) { // foward !! temp_instance = temp_instances.instance(j); for (int k = 0; k < input_layer; k++) { input_matrix.setEntry(0, k, temp_instance.value(k)); } input_matrix.setEntry(0, input_layer, 1.0); // bias hidden_matrix = input_matrix.multiply(weight1); for (int y = 0; y < hidden_layer; ++y) { hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y))); } output_matrix = hidden_matrix.multiply(weight2).add(bias2); for (int y = 0; y < output_layer; ++y) { output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y))); } // backward << // error layer 2 double total_err = 0; for (int k = 0; k < output_layer; k++) { double o = output_matrix.getEntry(0, k); double t = temp_instance.value(input_layer + k); double err = o * (1 - o) * (t - o); total_err += err * err; error_output.setEntry(0, k, err); } // back propagation layer 2 for (int y = 0; y < hidden_layer; y++) { for (int x = 0; x < output_layer; ++x) { double wold = weight2.getEntry(y, x); double correction = learning_rate * error_output.getEntry(0, x) * hidden_matrix.getEntry(0, y); weight2.setEntry(y, x, wold + correction); } } for (int x = 0; x < output_layer; ++x) { double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya bias2.setEntry(0, x, bias2.getEntry(0, x) + correction); } // error layer 1 for (int k = 0; k < hidden_layer; ++k) { double o = hidden_matrix.getEntry(0, k); double t = 0; for (int x = 0; x < output_layer; ++x) { t += error_output.getEntry(0, x) * weight2.getEntry(k, x); } double err = o * (1 - o) * t; error_hidden.setEntry(0, k, err); } // back propagation layer 1 for (int y = 0; y < input_layer + 1; ++y) { for (int x = 0; x < hidden_layer; ++x) { double wold = weight1.getEntry(y, x); double correction = learning_rate * error_hidden.getEntry(0, x) * input_matrix.getEntry(0, y); weight1.setEntry(y, x, wold + correction); } } } } }
From source file:meddle.TrainModelByDomainOS.java
License:Open Source License
/** * Do evalution on trained classifier/model, including the summary, false * positive/negative rate, AUC, running time * * @param j48/*from ww w.j a v a 2s.co m*/ * - the trained classifier * @param domain * - the domain name */ public static MetaEvaluationMeasures doEvaluation(Classifier classifier, String domainOS, Instances tras, MetaEvaluationMeasures mem) { try { Evaluation evaluation = new Evaluation(tras); evaluation.crossValidateModel(classifier, tras, 10, new Random(1)); mem.numInstance = evaluation.numInstances(); double M = evaluation.numTruePositives(1) + evaluation.numFalseNegatives(1); mem.numPositive = (int) M; mem.AUC = evaluation.areaUnderROC(1); mem.numCorrectlyClassified = (int) evaluation.correct(); mem.accuracy = 1.0 * mem.numCorrectlyClassified / mem.numInstance; mem.falseNegativeRate = evaluation.falseNegativeRate(1); mem.falsePositiveRate = evaluation.falsePositiveRate(1); mem.fMeasure = evaluation.fMeasure(1); double[][] cmMatrix = evaluation.confusionMatrix(); mem.confusionMatrix = cmMatrix; mem.TP = evaluation.numTruePositives(1); mem.TN = evaluation.numTrueNegatives(1); mem.FP = evaluation.numFalsePositives(1); mem.FN = evaluation.numFalseNegatives(1); } catch (Exception e) { e.printStackTrace(); } return mem; }