List of usage examples for weka.classifiers Evaluation toClassDetailsString
public String toClassDetailsString(String title) throws Exception
From source file:ann.ANN.java
public static void crossValidation(Classifier model, Instances data) { try {//from w ww . j a v a2s . c o m Evaluation eval = new Evaluation(data); eval.crossValidateModel(model, data, 10, new Random(1)); System.out.println("================================"); System.out.println("========Cross Validation========"); System.out.println("================================"); System.out.println(eval.toSummaryString("\n=== Summary ===\n", false)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString("=== Confusion Matrix ===\n")); } catch (Exception ex) { System.out.println(ex.toString()); } }
From source file:ann.ANN.java
public void percentageSplit(Classifier model, double percent, Instances data) { try {/* w ww . ja v a 2 s . c om*/ int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances train = new Instances(data, trainSize); Instances test = new Instances(data, testSize); ; for (int i = 0; i < trainSize; i++) { train.add(data.instance(i)); } for (int i = trainSize; i < data.numInstances(); i++) { test.add(data.instance(i)); } Evaluation eval = new Evaluation(train); eval.evaluateModel(model, test); System.out.println("================================"); System.out.println("========Percentage Split======="); System.out.println("================================"); System.out.println(eval.toSummaryString("\n=== Summary ===\n", false)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString("=== Confusion Matrix ===\n")); } catch (Exception ex) { System.out.println("File tidak berhasil di-load"); } }
From source file:cezeri.feature.selection.FeatureSelectionInfluence.java
public static Evaluation getEvaluation(Instances randData, Classifier model, int folds) { Evaluation eval = null; try {// w w w. j a v a2s . c om eval = new Evaluation(randData); for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n); Instances test = randData.testCV(folds, n); // build and evaluate classifier Classifier clsCopy = Classifier.makeCopy(model); clsCopy.buildClassifier(train); eval.evaluateModel(clsCopy, test); // double[] prediction = eval.evaluateModel(clsCopy, test); // double[] original = getAttributeValues(test); // double[][] d = new double[2][prediction.length]; // d[0] = prediction; // d[1] = original; // CMatrix f1 = new CMatrix(d); } // output evaluation System.out.println(); System.out.println("=== Setup ==="); System.out.println( "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions())); System.out.println("Dataset: " + randData.relationName()); System.out.println("Folds: " + folds); System.out.println(); System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===")); System.out.println(eval.toMatrixString("Confusion Matrix")); double acc = eval.correct() / eval.numInstances() * 100; System.out.println("correct:" + eval.correct() + " " + acc + "%"); } catch (Exception ex) { Logger.getLogger(FeatureSelectionInfluence.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:Control.Classificador.java
public ArrayList<Resultado> classificar(Plano plano, Arquivo arq) { try {/*w w w . j a v a 2 s . com*/ FileReader leitor = new FileReader(arq.arquivo); Instances conjunto = new Instances(leitor); conjunto.setClassIndex(conjunto.numAttributes() - 1); Evaluation avaliacao = new Evaluation(conjunto); conjunto = conjunto.resample(new Random()); Instances baseTreino = null, baseTeste = null; Random rand = new Random(1); if (plano.eHoldOut) { baseTeste = conjunto.testCV(3, 0); baseTreino = conjunto.trainCV(3, 0); } else { baseTeste = baseTreino = conjunto; } if (plano.IBK) { try { IB1 vizinho = new IB1(); vizinho.buildClassifier(baseTeste); avaliacao.crossValidateModel(vizinho, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(), rand); Resultado resultado = new Resultado("NN", avaliacao.toMatrixString("Algortmo Vizinho Mais Prximo - Matriz de Confuso"), avaliacao.toClassDetailsString("kNN")); resultado.setTaxaErro(avaliacao.errorRate()); resultado.setTaxaAcerto(1 - avaliacao.errorRate()); resultado.setRevocacao(recallToDouble(avaliacao, baseTeste)); resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste)); this.resultados.add(resultado); } catch (UnsupportedAttributeTypeException ex) { Mensagem.erro("Algortmo IB1 no suporta atributos numricos!", "MTCS - ERRO"); } } if (plano.J48) { try { J48 j48 = new J48(); j48.buildClassifier(baseTeste); avaliacao.crossValidateModel(j48, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(), rand); Resultado resultado = new Resultado("J48", avaliacao.toMatrixString("Algortmo J48 - Matriz de Confuso"), avaliacao.toClassDetailsString("J48")); resultado.setTaxaErro(avaliacao.errorRate()); resultado.setTaxaAcerto(1 - avaliacao.errorRate()); resultado.setRevocacao(recallToDouble(avaliacao, baseTeste)); resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste)); this.resultados.add(resultado); } catch (UnsupportedAttributeTypeException ex) { Mensagem.erro("Algortmo J48 no suporta atributos nominais!", "MTCS - ERRO"); } } if (plano.KNN) { try { IBk knn = new IBk(3); knn.buildClassifier(baseTeste); avaliacao.crossValidateModel(knn, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(), rand); Resultado resultado = new Resultado("KNN", avaliacao.toMatrixString("Algortmo KNN - Matriz de Confuso"), avaliacao.toClassDetailsString("kNN")); resultado.setTaxaErro(avaliacao.errorRate()); resultado.setTaxaAcerto(1 - avaliacao.errorRate()); resultado.setRevocacao(recallToDouble(avaliacao, baseTeste)); resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste)); this.resultados.add(resultado); } catch (UnsupportedAttributeTypeException ex) { Mensagem.erro("Algortmo KNN no suporta atributos numricos!", "MTCS - ERRO"); } } if (plano.Naive) { NaiveBayes naive = new NaiveBayes(); naive.buildClassifier(baseTeste); avaliacao.crossValidateModel(naive, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(), rand); Resultado resultado = new Resultado("Naive", avaliacao.toMatrixString("Algortmo NaiveBayes - Matriz de Confuso"), avaliacao.toClassDetailsString("kNN")); resultado.setTaxaErro(avaliacao.errorRate()); resultado.setTaxaAcerto(1 - avaliacao.errorRate()); resultado.setRevocacao(recallToDouble(avaliacao, baseTeste)); resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste)); this.resultados.add(resultado); } if (plano.Tree) { try { Id3 id3 = new Id3(); id3.buildClassifier(baseTeste); avaliacao.crossValidateModel(id3, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(), rand); Resultado resultado = new Resultado("ID3", avaliacao.toMatrixString("Algortmo ID3 - Matriz de Confuso"), avaliacao.toClassDetailsString("kNN")); resultado.setTaxaErro(avaliacao.errorRate()); resultado.setTaxaAcerto(1 - avaliacao.errorRate()); resultado.setRevocacao(recallToDouble(avaliacao, baseTeste)); resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste)); this.resultados.add(resultado); } catch (UnsupportedAttributeTypeException ex) { Mensagem.erro("Algortmo Arvore de Deciso no suporta atributos numricos!", "MTCS - ERRO"); } } } catch (FileNotFoundException ex) { Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex); } catch (NullPointerException ex) { Mensagem.erro("Selecione um arquivo para comear!", "MTCS - ERRO"); Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex); } return this.resultados; }
From source file:main.mFFNN.java
public static void main(String[] args) throws Exception { mFFNN m = new mFFNN(); BufferedReader breader = null; breader = new BufferedReader(new FileReader("src\\main\\iris.arff")); Instances fileTrain = new Instances(breader); fileTrain.setClassIndex(fileTrain.numAttributes() - 1); System.out.println(fileTrain); breader.close();//from w ww . j a v a 2 s. c o m System.out.println("mFFNN!!!\n\n"); FeedForwardNeuralNetwork FFNN = new FeedForwardNeuralNetwork(); Evaluation eval = new Evaluation(fileTrain); FFNN.buildClassifier(fileTrain); eval.evaluateModel(FFNN, fileTrain); //OUTPUT Scanner scan = new Scanner(System.in); System.out.println(eval.toSummaryString("=== Stratified cross-validation ===\n" + "=== Summary ===", true)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===")); System.out.println(eval.toMatrixString("===Confusion matrix===")); System.out.println(eval.fMeasure(1) + " " + eval.recall(1)); System.out.println("\nDo you want to save this model(1/0)? "); FFNN.distributionForInstance(fileTrain.get(0)); /* int c = scan.nextInt(); if (c == 1 ){ System.out.print("Please enter your file name (*.model) : "); String infile = scan.next(); m.saveModel(FFNN,infile); } else { System.out.print("Model not saved."); } */ }
From source file:mao.datamining.ModelProcess.java
private void testWithExtraDS(Classifier classifier, Instances finalTrainDataSet, Instances finalTestDataSet, FileOutputStream testCaseSummaryOut, TestResult result) { //Use final training dataset and final test dataset double confusionMatrix[][] = null; long start, end, trainTime = 0, testTime = 0; if (finalTestDataSet != null) { try {//from ww w. j a v a 2s . co m //counting training time start = System.currentTimeMillis(); classifier.buildClassifier(finalTrainDataSet); end = System.currentTimeMillis(); trainTime += end - start; //counting test time start = System.currentTimeMillis(); Evaluation testEvalOnly = new Evaluation(finalTrainDataSet); testEvalOnly.evaluateModel(classifier, finalTestDataSet); end = System.currentTimeMillis(); testTime += end - start; testCaseSummaryOut.write("=====================================================\n".getBytes()); testCaseSummaryOut.write((testEvalOnly.toSummaryString("=== Test Summary ===", true)).getBytes()); testCaseSummaryOut.write("\n".getBytes()); testCaseSummaryOut .write((testEvalOnly.toClassDetailsString("=== Test Class Detail ===\n")).getBytes()); testCaseSummaryOut.write("\n".getBytes()); testCaseSummaryOut .write((testEvalOnly.toMatrixString("=== Confusion matrix for Test ===\n")).getBytes()); testCaseSummaryOut.flush(); confusionMatrix = testEvalOnly.confusionMatrix(); result.setConfusionMatrix4Test(confusionMatrix); result.setAUT(testEvalOnly.areaUnderROC(1)); result.setPrecision(testEvalOnly.precision(1)); result.setRecall(testEvalOnly.recall(1)); } catch (Exception e) { ModelProcess.logging(null, e); } result.setTrainingTime(trainTime); result.setTestTime(testTime); } //using test data set , end }
From source file:mao.datamining.ModelProcess.java
private void testCV(Classifier classifier, Instances finalTrainDataSet, FileOutputStream testCaseSummaryOut, TestResult result) {//from w w w. j av a 2 s. co m long start, end, trainTime = 0, testTime = 0; Evaluation evalAll = null; double confusionMatrix[][] = null; // randomize data, and then stratify it into 10 groups Random rand = new Random(1); Instances randData = new Instances(finalTrainDataSet); randData.randomize(rand); if (randData.classAttribute().isNominal()) { //always run with 10 cross validation randData.stratify(folds); } try { evalAll = new Evaluation(randData); for (int i = 0; i < folds; i++) { Evaluation eval = new Evaluation(randData); Instances train = randData.trainCV(folds, i); Instances test = randData.testCV(folds, i); //counting traininig time start = System.currentTimeMillis(); Classifier j48ClassifierCopy = Classifier.makeCopy(classifier); j48ClassifierCopy.buildClassifier(train); end = System.currentTimeMillis(); trainTime += end - start; //counting test time start = System.currentTimeMillis(); eval.evaluateModel(j48ClassifierCopy, test); evalAll.evaluateModel(j48ClassifierCopy, test); end = System.currentTimeMillis(); testTime += end - start; } } catch (Exception e) { ModelProcess.logging(null, e); } //end test by cross validation // output evaluation try { ModelProcess.logging(""); //write into summary file testCaseSummaryOut .write((evalAll.toSummaryString("=== Cross Validation Summary ===", true)).getBytes()); testCaseSummaryOut.write("\n".getBytes()); testCaseSummaryOut.write( (evalAll.toClassDetailsString("=== " + folds + "-fold Cross-validation Class Detail ===\n")) .getBytes()); testCaseSummaryOut.write("\n".getBytes()); testCaseSummaryOut .write((evalAll.toMatrixString("=== Confusion matrix for all folds ===\n")).getBytes()); testCaseSummaryOut.flush(); confusionMatrix = evalAll.confusionMatrix(); result.setConfusionMatrix10Folds(confusionMatrix); } catch (Exception e) { ModelProcess.logging(null, e); } }
From source file:myclassifier.naiveBayes.java
public void TestData(Instances dataTest) throws Exception { if (data != null) { Instances train = data;/*from ww w. j a va2 s .c o m*/ // train classifier NBClassifier.buildClassifier(train); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(dataTest); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString()); } else { System.out.println("Data is null"); } }
From source file:myclassifier.naiveBayes.java
public void CrossValidation() throws Exception { if (data != null) { Instances train = data;/*from www .j a va 2 s . com*/ // train classifier NBClassifier.buildClassifier(train); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(train); eval.crossValidateModel(NBClassifier, train, 10, new Random(1)); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString()); } else { System.out.println("Data is null"); } }
From source file:myclassifier.naiveBayes.java
public void PercentageSplit(double percent) throws Exception { // Percent split int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances train = new Instances(data, 0, trainSize); Instances test = new Instances(data, trainSize, testSize); // train classifier NBClassifier.buildClassifier(train); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(train); eval.evaluateModel(NBClassifier, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString()); }