List of usage examples for weka.classifiers Evaluation Evaluation
public Evaluation(Instances data) throws Exception
From source file:naivebayes.TucilWeka.java
public static Evaluation fullTraining(Instances data) { //10-fold cross validation Evaluation eval = null;//from w ww .j a v a2s . com Instances train = new Instances(data); Instances test = new Instances(data); try { Classifier classify = new NBTubesAI(); //Membuat klasifier dari data training classify.buildClassifier(train); eval = new Evaluation(train); eval.evaluateModel(classify, test); } catch (Exception ex) { Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:NaiveBayesPckge.NaiveBayesMain.java
public static void printEvaluationSplit(Instances instance) throws Exception { Evaluation eval = new Evaluation(instance); System.out.println("Split Test Result :"); eval.evaluateModel(naive, instance); System.out.println(eval.toSummaryString()); // Summary of Training //System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); }
From source file:NaiveBayesPckge.NaiveBayesMain.java
public static void printEvaluation(Instances instance) throws Exception { Evaluation eval = new Evaluation(instance); Evaluation eval2 = new Evaluation(instance); System.out.println("Full training Result :"); eval.evaluateModel(naive, instance); System.out.println(eval.toSummaryString()); // Summary of Training //System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); System.out.println("10 cross validation Result :"); Random rand = new Random(1); eval2.crossValidateModel(naive, instance, 10, rand); System.out.println(eval2.toSummaryString()); // Summary of Training //System.out.println(eval2.toClassDetailsString()); System.out.println(eval2.toMatrixString()); double errorRates = eval.incorrect() / eval.numInstances() * 100; double accuracy = eval.correct() / eval.numInstances() * 100; // System.out.println("Accuracy: " + df.format(accuracy) + " %"); // System.out.println("Error rate: " + df.format(errorRates) + " %"); // Printing Training Mean root squared error }
From source file:net.sf.jclal.classifier.MOAClassifier.java
License:Open Source License
/** *{@inheritDoc}// www.j ava 2 s . com */ @Override public AbstractEvaluation testModel(IDataset instances) { try { // test the current classifier with the test set Evaluation evaluator = new Evaluation(new Instances(instances.getDataset(), 0)); evaluator.evaluateModel(classifier, instances.getDataset()); SingleLabelEvaluation sleval = new SingleLabelEvaluation(); sleval.setEvaluation(evaluator); return sleval; } catch (IllegalArgumentException e) { Logger.getLogger(MOAClassifier.class.getName()).log(Level.SEVERE, null, e); } catch (InvalidDataFormatException e) { Logger.getLogger(MOAClassifier.class.getName()).log(Level.SEVERE, null, e); } catch (Exception e) { Logger.getLogger(MOAClassifier.class.getName()).log(Level.SEVERE, null, e); } return null; }
From source file:net.sf.jclal.classifier.WekaClassifier.java
License:Open Source License
/** * Evaluates the classifier using the test dataset and stores the evaluation. * * @param instances The instances to test * @return The evaluation//from w w w. j a va 2 s . c o m */ @Override public AbstractEvaluation testModel(IDataset instances) { try { // test the current classifier with the test set Evaluation evaluator = new Evaluation(new Instances(instances.getDataset(), 0)); evaluator.evaluateModel(classifier, instances.getDataset()); SingleLabelEvaluation sleval = new SingleLabelEvaluation(); sleval.setEvaluation(evaluator); return sleval; } catch (Exception e) { Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, e); } return null; }
From source file:net.sf.jclal.classifier.WekaComitteClassifier.java
License:Open Source License
/** * Evaluates the classifier using the test dataset * * @param instances The test instances./*from ww w . j a v a2 s. c o m*/ * @return The evaluation of the model. */ @Override public AbstractEvaluation testModel(IDataset instances) { try { // test phase with the actual model Evaluation evaluator; evaluator = new Evaluation(new Instances(instances.getDataset(), 0)); Instances testData = instances.getDataset(); for (Instance in : testData) { double temp[] = distributionForInstance(in); evaluator.evaluationForSingleInstance(temp, in, true); } SingleLabelEvaluation sleval = new SingleLabelEvaluation(); sleval.setEvaluation(evaluator); return sleval; } catch (Exception e) { Logger.getLogger(WekaComitteClassifier.class.getName()).log(Level.SEVERE, null, e); } return null; }
From source file:neuralnetwork.NeuralNetwork.java
/** * @param args the command line arguments * @throws java.lang.Exception/*from w w w.j a v a 2 s . co m*/ */ public static void main(String[] args) throws Exception { ConverterUtils.DataSource source; source = new ConverterUtils.DataSource("C:\\Users\\Harvey\\Documents\\iris.csv"); Instances data = source.getDataSet(); if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } data.randomize(new Debug.Random(1)); RemovePercentage trainFilter = new RemovePercentage(); trainFilter.setPercentage(70); trainFilter.setInputFormat(data); Instances train = Filter.useFilter(data, trainFilter); trainFilter.setInvertSelection(true); trainFilter.setInputFormat(data); Instances test = Filter.useFilter(data, trainFilter); Standardize filter = new Standardize(); filter.setInputFormat(train); Instances newTrain = Filter.useFilter(test, filter); Instances newTest = Filter.useFilter(train, filter); Classifier nNet = new NeuralNet(); nNet.buildClassifier(newTrain); Evaluation eval = new Evaluation(newTest); eval.evaluateModel(nNet, newTest); System.out.println(eval.toSummaryString("\nResults\n-------------\n", false)); }
From source file:Neural_Network.NuralN.java
public int[] testNet() { System.out.println();//from w ww .j a v a 2 s . co m int[] results = new int[2]; if (!trained) { System.out.println("Neural netowrk is not trained...."); } else { try { loadTestData(); Evaluation tempEvaluator = new Evaluation(testSet); tempEvaluator.evaluateModel(nN, testSet); results[0] = (int) tempEvaluator.correct(); results[1] = (int) tempEvaluator.incorrect(); tested = true; // "Test completed; } catch (IOException e) { //"Test file missing System.out.println(e.toString()); } catch (Exception e) { System.err.println(e.toString()); } } return results; }
From source file:newclassifier.NewClassifier.java
public void crossValidation() throws Exception { cls.buildClassifier(data);/* ww w .j a va 2 s .c o m*/ Evaluation eval = new Evaluation(data); eval.crossValidateModel(cls, data, 10, new Random(1)); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); }
From source file:newclassifier.NewClassifier.java
public void percentSplit(float percent) throws Exception { int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances train = new Instances(data, 0, trainSize); Instances test = new Instances(data, trainSize, testSize); cls.buildClassifier(train);/*from w w w . ja v a 2 s . c o m*/ data = new Instances(test); Evaluation eval = new Evaluation(data); ; eval.evaluateModel(cls, data); }