List of usage examples for weka.classifiers Evaluation evaluateModel
public static String evaluateModel(Classifier classifier, String[] options) throws Exception
From source file:myclassifier.Util.java
public static void percentageSplit(Instances data, Classifier cls) throws Exception { int trainSize = (int) Math.round(data.numInstances() * 0.8); int testSize = data.numInstances() - trainSize; Instances train = new Instances(data, 0, trainSize); Instances test = new Instances(data, trainSize, testSize); Evaluation eval = new Evaluation(train); eval.evaluateModel(cls, test); }
From source file:myclassifier.wekaCode.java
public static void testingTestSet(Instances dataSet, Classifier classifiers, Instances testSet) throws Exception { Evaluation evaluation = new Evaluation(dataSet); evaluation.evaluateModel(classifiers, testSet); //Evaluates the classifier on a given set of instances. System.out.println(evaluation.toSummaryString("\n Testing Model given Test Set ", false)); System.out.println(evaluation.toClassDetailsString()); }
From source file:myid3andc45classifier.Main.java
/** * @param args the command line arguments *///from w w w .jav a 2 s . co m public static void main(String[] args) throws Exception { // TODO code application logic here WekaAccessor accessor = new WekaAccessor(); Instances trainset; trainset = accessor.readARFF( "D:\\Semester VII\\ML\\myID3andC45classifier\\myID3andC45classifier\\resources\\iris.arff"); Classifier j48 = new J48(); Classifier model = accessor.train(trainset, j48); //accessor.saveModel(model, "C:\\Users\\Julio Savigny\\Desktop\\myID3andC45classifier\\myID3andC45classifier\\some.model"); //Classifier loadedModel = accessor.loadModel("C:\\Users\\Julio Savigny\\Desktop\\myID3andC45classifier\\myID3andC45classifier\\some.model"); System.out.println(model); //System.out.println(loadedModel); // Coba ID3 Apoy Classifier customID3 = new MyC45(); Classifier myId3Model = accessor.train(trainset, customID3); Instances resampledTrainset = accessor.resample(trainset); System.out.println("WOY"); System.out.println(myId3Model); System.out.println(accessor.tenFoldCrossValidation(trainset, customID3).toSummaryString()); Evaluation eval = new Evaluation(trainset); eval.evaluateModel(myId3Model, trainset); //System.out.println(eval.toSummaryString()); // System.out.println(trainset); // System.out.println(resampledTrainset); // Coba C4.5 Bayu // Classifier customC45 = new myC45(); // Classifier myC45Model = accessor.train(trainset, customC45); // System.out.println(myC45Model); }
From source file:naivebayes.NBRun.java
public static void main(String[] args) throws Exception { System.out.println("Naive Bayes Classifier"); Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush_test.arff"); Scanner scan = new Scanner(System.in); Classifier cls;/*from w ww. j ava2 s . c o m*/ Instances train = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush.arff"); System.out.println("Read or create model? r/c"); if (scan.next().equals("c")) { cls = new NBTubesAI(); cls.buildClassifier(train); TucilWeka.saveModel(train); } else { cls = (NBTubesAI) TucilWeka.readModel(); } Evaluation eval = new Evaluation(data); System.out.println("10 fold cross validate or Full train? c/f"); if (scan.next().equals("c")) { int fold = 10; for (int i = 0; i < data.numAttributes(); i++) { System.out.println(i + ". " + data.attribute(i)); } eval.crossValidateModel(cls, data, fold, new Random(1)); } else { for (int i = 0; i < data.numAttributes(); i++) { System.out.println(i + ". " + data.attribute(i)); } data.deleteWithMissingClass(); try { eval.evaluateModel(cls, data); } catch (java.lang.Exception ex) { eval.crossValidateModel(cls, data, 11, new Random(1)); } } // Classifier cls=new NBTubesAI(); // cls.buildClassifier(data); System.out.println("Hasil evaluasi: "); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); System.out.println(eval.toClassDetailsString()); }
From source file:naivebayes.TucilWeka.java
public static Evaluation fullTraining(Instances data) { //10-fold cross validation Evaluation eval = null; Instances train = new Instances(data); Instances test = new Instances(data); try {/*www . j a v a 2 s .co m*/ Classifier classify = new NBTubesAI(); //Membuat klasifier dari data training classify.buildClassifier(train); eval = new Evaluation(train); eval.evaluateModel(classify, test); } catch (Exception ex) { Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:NaiveBayesPckge.NaiveBayesMain.java
public static void printEvaluationSplit(Instances instance) throws Exception { Evaluation eval = new Evaluation(instance); System.out.println("Split Test Result :"); eval.evaluateModel(naive, instance); System.out.println(eval.toSummaryString()); // Summary of Training //System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); }
From source file:NaiveBayesPckge.NaiveBayesMain.java
public static void printEvaluation(Instances instance) throws Exception { Evaluation eval = new Evaluation(instance); Evaluation eval2 = new Evaluation(instance); System.out.println("Full training Result :"); eval.evaluateModel(naive, instance); System.out.println(eval.toSummaryString()); // Summary of Training //System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); System.out.println("10 cross validation Result :"); Random rand = new Random(1); eval2.crossValidateModel(naive, instance, 10, rand); System.out.println(eval2.toSummaryString()); // Summary of Training //System.out.println(eval2.toClassDetailsString()); System.out.println(eval2.toMatrixString()); double errorRates = eval.incorrect() / eval.numInstances() * 100; double accuracy = eval.correct() / eval.numInstances() * 100; // System.out.println("Accuracy: " + df.format(accuracy) + " %"); // System.out.println("Error rate: " + df.format(errorRates) + " %"); // Printing Training Mean root squared error }
From source file:net.sf.bddbddb.order.MyId3.java
License:LGPL
/** * Main method./* www . ja v a 2s.c o m*/ * * @param args the options for the classifier */ public static void main(String[] args) { try { System.out.println(Evaluation.evaluateModel(new Id3(), args)); } catch (Exception e) { System.err.println(e.getMessage()); } }
From source file:net.sf.jclal.classifier.MOAClassifier.java
License:Open Source License
/** *{@inheritDoc}/*from w ww . j ava 2 s .c om*/ */ @Override public AbstractEvaluation testModel(IDataset instances) { try { // test the current classifier with the test set Evaluation evaluator = new Evaluation(new Instances(instances.getDataset(), 0)); evaluator.evaluateModel(classifier, instances.getDataset()); SingleLabelEvaluation sleval = new SingleLabelEvaluation(); sleval.setEvaluation(evaluator); return sleval; } catch (IllegalArgumentException e) { Logger.getLogger(MOAClassifier.class.getName()).log(Level.SEVERE, null, e); } catch (InvalidDataFormatException e) { Logger.getLogger(MOAClassifier.class.getName()).log(Level.SEVERE, null, e); } catch (Exception e) { Logger.getLogger(MOAClassifier.class.getName()).log(Level.SEVERE, null, e); } return null; }
From source file:net.sf.jclal.classifier.WekaClassifier.java
License:Open Source License
/** * Evaluates the classifier using the test dataset and stores the evaluation. * * @param instances The instances to test * @return The evaluation/* w ww. j a va 2s . c o m*/ */ @Override public AbstractEvaluation testModel(IDataset instances) { try { // test the current classifier with the test set Evaluation evaluator = new Evaluation(new Instances(instances.getDataset(), 0)); evaluator.evaluateModel(classifier, instances.getDataset()); SingleLabelEvaluation sleval = new SingleLabelEvaluation(); sleval.setEvaluation(evaluator); return sleval; } catch (Exception e) { Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, e); } return null; }