List of usage examples for weka.classifiers Evaluation crossValidateModel
public void crossValidateModel(Classifier classifier, Instances data, int numFolds, Random random) throws Exception
From source file:newsclassifier.NewsClassifier.java
public void CrossValidation(Classifier cls, int n) throws Exception { data.setClassIndex(0);/*ww w . j ava 2 s . c o m*/ Evaluation eval = new Evaluation(data); cls.buildClassifier(data); eval.crossValidateModel(cls, data, n, new Random(1)); System.out.println(eval.toSummaryString("Results", false)); //System.out.println(eval.toClassDetailsString()); //System.out.println(eval.toMatrixString()); }
From source file:nl.uva.expose.classification.WekaClassification.java
private void classifierTrainer(Instances trainData) throws Exception { trainData.setClassIndex(0);/*from ww w . j av a 2 s. co m*/ // classifier.setFilter(filter); classifier.setClassifier(new NaiveBayes()); classifier.buildClassifier(trainData); Evaluation eval = new Evaluation(trainData); eval.crossValidateModel(classifier, trainData, 5, new Random(1)); System.out.println(eval.toSummaryString()); System.out.println(eval.toClassDetailsString()); System.out.println("===== Evaluating on filtered (training) dataset done ====="); System.out.println("\n\nClassifier model:\n\n" + classifier); }
From source file:old.CFS.java
/** * uses the meta-classifier// w ww .j a va2s . c om */ protected static void useClassifier(Instances data) throws Exception { System.out.println("\n1. Meta-classfier"); AttributeSelectedClassifier classifier = new AttributeSelectedClassifier(); ChiSquaredAttributeEval eval = new ChiSquaredAttributeEval(); Ranker search = new Ranker(); search.setThreshold(-1.7976931348623157E308); search.setNumToSelect(1000); J48 base = new J48(); classifier.setClassifier(base); classifier.setEvaluator(eval); classifier.setSearch(search); Evaluation evaluation = new Evaluation(data); evaluation.crossValidateModel(classifier, data, 10, new Random(1)); System.out.println(evaluation.toSummaryString()); }
From source file:org.conqat.engine.commons.machine_learning.BaseWekaClassifier.java
License:Apache License
/** * Evaluates a classifier using 5-fold cross validation and returns the * evaluation object. Use this method for debugging purpose to get * information about precision, recall, etc. *//*from w w w . j a v a2s . co m*/ public Evaluation debugEvaluateClassifier() throws Exception, IOException { Instances data = wekaDataSetCreator.getDataSet(); Evaluation eval = new Evaluation(data); eval.crossValidateModel(wekaClassifier, data, 5, new Random(1)); return eval; }
From source file:org.openml.webapplication.fantail.dc.landmarking.J48BasedLandmarker.java
License:Open Source License
public Map<String, Double> characterize(Instances data) { int numFolds = m_NumFolds; double score1 = 0.5; double score2 = 0.5; // double score3 = 0.5; double score3 = 0.5; double score4 = 0.5; // double score3 = 0.5; double score5 = 0.5; double score6 = 0.5; double score7 = 0.5; double score8 = 0.5; double score9 = 0.5; weka.classifiers.trees.J48 cls = new weka.classifiers.trees.J48(); cls.setConfidenceFactor(0.00001f);//from w ww . jav a 2s . c o m try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score1 = eval.pctIncorrect(); score2 = eval.weightedAreaUnderROC(); score7 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.J48(); cls.setConfidenceFactor(0.0001f); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score3 = eval.pctIncorrect(); score4 = eval.weightedAreaUnderROC(); score8 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.J48(); cls.setConfidenceFactor(0.001f); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score5 = eval.pctIncorrect(); score6 = eval.weightedAreaUnderROC(); score9 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } Map<String, Double> qualities = new HashMap<String, Double>(); qualities.put(ids[0], score1); qualities.put(ids[1], score2); qualities.put(ids[2], score3); qualities.put(ids[3], score4); qualities.put(ids[4], score5); qualities.put(ids[5], score6); qualities.put(ids[6], score7); qualities.put(ids[7], score8); qualities.put(ids[8], score9); return qualities; }
From source file:org.openml.webapplication.fantail.dc.landmarking.RandomTreeBasedLandmarker.java
License:Open Source License
public Map<String, Double> characterize(Instances data) { int numFolds = m_NumFolds; double score1 = 0.5; double score2 = 0.5; double score3 = 0.5; weka.classifiers.trees.RandomTree cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(m_Seed);//from w w w .ja v a 2 s . co m cls.setMaxDepth(1); try { // ds.buildClassifier(data); weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score1 = eval.weightedAreaUnderROC(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(m_Seed); cls.setMaxDepth(2); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score2 = eval.weightedAreaUnderROC(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(m_Seed); cls.setMaxDepth(3); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score3 = eval.weightedAreaUnderROC(); } catch (Exception e) { e.printStackTrace(); } Map<String, Double> qualities = new HashMap<String, Double>(); qualities.put(ids[0], score1); qualities.put(ids[1], score2); qualities.put(ids[2], score3); return qualities; }
From source file:org.openml.webapplication.fantail.dc.landmarking.RandomTreeBasedLandmarker2.java
License:Open Source License
public Map<String, Double> characterize(Instances data) { int seed = m_Seed; Random r = new Random(seed); int numFolds = m_NumFolds; double score1 = 0.5; double score2 = 0.5; // double score3 = 0.5; double score3 = 0.5; double score4 = 0.5; // double score3 = 0.5; double score5 = 0.5; double score6 = 0.5; weka.classifiers.trees.RandomTree cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt());//from ww w . j a va 2 s.c o m cls.setKValue(m_K); // cls.setMaxDepth(1); try { // ds.buildClassifier(data); weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score1 = eval.pctIncorrect(); score2 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(2); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score3 = eval.pctIncorrect(); score4 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(3); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score5 = eval.pctIncorrect(); score6 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(4); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(5); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); } catch (Exception e) { e.printStackTrace(); } Map<String, Double> qualities = new HashMap<String, Double>(); qualities.put(ids[0], score1); qualities.put(ids[1], score2); qualities.put(ids[2], score3); qualities.put(ids[3], score4); qualities.put(ids[4], score5); qualities.put(ids[5], score6); return qualities; }
From source file:org.openml.webapplication.fantail.dc.landmarking.REPTreeBasedLandmarker.java
License:Open Source License
public Map<String, Double> characterize(Instances data) { int numFolds = m_NumFolds; double score1 = 0.5; double score2 = 0.5; // double score3 = 0.5; double score3 = 0.5; double score4 = 0.5; // double score3 = 0.5; double score5 = 0.5; double score6 = 0.5; double score7 = 0.5; double score8 = 0.5; double score9 = 0.5; weka.classifiers.trees.REPTree cls = new weka.classifiers.trees.REPTree(); cls.setMaxDepth(1);// ww w.j a va 2 s .c om try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score1 = eval.pctIncorrect(); score2 = eval.weightedAreaUnderROC(); score7 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.REPTree(); cls.setMaxDepth(2); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score3 = eval.pctIncorrect(); score4 = eval.weightedAreaUnderROC(); score8 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.REPTree(); cls.setMaxDepth(3); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score5 = eval.pctIncorrect(); score6 = eval.weightedAreaUnderROC(); score9 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } Map<String, Double> qualities = new HashMap<String, Double>(); qualities.put(ids[0], score1); qualities.put(ids[1], score2); qualities.put(ids[2], score3); qualities.put(ids[3], score4); qualities.put(ids[4], score5); qualities.put(ids[5], score6); qualities.put(ids[6], score7); qualities.put(ids[7], score8); qualities.put(ids[8], score9); return qualities; }
From source file:org.openml.webapplication.fantail.dc.landmarking.SimpleLandmarkers.java
License:Open Source License
public Map<String, Double> characterize(Instances data) { int numFolds = m_NumFolds; double score1 = 0.5; double score2 = 0.5; double score5 = 0.5; double score6 = 0.5; double score3 = 0.5; double score4 = 0.5; weka.classifiers.trees.DecisionStump ds = new weka.classifiers.trees.DecisionStump(); try {/* w w w . j a va2 s . c om*/ weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(ds, data, numFolds, new java.util.Random(1)); score1 = eval.pctIncorrect(); score2 = eval.weightedAreaUnderROC(); score3 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } try { weka.classifiers.bayes.NaiveBayes nb = new weka.classifiers.bayes.NaiveBayes(); weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(nb, data, numFolds, new java.util.Random(1)); score5 = eval.pctIncorrect(); score6 = eval.weightedAreaUnderROC(); score4 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } Map<String, Double> qualities = new HashMap<String, Double>(); qualities.put(ids[0], score1); qualities.put(ids[1], score2); qualities.put(ids[2], score5); qualities.put(ids[3], score6); qualities.put(ids[4], score3); qualities.put(ids[5], score4); return qualities; }
From source file:org.uclab.mm.kcl.ddkat.modellearner.ModelLearner.java
License:Apache License
/** * Method to compute the classification accuracy. * * @param algo the algorithm name//from ww w. jav a2 s . c o m * @param data the data instances * @param datanature the dataset nature (i.e. original or processed data) * @throws Exception the exception */ protected String[] modelAccuracy(String algo, Instances data, String datanature) throws Exception { String modelResultSet[] = new String[4]; String modelStr = ""; Classifier classifier = null; // setting class attribute if the data format does not provide this information if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); String decisionAttribute = data.attribute(data.numAttributes() - 1).toString(); String res[] = decisionAttribute.split("\\s+"); decisionAttribute = res[1]; if (algo.equals("BFTree")) { // Use BFTree classifiers BFTree BFTreeclassifier = new BFTree(); BFTreeclassifier.buildClassifier(data); modelStr = BFTreeclassifier.toString(); classifier = BFTreeclassifier; } else if (algo.equals("FT")) { // Use FT classifiers FT FTclassifier = new FT(); FTclassifier.buildClassifier(data); modelStr = FTclassifier.toString(); classifier = FTclassifier; } else if (algo.equals("J48")) { // Use J48 classifiers J48 J48classifier = new J48(); J48classifier.buildClassifier(data); modelStr = J48classifier.toString(); classifier = J48classifier; System.out.println("Model String: " + modelStr); } else if (algo.equals("J48graft")) { // Use J48graft classifiers J48graft J48graftclassifier = new J48graft(); J48graftclassifier.buildClassifier(data); modelStr = J48graftclassifier.toString(); classifier = J48graftclassifier; } else if (algo.equals("RandomTree")) { // Use RandomTree classifiers RandomTree RandomTreeclassifier = new RandomTree(); RandomTreeclassifier.buildClassifier(data); modelStr = RandomTreeclassifier.toString(); classifier = RandomTreeclassifier; } else if (algo.equals("REPTree")) { // Use REPTree classifiers REPTree REPTreeclassifier = new REPTree(); REPTreeclassifier.buildClassifier(data); modelStr = REPTreeclassifier.toString(); classifier = REPTreeclassifier; } else if (algo.equals("SimpleCart")) { // Use SimpleCart classifiers SimpleCart SimpleCartclassifier = new SimpleCart(); SimpleCartclassifier.buildClassifier(data); modelStr = SimpleCartclassifier.toString(); classifier = SimpleCartclassifier; } modelResultSet[0] = algo; modelResultSet[1] = decisionAttribute; modelResultSet[2] = modelStr; // Collect every group of predictions for J48 model in a FastVector FastVector predictions = new FastVector(); Evaluation evaluation = new Evaluation(data); int folds = 10; // cross fold validation = 10 evaluation.crossValidateModel(classifier, data, folds, new Random(1)); // System.out.println("Evaluatuion"+evaluation.toSummaryString()); System.out.println("\n\n" + datanature + " Evaluatuion " + evaluation.toMatrixString()); // ArrayList<Prediction> predictions = evaluation.predictions(); predictions.appendElements(evaluation.predictions()); System.out.println("\n\n 11111"); // Calculate overall accuracy of current classifier on all splits double correct = 0; for (int i = 0; i < predictions.size(); i++) { NominalPrediction np = (NominalPrediction) predictions.elementAt(i); if (np.predicted() == np.actual()) { correct++; } } System.out.println("\n\n 22222"); double accuracy = 100 * correct / predictions.size(); String accString = String.format("%.2f%%", accuracy); modelResultSet[3] = accString; System.out.println(datanature + " Accuracy " + accString); String modelFileName = algo + "-DDKA.model"; System.out.println("\n\n 33333"); ObjectOutputStream oos = new ObjectOutputStream( new FileOutputStream("D:\\DDKAResources\\" + modelFileName)); oos.writeObject(classifier); oos.flush(); oos.close(); return modelResultSet; }