List of usage examples for weka.classifiers Evaluation toMatrixString
public String toMatrixString() throws Exception
From source file:org.uclab.mm.kcl.ddkat.modellearner.ModelLearner.java
License:Apache License
/** * Method to compute the classification accuracy. * * @param algo the algorithm name//from w w w . j ava 2s . c o m * @param data the data instances * @param datanature the dataset nature (i.e. original or processed data) * @throws Exception the exception */ protected String[] modelAccuracy(String algo, Instances data, String datanature) throws Exception { String modelResultSet[] = new String[4]; String modelStr = ""; Classifier classifier = null; // setting class attribute if the data format does not provide this information if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); String decisionAttribute = data.attribute(data.numAttributes() - 1).toString(); String res[] = decisionAttribute.split("\\s+"); decisionAttribute = res[1]; if (algo.equals("BFTree")) { // Use BFTree classifiers BFTree BFTreeclassifier = new BFTree(); BFTreeclassifier.buildClassifier(data); modelStr = BFTreeclassifier.toString(); classifier = BFTreeclassifier; } else if (algo.equals("FT")) { // Use FT classifiers FT FTclassifier = new FT(); FTclassifier.buildClassifier(data); modelStr = FTclassifier.toString(); classifier = FTclassifier; } else if (algo.equals("J48")) { // Use J48 classifiers J48 J48classifier = new J48(); J48classifier.buildClassifier(data); modelStr = J48classifier.toString(); classifier = J48classifier; System.out.println("Model String: " + modelStr); } else if (algo.equals("J48graft")) { // Use J48graft classifiers J48graft J48graftclassifier = new J48graft(); J48graftclassifier.buildClassifier(data); modelStr = J48graftclassifier.toString(); classifier = J48graftclassifier; } else if (algo.equals("RandomTree")) { // Use RandomTree classifiers RandomTree RandomTreeclassifier = new RandomTree(); RandomTreeclassifier.buildClassifier(data); modelStr = RandomTreeclassifier.toString(); classifier = RandomTreeclassifier; } else if (algo.equals("REPTree")) { // Use REPTree classifiers REPTree REPTreeclassifier = new REPTree(); REPTreeclassifier.buildClassifier(data); modelStr = REPTreeclassifier.toString(); classifier = REPTreeclassifier; } else if (algo.equals("SimpleCart")) { // Use SimpleCart classifiers SimpleCart SimpleCartclassifier = new SimpleCart(); SimpleCartclassifier.buildClassifier(data); modelStr = SimpleCartclassifier.toString(); classifier = SimpleCartclassifier; } modelResultSet[0] = algo; modelResultSet[1] = decisionAttribute; modelResultSet[2] = modelStr; // Collect every group of predictions for J48 model in a FastVector FastVector predictions = new FastVector(); Evaluation evaluation = new Evaluation(data); int folds = 10; // cross fold validation = 10 evaluation.crossValidateModel(classifier, data, folds, new Random(1)); // System.out.println("Evaluatuion"+evaluation.toSummaryString()); System.out.println("\n\n" + datanature + " Evaluatuion " + evaluation.toMatrixString()); // ArrayList<Prediction> predictions = evaluation.predictions(); predictions.appendElements(evaluation.predictions()); System.out.println("\n\n 11111"); // Calculate overall accuracy of current classifier on all splits double correct = 0; for (int i = 0; i < predictions.size(); i++) { NominalPrediction np = (NominalPrediction) predictions.elementAt(i); if (np.predicted() == np.actual()) { correct++; } } System.out.println("\n\n 22222"); double accuracy = 100 * correct / predictions.size(); String accString = String.format("%.2f%%", accuracy); modelResultSet[3] = accString; System.out.println(datanature + " Accuracy " + accString); String modelFileName = algo + "-DDKA.model"; System.out.println("\n\n 33333"); ObjectOutputStream oos = new ObjectOutputStream( new FileOutputStream("D:\\DDKAResources\\" + modelFileName)); oos.writeObject(classifier); oos.flush(); oos.close(); return modelResultSet; }
From source file:PEBL.TwoStep.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( "Z:\\\\shared from vm\\\\fourthset\\\\mixed.csv"); Instances data = source.getDataSet(); // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); }/*from w w w . ja v a 2 s. com*/ NumericToNominal nmf = new NumericToNominal(); nmf.setInputFormat(data); data = Filter.useFilter(data, nmf); // build a c4.5 classifier String[] options = new String[1]; // options[0] = "-C 0.25 -M 2"; // unpruned tree options[0] = "-K"; NaiveBayes c = new NaiveBayes(); // new instance of tree c.setOptions(options); // set the options c.buildClassifier(data); // build classifier // eval Evaluation eval = new Evaluation(data); eval.crossValidateModel(c, data, 10, new Random(1)); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); System.out.println(eval.toClassDetailsString()); System.out.println("--- model learned on mixed set ---"); // load unlabeled data ConverterUtils.DataSource s = new ConverterUtils.DataSource( "Z:\\\\shared from vm\\\\fourthset\\\\unlabelled.csv"); Instances unlabeled = s.getDataSet(); // set class attribute unlabeled.setClassIndex(unlabeled.numAttributes() - 1); nmf = new NumericToNominal(); nmf.setInputFormat(unlabeled); unlabeled = Filter.useFilter(unlabeled, nmf); // label instances for (int i = 0; i < unlabeled.numInstances(); i++) { double classZero = c.distributionForInstance(unlabeled.instance(i))[0]; double classOne = c.distributionForInstance(unlabeled.instance(i))[1]; System.out.print( "classifying: " + unlabeled.instance(i) + " : " + classZero + " - " + classOne + " == class: "); if (classZero > classOne) { System.out.print("0"); unlabeled.instance(i).setClassValue("0"); } else { System.out.print("1"); unlabeled.instance(i).setClassValue("1"); } System.out.println(""); } // save labeled data // BufferedWriter writer = new BufferedWriter( // new FileWriter("Z:\\\\shared from vm\\\\thirdset\\\\relabelled.arff")); // writer.write(labeled.toString()); // writer.newLine(); // writer.flush(); // writer.close(); ArffSaver saver = new ArffSaver(); saver.setInstances(unlabeled); saver.setFile(new File("Z:\\shared from vm\\thirdset\\relabelled.arff")); // saver.setDestination(new File("Z:\\shared from vm\\thirdset\\relabelled.arff")); // **not** necessary in 3.5.4 and later saver.writeBatch(); }
From source file:PointAnalyser.Main.java
public static void trainC45Classifier() throws Exception { // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); }//from w w w . ja v a2s.c om NumericToNominal nmf = new NumericToNominal(); nmf.setInputFormat(data); data = Filter.useFilter(data, nmf); // build a c4.5 classifier String[] options = new String[1]; options[0] = "-C 0.25 -M 2 -U"; // unpruned tree tree = new J48(); // new instance of tree tree.setOptions(options); // set the options tree.buildClassifier(data); // build classifier /* RemoveMisclassified rm = new RemoveMisclassified(); rm.setInputFormat(data); rm.setClassifier(tree); rm.setNumFolds(10); rm.setThreshold(0.1); rm.setMaxIterations(0); data = Filter.useFilter(data, rm); tree = new J48(); // new instance of tree tree.setOptions(options); // set the options tree.buildClassifier(data); // build classifier */ // eval Evaluation eval = new Evaluation(data); eval.crossValidateModel(tree, data, 10, new Random(1)); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); System.out.println(eval.toClassDetailsString()); }
From source file:PointAnalyser.Main.java
public static void trainNNClassifier() throws Exception { // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); }/*w ww . ja v a2 s . c o m*/ NumericToNominal nmf = new NumericToNominal(); nmf.setInputFormat(data); data = Filter.useFilter(data, nmf); // build a c4.5 classifier String[] options = new String[1]; // options[0] = "-K 1"; // unpruned tree nn = new IBk(); // new instance of tree // nn.setCrossValidate(true); nn.setKNN(7); nn.setNearestNeighbourSearchAlgorithm(new weka.core.neighboursearch.KDTree(data)); nn.setWindowSize(0); // nn.setOptions(options); // set the options nn.buildClassifier(data); // build classifier // eval Evaluation eval = new Evaluation(data); eval.crossValidateModel(nn, data, 10, new Random(1)); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); System.out.println(eval.toClassDetailsString()); }
From source file:sentinets.TrainModel.java
License:Open Source License
public void trainModel(Classifier c, String name) { Evaluation e; try {// w w w . ja v a 2 s . c o m e = new Evaluation(ins); e.crossValidateModel(c, ins, 10, new Random(1)); System.out.println("****Results of " + name + "****"); System.out.println(e.toSummaryString()); System.out.println(e.toClassDetailsString()); System.out.println(e.toCumulativeMarginDistributionString()); System.out.println(e.toMatrixString()); System.out.println("*********************"); TrainModel.saveModel(c, name); } catch (Exception e1) { e1.printStackTrace(); } }
From source file:statistics.BinaryStatisticsEvaluator.java
@Override public double[][] getConfusionMatrix(Instances Training_Instances, Instances Testing_Instances, String classifier) {// w w w . j a va 2s.co m Classifier cModel = null; if ("NB".equals(classifier)) { cModel = (Classifier) new NaiveBayes(); try { cModel.buildClassifier(Training_Instances); } catch (Exception ex) { Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex); } } else if ("DT".equals(classifier)) { cModel = (Classifier) new J48(); try { cModel.buildClassifier(Training_Instances); } catch (Exception ex) { Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex); } } else if ("SVM".equals(classifier)) { cModel = (Classifier) new SMO(); try { cModel.buildClassifier(Training_Instances); } catch (Exception ex) { Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex); } } else if ("KNN".equals(classifier)) { cModel = (Classifier) new IBk(); try { cModel.buildClassifier(Training_Instances); } catch (Exception ex) { Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex); } } //Test the model Evaluation eTest; try { eTest = new Evaluation(Training_Instances); eTest.evaluateModel(cModel, Testing_Instances); //Print the result String strSummary = eTest.toSummaryString(); System.out.println(strSummary); String strSummary1 = eTest.toMatrixString(); System.out.println(strSummary1); String strSummary2 = eTest.toClassDetailsString(); System.out.println(strSummary2); //Get the confusion matrix double[][] cmMatrix = eTest.confusionMatrix(); return cmMatrix; } catch (Exception ex) { Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex); } return null; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Get test error of current classifier on a specific image and its binary labels * * @param image input image/*from w w w.j av a 2 s . co m*/ * @param labels binary labels * @param whiteClassIndex index of the white class * @param blackClassIndex index of the black class * @param verbose option to display evaluation information in the log window * @return pixel classification error */ public double getTestError(ImagePlus image, ImagePlus labels, int whiteClassIndex, int blackClassIndex, boolean verbose) { IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + image.getTitle() + "..."); // Set proper class names (skip empty list ones) ArrayList<String> classNames = new ArrayList<String>(); if (null == loadedClassNames) { for (int i = 0; i < numOfClasses; i++) if (examples[0].get(i).size() > 0) classNames.add(getClassLabels()[i]); } else classNames = loadedClassNames; // Apply labels final int height = image.getHeight(); final int width = image.getWidth(); final int depth = image.getStackSize(); Instances testData = null; for (int z = 1; z <= depth; z++) { final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z), image.getImageStack().getProcessor(z)); // Create feature stack for test image IJ.showStatus("Creating features for test image (slice " + z + ")..."); if (verbose) IJ.log("Creating features for test image (slice " + z + ")..."); final FeatureStack testImageFeatures = new FeatureStack(testSlice); // Use the same features as the current classifier testImageFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures()); testImageFeatures.setMaximumSigma(maximumSigma); testImageFeatures.setMinimumSigma(minimumSigma); testImageFeatures.setMembranePatchSize(membranePatchSize); testImageFeatures.setMembraneSize(membraneThickness); testImageFeatures.updateFeaturesMT(); testImageFeatures.setUseNeighbors(featureStackArray.useNeighborhood()); filterFeatureStackByList(this.featureNames, testImageFeatures); final Instances data = testImageFeatures.createInstances(classNames); data.setClassIndex(data.numAttributes() - 1); if (verbose) IJ.log("Assigning classes based on the labels..."); final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int n = 0, y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) { final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex; data.get(n).setClassValue(newValue); } if (null == testData) testData = data; else { for (int i = 0; i < data.numInstances(); i++) testData.add(data.get(i)); } } if (verbose) IJ.log("Evaluating test data..."); double error = -1; try { final Evaluation evaluation = new Evaluation(testData); evaluation.evaluateModel(classifier, testData); if (verbose) { IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false)); IJ.log(evaluation.toClassDetailsString() + "\n"); IJ.log(evaluation.toMatrixString()); } error = evaluation.errorRate(); } catch (Exception e) { e.printStackTrace(); } return error; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Get test error of current classifier on a specific image and its binary labels * * @param image input image/*ww w . j a v a 2 s. c o m*/ * @param labels binary labels * @param filters list of filters to create features * @param whiteClassIndex index of the white class * @param blackClassIndex index of the black class * @param verbose option to display evaluation information in the log window * @return pixel classification error */ public double getTestError(ImagePlus image, ImagePlus labels, ImagePlus filters, int whiteClassIndex, int blackClassIndex, boolean verbose) { IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + image.getTitle() + "..."); // Set proper class names (skip empty list ones) ArrayList<String> classNames = new ArrayList<String>(); if (null == loadedClassNames) { for (int i = 0; i < numOfClasses; i++) if (examples[0].get(i).size() > 0) classNames.add(getClassLabels()[i]); } else classNames = loadedClassNames; // Apply labels final int height = image.getHeight(); final int width = image.getWidth(); final int depth = image.getStackSize(); Instances testData = null; for (int z = 1; z <= depth; z++) { final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z), image.getImageStack().getProcessor(z)); // Create feature stack for test image IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + z + "..."); final FeatureStack testImageFeatures = new FeatureStack(testSlice); // Create features by applying the filters testImageFeatures.addFeaturesMT(filters); final Instances data = testImageFeatures.createInstances(classNames); data.setClassIndex(data.numAttributes() - 1); if (verbose) IJ.log("Assigning classes based on the labels..."); final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int n = 0, y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) { final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex; data.get(n).setClassValue(newValue); } if (null == testData) testData = data; else { for (int i = 0; i < data.numInstances(); i++) testData.add(data.get(i)); } } if (verbose) IJ.log("Evaluating test data..."); double error = -1; try { final Evaluation evaluation = new Evaluation(testData); evaluation.evaluateModel(classifier, testData); if (verbose) { IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false)); IJ.log(evaluation.toClassDetailsString() + "\n"); IJ.log(evaluation.toMatrixString()); } error = evaluation.errorRate(); } catch (Exception e) { e.printStackTrace(); } return error; }
From source file:Tubes.Classification.java
public static void main(String[] args) throws FileNotFoundException, IOException, Exception { StringToWordVector filter = new StringToWordVector(); File training = new File(classTrain); File testing = new File(classTest); BufferedReader readTrain = new BufferedReader(new FileReader(training)); BufferedReader readTest = new BufferedReader(new FileReader(testing)); Instances dataTrain = new Instances(readTrain); Instances dataTest = new Instances(readTest); filter.setInputFormat(dataTrain);// ww w. j av a2 s . c o m dataTrain = Filter.useFilter(dataTrain, filter); dataTrain.setClassIndex(dataTrain.numAttributes() - 1); dataTest.setClassIndex(dataTest.numAttributes() - 1); Classification classify = new Classification(); NaiveBayes bayes = new NaiveBayes(); // RandomForest rf = new RandomForest(); // BayesNet bayesNet = new BayesNet(); LibSVM libSVM = new LibSVM(); System.out.println("==========================Naive Bayes Evaluation==========================="); Evaluation eval = classify.runClassifier(bayes, dataTrain, dataTest); System.out.println(eval.toSummaryString() + "\n"); System.out.println(eval.toClassDetailsString() + "\n"); System.out.println(eval.toMatrixString() + "\n"); System.out.println("==========================================================================="); // // ====System.out.println("==============================Random Forest================================"); // Evaluation eval2 = classify.runClassifier(rf, dataTrain, dataTest); // System.out.println(eval2.toSummaryString() + "\n"); // System.out.println(eval2.toClassDetailsString() + "\n"); // System.out.println(eval2.toMatrixString() + "\n"); // System.out.println("======================================================================="); // // System.out.println("==============================Bayesian Network================================"); // Evaluation eval3 = classify.runClassifier(bayesNet, dataTrain, dataTest); // System.out.println(eval3.toSummaryString() + "\n"); // System.out.println(eval3.toClassDetailsString() + "\n"); // System.out.println(eval3.toMatrixString() + "\n"); // System.out.println("==========================================================================="); System.out.println("==============================LibSVM================================"); libSVM.setCacheSize(512); // MB libSVM.setNormalize(true); libSVM.setShrinking(true); libSVM.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_LINEAR, LibSVM.TAGS_KERNELTYPE)); libSVM.setDegree(3); libSVM.setSVMType(new SelectedTag(LibSVM.SVMTYPE_C_SVC, LibSVM.TAGS_SVMTYPE)); Evaluation eval4 = classify.runClassifier(libSVM, dataTrain, dataTest); System.out.println(eval4.toSummaryString() + "\n"); System.out.println(eval4.toClassDetailsString() + "\n"); System.out.println(eval4.toMatrixString() + "\n"); System.out.println("==========================================================================="); }
From source file:tubes2ai.DriverNB.java
public static void run(String data) throws Exception { //System.out.println("tes driver"); ConverterUtils.DataSource source = new ConverterUtils.DataSource(data); Instances dataTrain = source.getDataSet(); //if (dataTrain.classIndex() == -1) dataTrain.setClassIndex(0);/*w ww . ja v a2 s. c o m*/ ArffSaver saver = new ArffSaver(); // dataTrain.setClassIndex(); Discretize discretize = new Discretize(); discretize.setInputFormat(dataTrain); Instances dataTrainDisc = Filter.useFilter(dataTrain, discretize); //NaiveBayes NB = new NaiveBayes(); AIJKNaiveBayes NB = new AIJKNaiveBayes(); NB.buildClassifier(dataTrainDisc); Evaluation eval = new Evaluation(dataTrainDisc); eval.evaluateModel(NB, dataTrainDisc); System.out.println(eval.toSummaryString()); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); /*Instance inst = new DenseInstance(5); inst.setDataset(dataTrain); inst.setValue(0, "sunny"); inst.setValue(1, "hot"); inst.setValue(2, "high"); inst.setValue(3, "FALSE"); inst.setValue(4, "yes"); double a = NB.classifyInstance(inst); String hasil=""; if(a==0.0){ hasil="YES"; } else{ hasil="NO"; } //double[] b = NB.distributionForInstance(inst); System.out.println("Hasil klasifikasi: "+hasil); //System.out.println(b);*/ }