List of usage examples for weka.classifiers Evaluation toSummaryString
public String toSummaryString(String title, boolean printComplexityStatistics)
From source file:eyetracker.MLPProcessor.java
public MLPProcessor() { try {// w w w . j av a 2s . c o m FileReader fr = new FileReader("trainingData.arff"); Instances training = new Instances(fr); training.setClassIndex(training.numAttributes() - 1); mlp = new MultilayerPerceptron(); mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 5")); mlp.buildClassifier(training); FileReader tr = new FileReader("trainingData.arff"); Instances testdata = new Instances(tr); inst = testdata; testdata.setClassIndex(testdata.numAttributes() - 1); Evaluation eval = new Evaluation(training); eval.evaluateModel(mlp, testdata); System.out.println(eval.toSummaryString("\nResults\n*******\n", false)); tr.close(); fr.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }
From source file:farm_ads.MyClassifier.java
public String printEvaluation(Evaluation e) throws Exception { String s = new String(); s += e.toSummaryString("\nResults\n======\n", false); s += "\n" + e.toMatrixString("Matrix String"); s += "\n" + e.toClassDetailsString(); return s;/*from www.j av a2 s . c o m*/ }
From source file:general.Util.java
/** * show learning statistic result by folds-cross-validation * @param data instances/*www .j a va2 s . c om*/ * @param folds num of folds */ public static void FoldSchema(Instances data, int folds) { try { Evaluation eval = new Evaluation(data); eval.crossValidateModel(Util.getClassifier(), data, folds, new Random(1)); System.out.println(eval.toSummaryString("\nResults " + folds + " folds cross-validation\n\n", false)); } catch (Exception ex) { Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:general.Util.java
/** * show learning statistic result by full-training method * @param data training data// ww w. j a va2 s. com */ public static void FullSchema(Instances data) { try { Evaluation eval = new Evaluation(data); eval.evaluateModel(classifier, data); System.out.println(eval.toSummaryString("\nResults Full-Training\n\n", false)); } catch (Exception ex) { Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:general.Util.java
/** * show learning statistic result by using test sets * @param testPath test path file// w w w .j a v a2s.c om * @param typeTestFile test file */ public static void TestSchema(String testPath, String typeTestFile) { Instances testsets = null; // Load test instances based on file type and path if (typeTestFile.equals("arff")) { FileReader file = null; try { file = new FileReader(testPath); try (BufferedReader reader = new BufferedReader(file)) { testsets = new Instances(reader); } // setting class attribute testsets.setClassIndex(data.numAttributes() - 1); } catch (IOException ex) { Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex); } finally { try { if (file != null) { file.close(); } } catch (IOException ex) { Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex); } } } else if (typeTestFile.equals("csv")) { try { CSVLoader csv = new CSVLoader(); csv.setFile(new File(testPath)); data = csv.getDataSet(); // setting class attribute data.setClassIndex(data.numAttributes() - 1); } catch (IOException ex) { Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex); } } // Start evaluate model using instances test and print results try { Evaluation eval = new Evaluation(Util.getData()); eval.evaluateModel(Util.getClassifier(), testsets); System.out.println(eval.toSummaryString("\nResults\n\n", false)); } catch (Exception e) { e.printStackTrace(); } }
From source file:general.Util.java
/** * show learning statistic result by percentage split * @param data training data/*from w w w. j a v a 2 s. co m*/ * @param trainPercent percentage of the training data * @param Classifier model */ public static void PercentageSplit(Instances data, double trainPercent, String Classifier) { try { int trainSize = (int) Math.round(data.numInstances() * trainPercent / 100); int testSize = data.numInstances() - trainSize; data.randomize(new Random(1)); Instances train = new Instances(data, 0, trainSize); Instances test = new Instances(data, trainSize, testSize); train.setClassIndex(train.numAttributes() - 1); test.setClassIndex(test.numAttributes() - 1); switch (Classifier.toLowerCase()) { case "naivebayes": classifier = new NaiveBayes(); break; case "j48-prune": classifier = new MyJ48(true, 0.25f); break; case "j48-unprune": classifier = new MyJ48(false, 0f); break; case "id3": classifier = new MyID3(); break; default: break; } classifier.buildClassifier(train); for (int i = 0; i < test.numInstances(); i++) { try { double pred = classifier.classifyInstance(test.instance(i)); System.out.print("ID: " + test.instance(i)); System.out .print(", actual: " + test.classAttribute().value((int) test.instance(i).classValue())); System.out.println(", predicted: " + test.classAttribute().value((int) pred)); } catch (Exception ex) { Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex); } } // Start evaluate model using instances test and print results try { Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.toSummaryString("\nResults\n\n", false)); } catch (Exception e) { e.printStackTrace(); } } catch (Exception ex) { Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:hurtowniedanych.FXMLController.java
public void trainAndTestKNN() throws FileNotFoundException, IOException, Exception { InstanceQuery instanceQuery = new InstanceQuery(); instanceQuery.setUsername("postgres"); instanceQuery.setPassword("szupek"); instanceQuery.setCustomPropsFile(new File("./src/data/DatabaseUtils.props")); // Wskazanie pliku z ustawieniami dla PostgreSQL String query = "select ks.wydawnictwo,ks.gatunek, kl.mia-sto\n" + "from zakupy z,ksiazki ks,klienci kl\n" + "where ks.id_ksiazka=z.id_ksiazka and kl.id_klient=z.id_klient"; instanceQuery.setQuery(query);//from w ww. java2s. co m Instances data = instanceQuery.retrieveInstances(); data.setClassIndex(data.numAttributes() - 1); data.randomize(new Random()); double percent = 70.0; int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances trainData = new Instances(data, 0, trainSize); Instances testData = new Instances(data, trainSize, testSize); int lSasiadow = Integer.parseInt(textFieldKnn.getText()); System.out.println(lSasiadow); IBk ibk = new IBk(lSasiadow); // Ustawienie odleglosci EuclideanDistance euclidean = new EuclideanDistance(); // euklidesowej ManhattanDistance manhatan = new ManhattanDistance(); // miejska LinearNNSearch linearNN = new LinearNNSearch(); if (comboboxOdleglosc.getSelectionModel().getSelectedItem().equals("Manhatan")) { linearNN.setDistanceFunction(manhatan); } else { linearNN.setDistanceFunction(euclidean); } ibk.setNearestNeighbourSearchAlgorithm(linearNN); // ustawienie sposobu szukania sasiadow // Tworzenie klasyfikatora ibk.buildClassifier(trainData); Evaluation eval = new Evaluation(trainData); eval.evaluateModel(ibk, testData); spr.setVisible(true); labelKnn.setVisible(true); labelOdleglosc.setVisible(true); labelKnn.setText(textFieldKnn.getText()); labelOdleglosc.setText(comboboxOdleglosc.getSelectionModel().getSelectedItem().toString()); spr.setText(eval.toSummaryString("Wynik:", true)); }
From source file:id3.MyID3.java
/** * Main method/* w ww. ja v a 2 s .c om*/ * @param args arguments */ public static void main(String[] args) { Instances instances; try { BufferedReader reader = new BufferedReader(new FileReader("D:\\Weka-3-6\\data\\weather.nominal.arff")); try { instances = new Instances(reader); instances.setClassIndex(instances.numAttributes() - 1); MyID3 id3 = new MyID3(); try { id3.buildClassifier(instances); } catch (Exception e) { e.printStackTrace(); } // Test class distribution double[] classDistribution = id3.classDistribution(instances); for (int i = 0; i < classDistribution.length; i++) { System.out.println(classDistribution[i]); } // Test entrophy and information gain for each attribute System.out.println(id3.computeEntropy(instances)); Enumeration attributes = instances.enumerateAttributes(); while (attributes.hasMoreElements()) { System.out.println(id3.computeIG(instances, (Attribute) attributes.nextElement())); } // Test build classifier try { id3.buildClassifier(instances); } catch (Exception e) { e.printStackTrace(); } System.out.println(id3.toString()); // Evaluate model from build classifier (full training) Evaluation eval = null; try { eval = new Evaluation(instances); } catch (Exception e) { e.printStackTrace(); } try { System.out.println(instances); eval.evaluateModel(id3, instances); } catch (Exception e) { e.printStackTrace(); } System.out.println(eval.toSummaryString("\nResults Full-Training\n\n", false)); // Evaluate model from build classifier (test set) // Test Confusion Matrix System.out.println("Confusion Matrix : "); double[][] cmMatrix = eval.confusionMatrix(); for (int row_i = 0; row_i < cmMatrix.length; row_i++) { for (int col_i = 0; col_i < cmMatrix.length; col_i++) { System.out.print(cmMatrix[row_i][col_i]); System.out.print("|"); } System.out.println(); } } catch (IOException e) { e.printStackTrace(); } } catch (FileNotFoundException e) { e.printStackTrace(); } }
From source file:id3classifier.Main.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource(file); Instances dataSet = source.getDataSet(); // discretize the dataset Discretize filter = new Discretize(); filter.setInputFormat(dataSet);/*from ww w .j a v a 2s .com*/ dataSet = Filter.useFilter(dataSet, filter); // standardize the dataset Standardize standardizedData = new Standardize(); standardizedData.setInputFormat(dataSet); dataSet = Filter.useFilter(dataSet, standardizedData); // randomize the dataset dataSet.setClassIndex(dataSet.numAttributes() - 1); dataSet.randomize(new Debug.Random()); // get the sizes of the training and testing sets and split int trainingSize = (int) Math.round(dataSet.numInstances() * .7); int testSize = dataSet.numInstances() - trainingSize; Instances training = new Instances(dataSet, 0, trainingSize); Instances test = new Instances(dataSet, trainingSize, testSize); // set up the ID3 classifier on the training data ID3Classifiers classifier = new ID3Classifiers(); classifier.buildClassifier(training); // set up the evaluation and test using the classifier and test set Evaluation eval = new Evaluation(dataSet); eval.evaluateModel(classifier, test); // outup and kill, important to exit here to stop javaFX System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.exit(0); }
From source file:id3j48.WekaAccess.java
public static void main(String[] args) { initializePath();// w w w .j a v a 2s . c o m try { cin = new Scanner(System.in); Instances data = null, tempdata; Classifier NBclassifier, ID3classifier, j48classifier; Evaluation NBeval, ID3eval, j48eval; System.out.println("Enter filename below"); String filename = cin.nextLine(); System.out.println("Loading " + filename + "..."); String extension = ""; String name = ""; int i = filename.lastIndexOf('.'); if (i > 0) { extension = filename.substring(i + 1); name = filename.substring(0, i); } if (extension.equalsIgnoreCase("arff")) { try { data = readArff(filename); } catch (Exception ex) { Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex); } } else if (extension.equalsIgnoreCase("csv")) { try { data = readCsv(filename); } catch (Exception ex) { Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex); } } else { System.out.println("Invalid extension"); System.exit(0); } System.out.println(data.toString()); System.out.println("Resample data? (y for yes) "); String resample = cin.nextLine(); if (resample.equalsIgnoreCase("y")) { try { tempdata = resampleData(data); System.out.println("-- Resampled data --"); System.out.println(tempdata.toString()); } catch (Exception ex) { Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex); } } tempdata = removeAttribute(data, data.numAttributes()); System.out.println("-- Remove Attribute --"); System.out.println(tempdata.toString()); NBclassifier = buildClassifier(data, new NaiveBayes()); System.out.println("-- Naive Bayes Classifier --"); System.out.println(NBclassifier.toString()); ID3classifier = buildClassifier(data, new Id3()); System.out.println("-- ID3 Classifier --"); System.out.println(ID3classifier.toString()); j48classifier = buildClassifier(data, new J48()); System.out.println("-- J48 Classifier --"); System.out.println(j48classifier.toString()); Instances test = null; if (extension.equalsIgnoreCase("arff")) test = readArff("test." + filename); else if (extension.equalsIgnoreCase("csv")) test = readCsv("test." + filename); NBeval = testModel(NBclassifier, data, test); System.out.println( NBeval.toSummaryString("-- Training set evaluation results with Naive Bayes --\n", false)); ID3eval = testModel(ID3classifier, data, test); System.out.println(NBeval.toSummaryString("-- Training set evaluation results with ID3 --\n", false)); j48eval = testModel(j48classifier, data, test); System.out.println(NBeval.toSummaryString("-- Training set evaluation results with J48 --\n", false)); NBeval = tenFoldCrossValidation(data, NBclassifier); System.out.println( NBeval.toSummaryString("-- 10-fold cross validation results with Naive Bayes --\n", false)); ID3eval = tenFoldCrossValidation(data, ID3classifier); System.out.println(NBeval.toSummaryString("-- 10-fold cross validation results with ID3 --\n", false)); j48eval = tenFoldCrossValidation(data, j48classifier); System.out.println(NBeval.toSummaryString("-- 10-fold cross validation results with J48 --\n", false)); NBeval = percentageSplit(data, NBclassifier, 66); System.out.println( NBeval.toSummaryString("-- 66% split validation results with Naive Bayes --\n", false)); ID3eval = percentageSplit(data, ID3classifier, 66); System.out.println(NBeval.toSummaryString("-- 66% split validation results with ID3 --\n", false)); j48eval = percentageSplit(data, j48classifier, 66); System.out.println(NBeval.toSummaryString("-- 66% split validation results with J48 --\n", false)); System.out.println("-- Save Naive Bayes Model --"); saveModel("nb." + name + ".model", NBclassifier); System.out.println("-- Save Naive Bayes Model --"); saveModel("id3." + name + ".model", ID3classifier); System.out.println("-- Save Naive Bayes Model --"); saveModel("j48." + name + ".model", j48classifier); System.out.println("-- Save Naive Bayes Model --"); saveModel("nb." + name + ".model", NBclassifier); System.out.println("-- Save ID3 Model --"); saveModel("id3." + name + ".model", ID3classifier); System.out.println("-- Save J48 Model --"); saveModel("j48." + name + ".model", j48classifier); System.out.println("-- Load Naive Bayes Model --"); System.out.println(loadModel("nb." + name + ".model").toString()); System.out.println("-- Load ID3 Model --"); System.out.println(loadModel("id3." + name + ".model").toString()); System.out.println("-- Load J48 Model --"); System.out.println(loadModel("j48." + name + ".model").toString()); System.out.println("-- Classify Naive Bayes Model --"); classify("classify." + filename, NBclassifier); System.out.println("-- Classify ID3 Model --"); classify("classify." + filename, ID3classifier); System.out.println("-- Classify J48 Model --"); classify("classify." + filename, j48classifier); } catch (Exception ex) { Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex); } }