List of usage examples for weka.classifiers Evaluation toSummaryString
@Override
public String toSummaryString()
From source file:com.daniel.convert.IncrementalClassifier.java
License:Open Source License
/** * Expects an ARFF file as first argument (class attribute is assumed to be * the last attribute)./* ww w. ja v a2s . c om*/ * * @param args * the commandline arguments * @throws Exception * if something goes wrong */ public static BayesNet treinar(String[] args) throws Exception { // load data ArffLoader loader = new ArffLoader(); loader.setFile(new File(args[0])); Instances structure = loader.getStructure(); structure.setClassIndex(structure.numAttributes() - 1); // train NaiveBayes BayesNet BayesNet = new BayesNet(); Instance current; while ((current = loader.getNextInstance(structure)) != null) { structure.add(current); } BayesNet.buildClassifier(structure); // output generated model // System.out.println(nb); // test set BayesNet BayesNetTest = new BayesNet(); // test the model Evaluation eTest = new Evaluation(structure); // eTest.evaluateModel(nb, structure); eTest.crossValidateModel(BayesNetTest, structure, 15, new Random(1)); // Print the result la Weka explorer: String strSummary = eTest.toSummaryString(); System.out.println(strSummary); return BayesNet; }
From source file:com.deafgoat.ml.prognosticator.AppClassifier.java
License:Apache License
/** * Evaluates model performance on test instances * /*from ww w . j a va 2 s . c o m*/ * @throws Exception * If model can not be evaluated. */ public void evaluate() throws Exception { readModel(); _logger.info("Classifying with " + _config._classifier); Evaluation eval = new Evaluation(_testInstances); eval.evaluateModel(_cls, _testInstances); _logger.info("\n" + eval.toSummaryString()); try { _logger.info("\n" + eval.toClassDetailsString()); } catch (Exception e) { _logger.info("Can not create class details" + _config._classifier); } try { _logger.info("\n" + _eval.toMatrixString()); } catch (Exception e) { _logger.info( "Can not create confusion matrix for " + _config._classifier + " using " + _config._classValue); } }
From source file:com.deafgoat.ml.prognosticator.AppClassifier.java
License:Apache License
/** * Write results to mongoDB// w w w .j a va2 s . c om * * @param eval * The evaluation object holding data. * @throws Exception */ public void writeToMongoDB(Evaluation eval) throws Exception { MongoResult mongoResult = new MongoResult(_config._host, _config._port, _config._db, _config._modelCollection); mongoResult.writeExperiment(_config._relation, "summary", eval.toSummaryString()); try { mongoResult.writeExperiment(_config._relation, "class detail", eval.toClassDetailsString()); } catch (Exception e) { _logger.error("Can not create class details" + _config._classifier); } try { mongoResult.writeExperiment(_config._relation, "confusion matrix", eval.toMatrixString()); } catch (Exception e) { _logger.error("Can not create confusion matrix for " + _config._classifier); } mongoResult.close(); }
From source file:com.ivanrf.smsspam.SpamClassifier.java
License:Apache License
public static void evaluate(int wordsToKeep, String tokenizerOp, boolean useAttributeSelection, String classifierOp, boolean boosting, JTextArea log) { try {/* ww w . java 2 s.c om*/ long start = System.currentTimeMillis(); String modelName = getModelName(wordsToKeep, tokenizerOp, useAttributeSelection, classifierOp, boosting); showEstimatedTime(false, modelName, log); Instances trainData = loadDataset("SMSSpamCollection.arff", log); trainData.setClassIndex(0); FilteredClassifier classifier = initFilterClassifier(wordsToKeep, tokenizerOp, useAttributeSelection, classifierOp, boosting); publishEstado("=== Performing cross-validation ===", log); Evaluation eval = new Evaluation(trainData); // eval.evaluateModel(classifier, trainData); eval.crossValidateModel(classifier, trainData, 10, new Random(1)); publishEstado(eval.toSummaryString(), log); publishEstado(eval.toClassDetailsString(), log); publishEstado(eval.toMatrixString(), log); publishEstado("=== Evaluation finished ===", log); publishEstado("Elapsed time: " + Utils.getDateHsMinSegString(System.currentTimeMillis() - start), log); } catch (Exception e) { e.printStackTrace(); publishEstado("Error found when evaluating", log); } }
From source file:com.sliit.rules.RuleContainer.java
public Map<String, String> evaluateModel() { Map<String, String> evaluationSummary = new HashMap<String, String>(); try {/*from ww w . ja v a 2s .co m*/ instances.setClassIndex(instances.numAttributes() - 1); Evaluation evaluation = new Evaluation(instances); evaluation.evaluateModel(ruleMoldel, instances); ArrayList<Rule> rulesList = ruleMoldel.getRuleset(); String rules = ruleMoldel.toString(); evaluationSummary.put("rules", rules); evaluationSummary.put("summary", evaluation.toSummaryString()); evaluationSummary.put("confusion_matrix", evaluation.toMatrixString()); } catch (Exception e) { log.error("Error occurred:" + e.getLocalizedMessage()); } return evaluationSummary; }
From source file:controller.BothClassificationsServlet.java
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { request.setCharacterEncoding("UTF-8"); String dir = "/data/"; String path = getServletContext().getRealPath(dir); String action = request.getParameter("action"); switch (action) { case "create": { String fileName = request.getParameter("file"); String aux = fileName.substring(0, fileName.indexOf(".")); String pathInput = path + "/" + request.getParameter("file"); String pathTrainingOutput = path + "/" + aux + "-training-arff.txt"; String pathTestOutput = path + "/" + aux + "-test-arff.txt"; String pathBothClassifications = path + "/" + aux + "-bothClassifications.txt"; String name = request.getParameter("name"); int range = Integer.parseInt(request.getParameter("range")); int size = Integer.parseInt(request.getParameter("counter")); String[] columns = new String[size]; String[] types = new String[size]; int[] positions = new int[size]; int counter = 0; for (int i = 0; i < size; i++) { if (request.getParameter("column-" + (i + 1)) != null) { columns[counter] = request.getParameter("column-" + (i + 1)); types[counter] = request.getParameter("type-" + (i + 1)); positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1))); counter++;// w w w . j a va 2s . c o m } } FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types, positions, counter, range); try { J48 j48 = new J48(); BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput)); Instances instancesTraining = new Instances(readerTraining); instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1); j48.buildClassifier(instancesTraining); BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput)); //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput)); Instances instancesTest = new Instances(readerTest); instancesTest.setClassIndex(instancesTest.numAttributes() - 1); int correctsDecisionTree = 0; for (int i = 0; i < instancesTest.size(); i++) { Instance instance = instancesTest.get(i); double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1)); double classification = j48.classifyInstance(instance); if (correctValue == classification) { correctsDecisionTree++; } } Evaluation eval = new Evaluation(instancesTraining); eval.evaluateModel(j48, instancesTest); PrintWriter writer = new PrintWriter( new BufferedWriter(new FileWriter(pathBothClassifications, false))); writer.println("?rvore de Deciso\n\n"); writer.println(j48.toString()); writer.println(""); writer.println(""); writer.println("Results"); writer.println(eval.toSummaryString()); NaiveBayes naiveBayes = new NaiveBayes(); naiveBayes.buildClassifier(instancesTraining); eval = new Evaluation(instancesTraining); eval.evaluateModel(naiveBayes, instancesTest); int correctsNaiveBayes = 0; for (int i = 0; i < instancesTest.size(); i++) { Instance instance = instancesTest.get(i); double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1)); double classification = naiveBayes.classifyInstance(instance); if (correctValue == classification) { correctsNaiveBayes++; } } writer.println("Naive Bayes\n\n"); writer.println(naiveBayes.toString()); writer.println(""); writer.println(""); writer.println("Results"); writer.println(eval.toSummaryString()); writer.close(); response.sendRedirect("BothClassifications?action=view&correctsDecisionTree=" + correctsDecisionTree + "&correctsNaiveBayes=" + correctsNaiveBayes + "&totalTest=" + instancesTest.size() + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&fileName=" + aux + "-bothClassifications.txt"); } catch (Exception e) { System.out.println(e.getMessage()); response.sendRedirect("Navigation?action=decisionTree"); } break; } default: response.sendError(404); } }
From source file:controller.DecisionTreeServlet.java
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { request.setCharacterEncoding("UTF-8"); String dir = "/data/"; String path = getServletContext().getRealPath(dir); String action = request.getParameter("action"); switch (action) { case "create": { String fileName = request.getParameter("file"); String aux = fileName.substring(0, fileName.indexOf(".")); String pathInput = path + "/" + request.getParameter("file"); String pathTrainingOutput = path + "/" + aux + "-training-arff.txt"; String pathTestOutput = path + "/" + aux + "-test-arff.txt"; String pathDecisionTree = path + "/" + aux + "-decisionTree.txt"; String name = request.getParameter("name"); int range = Integer.parseInt(request.getParameter("range")); int size = Integer.parseInt(request.getParameter("counter")); String[] columns = new String[size]; String[] types = new String[size]; int[] positions = new int[size]; int counter = 0; for (int i = 0; i < size; i++) { if (request.getParameter("column-" + (i + 1)) != null) { columns[counter] = request.getParameter("column-" + (i + 1)); types[counter] = request.getParameter("type-" + (i + 1)); positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1))); counter++;//from w ww .j av a 2s. c o m } } FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types, positions, counter, range); try { J48 j48 = new J48(); BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput)); Instances instancesTraining = new Instances(readerTraining); instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1); j48.buildClassifier(instancesTraining); BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput)); //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput)); Instances instancesTest = new Instances(readerTest); instancesTest.setClassIndex(instancesTest.numAttributes() - 1); int corrects = 0; int truePositive = 0; int trueNegative = 0; int falsePositive = 0; int falseNegative = 0; for (int i = 0; i < instancesTest.size(); i++) { Instance instance = instancesTest.get(i); double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1)); double classification = j48.classifyInstance(instance); if (correctValue == classification) { corrects++; } if (correctValue == 1 && classification == 1) { truePositive++; } if (correctValue == 1 && classification == 0) { falseNegative++; } if (correctValue == 0 && classification == 1) { falsePositive++; } if (correctValue == 0 && classification == 0) { trueNegative++; } } Evaluation eval = new Evaluation(instancesTraining); eval.evaluateModel(j48, instancesTest); PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(pathDecisionTree, false))); writer.println(j48.toString()); writer.println(""); writer.println(""); writer.println("Results"); writer.println(eval.toSummaryString()); writer.close(); response.sendRedirect("DecisionTree?action=view&corrects=" + corrects + "&totalTest=" + instancesTest.size() + "&totalTrainig=" + instancesTraining.size() + "&truePositive=" + truePositive + "&trueNegative=" + trueNegative + "&falsePositive=" + falsePositive + "&falseNegative=" + falseNegative + "&fileName=" + aux + "-decisionTree.txt"); } catch (Exception e) { System.out.println(e.getMessage()); response.sendRedirect("Navigation?action=decisionTree"); } break; } default: response.sendError(404); } }
From source file:controller.NaiveBayesServlet.java
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { request.setCharacterEncoding("UTF-8"); String dir = "/data/"; String path = getServletContext().getRealPath(dir); String action = request.getParameter("action"); switch (action) { case "create": { String fileName = request.getParameter("file"); String aux = fileName.substring(0, fileName.indexOf(".")); String pathInput = path + "/" + request.getParameter("file"); String pathTrainingOutput = path + "/" + aux + "-training-arff.txt"; String pathTestOutput = path + "/" + aux + "-test-arff.txt"; String pathNaivebayes = path + "/" + aux + "-naiveBayes.txt"; String name = request.getParameter("name"); int range = Integer.parseInt(request.getParameter("range")); int size = Integer.parseInt(request.getParameter("counter")); String[] columns = new String[size]; String[] types = new String[size]; int[] positions = new int[size]; int counter = 0; for (int i = 0; i < size; i++) { if (request.getParameter("column-" + (i + 1)) != null) { columns[counter] = request.getParameter("column-" + (i + 1)); types[counter] = request.getParameter("type-" + (i + 1)); positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1))); counter++;// w ww . ja v a 2s . c o m } } FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types, positions, counter, range); try { NaiveBayes naiveBayes = new NaiveBayes(); BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput)); Instances instancesTraining = new Instances(readerTraining); instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1); naiveBayes.buildClassifier(instancesTraining); BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput)); //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput)); Instances instancesTest = new Instances(readerTest); instancesTest.setClassIndex(instancesTest.numAttributes() - 1); Evaluation eval = new Evaluation(instancesTraining); eval.evaluateModel(naiveBayes, instancesTest); int corrects = 0; int truePositive = 0; int trueNegative = 0; int falsePositive = 0; int falseNegative = 0; for (int i = 0; i < instancesTest.size(); i++) { Instance instance = instancesTest.get(i); double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1)); double classification = naiveBayes.classifyInstance(instance); if (correctValue == classification) { corrects++; } if (correctValue == 1 && classification == 1) { truePositive++; } if (correctValue == 1 && classification == 0) { falseNegative++; } if (correctValue == 0 && classification == 1) { falsePositive++; } if (correctValue == 0 && classification == 0) { trueNegative++; } } PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(pathNaivebayes, false))); writer.println(naiveBayes.toString()); writer.println(""); writer.println(""); writer.println("Results"); writer.println(eval.toSummaryString()); writer.close(); response.sendRedirect( "NaiveBayes?action=view&corrects=" + corrects + "&totalTest=" + instancesTest.size() + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&truePositive=" + truePositive + "&trueNegative=" + trueNegative + "&falsePositive=" + falsePositive + "&falseNegative=" + falseNegative + "&fileName=" + aux + "-naiveBayes.txt"); } catch (Exception e) { System.out.println(e.getMessage()); response.sendRedirect("Navigation?action=naiveBayes"); } break; } default: response.sendError(404); } }
From source file:csav2.Weka_additive.java
public void createTrainingFeatureFile1(String input) throws Exception { String file = "Classifier\\featurefile_additive_trial1.arff"; ArffLoader loader = new ArffLoader(); //ATTRIBUTES//w ww . ja v a2s.com Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[1] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); if (new File(file).isFile()) { loader.setFile(new File(file)); dataset = loader.getDataSet(); } System.out.println("-----------------------------------------"); System.out.println(input); System.out.println("-----------------------------------------"); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(2); for (int j = 0; j < 2; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 1) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } dataset.add(example); } //Save dataset ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(1); Classifier classifier = new J48(); classifier.buildClassifier(dataset); //Save classifier String file1 = "Classifier\\classifier_add_autosentiment.model"; OutputStream os = new FileOutputStream(file1); ObjectOutputStream objectOutputStream = new ObjectOutputStream(os); objectOutputStream.writeObject(classifier); // Comment out if not needed //Read classifier back InputStream is = new FileInputStream(file1); ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); objectInputStream.close(); //Evaluate resample if needed //dataset = dataset.resample(new Random(42)); //split to 70:30 learn and test set double percent = 70.0; int trainSize = (int) Math.round(dataset.numInstances() * percent / 100); int testSize = dataset.numInstances() - trainSize; Instances train = new Instances(dataset, 0, trainSize); Instances test = new Instances(dataset, trainSize, testSize); train.setClassIndex(1); test.setClassIndex(1); //Evaluate Evaluation eval = new Evaluation(dataset); //trainset eval.crossValidateModel(classifier, dataset, 10, new Random(1)); System.out.println("EVALUATION:\n" + eval.toSummaryString()); System.out.println("WEIGHTED MEASURE:\n" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:\n" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:\n" + eval.weightedRecall()); }
From source file:csav2.Weka_additive.java
public void createTrainingFeatureFile2(String input) throws Exception { String file = "Classifier\\featurefile_additive_trial2.arff"; ArffLoader loader = new ArffLoader(); //ATTRIBUTES/* w w w . j av a 2s.c o m*/ Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[3] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); if (new File(file).isFile()) { loader.setFile(new File(file)); dataset = loader.getDataSet(); } System.out.println("-----------------------------------------"); System.out.println(input); System.out.println("-----------------------------------------"); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(4); for (int j = 0; j < 4; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 3) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } dataset.add(example); } //Save dataset ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(3); Classifier classifier = new J48(); classifier.buildClassifier(dataset); //Save classifier String file1 = "Classifier\\classifier_add_asAndpolarwords.model"; OutputStream os = new FileOutputStream(file1); ObjectOutputStream objectOutputStream = new ObjectOutputStream(os); objectOutputStream.writeObject(classifier); // Comment out if not needed //Read classifier back InputStream is = new FileInputStream(file1); ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); objectInputStream.close(); //Evaluate resample if needed //dataset = dataset.resample(new Random(42)); //split to 70:30 learn and test set double percent = 70.0; int trainSize = (int) Math.round(dataset.numInstances() * percent / 100); int testSize = dataset.numInstances() - trainSize; Instances train = new Instances(dataset, 0, trainSize); Instances test = new Instances(dataset, trainSize, testSize); train.setClassIndex(3); test.setClassIndex(3); //Evaluate Evaluation eval = new Evaluation(dataset); //trainset eval.crossValidateModel(classifier, dataset, 10, new Random(1)); System.out.println("EVALUATION:\n" + eval.toSummaryString()); System.out.println("WEIGHTED MEASURE:\n" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:\n" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:\n" + eval.weightedRecall()); }