List of usage examples for weka.classifiers Evaluation evaluateModel
public static String evaluateModel(Classifier classifier, String[] options) throws Exception
From source file:kfst.classifier.WekaClassifier.java
License:Open Source License
/** * This method builds and evaluates the naiveBayes(NB) classifier. * The naiveBayes are used as the NB classifier implemented in the Weka * software./* w w w.java 2 s . c om*/ * * @param pathTrainData the path of the train set * @param pathTestData the path of the test set * * @return the classification accuracy */ public static double naiveBayes(String pathTrainData, String pathTestData) { double resultValue = 0; try { BufferedReader readerTrain = new BufferedReader(new FileReader(pathTrainData)); Instances dataTrain = new Instances(readerTrain); readerTrain.close(); dataTrain.setClassIndex(dataTrain.numAttributes() - 1); BufferedReader readerTest = new BufferedReader(new FileReader(pathTestData)); Instances dataTest = new Instances(readerTest); readerTest.close(); dataTest.setClassIndex(dataTest.numAttributes() - 1); NaiveBayes nb = new NaiveBayes(); nb.buildClassifier(dataTrain); Evaluation eval = new Evaluation(dataTest); eval.evaluateModel(nb, dataTest); resultValue = 100 - (eval.errorRate() * 100); } catch (Exception ex) { Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, ex); } return resultValue; }
From source file:kfst.classifier.WekaClassifier.java
License:Open Source License
/** * This method builds and evaluates the decision tree(DT) classifier. * The j48 are used as the DT classifier implemented in the Weka software. * * @param pathTrainData the path of the train set * @param pathTestData the path of the test set * @param confidenceValue The confidence factor used for pruning * @param minNumSampleInLeaf The minimum number of instances per leaf * //from w w w .ja va 2s . co m * @return the classification accuracy */ public static double dTree(String pathTrainData, String pathTestData, double confidenceValue, int minNumSampleInLeaf) { double resultValue = 0; try { BufferedReader readerTrain = new BufferedReader(new FileReader(pathTrainData)); Instances dataTrain = new Instances(readerTrain); readerTrain.close(); dataTrain.setClassIndex(dataTrain.numAttributes() - 1); BufferedReader readerTest = new BufferedReader(new FileReader(pathTestData)); Instances dataTest = new Instances(readerTest); readerTest.close(); dataTest.setClassIndex(dataTest.numAttributes() - 1); J48 decisionTree = new J48(); decisionTree.setConfidenceFactor((float) confidenceValue); decisionTree.setMinNumObj(minNumSampleInLeaf); decisionTree.buildClassifier(dataTrain); Evaluation eval = new Evaluation(dataTest); eval.evaluateModel(decisionTree, dataTest); resultValue = 100 - (eval.errorRate() * 100); } catch (Exception ex) { Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, ex); } return resultValue; }
From source file:knnclassifier.Main.java
public static void main(String[] args) throws Exception { DataSource source = new DataSource(file); Instances dataSet = source.getDataSet(); //Set up data dataSet.setClassIndex(dataSet.numAttributes() - 1); dataSet.randomize(new Random()); int trainingSize = (int) Math.round(dataSet.numInstances() * .7); int testSize = dataSet.numInstances() - trainingSize; Instances training = new Instances(dataSet, 0, trainingSize); Instances test = new Instances(dataSet, trainingSize, testSize); Standardize standardizedData = new Standardize(); standardizedData.setInputFormat(training); Instances newTest = Filter.useFilter(test, standardizedData); Instances newTraining = Filter.useFilter(training, standardizedData); KNNClassifier knn = new KNNClassifier(); knn.buildClassifier(newTraining);/*w w w. jav a 2 s. c o m*/ Evaluation eval = new Evaluation(newTraining); eval.evaluateModel(knn, newTest); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); }
From source file:lascer.WekaClassifier.java
License:Open Source License
/** * Evaluates the classifier.//w w w . ja va2s . co m * * @param args an array with the options for the evaluation. */ public static void main(String[] args) { try { System.out.println(Evaluation.evaluateModel(new WekaClassifier(), args)); } catch (Exception e) { e.printStackTrace(); } }
From source file:lector.Analizador.java
public static void clasificador() { BufferedReader reader1;/*from w w w . j a v a 2s . c o m*/ BufferedReader reader2; try { reader1 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/" + "proyecto/compartida/DataSetAnalisisSentimientos.arff")); reader2 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/" + "proyecto/compartida/DataSetAnalisisSentimientos_inc.arff")); Instances train = new Instances(reader1); train.setClassIndex(train.numAttributes() - 1); System.out.println(train.classIndex() + " " + train.numAttributes()); Instances test = new Instances(reader2); test.setClassIndex(train.numAttributes() - 1); System.out.println(test.classIndex() + " " + test.numAttributes()); NaiveBayes model = new NaiveBayes(); model.buildClassifier(train); //classify Instances labeled = new Instances(test); for (int i = 0; i < test.numInstances(); i++) { double clsLabel = model.classifyInstance(test.instance(i)); labeled.instance(i).setClassValue(clsLabel); } // https://youtu.be/JY_x5zKTfyo?list=PLJbE6j2EG1pZnBhOg3_Rb63WLCprtyJag Evaluation eval_train = new Evaluation(test); eval_train.evaluateModel(model, test); reader1.close(); reader2.close(); //System.out.println(eval_train.toSummaryString("\nResults\n======\n", false)); String[] options = new String[4]; options[0] = "-t"; //name of training file options[1] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/" + "compartida/DataSetAnalisisSentimientos.arff"; options[2] = "-T"; options[3] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/" + "compartida/DataSetAnalisisSentimientos_inc.arff"; System.out.println(Evaluation.evaluateModel(model, options)); try ( // print classification results to file BufferedWriter writer = new BufferedWriter( new FileWriter("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/" + "proyecto/compartida/DataSetAnalisisSentimientos_labeled.arff"))) { writer.write(labeled.toString()); } } catch (Exception e) { } }
From source file:lu.lippmann.cdb.datasetview.tabs.RegressionTreeTabView.java
License:Open Source License
/** * {@inheritDoc}//w ww. j av a 2 s. com */ @SuppressWarnings("unchecked") @Override public void update0(final Instances dataSet) throws Exception { this.panel.removeAll(); //final Object[] attrNames=WekaDataStatsUtil.getNumericAttributesNames(dataSet).toArray(); final Object[] attrNames = WekaDataStatsUtil.getAttributeNames(dataSet).toArray(); final JComboBox xCombo = new JComboBox(attrNames); xCombo.setBorder(new TitledBorder("Attribute to evaluate")); final JXPanel comboPanel = new JXPanel(); comboPanel.setLayout(new GridLayout(1, 2)); comboPanel.add(xCombo); final JXButton jxb = new JXButton("Compute"); comboPanel.add(jxb); this.panel.add(comboPanel, BorderLayout.NORTH); jxb.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { try { if (gv != null) panel.remove((Component) gv); dataSet.setClassIndex(xCombo.getSelectedIndex()); final REPTree rt = new REPTree(); rt.setNoPruning(true); //rt.setMaxDepth(3); rt.buildClassifier(dataSet); /*final M5P rt=new M5P(); rt.buildClassifier(dataSet);*/ final Evaluation eval = new Evaluation(dataSet); double[] d = eval.evaluateModel(rt, dataSet); System.out.println("PREDICTED -> " + FormatterUtil.buildStringFromArrayOfDoubles(d)); System.out.println(eval.errorRate()); System.out.println(eval.sizeOfPredictedRegions()); System.out.println(eval.toSummaryString("", true)); final GraphWithOperations gwo = GraphUtil .buildGraphWithOperationsFromWekaRegressionString(rt.graph()); final DecisionTree dt = new DecisionTree(gwo, eval.errorRate()); gv = DecisionTreeToGraphViewHelper.buildGraphView(dt, eventPublisher, commandDispatcher); gv.addMetaInfo("Size=" + dt.getSize(), ""); gv.addMetaInfo("Depth=" + dt.getDepth(), ""); gv.addMetaInfo("MAE=" + FormatterUtil.DECIMAL_FORMAT.format(eval.meanAbsoluteError()) + "", ""); gv.addMetaInfo("RMSE=" + FormatterUtil.DECIMAL_FORMAT.format(eval.rootMeanSquaredError()) + "", ""); final JCheckBox toggleDecisionTreeDetails = new JCheckBox("Toggle details"); toggleDecisionTreeDetails.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (!tweakedGraph) { final Object[] mapRep = WekaDataStatsUtil .buildNodeAndEdgeRepartitionMap(dt.getGraphWithOperations(), dataSet); gv.updateVertexShapeTransformer((Map<CNode, Map<Object, Integer>>) mapRep[0]); gv.updateEdgeShapeRenderer((Map<CEdge, Float>) mapRep[1]); } else { gv.resetVertexAndEdgeShape(); } tweakedGraph = !tweakedGraph; } }); gv.addMetaInfoComponent(toggleDecisionTreeDetails); /*final JButton openInEditorButton = new JButton("Open in editor"); openInEditorButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { GraphUtil.importDecisionTreeInEditor(dtFactory, dataSet, applicationContext, eventPublisher, commandDispatcher); } }); this.gv.addMetaInfoComponent(openInEditorButton);*/ final JButton showTextButton = new JButton("In text"); showTextButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { JOptionPane.showMessageDialog(null, graphDsl.getDslString(dt.getGraphWithOperations())); } }); gv.addMetaInfoComponent(showTextButton); panel.add(gv.asComponent(), BorderLayout.CENTER); } catch (Exception e1) { e1.printStackTrace(); panel.add(new JXLabel("Error during computation: " + e1.getMessage()), BorderLayout.CENTER); } } }); }
From source file:lu.lippmann.cdb.dt.ModelTreeFactory.java
License:Open Source License
/** * Main method./*from ww w. ja va 2 s .c o m*/ * @param args command line arguments */ public static void main(final String[] args) { try { //final String f="./samples/csv/uci/winequality-red-simplified.csv"; final String f = "./samples/csv/uci/winequality-white.csv"; //final String f="./samples/arff/UCI/crimepredict.arff"; final Instances dataSet = WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(new File(f)); System.out.println(dataSet.classAttribute().isNumeric()); final M5P rt = new M5P(); //rt.setUnpruned(true); rt.setMinNumInstances(1000); rt.buildClassifier(dataSet); System.out.println(rt); System.out.println(rt.graph()); final GraphWithOperations gwo = GraphUtil.buildGraphWithOperationsFromWekaRegressionString(rt.graph()); System.out.println(gwo); System.out.println(new ASCIIGraphDsl().getDslString(gwo)); final Evaluation eval = new Evaluation(dataSet); /*Field privateStringField = Evaluation.class.getDeclaredField("m_CoverageStatisticsAvailable"); privateStringField.setAccessible(true); //privateStringField.get boolean fieldValue = privateStringField.getBoolean(eval); System.out.println("fieldValue = " + fieldValue);*/ double[] d = eval.evaluateModel(rt, dataSet); System.out.println("PREDICTED -> " + FormatterUtil.buildStringFromArrayOfDoubles(d)); System.out.println(eval.errorRate()); System.out.println(eval.sizeOfPredictedRegions()); System.out.println(eval.toSummaryString("", true)); System.out.println(new DecisionTree(gwo, eval.errorRate())); } catch (Exception e) { e.printStackTrace(); } }
From source file:lu.lippmann.cdb.dt.RegressionTreeFactory.java
License:Open Source License
/** * Main method./*from ww w .j a v a 2 s .c o m*/ * @param args command line arguments */ public static void main(final String[] args) { try { final String f = "./samples/csv/uci/winequality-red.csv"; //final String f="./samples/arff/UCI/crimepredict.arff"; final Instances dataSet = WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(new File(f)); System.out.println(dataSet.classAttribute().isNumeric()); final REPTree rt = new REPTree(); rt.setMaxDepth(3); rt.buildClassifier(dataSet); System.out.println(rt); //System.out.println(rt.graph()); final GraphWithOperations gwo = GraphUtil.buildGraphWithOperationsFromWekaRegressionString(rt.graph()); System.out.println(gwo); System.out.println(new ASCIIGraphDsl().getDslString(gwo)); final Evaluation eval = new Evaluation(dataSet); /*Field privateStringField = Evaluation.class.getDeclaredField("m_CoverageStatisticsAvailable"); privateStringField.setAccessible(true); //privateStringField.get boolean fieldValue = privateStringField.getBoolean(eval); System.out.println("fieldValue = " + fieldValue);*/ double[] d = eval.evaluateModel(rt, dataSet); System.out.println("PREDICTED -> " + FormatterUtil.buildStringFromArrayOfDoubles(d)); System.out.println(eval.errorRate()); System.out.println(eval.sizeOfPredictedRegions()); System.out.println(eval.toSummaryString("", true)); /*final String f2="./samples/csv/salary.csv"; final Instances dataSet2=WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(new File(f2)); final J48 j48=new J48(); j48.buildClassifier(dataSet2); System.out.println(j48.graph()); final GraphWithOperations gwo2=GraphUtil.buildGraphWithOperationsFromWekaString(j48.graph(),false); System.out.println(gwo2);*/ System.out.println(new DecisionTree(gwo, eval.errorRate())); } catch (Exception e) { e.printStackTrace(); } }
From source file:machinelearningproject.MachineLearningProject.java
/** * @param args the command line arguments *//*from w w w . j a v a 2 s .c o m*/ public static void main(String[] args) throws Exception { // TODO code application logic here DataSource source = new DataSource("D:\\spambase.arff"); // DataSource source = new DataSource("D:\\weather-nominal.arff"); Instances instances = source.getDataSet(); int numAttr = instances.numAttributes(); instances.setClassIndex(instances.numAttributes() - 1); int runs = 5; int seed = 15; for (int i = 0; i < runs; i++) { //randomize data seed = seed + 1; // the seed for randomizing the data Random rand = new Random(seed); // create seeded number generator Instances randData = new Instances(instances); // create copy of original data Collections.shuffle(randData); Evaluation evalDTree = new Evaluation(randData); Evaluation evalRF = new Evaluation(randData); Evaluation evalSVM = new Evaluation(randData); int folds = 10; for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n, rand); Instances test = randData.testCV(folds, n); //instantiate classifiers DecisionTree dtree = new DecisionTree(); RandomForest rf = new RandomForest(100); SMO svm = new SMO(); RBFKernel rbfKernel = new RBFKernel(); double gamma = 0.70; rbfKernel.setGamma(gamma); dtree.buildClassifier(train); rf.buildClassifier(train); svm.buildClassifier(train); evalDTree.evaluateModel(dtree, test); evalRF.evaluateModel(rf, test); evalSVM.evaluateModel(svm, test); } System.out.println("=== Decision Tree Evaluation ==="); System.out.println(evalDTree.toSummaryString()); System.out.println(evalDTree.toClassDetailsString()); System.out.println(evalDTree.toMatrixString()); System.out.println("=== Random Forest Evaluation ==="); System.out.println(evalRF.toSummaryString()); System.out.println(evalRF.toClassDetailsString()); System.out.println(evalRF.toMatrixString()); System.out.println("=== SVM Evaluation ==="); System.out.println(evalSVM.toSummaryString()); System.out.println(evalSVM.toClassDetailsString()); System.out.println(evalSVM.toMatrixString()); } }
From source file:main.mFFNN.java
public static void main(String[] args) throws Exception { mFFNN m = new mFFNN(); BufferedReader breader = null; breader = new BufferedReader(new FileReader("src\\main\\iris.arff")); Instances fileTrain = new Instances(breader); fileTrain.setClassIndex(fileTrain.numAttributes() - 1); System.out.println(fileTrain); breader.close();//from w w w . j ava2 s. co m System.out.println("mFFNN!!!\n\n"); FeedForwardNeuralNetwork FFNN = new FeedForwardNeuralNetwork(); Evaluation eval = new Evaluation(fileTrain); FFNN.buildClassifier(fileTrain); eval.evaluateModel(FFNN, fileTrain); //OUTPUT Scanner scan = new Scanner(System.in); System.out.println(eval.toSummaryString("=== Stratified cross-validation ===\n" + "=== Summary ===", true)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===")); System.out.println(eval.toMatrixString("===Confusion matrix===")); System.out.println(eval.fMeasure(1) + " " + eval.recall(1)); System.out.println("\nDo you want to save this model(1/0)? "); FFNN.distributionForInstance(fileTrain.get(0)); /* int c = scan.nextInt(); if (c == 1 ){ System.out.print("Please enter your file name (*.model) : "); String infile = scan.next(); m.saveModel(FFNN,infile); } else { System.out.print("Model not saved."); } */ }