Example usage for weka.classifiers Evaluation toSummaryString

List of usage examples for weka.classifiers Evaluation toSummaryString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toSummaryString.

Prototype

public String toSummaryString(String title, boolean printComplexityStatistics) 

Source Link

Document

Outputs the performance statistics in summary form.

Usage

From source file:knnclassifier.Main.java

public static void main(String[] args) throws Exception {

    DataSource source = new DataSource(file);
    Instances dataSet = source.getDataSet();

    //Set up data
    dataSet.setClassIndex(dataSet.numAttributes() - 1);
    dataSet.randomize(new Random());

    int trainingSize = (int) Math.round(dataSet.numInstances() * .7);
    int testSize = dataSet.numInstances() - trainingSize;

    Instances training = new Instances(dataSet, 0, trainingSize);

    Instances test = new Instances(dataSet, trainingSize, testSize);

    Standardize standardizedData = new Standardize();
    standardizedData.setInputFormat(training);

    Instances newTest = Filter.useFilter(test, standardizedData);
    Instances newTraining = Filter.useFilter(training, standardizedData);

    KNNClassifier knn = new KNNClassifier();
    knn.buildClassifier(newTraining);//  ww  w.  j  av  a2 s  . co  m

    Evaluation eval = new Evaluation(newTraining);
    eval.evaluateModel(knn, newTest);

    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:lu.lippmann.cdb.datasetview.tabs.RegressionTreeTabView.java

License:Open Source License

/**
 * {@inheritDoc}/*from   w w w .  ja  v a2s .co m*/
 */
@SuppressWarnings("unchecked")
@Override
public void update0(final Instances dataSet) throws Exception {
    this.panel.removeAll();

    //final Object[] attrNames=WekaDataStatsUtil.getNumericAttributesNames(dataSet).toArray();
    final Object[] attrNames = WekaDataStatsUtil.getAttributeNames(dataSet).toArray();
    final JComboBox xCombo = new JComboBox(attrNames);
    xCombo.setBorder(new TitledBorder("Attribute to evaluate"));

    final JXPanel comboPanel = new JXPanel();
    comboPanel.setLayout(new GridLayout(1, 2));
    comboPanel.add(xCombo);
    final JXButton jxb = new JXButton("Compute");
    comboPanel.add(jxb);
    this.panel.add(comboPanel, BorderLayout.NORTH);

    jxb.addActionListener(new ActionListener() {
        @Override
        public void actionPerformed(ActionEvent e) {
            try {
                if (gv != null)
                    panel.remove((Component) gv);

                dataSet.setClassIndex(xCombo.getSelectedIndex());

                final REPTree rt = new REPTree();
                rt.setNoPruning(true);
                //rt.setMaxDepth(3);
                rt.buildClassifier(dataSet);

                /*final M5P rt=new M5P();
                rt.buildClassifier(dataSet);*/

                final Evaluation eval = new Evaluation(dataSet);
                double[] d = eval.evaluateModel(rt, dataSet);
                System.out.println("PREDICTED -> " + FormatterUtil.buildStringFromArrayOfDoubles(d));
                System.out.println(eval.errorRate());
                System.out.println(eval.sizeOfPredictedRegions());
                System.out.println(eval.toSummaryString("", true));

                final GraphWithOperations gwo = GraphUtil
                        .buildGraphWithOperationsFromWekaRegressionString(rt.graph());
                final DecisionTree dt = new DecisionTree(gwo, eval.errorRate());

                gv = DecisionTreeToGraphViewHelper.buildGraphView(dt, eventPublisher, commandDispatcher);
                gv.addMetaInfo("Size=" + dt.getSize(), "");
                gv.addMetaInfo("Depth=" + dt.getDepth(), "");

                gv.addMetaInfo("MAE=" + FormatterUtil.DECIMAL_FORMAT.format(eval.meanAbsoluteError()) + "", "");
                gv.addMetaInfo("RMSE=" + FormatterUtil.DECIMAL_FORMAT.format(eval.rootMeanSquaredError()) + "",
                        "");

                final JCheckBox toggleDecisionTreeDetails = new JCheckBox("Toggle details");
                toggleDecisionTreeDetails.addActionListener(new ActionListener() {
                    @Override
                    public void actionPerformed(ActionEvent e) {
                        if (!tweakedGraph) {
                            final Object[] mapRep = WekaDataStatsUtil
                                    .buildNodeAndEdgeRepartitionMap(dt.getGraphWithOperations(), dataSet);
                            gv.updateVertexShapeTransformer((Map<CNode, Map<Object, Integer>>) mapRep[0]);
                            gv.updateEdgeShapeRenderer((Map<CEdge, Float>) mapRep[1]);
                        } else {
                            gv.resetVertexAndEdgeShape();
                        }
                        tweakedGraph = !tweakedGraph;
                    }
                });
                gv.addMetaInfoComponent(toggleDecisionTreeDetails);

                /*final JButton openInEditorButton = new JButton("Open in editor");
                openInEditorButton.addActionListener(new ActionListener() {
                   @Override
                   public void actionPerformed(ActionEvent e) {
                       GraphUtil.importDecisionTreeInEditor(dtFactory, dataSet, applicationContext, eventPublisher, commandDispatcher);
                   }
                });
                this.gv.addMetaInfoComponent(openInEditorButton);*/

                final JButton showTextButton = new JButton("In text");
                showTextButton.addActionListener(new ActionListener() {
                    @Override
                    public void actionPerformed(ActionEvent e) {
                        JOptionPane.showMessageDialog(null, graphDsl.getDslString(dt.getGraphWithOperations()));
                    }
                });
                gv.addMetaInfoComponent(showTextButton);

                panel.add(gv.asComponent(), BorderLayout.CENTER);
            } catch (Exception e1) {
                e1.printStackTrace();
                panel.add(new JXLabel("Error during computation: " + e1.getMessage()), BorderLayout.CENTER);
            }

        }
    });
}

From source file:lu.lippmann.cdb.dt.ModelTreeFactory.java

License:Open Source License

/**
 * Main method./* www  .  ja v a2  s. com*/
 * @param args command line arguments
 */
public static void main(final String[] args) {
    try {
        //final String f="./samples/csv/uci/winequality-red-simplified.csv";
        final String f = "./samples/csv/uci/winequality-white.csv";
        //final String f="./samples/arff/UCI/crimepredict.arff";
        final Instances dataSet = WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(new File(f));
        System.out.println(dataSet.classAttribute().isNumeric());

        final M5P rt = new M5P();
        //rt.setUnpruned(true);
        rt.setMinNumInstances(1000);
        rt.buildClassifier(dataSet);

        System.out.println(rt);

        System.out.println(rt.graph());

        final GraphWithOperations gwo = GraphUtil.buildGraphWithOperationsFromWekaRegressionString(rt.graph());
        System.out.println(gwo);
        System.out.println(new ASCIIGraphDsl().getDslString(gwo));

        final Evaluation eval = new Evaluation(dataSet);

        /*Field privateStringField = Evaluation.class.getDeclaredField("m_CoverageStatisticsAvailable");
        privateStringField.setAccessible(true);
        //privateStringField.get
        boolean fieldValue = privateStringField.getBoolean(eval);
        System.out.println("fieldValue = " + fieldValue);*/

        double[] d = eval.evaluateModel(rt, dataSet);
        System.out.println("PREDICTED -> " + FormatterUtil.buildStringFromArrayOfDoubles(d));

        System.out.println(eval.errorRate());
        System.out.println(eval.sizeOfPredictedRegions());

        System.out.println(eval.toSummaryString("", true));

        System.out.println(new DecisionTree(gwo, eval.errorRate()));
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:lu.lippmann.cdb.dt.RegressionTreeFactory.java

License:Open Source License

/**
 * Main method.//ww w . j a  va2  s  .co  m
 * @param args command line arguments
 */
public static void main(final String[] args) {
    try {
        final String f = "./samples/csv/uci/winequality-red.csv";
        //final String f="./samples/arff/UCI/crimepredict.arff";
        final Instances dataSet = WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(new File(f));
        System.out.println(dataSet.classAttribute().isNumeric());

        final REPTree rt = new REPTree();
        rt.setMaxDepth(3);
        rt.buildClassifier(dataSet);

        System.out.println(rt);

        //System.out.println(rt.graph());

        final GraphWithOperations gwo = GraphUtil.buildGraphWithOperationsFromWekaRegressionString(rt.graph());
        System.out.println(gwo);
        System.out.println(new ASCIIGraphDsl().getDslString(gwo));

        final Evaluation eval = new Evaluation(dataSet);

        /*Field privateStringField = Evaluation.class.getDeclaredField("m_CoverageStatisticsAvailable");
        privateStringField.setAccessible(true);
        //privateStringField.get
        boolean fieldValue = privateStringField.getBoolean(eval);
        System.out.println("fieldValue = " + fieldValue);*/

        double[] d = eval.evaluateModel(rt, dataSet);
        System.out.println("PREDICTED -> " + FormatterUtil.buildStringFromArrayOfDoubles(d));

        System.out.println(eval.errorRate());
        System.out.println(eval.sizeOfPredictedRegions());

        System.out.println(eval.toSummaryString("", true));

        /*final String f2="./samples/csv/salary.csv";
        final Instances dataSet2=WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(new File(f2));
                
        final J48 j48=new J48();
        j48.buildClassifier(dataSet2);
        System.out.println(j48.graph());
        final GraphWithOperations gwo2=GraphUtil.buildGraphWithOperationsFromWekaString(j48.graph(),false);
        System.out.println(gwo2);*/

        System.out.println(new DecisionTree(gwo, eval.errorRate()));
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:main.mFFNN.java

public static void main(String[] args) throws Exception {
    mFFNN m = new mFFNN();
    BufferedReader breader = null;
    breader = new BufferedReader(new FileReader("src\\main\\iris.arff"));
    Instances fileTrain = new Instances(breader);
    fileTrain.setClassIndex(fileTrain.numAttributes() - 1);
    System.out.println(fileTrain);
    breader.close();//  w w w.j  av a2 s.  c  o m
    System.out.println("mFFNN!!!\n\n");
    FeedForwardNeuralNetwork FFNN = new FeedForwardNeuralNetwork();

    Evaluation eval = new Evaluation(fileTrain);
    FFNN.buildClassifier(fileTrain);

    eval.evaluateModel(FFNN, fileTrain);

    //OUTPUT
    Scanner scan = new Scanner(System.in);
    System.out.println(eval.toSummaryString("=== Stratified cross-validation ===\n" + "=== Summary ===", true));
    System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ==="));
    System.out.println(eval.toMatrixString("===Confusion matrix==="));
    System.out.println(eval.fMeasure(1) + " " + eval.recall(1));
    System.out.println("\nDo you want to save this model(1/0)? ");
    FFNN.distributionForInstance(fileTrain.get(0));
    /* int c = scan.nextInt();
    if (c == 1 ){
     System.out.print("Please enter your file name (*.model) : ");
     String infile = scan.next();
     m.saveModel(FFNN,infile);
    }
    else {
    System.out.print("Model not saved.");
    } */
}

From source file:mao.datamining.ModelProcess.java

private void testWithExtraDS(Classifier classifier, Instances finalTrainDataSet, Instances finalTestDataSet,
        FileOutputStream testCaseSummaryOut, TestResult result) {
    //Use final training dataset and final test dataset
    double confusionMatrix[][] = null;

    long start, end, trainTime = 0, testTime = 0;
    if (finalTestDataSet != null) {
        try {//from  w w  w  .  j a va2s .c o  m
            //counting training time
            start = System.currentTimeMillis();
            classifier.buildClassifier(finalTrainDataSet);
            end = System.currentTimeMillis();
            trainTime += end - start;

            //counting test time
            start = System.currentTimeMillis();
            Evaluation testEvalOnly = new Evaluation(finalTrainDataSet);
            testEvalOnly.evaluateModel(classifier, finalTestDataSet);
            end = System.currentTimeMillis();
            testTime += end - start;

            testCaseSummaryOut.write("=====================================================\n".getBytes());
            testCaseSummaryOut.write((testEvalOnly.toSummaryString("=== Test Summary ===", true)).getBytes());
            testCaseSummaryOut.write("\n".getBytes());
            testCaseSummaryOut
                    .write((testEvalOnly.toClassDetailsString("=== Test Class Detail ===\n")).getBytes());
            testCaseSummaryOut.write("\n".getBytes());
            testCaseSummaryOut
                    .write((testEvalOnly.toMatrixString("=== Confusion matrix for Test ===\n")).getBytes());
            testCaseSummaryOut.flush();

            confusionMatrix = testEvalOnly.confusionMatrix();
            result.setConfusionMatrix4Test(confusionMatrix);

            result.setAUT(testEvalOnly.areaUnderROC(1));
            result.setPrecision(testEvalOnly.precision(1));
            result.setRecall(testEvalOnly.recall(1));
        } catch (Exception e) {
            ModelProcess.logging(null, e);
        }
        result.setTrainingTime(trainTime);
        result.setTestTime(testTime);
    } //using test data set , end

}

From source file:mao.datamining.ModelProcess.java

private void testCV(Classifier classifier, Instances finalTrainDataSet, FileOutputStream testCaseSummaryOut,
        TestResult result) {/*w ww  .j a  v a2  s.  co m*/
    long start, end, trainTime = 0, testTime = 0;
    Evaluation evalAll = null;
    double confusionMatrix[][] = null;
    // randomize data, and then stratify it into 10 groups
    Random rand = new Random(1);
    Instances randData = new Instances(finalTrainDataSet);
    randData.randomize(rand);
    if (randData.classAttribute().isNominal()) {
        //always run with 10 cross validation
        randData.stratify(folds);
    }

    try {
        evalAll = new Evaluation(randData);
        for (int i = 0; i < folds; i++) {
            Evaluation eval = new Evaluation(randData);
            Instances train = randData.trainCV(folds, i);
            Instances test = randData.testCV(folds, i);
            //counting traininig time
            start = System.currentTimeMillis();
            Classifier j48ClassifierCopy = Classifier.makeCopy(classifier);
            j48ClassifierCopy.buildClassifier(train);
            end = System.currentTimeMillis();
            trainTime += end - start;

            //counting test time
            start = System.currentTimeMillis();
            eval.evaluateModel(j48ClassifierCopy, test);
            evalAll.evaluateModel(j48ClassifierCopy, test);
            end = System.currentTimeMillis();
            testTime += end - start;
        }

    } catch (Exception e) {
        ModelProcess.logging(null, e);
    } //end test by cross validation

    // output evaluation
    try {
        ModelProcess.logging("");
        //write into summary file
        testCaseSummaryOut
                .write((evalAll.toSummaryString("=== Cross Validation Summary ===", true)).getBytes());
        testCaseSummaryOut.write("\n".getBytes());
        testCaseSummaryOut.write(
                (evalAll.toClassDetailsString("=== " + folds + "-fold Cross-validation Class Detail ===\n"))
                        .getBytes());
        testCaseSummaryOut.write("\n".getBytes());
        testCaseSummaryOut
                .write((evalAll.toMatrixString("=== Confusion matrix for all folds ===\n")).getBytes());
        testCaseSummaryOut.flush();

        confusionMatrix = evalAll.confusionMatrix();
        result.setConfusionMatrix10Folds(confusionMatrix);
    } catch (Exception e) {
        ModelProcess.logging(null, e);
    }
}

From source file:miRdup.WekaModule.java

License:Open Source License

public static void testModel(File testarff, String predictionsFile, String classifier, boolean predictMiRNA) {
    System.out.println("Testing model on " + predictionsFile + " adapted in " + testarff
            + ". Submitted to model " + classifier);

    try {//from  w w  w.  j a v a  2  s .  com
        //add predictions sequences to object
        ArrayList<MirnaObject> alobj = new ArrayList<MirnaObject>();
        BufferedReader br = null;
        try {
            br = new BufferedReader(new FileReader(predictionsFile + ".folded"));
        } catch (FileNotFoundException fileNotFoundException) {
            br = new BufferedReader(new FileReader(predictionsFile));
        }
        BufferedReader br2 = new BufferedReader(new FileReader(testarff));
        String line2 = br2.readLine();
        while (!line2.startsWith("@data")) {
            line2 = br2.readLine();
        }
        String line = " ";
        int cpt = 0;
        while (br.ready()) {
            line = br.readLine();
            line2 = br2.readLine();
            String[] tab = line.split("\t");
            MirnaObject m = new MirnaObject();
            m.setArff(line2);
            m.setId(cpt++);
            m.setIdName(tab[0]);
            m.setMatureSequence(tab[1]);
            m.setPrecursorSequence(tab[2]);
            m.setStructure(tab[3]);
            alobj.add(m);
        }
        br.close();
        br2.close();

        // load data
        DataSource source = new DataSource(testarff.toString());
        Instances data = source.getDataSet();
        if (data.classIndex() == -1) {
            data.setClassIndex(data.numAttributes() - 1);
        }
        //remove ID row
        data.deleteAttributeAt(0);
        //load model
        Classifier model = (Classifier) weka.core.SerializationHelper.read(classifier);

        // evaluate dataset on the model
        Evaluation eval = new Evaluation(data);

        eval.evaluateModel(model, data);

        FastVector fv = eval.predictions();

        // output
        PrintWriter pw = new PrintWriter(new FileWriter(predictionsFile + "." + classifier + ".miRdup.txt"));
        PrintWriter pwt = new PrintWriter(
                new FileWriter(predictionsFile + "." + classifier + ".miRdup.tab.txt"));
        PrintWriter pwout = new PrintWriter(
                new FileWriter(predictionsFile + "." + classifier + ".miRdupOutput.txt"));

        for (int i = 0; i < fv.size(); i++) {
            //System.out.println(fv.elementAt(i).toString());
            String[] tab = fv.elementAt(i).toString().split(" ");
            int actual = Integer.valueOf(tab[1].substring(0, 1));
            int predicted = Integer.valueOf(tab[2].substring(0, 1));
            double score = 0.0;
            boolean validated = false;
            if (actual == predicted) { //case validated
                int s = tab[4].length();
                try {
                    score = Double.valueOf(tab[4]);
                    //score = Double.valueOf(tab[4].substring(0, s - 1));
                } catch (NumberFormatException numberFormatException) {
                    score = 0.0;
                }

                validated = true;
            } else {// case not validated
                int s = tab[5].length();
                try {
                    score = Double.valueOf(tab[5]);
                    //score = Double.valueOf(tab[5].substring(0, s - 1));
                } catch (NumberFormatException numberFormatException) {
                    score = 0.0;
                }
                validated = false;
            }
            MirnaObject m = alobj.get(i);
            m.setActual(actual);
            m.setPredicted(predicted);
            m.setScore(score);
            m.setValidated(validated);
            m.setNeedPrediction(predictMiRNA);
            String predictionMiRNA = "";
            if (predictMiRNA && validated == false) {
                predictionMiRNA = miRdupPredictor.Predictor.predictionBySequence(m.getPrecursorSequence(),
                        classifier, classifier + ".miRdupPrediction.txt");
                try {
                    m.setPredictedmiRNA(predictionMiRNA.split(",")[0]);
                    m.setPredictedmiRNAstar(predictionMiRNA.split(",")[1]);
                } catch (Exception e) {
                    m.setPredictedmiRNA(predictionMiRNA);
                    m.setPredictedmiRNAstar(predictionMiRNA);
                }
            }

            pw.println(m.toStringFullPredictions());
            pwt.println(m.toStringPredictions());
            if (i % 100 == 0) {
                pw.flush();
                pwt.flush();
            }
        }

        //System.out.println(eval.toSummaryString("\nSummary results of predictions\n======\n", false));
        String[] out = eval.toSummaryString("\nSummary results of predictions\n======\n", false).split("\n");
        String info = out[0] + "\n" + out[1] + "\n" + out[2] + "\n" + out[4] + "\n" + out[5] + "\n" + out[6]
                + "\n" + out[7] + "\n" + out[11] + "\n";
        System.out.println(info);
        //System.out.println("Predicted position of the miRNA by miRdup:"+predictionMiRNA);
        pwout.println(
                "File " + predictionsFile + " adapted in " + testarff + " submitted to model " + classifier);
        pwout.println(info);

        pw.flush();
        pw.close();
        pwt.flush();
        pwt.close();
        pwout.flush();
        pwout.close();

        System.out.println("Results in " + predictionsFile + "." + classifier + ".miRdup.txt");

        // draw curve
        //rocCurve(eval);
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:mlpoc.MLPOC.java

public static Evaluation crossValidate(String filename) {
    Evaluation eval = null;
    try {//from  w  w w .  jav a  2s  .c  o  m
        BufferedReader br = new BufferedReader(new FileReader(filename));
        // loads data and set class index
        Instances data = new Instances(br);
        br.close();
        /*File csv=new File(filename);
        CSVLoader loader = new CSVLoader();
        loader.setSource(csv);
        Instances data = loader.getDataSet();*/
        data.setClassIndex(data.numAttributes() - 1);

        // classifier
        String[] tmpOptions;
        String classname = "weka.classifiers.trees.J48 -C 0.25";
        tmpOptions = classname.split(" ");
        classname = "weka.classifiers.trees.J48";
        tmpOptions[0] = "";
        Classifier cls = (Classifier) Utils.forName(Classifier.class, classname, tmpOptions);

        // other options
        int seed = 2;
        int folds = 10;

        // randomize data
        Random rand = new Random(seed);
        Instances randData = new Instances(data);
        randData.randomize(rand);
        if (randData.classAttribute().isNominal())
            randData.stratify(folds);

        // perform cross-validation
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n);
            Instances test = randData.testCV(folds, n);
            // the above code is used by the StratifiedRemoveFolds filter, the
            // code below by the Explorer/Experimenter:
            // Instances train = randData.trainCV(folds, n, rand);

            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(cls);
            clsCopy.buildClassifier(train);
            eval.evaluateModel(clsCopy, test);
        }

        // output evaluation
        System.out.println();
        System.out.println("=== Setup ===");
        System.out
                .println("Classifier: " + cls.getClass().getName() + " " + Utils.joinOptions(cls.getOptions()));
        System.out.println("Dataset: " + data.relationName());
        System.out.println("Folds: " + folds);
        System.out.println("Seed: " + seed);
        System.out.println();
        System.out.println(eval.toSummaryString("Summary for testing", true));
        System.out.println("Correctly Classified Instances: " + eval.correct());
        System.out.println("Percentage of Correctly Classified Instances: " + eval.pctCorrect());
        System.out.println("InCorrectly Classified Instances: " + eval.incorrect());
        System.out.println("Percentage of InCorrectly Classified Instances: " + eval.pctIncorrect());

    } catch (Exception ex) {
        System.err.println(ex.getMessage());
    }
    return eval;
}

From source file:myclassifier.naiveBayes.java

public void TestData(Instances dataTest) throws Exception {
    if (data != null) {
        Instances train = data;/*from w  w  w.  ja v a  2 s. c  o m*/
        // train classifier
        NBClassifier.buildClassifier(train);
        // evaluate classifier and print some statistics
        Evaluation eval = new Evaluation(dataTest);
        System.out.println(eval.toSummaryString("\nResults\n======\n", false));
        System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n"));
        System.out.println(eval.toMatrixString());
    } else {
        System.out.println("Data is null");
    }
}