Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:tubes1.Main.java

public static Evaluation classify(Classifier model, Instances trainingSet, Instances testingSet)
        throws Exception {
    Evaluation evaluation = new Evaluation(trainingSet);
    model.buildClassifier(trainingSet);//from   ww w .j  a va2 s .  co m
    evaluation.evaluateModel(model, testingSet);
    return evaluation;
}

From source file:tubes2ai.DriverNB.java

public static void run(String data) throws Exception {
    //System.out.println("tes driver");

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(data);
    Instances dataTrain = source.getDataSet();
    //if (dataTrain.classIndex() == -1)
    dataTrain.setClassIndex(0);/*from ww w . j  a  va2s  .c  o m*/
    ArffSaver saver = new ArffSaver();

    //        dataTrain.setClassIndex();
    Discretize discretize = new Discretize();
    discretize.setInputFormat(dataTrain);
    Instances dataTrainDisc = Filter.useFilter(dataTrain, discretize);

    //NaiveBayes NB = new NaiveBayes();
    AIJKNaiveBayes NB = new AIJKNaiveBayes();
    NB.buildClassifier(dataTrainDisc);

    Evaluation eval = new Evaluation(dataTrainDisc);
    eval.evaluateModel(NB, dataTrainDisc);

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
    /*Instance inst = new DenseInstance(5);
            
    inst.setDataset(dataTrain);
    inst.setValue(0, "sunny");
    inst.setValue(1, "hot");
    inst.setValue(2, "high");
    inst.setValue(3, "FALSE");
    inst.setValue(4, "yes");
    double a = NB.classifyInstance(inst);
    String hasil="";
    if(a==0.0){
    hasil="YES";
    } else{
    hasil="NO";
    }
    //double[] b = NB.distributionForInstance(inst);
    System.out.println("Hasil klasifikasi: "+hasil);
    //System.out.println(b);*/
}

From source file:tucil.dua.ai.TucilDuaAi.java

public static void fullTrainingSet() throws Exception {
    Classifier j48 = new J48();
    j48.buildClassifier(datas);/* www  . j  a v a 2s.  c  o  m*/

    Evaluation eval = new Evaluation(datas);
    eval.evaluateModel(j48, datas);
    System.out.println("=====Run Information======");
    System.out.println("======Classifier Model======");
    System.out.println(j48.toString());
    System.out.println(eval.toSummaryString("====Stats======\n", false));
    System.out.println(eval.toClassDetailsString("====Detailed Result=====\n"));
    System.out.println(eval.toMatrixString("======Confusion Matrix======\n"));
}

From source file:tucil2ai.Tucil2AI.java

/**
 *
 * @param cls//w  w  w.ja va2 s .  c o  m
 * @param data
 * @param cross
 * @return
 * @throws Exception
 */
protected static Evaluation evalJ48(Classifier cls, Instances data, boolean cross) throws Exception {
    Evaluation E;
    E = new Evaluation(data);
    if (cross == false) {
        E.evaluateModel(cls, data);
    } else {
        E.crossValidateModel(cls, data, 10, new Random(0x100)); /*crossValidateModel*/
    }
    return E;
}

From source file:uv.datamining.tp2.WekaModeler.java

public static void generarArbol(File file, float cm) throws Exception {
    ArffLoader loader = new ArffLoader();
    loader.setFile(file);//ww  w  .  j a  v  a 2  s.c  om
    Instances data = loader.getDataSet();
    data.setClassIndex(data.numAttributes() - 1); //columna con el atributo clase
    J48 tree = new J48();
    tree.setConfidenceFactor(cm);
    tree.buildClassifier(data);
    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(tree, data);
    System.out.println(eval.toSummaryString());

    weka.core.SerializationHelper.write(
            file.getAbsolutePath().substring(0, file.getAbsolutePath().lastIndexOf(".")) + ".model", tree);

}

From source file:view.centerPanels.ClassificationPnlCenterPredict.java

private void btnStartActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnStartActionPerformed

    //imate vec objekat instances i objekat naiveBayes u ovoj klasi
    //treba da izvucete ono sto je uetu u text fieldove preko liste i da napravite
    //objekat Instance (nema "s"). kad naravite objekat Instance samo ga dodate
    //na instances.add(  )..
    //onda koristite takav Instances i naiveBayes za evaluaciju i u textArea
    //ispisujete kojoj klasi pripada.. Sad ne trba matrica konfuzije potrebno
    //je da ispisete string kojoj klasi pripada
    //pogledajte klasu ClusterinPnlCenterPredict tu je odradjeno 90% onog sto se i ovde trazi
    //        instances.delete();
    for (int i = 0; i < textFields.size(); i++) {
        String text = textFields.get(i).getText().trim();

        //prekace prazna pollja jer za klasterizaciju znaci da se ona ignorisu
        //to za klasifikaciju nije slucaj
        if (!text.equals("")) {

            if (instances.attribute(i).isNominal()) {
                boolean correct = false;
                for (int j = 0; j < instances.attribute(i).numValues(); j++) {
                    if (text.equals(instances.attribute(i).value(j))) {
                        correct = true;/*  w w w  .ja  v  a2 s .  c  om*/
                    }
                }
                if (!correct) {
                    JOptionPane.showMessageDialog(this,
                            "Incorrect format for attribute " + instances.attribute(i).name());
                    break;
                }
            }

            if (instances.attribute(i).isNumeric()) {
                try {
                    double value = Double.parseDouble(text);
                } catch (Exception e) {
                    JOptionPane.showMessageDialog(this,
                            "Incorrect format for attribute " + instances.attribute(i).name());
                    break;
                }
            }

        }
    }

    int numAttributes = instances.numAttributes();

    Instance instance = new Instance(numAttributes);

    for (int i = 0; i < textFields.size(); i++) {
        String text = textFields.get(i).getText().trim();

        try {
            double value = Double.parseDouble(text);
            instance.setValue(i, value);

        } catch (Exception e) {
            instance.setValue(i, text);
        }
    }

    String value = instances.attribute(numAttributes - 1).value(0);
    Attribute Att = instances.attribute(numAttributes - 1);

    instance.setValue(Att, value);
    //        instances.delete();
    //        instance.setValue((numAttributes - 1), instances.attribute(numAttributes - 1).value(0) );

    instances.add(instance);

    double[][] matrix = null;

    System.out.println(instances);
    System.out.println(naiveBayes);

    try {

        //                instances.setClassIndex(instances.numAttributes() - 1);
        Evaluation eval = new Evaluation(instances);

        eval.evaluateModel(naiveBayes, instances);

        textArea.setText(eval.toMatrixString());
        matrix = eval.confusionMatrix();

    } catch (Exception ex) {
        System.out.println("Greska: " + ex);
    }

    double[] array = new double[matrix.length];

    for (int i = 0; i < array.length; i++) {

        double sum = 0;
        for (int j = 0; j < array.length; j++) {

            sum = sum + matrix[j][i];

        }
        array[i] = sum;

    }

    String className = null;
    for (int i = 0; i < array.length; i++) {
        if (array[i] == 1) {
            className = instances.attribute(numAttributes - 1).value(i);
        }
    }

    textArea.setText("This instance belongs to class: \n\n== " + className + " ==");

    instances.delete();
}

From source file:view.centerPanels.ClassificationPnlCenterTrainNew.java

public ClassificationPnlCenterTrainNew() {
    initComponents();// w  w  w .  j av  a 2s.c  o m

    Instances data = Data.getInstance().getInstances();

    try {
        nb = new NaiveBayes();

        data.setClassIndex(data.numAttributes() - 1);

        Evaluation eval = new Evaluation(data);

        nb.buildClassifier(data);

        eval.evaluateModel(nb, data);

        jTextArea1.setText(eval.toMatrixString());

        //                System.out.println(eval.toMatrixString());
    } catch (Exception ex) {
        JOptionPane.showMessageDialog(this, ex);
    }

    setSize(MainGUI.getInstance().getPnlCenter().getWidth(), MainGUI.getInstance().getPnlCenter().getHeight());
    setVisible(true);

}

From source file:wekimini.learning.ModelEvaluator.java

public void evaluateAll(final List<Path> paths, final boolean isTraining, final int numFolds,
        PropertyChangeListener listener) {
    final List<Instances> data = new LinkedList<>();
    for (Path p : paths) {
        Instances i = w.getSupervisedLearningManager().getTrainingDataForPath(p, false);
        data.add(i);/*from  w ww.  ja v a 2 s .co  m*/
    }

    setResults(new String[paths.size()]);
    if (evalWorker != null && evalWorker.getState() != SwingWorker.StateValue.DONE) {
        return;
    }

    evalWorker = new SwingWorker<Integer, Void>() {

        //trainingWorker.
        @Override
        public Integer doInBackground() {
            // train(); //TODO: Add status updates
            int progress = 0;
            //setProgress(progress);
            int numToEvaluate = 0;
            for (Path p : paths) {
                if (p.canBuild()) {
                    numToEvaluate++;
                }
            }

            int numEvaluated = 0;
            int numErr = 0;
            setEvalStatus(new EvaluationStatus(numToEvaluate, numEvaluated, numErr, false));

            for (int i = 0; i < paths.size(); i++) {
                Path p = paths.get(i);
                if (p.canBuild()) {
                    try {
                        System.out.println("Evaluating with " + numFolds);
                        //EVALUATE HERE: TODO 
                        Instances instances = w.getSupervisedLearningManager().getTrainingDataForPath(p, false);
                        Evaluation eval = new Evaluation(instances);
                        Classifier c = ((LearningModelBuilder) p.getModelBuilder()).getClassifier();
                        if (!isTraining) {
                            Random r = new Random();
                            eval.crossValidateModel(c, instances, numFolds, r);
                        } else {
                            Classifier c2 = Classifier.makeCopy(c);
                            c2.buildClassifier(instances);
                            eval.evaluateModel(c2, instances);
                        }
                        String result;
                        if (p.getModelBuilder() instanceof ClassificationModelBuilder) {
                            result = dFormat.format(eval.pctCorrect()) + "%"; //WON"T WORK FOR NN
                        } else {
                            result = dFormat.format(eval.errorRate()) + " (RMS)";
                        }
                        if (!isTraining) {
                            KadenzeLogging.getLogger().crossValidationComputed(w, i, numFolds, result);
                        } else {
                            KadenzeLogging.getLogger().trainingAccuracyComputed(w, i, result);
                        }
                        setResults(i, result);
                        finishedModel(i, result);
                        numEvaluated++;

                        if (isCancelled()) {
                            cancelMe(p);
                            setResults(i, "Cancelled");
                            return 0;
                        }

                    } catch (InterruptedException ex) {
                        cancelMe(p);
                        setResults(i, "Cancelled");
                        return 0; //Not sure this will be called...
                    } catch (Exception ex) {
                        numErr++;
                        Util.showPrettyErrorPane(null, "Error encountered during evaluation "
                                + p.getCurrentModelName() + ": " + ex.getMessage());
                        logger.log(Level.SEVERE, ex.getMessage());
                    }
                    setEvalStatus(new EvaluationStatus(numToEvaluate, numEvaluated, numErr, false));
                } else {
                    logger.log(Level.WARNING, "Could not evaluate path");
                }

            }
            wasCancelled = false;
            hadError = evaluationStatus.numErrorsEncountered > 0;
            return 0;
        }

        @Override
        public void done() {
            if (isCancelled()) {
                EvaluationStatus t = new EvaluationStatus(evaluationStatus.numToEvaluate,
                        evaluationStatus.numEvaluated, evaluationStatus.numErrorsEncountered, true);
                setEvalStatus(t);
            }
            finished();
        }
    };
    evalWorker.addPropertyChangeListener(listener);
    evalWorker.execute();
}

From source file:wlsvm.WLSVM.java

License:Open Source License

/**
 * //  ww  w  . j ava 2  s.c  o m
 * @param argv
 * @throws Exception
 */

public static void main(String[] argv) throws Exception {
    if (argv.length < 1) {
        System.out.println("Usage: Test <arff file>");
        System.exit(1);
    }
    String dataFile = argv[0];

    WLSVM lib = new WLSVM();

    String[] ops = { new String("-t"), dataFile, new String("-x"), new String("5"), new String("-i"),
            //WLSVM options
            new String("-S"), new String("0"), new String("-K"), new String("2"), new String("-G"),
            new String("1"), new String("-C"), new String("7"),
            //new String("-B"),    
            //new String("1"),
            new String("-M"), new String("100"),
            //new String("-W"), 
            //new String("1.0 1.0")
    };

    System.out.println(Evaluation.evaluateModel(lib, ops));

}