Example usage for weka.classifiers Evaluation Evaluation

List of usage examples for weka.classifiers Evaluation Evaluation

Introduction

In this page you can find the example usage for weka.classifiers Evaluation Evaluation.

Prototype

public Evaluation(Instances data) throws Exception 

Source Link

Usage

From source file:tubes2ai.DriverNB.java

public static void run(String data) throws Exception {
    //System.out.println("tes driver");

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(data);
    Instances dataTrain = source.getDataSet();
    //if (dataTrain.classIndex() == -1)
    dataTrain.setClassIndex(0);//from   w ww  .  ja  va2s  .c o  m
    ArffSaver saver = new ArffSaver();

    //        dataTrain.setClassIndex();
    Discretize discretize = new Discretize();
    discretize.setInputFormat(dataTrain);
    Instances dataTrainDisc = Filter.useFilter(dataTrain, discretize);

    //NaiveBayes NB = new NaiveBayes();
    AIJKNaiveBayes NB = new AIJKNaiveBayes();
    NB.buildClassifier(dataTrainDisc);

    Evaluation eval = new Evaluation(dataTrainDisc);
    eval.evaluateModel(NB, dataTrainDisc);

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
    /*Instance inst = new DenseInstance(5);
            
    inst.setDataset(dataTrain);
    inst.setValue(0, "sunny");
    inst.setValue(1, "hot");
    inst.setValue(2, "high");
    inst.setValue(3, "FALSE");
    inst.setValue(4, "yes");
    double a = NB.classifyInstance(inst);
    String hasil="";
    if(a==0.0){
    hasil="YES";
    } else{
    hasil="NO";
    }
    //double[] b = NB.distributionForInstance(inst);
    System.out.println("Hasil klasifikasi: "+hasil);
    //System.out.println(b);*/
}

From source file:tubesduaai.TesFFNN.java

public static Evaluation cross_validation(FFNN x) throws Exception {
    eval = new Evaluation(data);
    eval.crossValidateModel(x, data, 10, new Random(1));
    return eval;// w w w  . ja v  a 2  s  .co  m
}

From source file:tucil.dua.ai.TucilDuaAi.java

public static void fullTrainingSet() throws Exception {
    Classifier j48 = new J48();
    j48.buildClassifier(datas);// ww  w  . ja  v a  2s  .  c  o m

    Evaluation eval = new Evaluation(datas);
    eval.evaluateModel(j48, datas);
    System.out.println("=====Run Information======");
    System.out.println("======Classifier Model======");
    System.out.println(j48.toString());
    System.out.println(eval.toSummaryString("====Stats======\n", false));
    System.out.println(eval.toClassDetailsString("====Detailed Result=====\n"));
    System.out.println(eval.toMatrixString("======Confusion Matrix======\n"));
}

From source file:tucil.dua.ai.TucilDuaAi.java

public static void crossValidation() throws Exception {
    Evaluation evaluation = new Evaluation(datas);
    Classifier attr_tree = new J48();
    attr_tree.buildClassifier(datas);//w  w w.  j ava  2 s .c  om
    evaluation.crossValidateModel(attr_tree, datas, 10, new Random(1));
    System.out.println("=====Run Information======");
    System.out.println("======Classifier Model======");
    System.out.println(attr_tree.toString());
    System.out.println(evaluation.toSummaryString("====Stats======\n", false));
    System.out.println(evaluation.toClassDetailsString("====Detailed Result=====\n"));
    System.out.println(evaluation.toMatrixString("======Confusion Matrix======\n"));
}

From source file:tucil2ai.Tucil2AI.java

/**
 *
 * @param cls/*from  www .  j av  a  2  s  . c o  m*/
 * @param data
 * @param cross
 * @return
 * @throws Exception
 */
protected static Evaluation evalJ48(Classifier cls, Instances data, boolean cross) throws Exception {
    Evaluation E;
    E = new Evaluation(data);
    if (cross == false) {
        E.evaluateModel(cls, data);
    } else {
        E.crossValidateModel(cls, data, 10, new Random(0x100)); /*crossValidateModel*/
    }
    return E;
}

From source file:util.FeatureExtract.java

public static void createArff(String directory) {
    TextDirectoryLoader loader = new TextDirectoryLoader();
    try {/*from   ww w .ja  va2s .  c o m*/
        // convert the directory into a dataset
        loader.setDirectory(new File(directory));
        Instances dataRaw = loader.getDataSet();

        // apply the StringToWordVector and tf-idf weighting
        StringToWordVector filter = new StringToWordVector();
        filter.setIDFTransform(true);
        filter.setInputFormat(dataRaw);
        Instances dataFiltered = Filter.useFilter(dataRaw, filter);

        // output the arff file
        ArffSaver saver = new ArffSaver();
        saver.setInstances(dataFiltered);
        saver.setFile(new File(SpamFilterConfig.getArffFilePath()));
        saver.writeBatch();

        // train with simple cart
        SimpleCart classifier = new SimpleCart();
        classifier.buildClassifier(dataFiltered);
        System.out.println("\n\nClassifier model:\n\n" + classifier.toString());

        // using 10 cross validation
        Evaluation eval = new Evaluation(dataFiltered);
        eval.crossValidateModel(classifier, dataFiltered, 10, new Random(1));

        System.out.println("\n\nCross fold:\n\n" + eval.toSummaryString());
    } catch (Exception ex) {
        Logger.getLogger(FeatureExtract.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:util.Weka.java

public void fijarAprendizaje(Instance casoAdecidir, double resultadoRealObservado) {
    casoAdecidir.setClassValue(resultadoRealObservado);
    casosEntrenamiento.add(casoAdecidir);
    for (int i = 0; i < casosEntrenamiento.numInstances() - this.maximoNumeroCasosEntrenamiento; i++) {
        casosEntrenamiento.delete(0); //Si hay muchos ejemplos borrar el ms antiguo
    }//from   w w w .j a va2 s  . co  m
    try {
        conocimiento.buildClassifier(casosEntrenamiento);
        Evaluation evaluador = new Evaluation(casosEntrenamiento);
        evaluador.crossValidateModel(conocimiento, casosEntrenamiento, 10, new Random(1));
    } catch (Exception e) {
        // meter el error al log
    }
}

From source file:uv.datamining.tp2.WekaModeler.java

public static void generarArbol(File file, float cm) throws Exception {
    ArffLoader loader = new ArffLoader();
    loader.setFile(file);/*  w w  w.j  a va  2s.  c o  m*/
    Instances data = loader.getDataSet();
    data.setClassIndex(data.numAttributes() - 1); //columna con el atributo clase
    J48 tree = new J48();
    tree.setConfidenceFactor(cm);
    tree.buildClassifier(data);
    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(tree, data);
    System.out.println(eval.toSummaryString());

    weka.core.SerializationHelper.write(
            file.getAbsolutePath().substring(0, file.getAbsolutePath().lastIndexOf(".")) + ".model", tree);

}

From source file:view.centerPanels.ClassificationPnlCenterPredict.java

private void btnStartActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnStartActionPerformed

    //imate vec objekat instances i objekat naiveBayes u ovoj klasi
    //treba da izvucete ono sto je uetu u text fieldove preko liste i da napravite
    //objekat Instance (nema "s"). kad naravite objekat Instance samo ga dodate
    //na instances.add(  )..
    //onda koristite takav Instances i naiveBayes za evaluaciju i u textArea
    //ispisujete kojoj klasi pripada.. Sad ne trba matrica konfuzije potrebno
    //je da ispisete string kojoj klasi pripada
    //pogledajte klasu ClusterinPnlCenterPredict tu je odradjeno 90% onog sto se i ovde trazi
    //        instances.delete();
    for (int i = 0; i < textFields.size(); i++) {
        String text = textFields.get(i).getText().trim();

        //prekace prazna pollja jer za klasterizaciju znaci da se ona ignorisu
        //to za klasifikaciju nije slucaj
        if (!text.equals("")) {

            if (instances.attribute(i).isNominal()) {
                boolean correct = false;
                for (int j = 0; j < instances.attribute(i).numValues(); j++) {
                    if (text.equals(instances.attribute(i).value(j))) {
                        correct = true;//from   w  w w . j  a va2s  .  co  m
                    }
                }
                if (!correct) {
                    JOptionPane.showMessageDialog(this,
                            "Incorrect format for attribute " + instances.attribute(i).name());
                    break;
                }
            }

            if (instances.attribute(i).isNumeric()) {
                try {
                    double value = Double.parseDouble(text);
                } catch (Exception e) {
                    JOptionPane.showMessageDialog(this,
                            "Incorrect format for attribute " + instances.attribute(i).name());
                    break;
                }
            }

        }
    }

    int numAttributes = instances.numAttributes();

    Instance instance = new Instance(numAttributes);

    for (int i = 0; i < textFields.size(); i++) {
        String text = textFields.get(i).getText().trim();

        try {
            double value = Double.parseDouble(text);
            instance.setValue(i, value);

        } catch (Exception e) {
            instance.setValue(i, text);
        }
    }

    String value = instances.attribute(numAttributes - 1).value(0);
    Attribute Att = instances.attribute(numAttributes - 1);

    instance.setValue(Att, value);
    //        instances.delete();
    //        instance.setValue((numAttributes - 1), instances.attribute(numAttributes - 1).value(0) );

    instances.add(instance);

    double[][] matrix = null;

    System.out.println(instances);
    System.out.println(naiveBayes);

    try {

        //                instances.setClassIndex(instances.numAttributes() - 1);
        Evaluation eval = new Evaluation(instances);

        eval.evaluateModel(naiveBayes, instances);

        textArea.setText(eval.toMatrixString());
        matrix = eval.confusionMatrix();

    } catch (Exception ex) {
        System.out.println("Greska: " + ex);
    }

    double[] array = new double[matrix.length];

    for (int i = 0; i < array.length; i++) {

        double sum = 0;
        for (int j = 0; j < array.length; j++) {

            sum = sum + matrix[j][i];

        }
        array[i] = sum;

    }

    String className = null;
    for (int i = 0; i < array.length; i++) {
        if (array[i] == 1) {
            className = instances.attribute(numAttributes - 1).value(i);
        }
    }

    textArea.setText("This instance belongs to class: \n\n== " + className + " ==");

    instances.delete();
}

From source file:view.centerPanels.ClassificationPnlCenterTrainNew.java

public ClassificationPnlCenterTrainNew() {
    initComponents();//  w ww  .  j a  v a  2s.c o m

    Instances data = Data.getInstance().getInstances();

    try {
        nb = new NaiveBayes();

        data.setClassIndex(data.numAttributes() - 1);

        Evaluation eval = new Evaluation(data);

        nb.buildClassifier(data);

        eval.evaluateModel(nb, data);

        jTextArea1.setText(eval.toMatrixString());

        //                System.out.println(eval.toMatrixString());
    } catch (Exception ex) {
        JOptionPane.showMessageDialog(this, ex);
    }

    setSize(MainGUI.getInstance().getPnlCenter().getWidth(), MainGUI.getInstance().getPnlCenter().getHeight());
    setVisible(true);

}