Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:eyetracker.MLPProcessor.java

public MLPProcessor() {
    try {/*from w w w .  jav  a2  s .c  o m*/
        FileReader fr = new FileReader("trainingData.arff");
        Instances training = new Instances(fr);
        training.setClassIndex(training.numAttributes() - 1);
        mlp = new MultilayerPerceptron();
        mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 5"));
        mlp.buildClassifier(training);

        FileReader tr = new FileReader("trainingData.arff");
        Instances testdata = new Instances(tr);
        inst = testdata;
        testdata.setClassIndex(testdata.numAttributes() - 1);
        Evaluation eval = new Evaluation(training);
        eval.evaluateModel(mlp, testdata);
        System.out.println(eval.toSummaryString("\nResults\n*******\n", false));
        tr.close();
        fr.close();
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:farm_ads.MyClassifier.java

public Evaluation evaluationModel(Instances train, Instances test, Classifier classifier) throws Exception {
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(classifier, test);
    return eval;/*  w w w.  j  av  a 2  s  .c  o m*/
}

From source file:ffnn.FFNN.java

/**
 * @param args the command line arguments
 *//*  w w w.  j av  a 2s  .  c  om*/
public static void main(String[] args) throws Exception {
    FFNNTubesAI cls;
    Scanner scan = new Scanner(System.in);
    System.out.print("new / read? (n/r)");
    if (scan.next().equals("n")) {
        cls = new FFNNTubesAI();
    } else {
        cls = (FFNNTubesAI) TucilWeka.readModel();
    }
    int temp;
    Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\Team.arff");
    //Tampilkan opsi
    for (int i = 0; i < data.numAttributes(); i++) {
        System.out.println(i + ". " + data.attribute(i));
    }
    System.out.print("Class Index : ");
    temp = scan.nextInt();
    data.setClassIndex(temp);
    data = preprocess(data);
    System.out.println(data);

    System.out.print("full train? (y/n)");
    if (scan.next().equals("y")) {
        try {
            cls.buildClassifier(data);
        } catch (Exception ex) {
            Logger.getLogger(FFNNTubesAI.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    int fold = 10;

    //FFNNTubesAI.printMatrix(cls.weight1, cls.input_layer+1, cls.hidden_layer);
    //FFNNTubesAI.printMatrix(cls.weight2, cls.hidden_layer, cls.output_layer);
    //FFNNTubesAI.printMatrix(cls.bias2, 1, cls.output_layer);
    Evaluation eval = new Evaluation(data);
    System.out.print("eval/10-fold? (e/f)");
    if (scan.next().equals("e")) {
        eval.evaluateModel(cls, data);
    } else {
        eval.crossValidateModel(cls, data, fold, new Random(1));
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
}

From source file:ffnn.FFNNTubesAI.java

@Override
public void buildClassifier(Instances i) throws Exception {
    Instance temp_instance = null;//from   w ww.j av  a2  s .co  m
    RealMatrix error_output;
    RealMatrix error_hidden;
    RealMatrix input_matrix;
    RealMatrix hidden_matrix;
    RealMatrix output_matrix;
    Instances temp_instances;
    int r = 0;
    Scanner scan = new Scanner(System.in);

    output_layer = i.numDistinctValues(i.classIndex()); //3
    temp_instances = filterNominalNumeric(i);

    if (output_layer == 2) {
        Add filter = new Add();
        filter.setAttributeIndex("last");
        filter.setAttributeName("dummy");
        filter.setInputFormat(temp_instances);
        temp_instances = Filter.useFilter(temp_instances, filter);
        //            System.out.println(temp_instances);
        for (int j = 0; j < temp_instances.numInstances(); j++) {
            if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0);
            } else {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1);
            }
        }
    }

    //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1));
    //System.out.println(temp_instances);
    input_layer = temp_instances.numAttributes() - output_layer; //4
    hidden_layer = 0;
    while (hidden_layer < 1) {
        System.out.print("Hidden layer : ");
        hidden_layer = scan.nextInt();
    }
    int init_hidden = hidden_layer;
    error_hidden = new BlockRealMatrix(1, hidden_layer);
    error_output = new BlockRealMatrix(1, output_layer);
    input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias

    buildWeight(input_layer, hidden_layer, output_layer);

    long last_time = System.nanoTime();
    double last_error_rate = 1;
    double best_error_rate = 1;

    double last_update = System.nanoTime();

    // brp iterasi
    //        for( long itr = 0; last_error_rate > 0.001; ++ itr ){
    for (long itr = 0; itr < 50000; ++itr) {
        if (r == 10) {
            break;
        }
        long time = System.nanoTime();
        if (time - last_time > 2000000000) {
            Evaluation eval = new Evaluation(i);
            eval.evaluateModel(this, i);

            double accry = eval.correct() / eval.numInstances();
            if (eval.errorRate() < last_error_rate) {
                last_update = System.nanoTime();
                if (eval.errorRate() < best_error_rate)
                    SerializationHelper.write(accry + "-" + time + ".model", this);
            }

            if (accry > 0)
                last_error_rate = eval.errorRate();

            // 2 minute without improvement restart
            if (time - last_update > 30000000000L) {
                last_update = System.nanoTime();
                learning_rate = random() * 0.05;
                hidden_layer = (int) (10 + floor(random() * 15));
                hidden_layer = (int) floor((hidden_layer / 25) * init_hidden);
                if (hidden_layer == 0) {
                    hidden_layer = 1;
                }
                itr = 0;
                System.out.println("RESTART " + learning_rate + " " + hidden_layer);
                buildWeight(input_layer, hidden_layer, output_layer);
                r++;
            }

            System.out.println(accry + " " + itr);
            last_time = time;
        }

        for (int j = 0; j < temp_instances.numInstances(); j++) {
            // foward !!
            temp_instance = temp_instances.instance(j);

            for (int k = 0; k < input_layer; k++) {
                input_matrix.setEntry(0, k, temp_instance.value(k));
            }
            input_matrix.setEntry(0, input_layer, 1.0); // bias

            hidden_matrix = input_matrix.multiply(weight1);
            for (int y = 0; y < hidden_layer; ++y) {
                hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y)));
            }

            output_matrix = hidden_matrix.multiply(weight2).add(bias2);
            for (int y = 0; y < output_layer; ++y) {
                output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y)));
            }

            // backward <<

            // error layer 2
            double total_err = 0;
            for (int k = 0; k < output_layer; k++) {
                double o = output_matrix.getEntry(0, k);
                double t = temp_instance.value(input_layer + k);
                double err = o * (1 - o) * (t - o);
                total_err += err * err;
                error_output.setEntry(0, k, err);
            }

            // back propagation layer 2
            for (int y = 0; y < hidden_layer; y++) {
                for (int x = 0; x < output_layer; ++x) {
                    double wold = weight2.getEntry(y, x);
                    double correction = learning_rate * error_output.getEntry(0, x)
                            * hidden_matrix.getEntry(0, y);
                    weight2.setEntry(y, x, wold + correction);
                }
            }

            for (int x = 0; x < output_layer; ++x) {
                double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya
                bias2.setEntry(0, x, bias2.getEntry(0, x) + correction);
            }

            // error layer 1
            for (int k = 0; k < hidden_layer; ++k) {
                double o = hidden_matrix.getEntry(0, k);
                double t = 0;
                for (int x = 0; x < output_layer; ++x) {
                    t += error_output.getEntry(0, x) * weight2.getEntry(k, x);
                }
                double err = o * (1 - o) * t;
                error_hidden.setEntry(0, k, err);
            }

            // back propagation layer 1
            for (int y = 0; y < input_layer + 1; ++y) {
                for (int x = 0; x < hidden_layer; ++x) {
                    double wold = weight1.getEntry(y, x);
                    double correction = learning_rate * error_hidden.getEntry(0, x)
                            * input_matrix.getEntry(0, y);
                    weight1.setEntry(y, x, wold + correction);
                }
            }
        }
    }
}

From source file:FFNN.MultiplePerceptron.java

public static void main(String args[]) throws Exception {
    //        System.out.println("input jumlah layer 0/1 :");
    //        Scanner input = new Scanner(System.in);
    //        int layer = input.nextInt();
    //        System.out.println("input learning rate");
    //        double rate = input.nextDouble();
    //        int hidden = 0;
    //        if(layer==1){
    //            System.out.println("input jumlah neuron di hidden layer");
    //            hidden = input.nextInt();
    //        }//from w w  w.  j ava2  s .c o  m
    //        
    //        System.out.print("Masukkan nama file : ");
    //        String filename = input.next();
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\iris.arff"));
    Instances train = source.getDataSet();
    //        Normalize nm = new Normalize();
    //        nm.setInputFormat(train);
    //        train = Filter.useFilter(train, nm);
    for (int i = 0; i < train.numAttributes(); i++)
        System.out.println(i + ". " + train.attribute(i).name());
    System.out.print("Masukkan indeks kelas : ");
    //int classIdx = input.nextInt();
    train.setClassIndex(train.numAttributes() - 1);
    MultiplePerceptron mlp = new MultiplePerceptron(10000, 1, 13, train);
    mlp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(mlp, train);
    System.out.println(eval.toSummaryString());
    //        System.out.println(eval.toMatrixString());
}

From source file:ffnn.TucilWeka.java

public static Evaluation fullTraining(Instances data) {
    //10-fold cross validation
    Evaluation eval = null;
    Instances train = new Instances(data);
    Instances test = new Instances(data);
    try {/*w  ww  .  j a v  a2  s  . c om*/
        Classifier classify = new J48();
        //Membuat klasifier dari data training
        classify.buildClassifier(train);
        eval = new Evaluation(train);
        eval.evaluateModel(classify, test);
    } catch (Exception ex) {
        Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:focusedCrawler.target.ClassifyBuilder.java

License:Open Source License

public String buildClassifier(String trainFile, String outputModel) throws Exception {
    double max = Double.MIN_NORMAL;
    double cValue = 0;
    for (double c = 1; c > 0.1; c = c - 0.2) {
        SMO classifier = new SMO();
        String[] argum = new String[] { "-t", trainFile, "-C", "" + c, "-v", "-d", outputModel + c };
        String output = Evaluation.evaluateModel(classifier, argum);
        int index = output.indexOf("Correctly Classified Instances");
        if (index >= 0) {
            int end = output.indexOf("%", index);
            String line = (output.substring(index, end)).trim();
            line = line.substring(line.lastIndexOf(" "));
            double accuracy = Double.parseDouble(line.trim());
            if (accuracy > max) {
                max = accuracy;//  ww w  .  jav a  2  s  .  co  m
                cValue = c;
            }
        }
    }
    System.out.println("C:" + cValue);
    return outputModel + cValue;
}

From source file:focusedCrawler.target.ClassifyBuilder.java

License:Open Source License

public void testClassifier(String testFile, String outputModel) throws Exception {
    SMO classifier = new SMO();
    String[] argum = new String[] { "-T", testFile, "-l", outputModel, "-i" };
    String output = Evaluation.evaluateModel(classifier, argum);
    int index = output.indexOf("F-Measure");
    if (index >= 0) {
        index = output.indexOf("\n", index);
        int end = output.indexOf("\n", index + 1);
        String line = (output.substring(index, end)).trim();
        System.out.println(line);
        StringTokenizer tokenizer = new StringTokenizer(line, " ");
        int count = 0;
        while (tokenizer.hasMoreTokens()) {
            String word = tokenizer.nextToken();
            if (count == 2) {
                System.out.println("PRECISION:" + word);
            }// w  w w .  j av a2 s  . c o m
            if (count == 3) {
                System.out.println("RECALL:" + word);
            }
            if (count == 4) {
                System.out.println("F-MEASURE:" + word);
            }
            count++;
        }
    }
}

From source file:focusedCrawler.target.EMClassifier.java

License:Open Source License

private String buildClassifier(String suffix) throws Exception {
    String trainingData = rootDir + File.separator + "trainData_" + suffix;
    //      System.out.println("TRAIN:" +trainingData);
    String trainWekafile = wekaFileDir + "weka_" + suffix;
    String testFileDir = rootDir + File.separator + "testData_" + suffix;
    String outputModel = rootDir + File.separator + "model" + File.separator + "model_" + suffix;
    CreateWekaInput createWekaFile = new CreateWekaInput(new File(trainingData), new File(testFileDir),
            stoplist);/*from   w  w  w  . j  av  a2s. c o m*/
    attributes = createWekaFile.centroid2Weka(trainWekafile);
    double max = Double.MIN_VALUE;
    double cValue = 0;
    int count = 0;
    for (double c = 0.0625; count < 1; c = c * 0.5) {
        SMO classifier = new SMO();
        String[] argum = new String[] { "-t", trainWekafile, "-C", "" + c, "-v", "-M", "-d", outputModel + c };
        String output = Evaluation.evaluateModel(classifier, argum);
        int index = output.indexOf("Correctly Classified Instances");
        if (index >= 0) {
            int end = output.indexOf("%", index);
            String line = (output.substring(index, end)).trim();
            line = line.substring(line.lastIndexOf(" "));
            double accuracy = Double.parseDouble(line.trim());
            System.out.println("C=" + c + " acc=" + accuracy);
            if (accuracy > max) {
                max = accuracy;
                cValue = c;
            }
        }
        count++;
        if (c == 1) {
            testClassifier(trainWekafile + "_test", outputModel + c);
        }
    }
    return outputModel + cValue;
}

From source file:focusedCrawler.target.EMClassifier.java

License:Open Source License

private void testClassifier(String testFile, String outputModel) throws Exception {
    SMO classifier = new SMO();
    //        NaiveBayes classifier = new NaiveBayes();
    //        System.out.println("java -T " + testFile + " -l" + outputModel );
    String[] argum = new String[] { "-T", testFile, "-l", outputModel, "-i" };
    String output = Evaluation.evaluateModel(classifier, argum);
    int index = output.indexOf("F-Measure");
    if (index >= 0) {
        index = output.indexOf("\n", index);
        int end = output.indexOf("\n", index + 1);
        String line = (output.substring(index, end)).trim();
        StringTokenizer tokenizer = new StringTokenizer(line, " ");
        int count = 0;
        while (tokenizer.hasMoreTokens()) {
            String word = tokenizer.nextToken();
            if (count == 2) {
                System.out.println("PRECISION:" + word);
            }//from w w  w.  j a  v  a 2 s.  c o  m
            if (count == 3) {
                System.out.println("RECALL:" + word);
            }
            if (count == 4) {
                System.out.println("F-MEASURE:" + word);
            }
            count++;
        }
    }
    System.out.println("-----------");
}