Example usage for weka.classifiers Evaluation Evaluation

List of usage examples for weka.classifiers Evaluation Evaluation

Introduction

In this page you can find the example usage for weka.classifiers Evaluation Evaluation.

Prototype

public Evaluation(Instances data) throws Exception 

Source Link

Usage

From source file:es.upm.dit.gsi.barmas.launcher.WekaClassifiersValidator.java

License:Open Source License

/**
 * @param cls/* w  w w.j  av  a2  s  .co m*/
 * @param trainingData
 * @param testData
 * @param leba
 * @return [0] = pctCorrect, [1] = pctIncorrect
 * @throws Exception
 */
public double[] getValidation(Classifier cls, Instances trainingData, Instances testData, int leba)
        throws Exception {

    Instances testDataWithLEBA = new Instances(testData);

    for (int j = 0; j < leba; j++) {
        if (j < testDataWithLEBA.numAttributes() - 1) {
            for (int i = 0; i < testDataWithLEBA.numInstances(); i++) {
                testDataWithLEBA.instance(i).setMissing(j);
            }
        }
    }

    Evaluation eval;
    try {
        eval = new Evaluation(trainingData);
        logger.fine("Evaluating model with leba: " + leba);
        eval.evaluateModel(cls, testDataWithLEBA);

        double[] results = new double[2];
        results[0] = eval.pctCorrect() / 100;
        results[1] = eval.pctIncorrect() / 100;
        return results;
    } catch (Exception e) {
        logger.severe("Problems evaluating model for " + cls.getClass().getSimpleName());
        logger.severe(e.getMessage());
        e.printStackTrace();
        throw e;
    }
}

From source file:examples.Pair.java

License:Open Source License

/**
 * @param args the command line arguments
 *//*  ww w .j a va2  s  . c  o m*/
public static void main(String[] args) throws Exception {

    if (args.length != 1) {
        System.out.println("Requires path to the dataset as the first and only argument");
        return;
    }

    final String datasetPath = args[0];

    // Create classifiers
    MultiStageCascading msc = new MultiStageCascading();
    J48 classifier1 = new J48();
    IBk knn = new IBk(3);

    // Set sequence of classifiers
    msc.setClassifiers(new Classifier[] { classifier1, new NBTree() });
    msc.setDebug(true);
    // Set a classifier that will classify an instance that is not classified by all other classifiers
    msc.setLastClassifier(knn);

    // First classifier will have confidence threshold 0.95 and the second one 0.97
    msc.setConfidenceThresholds("0.95,0.97");
    // 80% of instances in training set will be randomly selected to train j-th classifier
    msc.setPercentTrainingInstances(0.8);

    Instances dataset = DataSource.read(datasetPath);
    dataset.setClassIndex(dataset.numAttributes() - 1);

    // Create test and training sets
    Pair<Instances, Instances> sets = seprateTestAndTrainingSets(dataset, 0.7);
    Instances trainingSet = sets.getFirst();
    Instances testSet = sets.getSecond();

    // Build cascade classifier
    msc.buildClassifier(trainingSet);

    // Evaluate created classifier
    Evaluation eval = new Evaluation(trainingSet);
    eval.evaluateModel(msc, testSet);
    System.out.println(eval.toSummaryString("\nResults\n\n", false));
}

From source file:experimentalclassifier.ExperimentalClassifier.java

/**
 * @param args the command line arguments
 *///from ww w.jav  a  2  s .  c  o m
public static void main(String[] args) throws Exception {

    DataSource source = new DataSource("data/iris.csv");

    Instances data = source.getDataSet();

    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }

    data.randomize(new Random());

    String[] options = weka.core.Utils.splitOptions("-P 30");
    RemovePercentage remove = new RemovePercentage();
    remove.setOptions(options);
    remove.setInputFormat(data);
    Instances train = Filter.useFilter(data, remove);

    remove.setInvertSelection(true);
    remove.setInputFormat(data);
    Instances test = Filter.useFilter(data, remove);

    Classifier classifier = new HardCodedClassifier();
    classifier.buildClassifier(train);//Currently, this does nothing
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(classifier, test);
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:expshell.ExpShell.java

/**
 * @param args the command line arguments
 * @throws java.lang.Exception//ww w.j a  v  a  2 s.  c  om
 */
public static void main(String[] args) throws Exception {
    String file = "C:\\Users\\YH Jonathan Kwok\\Documents\\NetBeansProjects\\ExpShell\\src\\expshell\\iris.csv";

    DataSource source = new DataSource(file);
    Instances data = source.getDataSet();

    if (data.classIndex() == -1)
        data.setClassIndex(data.numAttributes() - 1);

    //Randomize it
    data.randomize(new Random(1));

    RemovePercentage rp = new RemovePercentage();
    rp.setPercentage(70);

    rp.setInputFormat(data);
    Instances training = Filter.useFilter(data, rp);

    rp.setInvertSelection(true);
    rp.setInputFormat(data);
    Instances test = Filter.useFilter(data, rp);

    //standardize the data
    Standardize filter = new Standardize();
    filter.setInputFormat(training);

    Instances newTest = Filter.useFilter(test, filter);
    Instances newTraining = Filter.useFilter(training, filter);

    //Part 5 - Now it's a knn
    Classifier knn = new NeuralClassifier();
    knn.buildClassifier(newTraining);
    Evaluation eval = new Evaluation(newTraining);
    eval.evaluateModel(knn, newTest);

    System.out.println(eval.toSummaryString("***** Overall results: *****", false));

}

From source file:eyetracker.MLPProcessor.java

public MLPProcessor() {
    try {//from   w  w w  .j av a  2 s  . c o m
        FileReader fr = new FileReader("trainingData.arff");
        Instances training = new Instances(fr);
        training.setClassIndex(training.numAttributes() - 1);
        mlp = new MultilayerPerceptron();
        mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 5"));
        mlp.buildClassifier(training);

        FileReader tr = new FileReader("trainingData.arff");
        Instances testdata = new Instances(tr);
        inst = testdata;
        testdata.setClassIndex(testdata.numAttributes() - 1);
        Evaluation eval = new Evaluation(training);
        eval.evaluateModel(mlp, testdata);
        System.out.println(eval.toSummaryString("\nResults\n*******\n", false));
        tr.close();
        fr.close();
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:farm_ads.MyClassifier.java

public Evaluation evaluationModel(Instances train, Instances test, Classifier classifier) throws Exception {
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(classifier, test);
    return eval;/*from  w  ww.ja  va2s .  c o m*/
}

From source file:ffnn.FFNN.java

/**
 * @param args the command line arguments
 *//*from w ww .  ja  va  2  s . c  o m*/
public static void main(String[] args) throws Exception {
    FFNNTubesAI cls;
    Scanner scan = new Scanner(System.in);
    System.out.print("new / read? (n/r)");
    if (scan.next().equals("n")) {
        cls = new FFNNTubesAI();
    } else {
        cls = (FFNNTubesAI) TucilWeka.readModel();
    }
    int temp;
    Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\Team.arff");
    //Tampilkan opsi
    for (int i = 0; i < data.numAttributes(); i++) {
        System.out.println(i + ". " + data.attribute(i));
    }
    System.out.print("Class Index : ");
    temp = scan.nextInt();
    data.setClassIndex(temp);
    data = preprocess(data);
    System.out.println(data);

    System.out.print("full train? (y/n)");
    if (scan.next().equals("y")) {
        try {
            cls.buildClassifier(data);
        } catch (Exception ex) {
            Logger.getLogger(FFNNTubesAI.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    int fold = 10;

    //FFNNTubesAI.printMatrix(cls.weight1, cls.input_layer+1, cls.hidden_layer);
    //FFNNTubesAI.printMatrix(cls.weight2, cls.hidden_layer, cls.output_layer);
    //FFNNTubesAI.printMatrix(cls.bias2, 1, cls.output_layer);
    Evaluation eval = new Evaluation(data);
    System.out.print("eval/10-fold? (e/f)");
    if (scan.next().equals("e")) {
        eval.evaluateModel(cls, data);
    } else {
        eval.crossValidateModel(cls, data, fold, new Random(1));
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
}

From source file:ffnn.FFNNTubesAI.java

@Override
public void buildClassifier(Instances i) throws Exception {
    Instance temp_instance = null;//  w  ww  .jav a 2  s  .  co m
    RealMatrix error_output;
    RealMatrix error_hidden;
    RealMatrix input_matrix;
    RealMatrix hidden_matrix;
    RealMatrix output_matrix;
    Instances temp_instances;
    int r = 0;
    Scanner scan = new Scanner(System.in);

    output_layer = i.numDistinctValues(i.classIndex()); //3
    temp_instances = filterNominalNumeric(i);

    if (output_layer == 2) {
        Add filter = new Add();
        filter.setAttributeIndex("last");
        filter.setAttributeName("dummy");
        filter.setInputFormat(temp_instances);
        temp_instances = Filter.useFilter(temp_instances, filter);
        //            System.out.println(temp_instances);
        for (int j = 0; j < temp_instances.numInstances(); j++) {
            if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0);
            } else {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1);
            }
        }
    }

    //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1));
    //System.out.println(temp_instances);
    input_layer = temp_instances.numAttributes() - output_layer; //4
    hidden_layer = 0;
    while (hidden_layer < 1) {
        System.out.print("Hidden layer : ");
        hidden_layer = scan.nextInt();
    }
    int init_hidden = hidden_layer;
    error_hidden = new BlockRealMatrix(1, hidden_layer);
    error_output = new BlockRealMatrix(1, output_layer);
    input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias

    buildWeight(input_layer, hidden_layer, output_layer);

    long last_time = System.nanoTime();
    double last_error_rate = 1;
    double best_error_rate = 1;

    double last_update = System.nanoTime();

    // brp iterasi
    //        for( long itr = 0; last_error_rate > 0.001; ++ itr ){
    for (long itr = 0; itr < 50000; ++itr) {
        if (r == 10) {
            break;
        }
        long time = System.nanoTime();
        if (time - last_time > 2000000000) {
            Evaluation eval = new Evaluation(i);
            eval.evaluateModel(this, i);

            double accry = eval.correct() / eval.numInstances();
            if (eval.errorRate() < last_error_rate) {
                last_update = System.nanoTime();
                if (eval.errorRate() < best_error_rate)
                    SerializationHelper.write(accry + "-" + time + ".model", this);
            }

            if (accry > 0)
                last_error_rate = eval.errorRate();

            // 2 minute without improvement restart
            if (time - last_update > 30000000000L) {
                last_update = System.nanoTime();
                learning_rate = random() * 0.05;
                hidden_layer = (int) (10 + floor(random() * 15));
                hidden_layer = (int) floor((hidden_layer / 25) * init_hidden);
                if (hidden_layer == 0) {
                    hidden_layer = 1;
                }
                itr = 0;
                System.out.println("RESTART " + learning_rate + " " + hidden_layer);
                buildWeight(input_layer, hidden_layer, output_layer);
                r++;
            }

            System.out.println(accry + " " + itr);
            last_time = time;
        }

        for (int j = 0; j < temp_instances.numInstances(); j++) {
            // foward !!
            temp_instance = temp_instances.instance(j);

            for (int k = 0; k < input_layer; k++) {
                input_matrix.setEntry(0, k, temp_instance.value(k));
            }
            input_matrix.setEntry(0, input_layer, 1.0); // bias

            hidden_matrix = input_matrix.multiply(weight1);
            for (int y = 0; y < hidden_layer; ++y) {
                hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y)));
            }

            output_matrix = hidden_matrix.multiply(weight2).add(bias2);
            for (int y = 0; y < output_layer; ++y) {
                output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y)));
            }

            // backward <<

            // error layer 2
            double total_err = 0;
            for (int k = 0; k < output_layer; k++) {
                double o = output_matrix.getEntry(0, k);
                double t = temp_instance.value(input_layer + k);
                double err = o * (1 - o) * (t - o);
                total_err += err * err;
                error_output.setEntry(0, k, err);
            }

            // back propagation layer 2
            for (int y = 0; y < hidden_layer; y++) {
                for (int x = 0; x < output_layer; ++x) {
                    double wold = weight2.getEntry(y, x);
                    double correction = learning_rate * error_output.getEntry(0, x)
                            * hidden_matrix.getEntry(0, y);
                    weight2.setEntry(y, x, wold + correction);
                }
            }

            for (int x = 0; x < output_layer; ++x) {
                double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya
                bias2.setEntry(0, x, bias2.getEntry(0, x) + correction);
            }

            // error layer 1
            for (int k = 0; k < hidden_layer; ++k) {
                double o = hidden_matrix.getEntry(0, k);
                double t = 0;
                for (int x = 0; x < output_layer; ++x) {
                    t += error_output.getEntry(0, x) * weight2.getEntry(k, x);
                }
                double err = o * (1 - o) * t;
                error_hidden.setEntry(0, k, err);
            }

            // back propagation layer 1
            for (int y = 0; y < input_layer + 1; ++y) {
                for (int x = 0; x < hidden_layer; ++x) {
                    double wold = weight1.getEntry(y, x);
                    double correction = learning_rate * error_hidden.getEntry(0, x)
                            * input_matrix.getEntry(0, y);
                    weight1.setEntry(y, x, wold + correction);
                }
            }
        }
    }
}

From source file:FFNN.MultiplePerceptron.java

public static void main(String args[]) throws Exception {
    //        System.out.println("input jumlah layer 0/1 :");
    //        Scanner input = new Scanner(System.in);
    //        int layer = input.nextInt();
    //        System.out.println("input learning rate");
    //        double rate = input.nextDouble();
    //        int hidden = 0;
    //        if(layer==1){
    //            System.out.println("input jumlah neuron di hidden layer");
    //            hidden = input.nextInt();
    //        }/* www .j  a v a 2 s  .  com*/
    //        
    //        System.out.print("Masukkan nama file : ");
    //        String filename = input.next();
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\iris.arff"));
    Instances train = source.getDataSet();
    //        Normalize nm = new Normalize();
    //        nm.setInputFormat(train);
    //        train = Filter.useFilter(train, nm);
    for (int i = 0; i < train.numAttributes(); i++)
        System.out.println(i + ". " + train.attribute(i).name());
    System.out.print("Masukkan indeks kelas : ");
    //int classIdx = input.nextInt();
    train.setClassIndex(train.numAttributes() - 1);
    MultiplePerceptron mlp = new MultiplePerceptron(10000, 1, 13, train);
    mlp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(mlp, train);
    System.out.println(eval.toSummaryString());
    //        System.out.println(eval.toMatrixString());
}

From source file:ffnn.TucilWeka.java

public static Evaluation crossValidation(Instances data) {
    //10-fold cross validation
    Evaluation eval = null;//ww  w.  jav  a2  s  . c  o m
    try {

        eval = new Evaluation(data);
        Classifier cls = new FFNNTubesAI();
        if (cls == null) {
            System.out.println("MODEL CANNOT BE USED");
        } else {
            System.out.println("MODEL IS USED");
        }
        cls.buildClassifier(data);
        //crossValidateModel:
        //param 1 = tipe classifier (disini J48)
        //param 2 = Instances data
        //param 3 = jumlah fold
        //param 4 = Randomizer (seed)

        eval.crossValidateModel(cls, data, 10, new Random(1));
    } catch (Exception ex) {
        Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}