Example usage for weka.classifiers Evaluation numInstances

List of usage examples for weka.classifiers Evaluation numInstances

Introduction

In this page you can find the example usage for weka.classifiers Evaluation numInstances.

Prototype

public final double numInstances() 

Source Link

Document

Gets the number of test instances that had a known class value (actually the sum of the weights of test instances with known class value).

Usage

From source file:algoritmogeneticocluster.WekaSimulation.java

/**
 * @param args the command line arguments
 *///from  ww w.j  av  a  2s . c om
public static void main(String[] args) {
    SMO classifier = new SMO();
    HyperPipes hy = new HyperPipes();
    //        classifier.buildClassifier(trainset);

    BufferedReader datafile = readDataFile("tabela10.arff");

    Instances data;
    Evaluation eval;
    try {
        data = new Instances(datafile);
        data.setClassIndex(data.numAttributes() - 1);
        eval = new Evaluation(data);
        Random rand = new Random(1); // using seed = 1
        int folds = 10;
        eval.crossValidateModel(classifier, data, folds, rand);
        System.out.println(eval.toString());
        System.out.println(eval.numInstances());
        System.out.println(eval.correct());
        System.out.println(eval.incorrect());
        System.out.println(eval.pctCorrect());
        System.out.println(eval.pctIncorrect());

    } catch (Exception ex) {
        Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:cezeri.feature.selection.FeatureSelectionInfluence.java

public static Evaluation getEvaluation(Instances randData, Classifier model, int folds) {
    Evaluation eval = null;
    try {/*from ww w .j  a va  2 s . c  o m*/
        eval = new Evaluation(randData);
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n);
            Instances test = randData.testCV(folds, n);
            // build and evaluate classifier
            Classifier clsCopy = Classifier.makeCopy(model);
            clsCopy.buildClassifier(train);
            eval.evaluateModel(clsCopy, test);
            //                double[] prediction = eval.evaluateModel(clsCopy, test);
            //                double[] original = getAttributeValues(test);
            //                double[][] d = new double[2][prediction.length];
            //                d[0] = prediction;
            //                d[1] = original;
            //                CMatrix f1 = new CMatrix(d);
        }

        // output evaluation
        System.out.println();
        System.out.println("=== Setup ===");
        System.out.println(
                "Classifier: " + model.getClass().getName() + " " + Utils.joinOptions(model.getOptions()));
        System.out.println("Dataset: " + randData.relationName());
        System.out.println("Folds: " + folds);
        System.out.println();
        System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
        System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ==="));
        System.out.println(eval.toMatrixString("Confusion Matrix"));

        double acc = eval.correct() / eval.numInstances() * 100;
        System.out.println("correct:" + eval.correct() + "  " + acc + "%");
    } catch (Exception ex) {

        Logger.getLogger(FeatureSelectionInfluence.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:cezeri.feature.selection.FeatureSelectionInfluence.java

public static Influence[] getMostDiscriminativeFeature(String filePath, Classifier model) {
    Influence[] ret = null;//from ww  w  .jav  a2s.c  o  m
    try {
        Instances data = DataSource.read(filePath);
        ret = new Influence[data.numAttributes() - 1];
        data.setClassIndex(data.numAttributes() - 1);
        // other options
        int seed = 1;
        int folds = 10;
        // randomize data
        Instances randData = new Instances(data);
        Random rand = new Random(seed);
        randData.randomize(rand);
        Evaluation evalBase = getEvaluation(randData, model, folds);
        double accBase = evalBase.correct() / evalBase.numInstances() * 100;
        double nf = randData.numAttributes();

        for (int j = 0; j < nf - 1; j++) {
            ret[j] = new Influence();
            String str = randData.attribute(j).name();
            Attribute att = randData.attribute(j);
            randData.deleteAttributeAt(j);
            Evaluation evalTemp = getEvaluation(randData, model, folds);
            double accTemp = evalTemp.correct() / evalTemp.numInstances() * 100;
            double tempInfluence = accBase - accTemp;
            ret[j].attributeName = str;
            ret[j].infVal = tempInfluence;
            randData.insertAttributeAt(att, j);
        }
        sortInfluenceArray(ret);
    } catch (Exception ex) {
        Logger.getLogger(FeatureSelectionInfluence.class.getName()).log(Level.SEVERE, null, ex);
    }
    return ret;
}

From source file:Controller.CtlDataMining.java

public String redBayesiana(Instances data) {
    try {//from   w  w w .j  a v  a  2 s .co m
        //Creamos un clasificador Bayesiano                
        NaiveBayes nb = new NaiveBayes();

        //creamos el clasificador de la redBayesiana 
        nb.buildClassifier(data);

        //Creamos un objeto para la validacion del modelo con redBayesiana
        Evaluation evalB = new Evaluation(data);

        /*Aplicamos el clasificador bayesiano
        hacemos validacion cruzada, de redBayesiana, con 10 campos, 
        y un aleatorio para la semilla, en este caso es 1 para el 
        muestreo de la validacion cruzada (Como ordenar para luego
        partirlo en 10)*/
        evalB.crossValidateModel(nb, data, 10, new Random(1));

        String resBay = "<br><br><b><center>Resultados NaiveBayes</center>" + "<br>========<br>"
                + "Modelo generado indica los siguientes resultados:" + "<br>========<br></b>";
        //Obtenemos resultados
        resBay = resBay
                + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalB.numInstances() + "<br>");
        resBay = resBay + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> "
                + formato.format(evalB.pctCorrect()) + "%<br>");
        resBay = resBay + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b> "
                + (int) evalB.correct() + "<br>");
        resBay = resBay + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> "
                + formato.format(evalB.pctIncorrect()) + "%<br>");
        resBay = resBay + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> "
                + (int) evalB.incorrect() + "<br>");
        resBay = resBay + ("<b>6. Media del error absoluto:</b> " + formato.format(evalB.meanAbsoluteError())
                + "%<br>");
        resBay = resBay
                + ("<b>7. " + evalB.toMatrixString("Matriz de " + "confusion</b>").replace("\n", "<br>"));

        return resBay;

    } catch (Exception e) {
        return "El error es" + e.getMessage();
    }
}

From source file:Controller.CtlDataMining.java

public String arbolJ48(Instances data) {
    try {/*from   w ww  .ja  v  a  2  s  .  c  o m*/
        // Creamos un clasidicador J48
        J48 j48 = new J48();
        //creamos el clasificador  del J48 con los datos 
        j48.buildClassifier(data);

        //Creamos un objeto para la validacion del modelo con redBayesiana
        Evaluation evalJ48 = new Evaluation(data);

        /*Aplicamos el clasificador J48
        hacemos validacion cruzada, de redBayesiana, con 10 campos, 
        y el aleatorio arrancando desde 1 para la semilla*/
        evalJ48.crossValidateModel(j48, data, 10, new Random(1));
        //Obtenemos resultados
        String resJ48 = "<br><b><center>Resultados Arbol de decision J48"
                + "</center><br>========<br>Modelo generado indica los "
                + "siguientes resultados:<br>========<br></b>";

        resJ48 = resJ48
                + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalJ48.numInstances() + "<br>");
        resJ48 = resJ48 + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> "
                + formato.format(evalJ48.pctCorrect()) + "<br>");
        resJ48 = resJ48 + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b>"
                + (int) evalJ48.correct() + "<br>");
        resJ48 = resJ48 + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> "
                + formato.format(evalJ48.pctIncorrect()) + "<br>");
        resJ48 = resJ48 + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> "
                + (int) evalJ48.incorrect() + "<br>");
        resJ48 = resJ48 + ("<b>6. Media del error absoluto:</b> " + formato.format(evalJ48.meanAbsoluteError())
                + "<br>");
        resJ48 = resJ48
                + ("<b>7. " + evalJ48.toMatrixString("Matriz de" + " confusion</b>").replace("\n", "<br>"));

        // SE GRAFICA EL ARBOL GENERADO
        //Se crea un Jframe Temporal
        final javax.swing.JFrame jf = new javax.swing.JFrame("Arbol de decision: J48");
        /*Se asigna un tamao*/
        jf.setSize(500, 400);
        /*Se define un borde*/
        jf.getContentPane().setLayout(new BorderLayout());
        /*Se instancia la grafica del arbol, estableciendo el tipo J48
        Parametros (Listener, Tipo de arbol, Tipo de nodos)
        El placeNode2 colocar los nodos para que caigan en forma uniforme
        por debajo de su padre*/
        TreeVisualizer tv = new TreeVisualizer(null, j48.graph(), new PlaceNode2());
        /*Aade el arbol centrandolo*/
        jf.getContentPane().add(tv, BorderLayout.CENTER);
        /*Aadimos un listener para la X del close*/
        jf.addWindowListener(new java.awt.event.WindowAdapter() {
            @Override
            public void windowClosing(java.awt.event.WindowEvent e) {
                jf.dispose();
            }
        });
        /*Lo visualizamos*/
        jf.setVisible(true);
        /*Ajustamos el arbol al ancho del JFRM*/
        tv.fitToScreen();

        return resJ48;

    } catch (Exception e) {
        return "El error es" + e.getMessage();

    }
}

From source file:dkpro.similarity.experiments.rte.util.Evaluator.java

License:Open Source License

public static void runClassifier(WekaClassifier wekaClassifier, Dataset trainDataset, Dataset testDataset)
        throws Exception {
    Classifier baseClassifier = ClassifierSimilarityMeasure.getClassifier(wekaClassifier);

    // Set up the random number generator
    long seed = new Date().getTime();
    Random random = new Random(seed);

    // Add IDs to the train instances and get the instances
    AddID.main(new String[] { "-i", MODELS_DIR + "/" + trainDataset.toString() + ".arff", "-o",
            MODELS_DIR + "/" + trainDataset.toString() + "-plusIDs.arff" });
    Instances train = DataSource.read(MODELS_DIR + "/" + trainDataset.toString() + "-plusIDs.arff");
    train.setClassIndex(train.numAttributes() - 1);

    // Add IDs to the test instances and get the instances
    AddID.main(new String[] { "-i", MODELS_DIR + "/" + testDataset.toString() + ".arff", "-o",
            MODELS_DIR + "/" + testDataset.toString() + "-plusIDs.arff" });
    Instances test = DataSource.read(MODELS_DIR + "/" + testDataset.toString() + "-plusIDs.arff");
    test.setClassIndex(test.numAttributes() - 1);

    // Instantiate the Remove filter
    Remove removeIDFilter = new Remove();
    removeIDFilter.setAttributeIndices("first");

    // Randomize the data
    test.randomize(random);//w w w  . j  a  va2  s . c  o m

    // Apply log filter
    //       Filter logFilter = new LogFilter();
    //       logFilter.setInputFormat(train);
    //       train = Filter.useFilter(train, logFilter);        
    //       logFilter.setInputFormat(test);
    //       test = Filter.useFilter(test, logFilter);

    // Copy the classifier
    Classifier classifier = AbstractClassifier.makeCopy(baseClassifier);

    // Instantiate the FilteredClassifier
    FilteredClassifier filteredClassifier = new FilteredClassifier();
    filteredClassifier.setFilter(removeIDFilter);
    filteredClassifier.setClassifier(classifier);

    // Build the classifier
    filteredClassifier.buildClassifier(train);

    // Prepare the output buffer 
    AbstractOutput output = new PlainText();
    output.setBuffer(new StringBuffer());
    output.setHeader(test);
    output.setAttributes("first");

    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(filteredClassifier, test, output);

    // Convert predictions to CSV
    // Format: inst#, actual, predicted, error, probability, (ID)
    String[] scores = new String[new Double(eval.numInstances()).intValue()];
    double[] probabilities = new double[new Double(eval.numInstances()).intValue()];
    for (String line : output.getBuffer().toString().split("\n")) {
        String[] linesplit = line.split("\\s+");

        // If there's been an error, the length of linesplit is 6, otherwise 5,
        // due to the error flag "+"

        int id;
        String expectedValue, classification;
        double probability;

        if (line.contains("+")) {
            id = Integer.parseInt(linesplit[6].substring(1, linesplit[6].length() - 1));
            expectedValue = linesplit[2].substring(2);
            classification = linesplit[3].substring(2);
            probability = Double.parseDouble(linesplit[5]);
        } else {
            id = Integer.parseInt(linesplit[5].substring(1, linesplit[5].length() - 1));
            expectedValue = linesplit[2].substring(2);
            classification = linesplit[3].substring(2);
            probability = Double.parseDouble(linesplit[4]);
        }

        scores[id - 1] = classification;
        probabilities[id - 1] = probability;
    }

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());

    // Output classifications
    StringBuilder sb = new StringBuilder();
    for (String score : scores)
        sb.append(score.toString() + LF);

    FileUtils.writeStringToFile(new File(OUTPUT_DIR + "/" + testDataset.toString() + "/"
            + wekaClassifier.toString() + "/" + testDataset.toString() + ".csv"), sb.toString());

    // Output probabilities
    sb = new StringBuilder();
    for (Double probability : probabilities)
        sb.append(probability.toString() + LF);

    FileUtils.writeStringToFile(new File(OUTPUT_DIR + "/" + testDataset.toString() + "/"
            + wekaClassifier.toString() + "/" + testDataset.toString() + ".probabilities.csv"), sb.toString());

    // Output predictions
    FileUtils.writeStringToFile(new File(OUTPUT_DIR + "/" + testDataset.toString() + "/"
            + wekaClassifier.toString() + "/" + testDataset.toString() + ".predictions.txt"),
            output.getBuffer().toString());

    // Output meta information
    sb = new StringBuilder();
    sb.append(classifier.toString() + LF);
    sb.append(eval.toSummaryString() + LF);
    sb.append(eval.toMatrixString() + LF);

    FileUtils.writeStringToFile(new File(OUTPUT_DIR + "/" + testDataset.toString() + "/"
            + wekaClassifier.toString() + "/" + testDataset.toString() + ".meta.txt"), sb.toString());
}

From source file:ffnn.FFNNTubesAI.java

@Override
public void buildClassifier(Instances i) throws Exception {
    Instance temp_instance = null;/*from  w ww  .  j a va2 s  . c  o m*/
    RealMatrix error_output;
    RealMatrix error_hidden;
    RealMatrix input_matrix;
    RealMatrix hidden_matrix;
    RealMatrix output_matrix;
    Instances temp_instances;
    int r = 0;
    Scanner scan = new Scanner(System.in);

    output_layer = i.numDistinctValues(i.classIndex()); //3
    temp_instances = filterNominalNumeric(i);

    if (output_layer == 2) {
        Add filter = new Add();
        filter.setAttributeIndex("last");
        filter.setAttributeName("dummy");
        filter.setInputFormat(temp_instances);
        temp_instances = Filter.useFilter(temp_instances, filter);
        //            System.out.println(temp_instances);
        for (int j = 0; j < temp_instances.numInstances(); j++) {
            if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0);
            } else {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1);
            }
        }
    }

    //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1));
    //System.out.println(temp_instances);
    input_layer = temp_instances.numAttributes() - output_layer; //4
    hidden_layer = 0;
    while (hidden_layer < 1) {
        System.out.print("Hidden layer : ");
        hidden_layer = scan.nextInt();
    }
    int init_hidden = hidden_layer;
    error_hidden = new BlockRealMatrix(1, hidden_layer);
    error_output = new BlockRealMatrix(1, output_layer);
    input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias

    buildWeight(input_layer, hidden_layer, output_layer);

    long last_time = System.nanoTime();
    double last_error_rate = 1;
    double best_error_rate = 1;

    double last_update = System.nanoTime();

    // brp iterasi
    //        for( long itr = 0; last_error_rate > 0.001; ++ itr ){
    for (long itr = 0; itr < 50000; ++itr) {
        if (r == 10) {
            break;
        }
        long time = System.nanoTime();
        if (time - last_time > 2000000000) {
            Evaluation eval = new Evaluation(i);
            eval.evaluateModel(this, i);

            double accry = eval.correct() / eval.numInstances();
            if (eval.errorRate() < last_error_rate) {
                last_update = System.nanoTime();
                if (eval.errorRate() < best_error_rate)
                    SerializationHelper.write(accry + "-" + time + ".model", this);
            }

            if (accry > 0)
                last_error_rate = eval.errorRate();

            // 2 minute without improvement restart
            if (time - last_update > 30000000000L) {
                last_update = System.nanoTime();
                learning_rate = random() * 0.05;
                hidden_layer = (int) (10 + floor(random() * 15));
                hidden_layer = (int) floor((hidden_layer / 25) * init_hidden);
                if (hidden_layer == 0) {
                    hidden_layer = 1;
                }
                itr = 0;
                System.out.println("RESTART " + learning_rate + " " + hidden_layer);
                buildWeight(input_layer, hidden_layer, output_layer);
                r++;
            }

            System.out.println(accry + " " + itr);
            last_time = time;
        }

        for (int j = 0; j < temp_instances.numInstances(); j++) {
            // foward !!
            temp_instance = temp_instances.instance(j);

            for (int k = 0; k < input_layer; k++) {
                input_matrix.setEntry(0, k, temp_instance.value(k));
            }
            input_matrix.setEntry(0, input_layer, 1.0); // bias

            hidden_matrix = input_matrix.multiply(weight1);
            for (int y = 0; y < hidden_layer; ++y) {
                hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y)));
            }

            output_matrix = hidden_matrix.multiply(weight2).add(bias2);
            for (int y = 0; y < output_layer; ++y) {
                output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y)));
            }

            // backward <<

            // error layer 2
            double total_err = 0;
            for (int k = 0; k < output_layer; k++) {
                double o = output_matrix.getEntry(0, k);
                double t = temp_instance.value(input_layer + k);
                double err = o * (1 - o) * (t - o);
                total_err += err * err;
                error_output.setEntry(0, k, err);
            }

            // back propagation layer 2
            for (int y = 0; y < hidden_layer; y++) {
                for (int x = 0; x < output_layer; ++x) {
                    double wold = weight2.getEntry(y, x);
                    double correction = learning_rate * error_output.getEntry(0, x)
                            * hidden_matrix.getEntry(0, y);
                    weight2.setEntry(y, x, wold + correction);
                }
            }

            for (int x = 0; x < output_layer; ++x) {
                double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya
                bias2.setEntry(0, x, bias2.getEntry(0, x) + correction);
            }

            // error layer 1
            for (int k = 0; k < hidden_layer; ++k) {
                double o = hidden_matrix.getEntry(0, k);
                double t = 0;
                for (int x = 0; x < output_layer; ++x) {
                    t += error_output.getEntry(0, x) * weight2.getEntry(k, x);
                }
                double err = o * (1 - o) * t;
                error_hidden.setEntry(0, k, err);
            }

            // back propagation layer 1
            for (int y = 0; y < input_layer + 1; ++y) {
                for (int x = 0; x < hidden_layer; ++x) {
                    double wold = weight1.getEntry(y, x);
                    double correction = learning_rate * error_hidden.getEntry(0, x)
                            * input_matrix.getEntry(0, y);
                    weight1.setEntry(y, x, wold + correction);
                }
            }
        }
    }
}

From source file:meddle.TrainModelByDomainOS.java

License:Open Source License

/**
 * Do evalution on trained classifier/model, including the summary, false
 * positive/negative rate, AUC, running time
 *
 * @param j48/*from   w w  w. j a  v a  2  s . c o  m*/
 *            - the trained classifier
 * @param domain
 *            - the domain name
 */
public static MetaEvaluationMeasures doEvaluation(Classifier classifier, String domainOS, Instances tras,
        MetaEvaluationMeasures mem) {
    try {
        Evaluation evaluation = new Evaluation(tras);
        evaluation.crossValidateModel(classifier, tras, 10, new Random(1));
        mem.numInstance = evaluation.numInstances();
        double M = evaluation.numTruePositives(1) + evaluation.numFalseNegatives(1);
        mem.numPositive = (int) M;
        mem.AUC = evaluation.areaUnderROC(1);
        mem.numCorrectlyClassified = (int) evaluation.correct();
        mem.accuracy = 1.0 * mem.numCorrectlyClassified / mem.numInstance;
        mem.falseNegativeRate = evaluation.falseNegativeRate(1);
        mem.falsePositiveRate = evaluation.falsePositiveRate(1);
        mem.fMeasure = evaluation.fMeasure(1);
        double[][] cmMatrix = evaluation.confusionMatrix();
        mem.confusionMatrix = cmMatrix;
        mem.TP = evaluation.numTruePositives(1);
        mem.TN = evaluation.numTrueNegatives(1);
        mem.FP = evaluation.numFalsePositives(1);
        mem.FN = evaluation.numFalseNegatives(1);
    } catch (Exception e) {
        e.printStackTrace();
    }

    return mem;
}

From source file:NaiveBayesPckge.NaiveBayesMain.java

public static void printEvaluation(Instances instance) throws Exception {
    Evaluation eval = new Evaluation(instance);
    Evaluation eval2 = new Evaluation(instance);

    System.out.println("Full training Result :");
    eval.evaluateModel(naive, instance);
    System.out.println(eval.toSummaryString()); // Summary of Training
    //System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());

    System.out.println("10 cross validation Result :");
    Random rand = new Random(1);
    eval2.crossValidateModel(naive, instance, 10, rand);
    System.out.println(eval2.toSummaryString()); // Summary of Training
    //System.out.println(eval2.toClassDetailsString());
    System.out.println(eval2.toMatrixString());

    double errorRates = eval.incorrect() / eval.numInstances() * 100;
    double accuracy = eval.correct() / eval.numInstances() * 100;

    //        System.out.println("Accuracy: " + df.format(accuracy) + " %");
    //        System.out.println("Error rate: " + df.format(errorRates) + " %"); // Printing Training Mean root squared error
}

From source file:org.openml.webapplication.io.Output.java

License:Open Source License

public static Map<Metric, MetricScore> evaluatorToMap(Evaluation evaluator, int classes, TaskType task)
        throws Exception {
    Map<Metric, MetricScore> m = new HashMap<Metric, MetricScore>();

    if (task == TaskType.REGRESSION) {

        // here all measures for regression tasks
        m.put(new Metric("mean_absolute_error", "openml.evaluation.mean_absolute_error(1.0)"),
                new MetricScore(evaluator.meanAbsoluteError(), (int) evaluator.numInstances()));
        m.put(new Metric("mean_prior_absolute_error", "openml.evaluation.mean_prior_absolute_error(1.0)"),
                new MetricScore(evaluator.meanPriorAbsoluteError(), (int) evaluator.numInstances()));
        m.put(new Metric("root_mean_squared_error", "openml.evaluation.root_mean_squared_error(1.0)"),
                new MetricScore(evaluator.rootMeanSquaredError(), (int) evaluator.numInstances()));
        m.put(new Metric("root_mean_prior_squared_error",
                "openml.evaluation.root_mean_prior_squared_error(1.0)"),
                new MetricScore(evaluator.rootMeanPriorSquaredError(), (int) evaluator.numInstances()));
        m.put(new Metric("relative_absolute_error", "openml.evaluation.relative_absolute_error(1.0)"),
                new MetricScore(evaluator.relativeAbsoluteError() / 100, (int) evaluator.numInstances()));
        m.put(new Metric("root_relative_squared_error", "openml.evaluation.root_relative_squared_error(1.0)"),
                new MetricScore(evaluator.rootRelativeSquaredError() / 100, (int) evaluator.numInstances()));

    } else if (task == TaskType.CLASSIFICATION || task == TaskType.LEARNINGCURVE
            || task == TaskType.TESTTHENTRAIN) {

        m.put(new Metric("average_cost", "openml.evaluation.average_cost(1.0)"),
                new MetricScore(evaluator.avgCost(), (int) evaluator.numInstances()));
        m.put(new Metric("total_cost", "openml.evaluation.total_cost(1.0)"),
                new MetricScore(evaluator.totalCost(), (int) evaluator.numInstances()));

        m.put(new Metric("mean_absolute_error", "openml.evaluation.mean_absolute_error(1.0)"),
                new MetricScore(evaluator.meanAbsoluteError(), (int) evaluator.numInstances()));
        m.put(new Metric("mean_prior_absolute_error", "openml.evaluation.mean_prior_absolute_error(1.0)"),
                new MetricScore(evaluator.meanPriorAbsoluteError(), (int) evaluator.numInstances()));
        m.put(new Metric("root_mean_squared_error", "openml.evaluation.root_mean_squared_error(1.0)"),
                new MetricScore(evaluator.rootMeanSquaredError(), (int) evaluator.numInstances()));
        m.put(new Metric("root_mean_prior_squared_error",
                "openml.evaluation.root_mean_prior_squared_error(1.0)"),
                new MetricScore(evaluator.rootMeanPriorSquaredError(), (int) evaluator.numInstances()));
        m.put(new Metric("relative_absolute_error", "openml.evaluation.relative_absolute_error(1.0)"),
                new MetricScore(evaluator.relativeAbsoluteError() / 100, (int) evaluator.numInstances()));
        m.put(new Metric("root_relative_squared_error", "openml.evaluation.root_relative_squared_error(1.0)"),
                new MetricScore(evaluator.rootRelativeSquaredError() / 100, (int) evaluator.numInstances()));

        m.put(new Metric("prior_entropy", "openml.evaluation.prior_entropy(1.0)"),
                new MetricScore(evaluator.priorEntropy(), (int) evaluator.numInstances()));
        m.put(new Metric("kb_relative_information_score",
                "openml.evaluation.kb_relative_information_score(1.0)"),
                new MetricScore(evaluator.KBRelativeInformation() / 100, (int) evaluator.numInstances()));

        Double[] precision = new Double[classes];
        Double[] recall = new Double[classes];
        Double[] auroc = new Double[classes];
        Double[] fMeasure = new Double[classes];
        Double[] instancesPerClass = new Double[classes];
        double[][] confussion_matrix = evaluator.confusionMatrix();
        for (int i = 0; i < classes; ++i) {
            precision[i] = evaluator.precision(i);
            recall[i] = evaluator.recall(i);
            auroc[i] = evaluator.areaUnderROC(i);
            fMeasure[i] = evaluator.fMeasure(i);
            instancesPerClass[i] = 0.0;/*from www . j a  v  a2s  . co m*/
            for (int j = 0; j < classes; ++j) {
                instancesPerClass[i] += confussion_matrix[i][j];
            }
        }

        m.put(new Metric("predictive_accuracy", "openml.evaluation.predictive_accuracy(1.0)"),
                new MetricScore(evaluator.pctCorrect() / 100, (int) evaluator.numInstances()));
        m.put(new Metric("kappa", "openml.evaluation.kappa(1.0)"),
                new MetricScore(evaluator.kappa(), (int) evaluator.numInstances()));

        m.put(new Metric("number_of_instances", "openml.evaluation.number_of_instances(1.0)"),
                new MetricScore(evaluator.numInstances(), instancesPerClass, (int) evaluator.numInstances()));

        m.put(new Metric("precision", "openml.evaluation.precision(1.0)"),
                new MetricScore(evaluator.weightedPrecision(), precision, (int) evaluator.numInstances()));
        m.put(new Metric("recall", "openml.evaluation.recall(1.0)"),
                new MetricScore(evaluator.weightedRecall(), recall, (int) evaluator.numInstances()));
        m.put(new Metric("f_measure", "openml.evaluation.f_measure(1.0)"),
                new MetricScore(evaluator.weightedFMeasure(), fMeasure, (int) evaluator.numInstances()));
        if (Utils.isMissingValue(evaluator.weightedAreaUnderROC()) == false) {
            m.put(new Metric("area_under_roc_curve", "openml.evaluation.area_under_roc_curve(1.0)"),
                    new MetricScore(evaluator.weightedAreaUnderROC(), auroc, (int) evaluator.numInstances()));
        }
        m.put(new Metric("confusion_matrix", "openml.evaluation.confusion_matrix(1.0)"),
                new MetricScore(confussion_matrix));
    }
    return m;
}