Example usage for weka.classifiers Evaluation Evaluation

List of usage examples for weka.classifiers Evaluation Evaluation

Introduction

In this page you can find the example usage for weka.classifiers Evaluation Evaluation.

Prototype

public Evaluation(Instances data) throws Exception 

Source Link

Usage

From source file:rdt.WekaDemo.java

License:Open Source License

/**
 * runs 10fold CV over the training file
 *//* w  w w  .j  a va  2s .  c  o m*/
public void execute() throws Exception {
    // run filter
    m_Filter.setInputFormat(m_Training);
    Instances filtered = Filter.useFilter(m_Training, m_Filter);

    // train classifier on complete file for tree
    m_Classifier.buildClassifier(filtered);

    // 10fold CV with seed=1
    m_Evaluation = new Evaluation(filtered);
    m_Evaluation.crossValidateModel(m_Classifier, filtered, 10, m_Training.getRandomNumberGenerator(1));
}

From source file:regression.logisticRegression.LogisticRegressionCorrect.java

public void weka(JTextArea output) throws FileNotFoundException, IOException, Exception {
    this.finalPoints = new ArrayList<>();

    BufferedReader reader = new BufferedReader(new FileReader("weka.arff"));
    Instances instances = new Instances(reader);
    instances.setClassIndex(instances.numAttributes() - 1);
    String[] options = new String[4];
    options[0] = "-R";

    options[1] = "1.0E-8";
    options[2] = "-M";
    options[3] = "-1";

    logistic.setOptions(options);/*  www .  j  av  a 2s .c  o  m*/

    logistic.buildClassifier(instances);

    for (int i = 0; i < instances.numInstances(); i++) {
        weka.core.Instance inst = instances.instance(i);
        Double classifiedClass = 1.0;
        if (logistic.classifyInstance(inst) == 1.0) {
            classifiedClass = 0.0;
        }

        System.out.println("classify: " + inst.attribute(0) + "|" + inst.value(0) + "->" + classifiedClass);
        double[] distributions = logistic.distributionForInstance(inst);
        output.append("Dla x= " + inst.value(0) + " prawdopodobiestwo wystpnienia zdarzenia wynosi: "
                + distributions[0] + " zatem naley on do klasy: " + classifiedClass + "\n");
        this.finalPoints.add(new Point(inst.value(0), classifiedClass));
        this.finalProbPoints.add(new Point(inst.value(0), distributions[0]));
        for (int j = 0; j < distributions.length; j++) {
            System.out.println("distribution: " + inst.value(0) + "->" + distributions[j]);

        }

    }

    // evaluate classifier and print some statistics
    Evaluation eval = new Evaluation(instances);

    eval.evaluateModel(logistic, instances);
    FastVector pred = eval.predictions();

    for (int i = 0; i < eval.predictions().size(); i++) {

    }
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:repro.CECheck.java

public CECheck(Instances d) throws Exception {
    dataset = d;
    eval = new Evaluation(dataset);
}

From source file:sentinets.Prediction.java

License:Open Source License

public String updateModel(String inputFile, ArrayList<Double[]> metrics) {
    String output = "";
    this.setInstances(inputFile);
    FilteredClassifier fcls = (FilteredClassifier) this.cls;
    SGD cls = (SGD) fcls.getClassifier();
    Filter filter = fcls.getFilter();
    Instances insAll;//from w  ww .  j av  a  2  s.c  om
    try {
        insAll = Filter.useFilter(this.unlabled, filter);
        if (insAll.size() > 0) {
            Random rand = new Random(10);
            int folds = 10 > insAll.size() ? 2 : 10;
            Instances randData = new Instances(insAll);
            randData.randomize(rand);
            if (randData.classAttribute().isNominal()) {
                randData.stratify(folds);
            }
            Evaluation eval = new Evaluation(randData);
            eval.evaluateModel(cls, insAll);
            System.out.println("Initial Evaluation");
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            output += "\n====" + "Initial Evaluation" + "====\n";
            output += "\n" + eval.toSummaryString();
            output += "\n" + eval.toClassDetailsString();
            System.out.println("Cross Validated Evaluation");
            output += "\n====" + "Cross Validated Evaluation" + "====\n";
            for (int n = 0; n < folds; n++) {
                Instances train = randData.trainCV(folds, n);
                Instances test = randData.testCV(folds, n);

                for (int i = 0; i < train.numInstances(); i++) {
                    cls.updateClassifier(train.instance(i));
                }

                eval.evaluateModel(cls, test);
                System.out.println("Cross Validated Evaluation fold: " + n);
                output += "\n====" + "Cross Validated Evaluation fold (" + n + ")====\n";
                System.out.println(eval.toSummaryString());
                System.out.println(eval.toClassDetailsString());
                output += "\n" + eval.toSummaryString();
                output += "\n" + eval.toClassDetailsString();
                metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            }
            for (int i = 0; i < insAll.numInstances(); i++) {
                cls.updateClassifier(insAll.instance(i));
            }
            eval.evaluateModel(cls, insAll);
            System.out.println("Final Evaluation");
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            output += "\n====" + "Final Evaluation" + "====\n";
            output += "\n" + eval.toSummaryString();
            output += "\n" + eval.toClassDetailsString();
            metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            fcls.setClassifier(cls);
            String modelFilePath = outputDir + "/" + Utils.getOutDir(Utils.OutDirIndex.MODELS)
                    + "/updatedClassifier.model";
            weka.core.SerializationHelper.write(modelFilePath, fcls);
            output += "\n" + "Updated Model saved at: " + modelFilePath;
        } else {
            output += "No new instances for training the model.";
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return output;
}

From source file:sentinets.TrainModel.java

License:Open Source License

public void trainModel(Classifier c, String name) {
    Evaluation e;//from  ww  w . java 2  s  . c om
    try {
        e = new Evaluation(ins);
        e.crossValidateModel(c, ins, 10, new Random(1));
        System.out.println("****Results of " + name + "****");
        System.out.println(e.toSummaryString());
        System.out.println(e.toClassDetailsString());
        System.out.println(e.toCumulativeMarginDistributionString());
        System.out.println(e.toMatrixString());
        System.out.println("*********************");
        TrainModel.saveModel(c, name);
    } catch (Exception e1) {
        e1.printStackTrace();
    }

}

From source file:soccer.core.SimpleClassifier.java

public void evaluate() throws IOException, Exception {
    Instances data = loader.buildInstances();
    NumericToNominal toNominal = new NumericToNominal();
    toNominal.setOptions(new String[] { "-R", "5,6,8,9" });
    toNominal.setInputFormat(data);//from   w  w  w.  jav a  2  s.c  o m
    data = Filter.useFilter(data, toNominal);
    data.setClassIndex(6);

    //        DataSink.write(ARFF_STRING, data);

    EnsembleLibrary ensembleLib = new EnsembleLibrary();
    ensembleLib.addModel("weka.classifiers.trees.J48");
    ensembleLib.addModel("weka.classifiers.bayes.NaiveBayes");
    ensembleLib.addModel("weka.classifiers.functions.SMO");
    ensembleLib.addModel("weka.classifiers.meta.AdaBoostM1");
    ensembleLib.addModel("weka.classifiers.meta.LogitBoost");
    ensembleLib.addModel("classifiers.trees.DecisionStump");
    ensembleLib.addModel("classifiers.trees.DecisionStump");
    EnsembleLibrary.saveLibrary(new File("./ensembleLib.model.xml"), ensembleLib, null);
    EnsembleSelection model = new EnsembleSelection();
    model.setOptions(new String[] { "-L", "./ensembleLib.model.xml", // </path/to/modelLibrary>"-W", path+"esTmp", // </path/to/working/directory> - 
            "-B", "10", // <numModelBags> 
            "-E", "1.0", // <modelRatio>.
            "-V", "0.25", // <validationRatio>
            "-H", "100", // <hillClimbIterations> 
            "-I", "1.0", // <sortInitialization> 
            "-X", "2", // <numFolds>
            "-P", "roc", // <hillclimbMettric>
            "-A", "forward", // <algorithm> 
            "-R", "true", // - Flag to be selected more than once
            "-G", "true", // - stops adding models when performance degrades
            "-O", "true", // - verbose output.
            "-S", "1", // <num> - Random number seed.
            "-D", "true" // - run in debug mode 
    });
    //        double resES[] = evaluate(ensambleSel);
    //        System.out.println("Ensemble Selection\n"
    //                + "\tchurn:     " + resES[0] + "\n"
    //                + "\tappetency: " + resES[1] + "\n"
    //                + "\tup-sell:   " + resES[2] + "\n"
    //                + "\toverall:   " + resES[3] + "\n");
    //        models.add(new J48());
    //        models.add(new RandomForest());
    //        models.add(new NaiveBayes());
    //        models.add(new AdaBoostM1());
    //        models.add(new Logistic());
    //        models.add(new MultilayerPerceptron());

    int FOLDS = 5;
    Evaluation eval = new Evaluation(data);
    //
    //        for (Classifier model : models) {
    eval.crossValidateModel(model, data, FOLDS, new Random(1), new Object[] {});
    System.out.println(model.getClass().getName() + "\n" + "\tRecall:    " + eval.recall(1) + "\n"
            + "\tPrecision: " + eval.precision(1) + "\n" + "\tF-measure: " + eval.fMeasure(1));
    System.out.println(eval.toSummaryString());
    //        }
    //        LogitBoost cl = new LogitBoost();
    //        cl.setOptions(new String[] {
    //            "-Q", "-I", "100", "-Z", "4", "-O", "4", "-E", "4"
    //        });
    //        cl.buildClassifier(data);
    //        Evaluation eval = new Evaluation(data);
    //        eval.crossValidateModel(cl, data, 6, new Random(1), new Object[]{});
    //        System.out.println(eval.weightedFMeasure());
    //        System.out.println(cl.graph());
    //        System.out.println(cl.globalInfo());

}

From source file:SpamDetector.SpamDetector.java

/**
 * @param args the command line arguments
 *//*from  www  .  java2s.c  om*/
public static void main(String[] args) throws IOException, Exception {
    ArrayList<ArrayList<String>> notSpam = processCSV("notspam.csv");
    ArrayList<ArrayList<String>> spam = processCSV("spam.csv");

    // Cobain generate attribute & data
    FeatureExtraction fe = new FeatureExtraction();
    fe.generateArff(spam, notSpam);

    // Cobain CART
    BufferedReader br = new BufferedReader(new FileReader("data.arff"));

    ArffReader arff = new ArffReader(br);
    Instances data = arff.getData();
    data.setClassIndex(data.numAttributes() - 1);

    SimpleCart tree = new SimpleCart();
    tree.buildClassifier(data);
    System.out.println(tree.toString());

    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(tree, data);
    System.out.println(eval.toSummaryString("\n\n\n\nResults\n======\n", false));
    eval.crossValidateModel(tree, data, 10, new Random());
    System.out.println(eval.toSummaryString("\n\n\n\n10-Fold\n======\n", false));

}

From source file:statistics.BinaryStatisticsEvaluator.java

@Override
public double[][] getConfusionMatrix(Instances Training_Instances, Instances Testing_Instances,
        String classifier) {//from   w w  w .  j a  v a2  s. com

    Classifier cModel = null;
    if ("NB".equals(classifier)) {
        cModel = (Classifier) new NaiveBayes();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("DT".equals(classifier)) {
        cModel = (Classifier) new J48();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("SVM".equals(classifier)) {
        cModel = (Classifier) new SMO();

        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("KNN".equals(classifier)) {
        cModel = (Classifier) new IBk();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    //Test the model
    Evaluation eTest;
    try {
        eTest = new Evaluation(Training_Instances);
        eTest.evaluateModel(cModel, Testing_Instances);
        //Print the result
        String strSummary = eTest.toSummaryString();
        System.out.println(strSummary);
        String strSummary1 = eTest.toMatrixString();
        System.out.println(strSummary1);
        String strSummary2 = eTest.toClassDetailsString();
        System.out.println(strSummary2);

        //Get the confusion matrix
        double[][] cmMatrix = eTest.confusionMatrix();
        return cmMatrix;
    } catch (Exception ex) {
        Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:Statistics.WekaFunctions.java

public void buildLRcls() {
    try {//from  w w  w  .j  a  va2 s  . c  o m
        System.out.println("building classifier");
        model.setOptions(weka.core.Utils.splitOptions("-S 0 -D")); // set options
        model.buildClassifier(trainParameters);

        // evaluate classifier and print some statistics 
        Evaluation eval = new Evaluation(trainParameters);
        eval.evaluateModel(model, trainParameters);
        System.out.println(eval.toSummaryString());

    } catch (Exception ex) {
        Logger.getLogger(WekaFunctions.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:tcc.FeatureExtraction.java

public void knn() throws IOException {
    //parsing CSV to Arff
    CSVLoader loader = new CSVLoader();
    loader.setSource(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.csv"));
    Instances inst = loader.getDataSet();

    ArffSaver saver = new ArffSaver();
    saver.setInstances(inst);//from w  w  w  . j  a  v a  2  s.  c o  m
    saver.setFile(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff"));
    saver.setDestination(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff"));
    saver.writeBatch();

    BufferedReader reader = new BufferedReader(
            new FileReader("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff"));
    Instances data = new Instances(reader);
    reader.close();
    data.setClassIndex(data.numAttributes() - 1);

    //Normalizando
    try {
        Normalize norm = new Normalize();
        norm.setInputFormat(data);
        data = Filter.useFilter(data, norm);

    } catch (Exception ex) {
        Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex);
    }

    File csv = new File("/root/TCC/Resultados/knn.csv");
    FileWriter fw = new FileWriter(csv);
    BufferedWriter bw = new BufferedWriter(fw);

    for (int i = 1; i < 51; i++) {
        //instanciando o classificador
        IBk knn = new IBk();
        knn.setKNN(i);

        try {

            knn.buildClassifier(data);
            Evaluation eval = new Evaluation(data);
            //System.out.println(eval.toSummaryString("\nResults\n======\n", false));
            eval.crossValidateModel(knn, data, 10, new Random(1), new Object[] {});
            double auc = eval.areaUnderROC(1);
            System.out.println(auc);
            bw.write(Double.toString(auc));
            bw.newLine();

        } catch (Exception ex) {
            Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    bw.close();

}