Example usage for weka.classifiers Evaluation predictions

List of usage examples for weka.classifiers Evaluation predictions

Introduction

In this page you can find the example usage for weka.classifiers Evaluation predictions.

Prototype

public ArrayList<Prediction> predictions() 

Source Link

Document

Returns the predictions that have been collected.

Usage

From source file:miRdup.WekaModule.java

License:Open Source License

public static void rocCurve(Evaluation eval) {
    try {//from w ww.java 2s .  c  o m
        // generate curve
        ThresholdCurve tc = new ThresholdCurve();
        int classIndex = 0;
        Instances result = tc.getCurve(eval.predictions(), classIndex);
        result.toString();
        // plot curve
        ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
        vmc.setROCString("(Area under ROC = " + Utils.doubleToString(tc.getROCArea(result), 4) + ")");
        vmc.setName(result.relationName());
        PlotData2D tempd = new PlotData2D(result);
        tempd.setPlotName(result.relationName());
        tempd.addInstanceNumberAttribute();
        // specify which points are connected
        boolean[] cp = new boolean[result.numInstances()];
        for (int n = 1; n < cp.length; n++) {
            cp[n] = true;
        }
        tempd.setConnectPoints(cp);
        // add plot
        vmc.addPlot(tempd);

        //
        result.toString();

        // display curve
        String plotName = vmc.getName();
        final javax.swing.JFrame jf = new javax.swing.JFrame("Weka Classifier Visualize: " + plotName);
        jf.setSize(500, 400);
        jf.getContentPane().setLayout(new BorderLayout());
        jf.getContentPane().add(vmc, BorderLayout.CENTER);
        jf.addWindowListener(new java.awt.event.WindowAdapter() {
            public void windowClosing(java.awt.event.WindowEvent e) {
                jf.dispose();
            }
        });

        jf.setVisible(true);
        System.out.println("");
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:org.uclab.mm.kcl.ddkat.modellearner.ModelLearner.java

License:Apache License

/**
* Method to compute the classification accuracy.
*
* @param algo the algorithm name//w  w w.jav a  2  s  .  c o m
* @param data the data instances
* @param datanature the dataset nature (i.e. original or processed data)
* @throws Exception the exception
*/
protected String[] modelAccuracy(String algo, Instances data, String datanature) throws Exception {

    String modelResultSet[] = new String[4];
    String modelStr = "";
    Classifier classifier = null;

    // setting class attribute if the data format does not provide this information           
    if (data.classIndex() == -1)
        data.setClassIndex(data.numAttributes() - 1);

    String decisionAttribute = data.attribute(data.numAttributes() - 1).toString();
    String res[] = decisionAttribute.split("\\s+");
    decisionAttribute = res[1];

    if (algo.equals("BFTree")) {

        // Use BFTree classifiers
        BFTree BFTreeclassifier = new BFTree();
        BFTreeclassifier.buildClassifier(data);
        modelStr = BFTreeclassifier.toString();
        classifier = BFTreeclassifier;

    } else if (algo.equals("FT")) {

        // Use FT classifiers
        FT FTclassifier = new FT();
        FTclassifier.buildClassifier(data);
        modelStr = FTclassifier.toString();
        classifier = FTclassifier;

    } else if (algo.equals("J48")) {

        // Use J48 classifiers
        J48 J48classifier = new J48();
        J48classifier.buildClassifier(data);
        modelStr = J48classifier.toString();
        classifier = J48classifier;
        System.out.println("Model String: " + modelStr);

    } else if (algo.equals("J48graft")) {

        // Use J48graft classifiers
        J48graft J48graftclassifier = new J48graft();
        J48graftclassifier.buildClassifier(data);
        modelStr = J48graftclassifier.toString();
        classifier = J48graftclassifier;

    } else if (algo.equals("RandomTree")) {

        // Use RandomTree classifiers
        RandomTree RandomTreeclassifier = new RandomTree();
        RandomTreeclassifier.buildClassifier(data);
        modelStr = RandomTreeclassifier.toString();
        classifier = RandomTreeclassifier;

    } else if (algo.equals("REPTree")) {

        // Use REPTree classifiers
        REPTree REPTreeclassifier = new REPTree();
        REPTreeclassifier.buildClassifier(data);
        modelStr = REPTreeclassifier.toString();
        classifier = REPTreeclassifier;

    } else if (algo.equals("SimpleCart")) {

        // Use SimpleCart classifiers
        SimpleCart SimpleCartclassifier = new SimpleCart();
        SimpleCartclassifier.buildClassifier(data);
        modelStr = SimpleCartclassifier.toString();
        classifier = SimpleCartclassifier;

    }

    modelResultSet[0] = algo;
    modelResultSet[1] = decisionAttribute;
    modelResultSet[2] = modelStr;

    // Collect every group of predictions for J48 model in a FastVector
    FastVector predictions = new FastVector();

    Evaluation evaluation = new Evaluation(data);
    int folds = 10; // cross fold validation = 10
    evaluation.crossValidateModel(classifier, data, folds, new Random(1));
    // System.out.println("Evaluatuion"+evaluation.toSummaryString());
    System.out.println("\n\n" + datanature + " Evaluatuion " + evaluation.toMatrixString());

    // ArrayList<Prediction> predictions = evaluation.predictions();
    predictions.appendElements(evaluation.predictions());

    System.out.println("\n\n 11111");
    // Calculate overall accuracy of current classifier on all splits
    double correct = 0;

    for (int i = 0; i < predictions.size(); i++) {
        NominalPrediction np = (NominalPrediction) predictions.elementAt(i);
        if (np.predicted() == np.actual()) {
            correct++;
        }
    }

    System.out.println("\n\n 22222");
    double accuracy = 100 * correct / predictions.size();
    String accString = String.format("%.2f%%", accuracy);
    modelResultSet[3] = accString;
    System.out.println(datanature + " Accuracy " + accString);

    String modelFileName = algo + "-DDKA.model";

    System.out.println("\n\n 33333");

    ObjectOutputStream oos = new ObjectOutputStream(
            new FileOutputStream("D:\\DDKAResources\\" + modelFileName));
    oos.writeObject(classifier);
    oos.flush();
    oos.close();

    return modelResultSet;

}

From source file:regression.logisticRegression.LogisticRegressionCorrect.java

public void weka(JTextArea output) throws FileNotFoundException, IOException, Exception {
    this.finalPoints = new ArrayList<>();

    BufferedReader reader = new BufferedReader(new FileReader("weka.arff"));
    Instances instances = new Instances(reader);
    instances.setClassIndex(instances.numAttributes() - 1);
    String[] options = new String[4];
    options[0] = "-R";

    options[1] = "1.0E-8";
    options[2] = "-M";
    options[3] = "-1";

    logistic.setOptions(options);/*from www . j a v a2  s.  co  m*/

    logistic.buildClassifier(instances);

    for (int i = 0; i < instances.numInstances(); i++) {
        weka.core.Instance inst = instances.instance(i);
        Double classifiedClass = 1.0;
        if (logistic.classifyInstance(inst) == 1.0) {
            classifiedClass = 0.0;
        }

        System.out.println("classify: " + inst.attribute(0) + "|" + inst.value(0) + "->" + classifiedClass);
        double[] distributions = logistic.distributionForInstance(inst);
        output.append("Dla x= " + inst.value(0) + " prawdopodobiestwo wystpnienia zdarzenia wynosi: "
                + distributions[0] + " zatem naley on do klasy: " + classifiedClass + "\n");
        this.finalPoints.add(new Point(inst.value(0), classifiedClass));
        this.finalProbPoints.add(new Point(inst.value(0), distributions[0]));
        for (int j = 0; j < distributions.length; j++) {
            System.out.println("distribution: " + inst.value(0) + "->" + distributions[j]);

        }

    }

    // evaluate classifier and print some statistics
    Evaluation eval = new Evaluation(instances);

    eval.evaluateModel(logistic, instances);
    FastVector pred = eval.predictions();

    for (int i = 0; i < eval.predictions().size(); i++) {

    }
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:tubes1.Main.java

/**
 * @param args the command line arguments
 *///from w  ww  .j a v a 2  s  . co  m
public static void main(String[] args) throws IOException, Exception {
    // TODO code application logic here
    String filename = "weather";

    //Masih belum mengerti tipe .csv yang dapat dibaca seperti apa
    //CsvToArff convert = new CsvToArff(filename+".csv");

    //LOAD FILE
    BufferedReader datafile = readDataFile("src/" + filename + ".arff");
    Instances data = new Instances(datafile);
    data.setClassIndex(data.numAttributes() - 1);
    //END OF LOAD FILE

    CustomFilter fil = new CustomFilter();

    //REMOVE USELESS ATTRIBUTE
    data = fil.removeAttribute(data);
    System.out.println(data);

    Instances[] allData = new Instances[4];
    //data for Id3
    allData[0] = fil.resampling(fil.convertNumericToNominal(data));
    //data for J48
    allData[1] = fil.convertNumericToNominal(fil.resampling(data));
    //data for myId3
    allData[2] = allData[0];
    //data for myC4.5
    allData[3] = fil.resampling(fil.convertNumericToNominal(fil.convertNumericRange(data)));

    data = fil.convertNumericToNominal(data);
    // BUILD CLASSIFIERS
    Classifier[] models = { new Id3(), //C4.5
            new J48(), new myID3(), new myC45() };

    for (int j = 0; j < models.length; j++) {
        FastVector predictions = new FastVector();
        //FOR TEN-FOLD CROSS VALIDATION
        Instances[][] split = crossValidationSplit(allData[j], 10);
        // Separate split into training and testing arrays
        Instances[] trainingSplits = split[0];
        Instances[] testingSplits = split[1];
        System.out.println("\n---------------------------------");
        for (int i = 0; i < trainingSplits.length; i++) {
            try {
                //                    System.out.println("Building for training Split : " + i);
                Evaluation validation = classify(models[j], trainingSplits[i], testingSplits[i]);

                predictions.appendElements(validation.predictions());

                // Uncomment to see the summary for each training-testing pair.
                //                    System.out.println(models[j].toString());
            } catch (Exception ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }
            // Calculate overall accuracy of current classifier on all splits
            double accuracy = calculateAccuracy(predictions);

            // Print current classifier's name and accuracy in a complicated,
            // but nice-looking way.
            System.out.println(String.format("%.2f%%", accuracy));
        }
        models[j].buildClassifier(allData[j]);
        Model.save(models[j], models[j].getClass().getSimpleName());
    }

    //test instance
    Instances trainingSet = new Instances("Rel", getFvWekaAttributes(data), 10);
    trainingSet.setClassIndex(data.numAttributes() - 1);

    Instance testInstance = new Instance(data.numAttributes());
    for (int i = 0; i < data.numAttributes() - 1; i++) {
        System.out.print("Masukkan " + data.attribute(i).name() + " : ");
        Scanner in = new Scanner(System.in);
        String att = in.nextLine();
        if (isNumeric(att)) {
            att = fil.convertToFit(att, data, i);
        }
        testInstance.setValue(data.attribute(i), att);
    }

    //        System.out.println(testInstance);
    //        System.out.println(testInstance.classAttribute().index());

    trainingSet.add(testInstance);

    Classifier Id3 = Model.load("Id3");
    Classifier J48 = Model.load("J48");
    Classifier myID3 = Model.load("myID3");
    Classifier MyC45 = Model.load("myC45");
    //        Classifier MyId3 = Model.load("myID3");

    Instance A = trainingSet.instance(0);
    Instance B = trainingSet.instance(0);
    Instance C = trainingSet.instance(0);
    Instance D = trainingSet.instance(0);

    //test with ID3 WEKA
    A.setClassValue(Id3.classifyInstance(trainingSet.instance(0)));
    System.out.println("Id3 Weka : " + A);

    //test with C4.5 WEKA
    B.setClassValue(J48.classifyInstance(trainingSet.instance(0)));
    System.out.println("C4.5 Weka : " + B);

    //test with my C4.5
    C.setClassValue(MyC45.classifyInstance(trainingSet.instance(0)));
    System.out.println("My C4.5 : " + C);

    //test with my ID3
    D.setClassValue(myID3.classifyInstance(trainingSet.instance(0)));
    System.out.println("My ID3 : " + D);
}