Example usage for weka.classifiers Classifier classifyInstance

List of usage examples for weka.classifiers Classifier classifyInstance

Introduction

In this page you can find the example usage for weka.classifiers Classifier classifyInstance.

Prototype

public double classifyInstance(Instance instance) throws Exception;

Source Link

Document

Classifies the given test instance.

Usage

From source file:general.Util.java

/**
 * Classify test set using pre-build model
 * @param model model pathfile/*from  ww  w . j  ava2s .  c om*/
 * @param test test file
 */
public static void doClassify(Classifier model, Instances test) {
    test.setClassIndex(test.numAttributes() - 1);
    for (int i = 0; i < test.numInstances(); i++) {
        try {
            double pred = model.classifyInstance(test.instance(i));
            System.out.print("ID: " + test.instance(i));
            System.out.print(", actual: " + test.classAttribute().value((int) test.instance(i).classValue()));
            System.out.println(", predicted: " + test.classAttribute().value((int) pred));
        } catch (Exception ex) {
            Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:gnusmail.learning.ClassifierManager.java

License:Open Source License

/**
 * This method reads the messages in chronological order, and updates the
 * underlying model with each message//w  ww  .j  a va  2 s  . c o  m
 * 
 * @return
 */
public List<Double> incrementallyTrainModel(DocumentReader reader, String wekaClassifier, FilterManager fm) {
    List<Double> successes = new ArrayList<Double>();
    try {
        Classifier model = null;
        model = (Classifier) Class.forName(wekaClassifier).newInstance();
        try {
            model.buildClassifier(filterManager.getDataset());
        } catch (Exception ex) {
            Logger.getLogger(ClassifierManager.class.getName()).log(Level.SEVERE, null, ex);
        }
        UpdateableClassifier updateableModel = (UpdateableClassifier) model;
        for (Document doc : reader) {
            double predictedClass = 0.0;
            try {
                Instance inst = doc.toWekaInstance(fm);
                predictedClass = model.classifyInstance(inst);
                double trueClass = inst.classValue();
                successes.add((predictedClass == trueClass) ? 1.0 : 0.0);
                updateableModel.updateClassifier(inst);
            } catch (Exception ex) {
                Logger.getLogger(ClassifierManager.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        FileOutputStream f = new FileOutputStream(ConfigManager.MODEL_FILE);
        ObjectOutputStream fis = new ObjectOutputStream(f);
        fis.writeObject(updateableModel);
        fis.close();
    } catch (Exception ex) {
        Logger.getLogger(ClassifierManager.class.getName()).log(Level.SEVERE, null, ex);
    }
    return successes;
}

From source file:gr.uoc.nlp.opinion.analysis.suggestion.AnalyzeSuggestions.java

/**
 *
 * @param classifier/*from   w ww.ja  v  a2 s.c o m*/
 * @param unclassified
 * @return
 */
public Instances classify(Classifier classifier, Instances unclassified) {

    unclassified.setClassIndex(unclassified.numAttributes() - 1);

    //new set wich will contain classifies instances
    Instances classified = new Instances(unclassified);

    double clsLabel;
    try {
        for (int i = 0; i < unclassified.numInstances(); i++) {
            //for each unclassifies, classify
            clsLabel = classifier.classifyInstance(unclassified.instance(i));
            //append result to final set
            classified.instance(i).setClassValue(clsLabel);
        }

    } catch (Exception ex) {
        Logger.getLogger(AnalyzeArguments.class.getName()).log(Level.SEVERE, null, ex);
    }

    return classified;
}

From source file:GroupProject.DMChartUI.java

/**
* Action for the generate button/*from w w w .  j  a  v  a 2s . co  m*/
* It reads the user input from the table and the selected options and performs
* a classifiecation of the user input
* the user can choose linear regression, naive bayes classifier, or j48 trees to classify 
*
*/
private void generateButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_generateButtonActionPerformed
    // TODO add your handling code here:                                              
    // TODO add your handling code here:
    //File file = new File("studentTemp.csv");
    CSVtoArff converter = new CSVtoArff();
    Instances students = null;
    Instances students2 = null;
    try {
        converter.convert("studentTemp.csv", "studentTemp.arff");
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    try {
        students = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
        students2 = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    //get column to predict values for 
    //int target=students.numAttributes()-1; 
    int target = dataSelector.getSelectedIndex() + 1;
    System.out.printf("this is the target: %d\n", target);
    //set target 
    students.setClassIndex(target);
    students2.setClassIndex(target);

    //case on which radio button is selected 
    //Linear Regressions
    if (LRB.isSelected()) {

        LinearRegression model = null;
        if (Lmodel != null) {
            model = Lmodel;
        } else {
            buildLinearModel();
            model = Lmodel;
        }

        System.out.println("im doing linear regression");

        equationDisplayArea.setText(model.toString());

        System.out.println("im going to get the instance");

        Instance prediction2 = getInstance(true);

        Remove remove = new Remove();
        int[] toremove = { 0, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 16, 17 };
        remove.setAttributeIndicesArray(toremove);

        try {
            remove.setInputFormat(students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        Instances instNew = null;
        try {
            instNew = Filter.useFilter(students, remove);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        prediction2.setDataset(instNew);
        System.err.print("i got the instance");
        double result = 0;
        try {
            result = model.classifyInstance(prediction2);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        System.out.printf("the result : %f \n ", result);
        predictValue.setText(Double.toString(result));
        System.out.println("I'm done with Linear Regression");
    }

    //Naive Bayes
    else if (NBB.isSelected()) {
        Classifier cModel = null;

        if (NBmodel != null) {
            cModel = NBmodel;
        } else {
            buildNBClassifier();
            cModel = NBmodel;
        }

        System.out.println("im doing NB");

        //build test 
        Evaluation eTest = null;
        try {
            eTest = new Evaluation(students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        System.out.println("Using NB");

        try {
            eTest.evaluateModel(cModel, students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        //display the test results to console 
        String strSummary = eTest.toSummaryString();
        System.out.println(strSummary);

        //build instance to predict 
        System.out.println("im going to get the instance");

        Instance prediction2 = getInstance(false);

        prediction2.setDataset(students);
        System.err.print("i got the instance");

        //replace with loop stating the class names 
        //fit text based on name of categories 
        double pred = 0;
        try {
            pred = cModel.classifyInstance(prediction2);
            prediction2.setClassValue(pred);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        //get the predicted value and set predictValue to it 
        predictValue.setText(prediction2.classAttribute().value((int) pred));

        System.out.println("I'm done with Naive Bayes");

        double[] fDistribution2 = null;
        try {
            fDistribution2 = cModel.distributionForInstance(prediction2);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        double max = 0;
        int maxindex = 0;
        max = fDistribution2[0];
        for (int i = 0; i < fDistribution2.length; i++) {
            if (fDistribution2[i] > max) {
                maxindex = i;
                max = fDistribution2[i];
            }
            System.out.println("the value at " + i + " : " + fDistribution2[i]);
            System.out.println("the label at " + i + prediction2.classAttribute().value(i));
        }
        prediction2.setClassValue(maxindex);
        predictValue.setText(prediction2.classAttribute().value(maxindex));

    }
    //J48 Tree
    else if (JB.isSelected()) {

        System.out.println("im doing j48 ");

        Classifier jModel = null;
        if (Jmodel != null) {
            jModel = Jmodel;
        } else {
            buildJClassifier();
            jModel = Jmodel;
        }
        //test model 
        Evaluation eTest2 = null;
        try {
            eTest2 = new Evaluation(students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        System.out.println("Using J48 test");
        try {
            eTest2.evaluateModel(jModel, students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        String strSummary2 = eTest2.toSummaryString();
        System.out.println(strSummary2);

        System.out.println("im going to get the instance");

        Instance prediction2 = getInstance(false);

        prediction2.setDataset(students);
        System.err.print("i got the instance\n");

        double pred = 0;
        try {
            pred = jModel.classifyInstance(prediction2);
            prediction2.setClassValue(pred);
            System.out.println("i did a prediction");
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        //get the predicted value and set predictValue to it 
        System.out.println("this was pred:" + pred);
        predictValue.setText(prediction2.classAttribute().value((int) pred));

        System.out.println("I'm done with J48");
        //replace with loop stating the class names 
        //fit text based on name of categories 

        double[] fDistribution2 = null;
        try {
            fDistribution2 = jModel.distributionForInstance(prediction2);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        double max = 0;
        int maxindex = 0;
        max = fDistribution2[0];
        for (int i = 0; i < fDistribution2.length; i++) {
            if (fDistribution2[i] > max) {
                maxindex = i;
                max = fDistribution2[i];
            }
            System.out.println("the value at " + i + " : " + fDistribution2[i]);
            System.out.println("the label at " + i + " " + prediction2.classAttribute().value(i));
        }
        prediction2.setClassValue(maxindex);
        predictValue.setText(prediction2.classAttribute().value(maxindex));

    }

}

From source file:Helper.ClassifyHelper.java

public static void clasifyInstance(Classifier cls, Instance inst) throws Exception {
    double result = cls.classifyInstance(inst);
    inst.setClassValue(result);//  w ww .  j a v  a  2 s . c om
}

From source file:id3j48.WekaAccess.java

public static void classify(String filename, Classifier classifier) throws Exception {
    Instances input = readArff(filename);
    input.setClassIndex(input.numAttributes() - 1);
    for (int i = 0; i < input.numInstances(); i++) {
        double classLabel = classifier.classifyInstance(input.instance(i));
        input.instance(i).setClassValue(classLabel);
        System.out.println("Instance: " + input.instance(i));
        System.out.println("Class: " + input.classAttribute().value((int) classLabel));
    }/*from   w ww . j a v a2s  .  c o  m*/

    try (BufferedWriter writer = new BufferedWriter(
            new FileWriter(classifiedFolder + File.separator + filename))) {
        writer.write(input.toString());
        writer.newLine();
        writer.flush();
    }
}

From source file:LeerArchivo.Leer.java

public String leerModelo() {
    try {//from   w  w  w. ja  va 2s  .c  o m
        String[] valoresAtributos = { "0", "1" };
        Classifier clasificador = (Classifier) weka.core.SerializationHelper.read("./KStar.model");
        ConverterUtils.DataSource source = new ConverterUtils.DataSource("./test.arff");
        Instances data = source.getDataSet();
        data.setClassIndex(5);
        System.out.println(data.instance(0));
        return valoresAtributos[(int) clasificador.classifyInstance(data.instance(0))];
    } catch (Exception ex) {
        Logger.getLogger(Leer.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:LVCoref.WekaWrapper.java

License:Open Source License

public static void main(String[] args) {
    try {/*from  w  ww .j a  va  2  s .c  o  m*/
        List<Document> docs = new LinkedList<Document>();
        Document d = new Document();
        d.readCONLL("data/pipeline/interview_16.lvsem.conll");
        d.addAnnotationMMAX("data/interview_16_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_23.lvsem.conll");
        d.addAnnotationMMAX("data/interview_23_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_27.lvsem.conll");
        d.addAnnotationMMAX("data/interview_27_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_38.lvsem.conll");
        d.addAnnotationMMAX("data/interview_38_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);

        Instances train = toArff2(docs);
        train.setClassIndex(train.numAttributes() - 1);
        String[] options = { "-U" };//, "-C", "0.5"};
        Classifier cls = new J48();
        cls.setOptions(options);
        cls.buildClassifier(train);

        docs = new LinkedList<Document>();
        d = new Document();
        d.readCONLL("data/pipeline/interview_43.lvsem.conll");
        d.addAnnotationMMAX("data/interview_43_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_46.lvsem.conll");
        d.addAnnotationMMAX("data/interview_46_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);

        Evaluation eval = new Evaluation(train);

        Instances data = toArff2(docs);
        data.setClassIndex(data.numAttributes() - 1);
        for (int i = 0; i < data.numInstances(); i++) {
            double clsLabel = cls.classifyInstance(data.instance(i));
            //System.out.println(clsLabel);
            data.instance(i).setClassValue(clsLabel);
            System.out.println(data.instance(i).toString(data.classIndex()));
        }

        //     eval.crossValidateModel(cls, train, 10, new Random(1));
        //            // generate curve
        //     ThresholdCurve tc = new ThresholdCurve();
        //     //int classIndex = test.numAttributes()-1;
        //     Instances result = tc.getCurve(eval.predictions());//, classIndex);
        // 
        //     // plot curve
        //     ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
        //     vmc.setROCString("(Area under ROC = " + 
        //         weka.core.Utils.doubleToString(tc.getROCArea(result), 4) + ")");
        //     vmc.setName(result.relationName());
        //     PlotData2D tempd = new PlotData2D(result);
        //     tempd.setPlotName(result.relationName());
        //     tempd.addInstanceNumberAttribute();
        //     // specify which points are connected
        //     boolean[] cp = new boolean[result.numInstances()];
        //     for (int n = 1; n < cp.length; n++)
        //       cp[n] = true;
        //     tempd.setConnectPoints(cp);
        //     // add plot
        //     vmc.addPlot(tempd);
        // 
        //     // display curve
        //     String plotName = vmc.getName(); 
        //     final javax.swing.JFrame jf = 
        //       new javax.swing.JFrame("Weka Classifier Visualize: "+plotName);
        //     jf.setSize(500,400);
        //     jf.getContentPane().setLayout(new BorderLayout());
        //     jf.getContentPane().add(vmc, BorderLayout.CENTER);
        //     jf.addWindowListener(new java.awt.event.WindowAdapter() {
        //       public void windowClosing(java.awt.event.WindowEvent e) {
        //       jf.dispose();
        //       }
        //     });
        //     jf.setVisible(true);

        //            Instances test = toArff2(docs);
        //            test.setClassIndex(test.numAttributes()-1);
        //            
        //            
        //           Evaluation evals = new Evaluation(train); 
        //
        //            evals.evaluateModel(cls, test);
        //            System.out.println(evals.toSummaryString("\nResults\n======\n", false));
        //             System.out.println(evals.toMatrixString());
        //              System.out.println(evals.toClassDetailsString());
        //            
        //            System.out.println(cls);
        //            //System.out.println(toArff2(docs));

    } catch (Exception ex) {
        Logger.getLogger(WekaWrapper.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:machinelearningcw.MachineLearningCw.java

public static void timingExperiment(Classifier s, Instances[] data) throws Exception {

    /* get the biggest data set */
    Instances largestData = data[0];// w w  w  . jav  a  2 s  . c  o m
    for (int i = 0; i < data.length; i++) {
        if (largestData.numInstances() < data[i].numInstances()) {
            largestData = data[i];
        }
    }
    for (int i = 1; i <= 7; i++) {
        int percent = i * 10;
        int train_size = (int) Math.round(largestData.numInstances() * percent / 100);
        int testSize = largestData.numInstances() - train_size;
        Instances train = new Instances(largestData, 0, train_size);
        Instances test = new Instances(largestData, train_size, testSize);

        long t1 = System.currentTimeMillis();

        s.buildClassifier(train);
        for (Instance ins : test) {
            s.classifyInstance(ins);
        }

        long t2 = System.currentTimeMillis() - t1;
        //change to seconds

        System.out.println("TIME TAKEN " + i + ": " + t2);

    }
    System.out.println("\n");
}

From source file:machinelearning_cw.MachineLearning_CW.java

/**
 * /*from   ww w.  ja v  a2  s .  co  m*/
 * Tests the speed of a classifier in milliseconds and prints it. 
 * This is achieved by checking the time taken for the given classifier to
 * classify some given data.
 * 
 * 
 * @param classifier The classifier to be tested.
 * @param train The data with which to train the classifier.
 * @param test The data the classifier is to be tested against.
 * @param t The number of times the test should be carried out and averaged.
 * @throws Exception 
 */
public static void timeClassifier(Classifier classifier, Instances train, Instances test, int t)
        throws Exception {

    ArrayList<Double> times = new ArrayList<Double>();

    /* Carry out test t+1 times and average.
     * The first run is ignored to offset the effects of
     * the Java Garbage Collector and caching.
     */
    for (int i = 0; i < t + 1; i++) {
        // Time the build and classifyInstance methods
        double t1 = System.nanoTime();

        classifier.buildClassifier(train);
        for (Instance eachInstance : test) {
            classifier.classifyInstance(eachInstance);
        }

        double t2 = System.nanoTime() - t1;

        // Convert to ms
        double timeTaken = t2 / 1000000.0;

        if (i != 0) {
            times.add(timeTaken);
        }
    }

    double averageTime = average(times);
    System.out.println(averageTime);
}