Example usage for weka.classifiers Classifier classifyInstance

List of usage examples for weka.classifiers Classifier classifyInstance

Introduction

In this page you can find the example usage for weka.classifiers Classifier classifyInstance.

Prototype

public double classifyInstance(Instance instance) throws Exception;

Source Link

Document

Classifies the given test instance.

Usage

From source file:classifyfromimage.java

private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
    this.name3 = IJ.getImage().getTitle();
    this.name4 = this.name3.replaceFirst("[.][^.]+$", "");
    System.out.println("hola " + this.name4);
    selectWindow(this.name3);
    System.out.println(this.name4);
    System.out.println(this.name3);
    RoiManager rm = RoiManager.getInstance();
    IJ.run("Duplicate...", this.name3);
    IJ.run("Set Measurements...", "area perimeter fit shape limit scientific redirect=None decimal=5");
    selectWindow(this.name3);
    IJ.run("Subtract Background...", "rolling=1.5");
    IJ.run("Enhance Contrast...", "saturated=25 equalize");
    IJ.run("Subtract Background...", "rolling=1.5");
    IJ.run("Convolve...",
            "text1=[-1 -3 -4 -3 -1\n-3 0 6 0 -3\n-4 6 50 6 -4\n-3 0 6 0 -3\n-1 -3 -4 -3 -1\n] normalize");
    IJ.run("8-bit", "");
    IJ.run("Restore Selection", "");
    IJ.run("Make Binary", "");
    Prefs.blackBackground = false;/*from   www.  j a v a 2  s.c o m*/
    IJ.run("Convert to Mask", "");
    IJ.run("Restore Selection", "");
    this.valor1 = this.interval3.getText();
    this.valor2 = this.interval4.getText();
    System.out.println("VECTOR-> punctua: " + this.valor1 + " " + this.valor2);
    this.text = "size=" + this.valor1 + "-" + this.valor2
            + " pixel show=Outlines display include summarize add";
    IJ.run("Analyze Particles...", this.text);
    IJ.saveAs("tif", this.name3 + "_processed");
    String dest_filename1, dest_filename2, full;
    selectWindow("Results");
    //dest_filename1 = this.name2 + "_complete.txt";
    dest_filename2 = this.name3 + "_complete.csv";
    //IJ.saveAs("Results", prova + File.separator + dest_filename1);
    IJ.run("Input/Output...", "jpeg=85 gif=-1 file=.csv copy_row save_column save_row");
    //IJ.saveAs("Results", dir + File.separator + dest_filename2);
    IJ.saveAs("Results", this.name3 + "_complete.csv");
    IJ.run("Restore Selection");
    IJ.run("Clear Results");

    //txtarea.setText("Converting, please wait...  ");
    try {
        CSVLoader loader = new CSVLoader();
        loader.setSource(new File(this.name3 + "_complete.csv"));
        Instances data = loader.getDataSet();
        System.out.println(data);

        // save ARFF
        String arffile = this.name3 + ".arff";
        System.out.println(arffile);
        ArffSaver saver = new ArffSaver();
        saver.setInstances(data);
        saver.setFile(new File(arffile));
        saver.writeBatch();
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }
    //txtdata2.setText(this.name3);

    //txtarea.setText("Succesfully converted " + this.name3);
    //txtarea.setText("Analysing your data, please wait...  ");
    Instances data;
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name3 + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels("rods,punctua,networks");
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.name3 + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }
        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.name3 + "arff" + "\nhas been analysed, and it is composed by-> punctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        this.txtarea2.setText(
                "Your file:" + this.name3 + ".arff" + "\nhas been analysed, and it is composed by-> punctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        A_MachineLearning nf1 = new A_MachineLearning();
        A_MachineLearning.txtresults1.setText(this.txtarea2.getText());
        A_MachineLearning.txtresults1.setText(this.txtarea2.getText());
        A_MachineLearning.txtresults1.setText(this.txtarea2.getText());
        A_MachineLearning.txtresults1.append(this.txtarea2.getText());
        A_MachineLearning.txtresults1.append(this.txtarea2.getText());
        A_MachineLearning.txtresults1.append(this.txtarea2.getText());
        nf1.setVisible(true);

    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.run("Clear Results");
    //IJ.RoiManager("Delete");
    IJ.run("Clear Results");
    IJ.run("Close All", "");

    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Summary") != null) {
        IJ.selectWindow("Summary");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("ROI Manager") != null) {
        IJ.selectWindow("ROI Manager");
        IJ.run("Close");
    }

    IJ.run("Close All", "roiManager");
    IJ.run("Close All", "");
    setVisible(false);
    dispose();// TODO add your handling code here:

    setVisible(false);
    dispose();// TODO add your handling code here:
    // TODO add your handling code here:
}

From source file:A_MachineLearning.java

private void jButton7ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton7ActionPerformed
    Instances data;//from   w ww.ja  v  a 2 s.  co m
    try {
        data = new Instances(new BufferedReader(new FileReader(this.file2 + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels("rods,punctua,networks");
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.file2 + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }
        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.file2 + "arff" + "\nhas been analysed, and it is composed by-> punctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        txtresults1.setText(
                "Your file:" + this.file2 + "arff" + "\nhas been analysed, and it is composed by: \npunctua: "
                        + p + ", rods: " + r + ", networks: " + n);

    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.showMessage("analysing complete ");
}

From source file:Pair.java

License:Open Source License

/**
 * Sets the weights for the next iteration.
 *//*from  www  . j  a  v a2  s .co  m*/
protected double setWeights(Instances trainData, Classifier cls, double sourceFraction, int numSourceInstances,
        boolean isFinal) throws Exception {

    Enumeration enu = trainData.enumerateInstances();
    int instNum = 0;
    double[] errors = new double[trainData.numInstances()];
    double max = 0;
    int i = 0;
    while (enu.hasMoreElements()) {
        Instance instance = (Instance) enu.nextElement();
        errors[i] = Math.abs(cls.classifyInstance(instance) - instance.classValue());
        if (i >= numSourceInstances && errors[i] > max)
            max = errors[i];
        i++;
    }

    if (max == 0)
        return -1;

    //get avg loss
    double loss = 0;
    double initialTWeightSum = 0;
    double allWeightSum = 0;
    for (int j = 0; j < errors.length; j++) {
        errors[j] /= max;
        Instance instance = trainData.instance(j);
        loss += instance.weight() * errors[j];
        if (j >= numSourceInstances) {
            //loss += instance.weight() * errors[j];
            initialTWeightSum += instance.weight();
        }
        allWeightSum += instance.weight();
    }
    //loss /= weightSum;
    loss /= allWeightSum;

    targetWeight = initialTWeightSum / allWeightSum;
    /*
    if (!isFinal){
    System.out.println("Target weight: " + targetWeight);
    System.out.println("max: " + max);
    System.out.println("avg error: " + loss * max);
    System.out.println("Loss: " + loss);
    }
    */

    double beta;

    if (fixedBeta)
        beta = 0.4 / 0.6;
    else {
        if (isFinal && loss > 0.499)//bad, so quit
            //return -1;
            loss = 0.499; //since we're doing CV, no reason to quit

        beta = loss / (1 - loss); //or just use beta = .4/.6, since beta isn't as meaningful in AdaBoost.R2;
    }

    double tWeightSum = 0;
    if (!isFinal) {
        //need to find b so that weight of source be sourceFraction*num source
        //do binary search
        double goal = sourceFraction * errors.length;
        double bMin = .001;
        double bMax = .999;
        double b;
        double sourceSum = 0;
        while (bMax - bMin > .001) {
            b = (bMax + bMin) / 2;
            double sum = 0;
            for (int j = 0; j < numSourceInstances; j++) {
                Instance instance = trainData.instance(j);
                sum += Math.pow(b, errors[j]) * instance.weight();
            }
            if (sum > goal)
                bMax = b;
            else
                bMin = b;
        }
        b = (bMax + bMin) / 2;
        //System.out.println(b);         
        for (int j = 0; j < numSourceInstances; j++) {
            Instance instance = trainData.instance(j);
            instance.setWeight(instance.weight() * Math.pow(bMin, errors[j]));
            sourceSum += instance.weight();
        }

        //now adjust target weights
        goal = errors.length - sourceSum;
        double m = goal / initialTWeightSum;

        for (int j = numSourceInstances; j < errors.length; j++) {
            Instance instance = trainData.instance(j);
            instance.setWeight(instance.weight() * m);
        }
    } else {//final
        if (!doUpsource) { //modify only target weights
            for (int j = numSourceInstances; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * Math.pow(beta, -errors[j]));
                tWeightSum += instance.weight();
            }

            double weightSumInverse = initialTWeightSum / tWeightSum;
            for (int j = numSourceInstances; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * weightSumInverse);
            }
        } else { //modify all weights
            for (int j = 0; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * Math.pow(beta, -errors[j]));
                tWeightSum += instance.weight();
            }

            double weightSumInverse = errors.length / tWeightSum;
            for (int j = 0; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * weightSumInverse);
            }
        }

    }

    return beta;
}

From source file:classificationPLugin.java

private void ClassifyActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_ClassifyActionPerformed
    this.name = txtdirecotry2.getText();
    System.out.println(this.name);
    try {//from w  w  w  .  ja va2 s  .  c o m
        CSVLoader loader = new CSVLoader();
        loader.setSource(new File(this.name));
        Instances data = loader.getDataSet();
        System.out.println(data);

        // save ARFF
        String arffile = this.name + ".arff";
        System.out.println(arffile);
        ArffSaver saver = new ArffSaver();
        saver.setInstances(data);
        saver.setFile(new File(arffile));
        saver.writeBatch();
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }
    try {
        FileReader reader = new FileReader(this.name + ".arff");
        BufferedReader br = new BufferedReader(reader);
        instance.read(br, null);
        br.close();
        instance.requestFocus();

    } catch (Exception e2) {
        System.out.println(e2);
    }
    Instances data;
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels("rods,punctua,networks");
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.name + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }

        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by-> \npunctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        classi.setText(
                "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by: \npunctua: "
                        + p + ", rods: " + r + ", networks: " + n);
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.run("Clear Results");
    IJ.run("Clear Results");
    IJ.run("Close All", "");

    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Summary") != null) {
        IJ.selectWindow("Summary");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("ROI Manager") != null) {
        IJ.selectWindow("ROI Manager");
        IJ.run("Close");
    }
    IJ.run("Close All", "roiManager");
    IJ.run("Close All", "");

}

From source file:dialog1.java

private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed

    try {//w  ww .  j av  a2 s. co m
        CSVLoader loader = new CSVLoader();
        loader.setSource(new File(txtfilename.getText() + "_complete.csv"));
        Instances data = loader.getDataSet();
        System.out.println(data);

        // save ARFF
        String arffile = this.name3 + ".arff";
        System.out.println(arffile);
        ArffSaver saver = new ArffSaver();
        saver.setInstances(data);
        saver.setFile(new File(arffile));
        saver.writeBatch();
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    Instances data;
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name3 + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels("rods,punctua,networks");
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        /*URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();*/
        InputStream stream = this.getClass().getResourceAsStream("/" + "Final.model");
        Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));
            //txtarea2.append(Utils.arrayToString(newData.classAttribute().value((int) pred)));
            //this.target2.add((i + 1) + " -);
            //this.target.add(newData.classAttribute().value((int) pred));
            //for (String s : this.list) {
            //this.target2 += s + ",";

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.name3 + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }
        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.name3 + "arff" + "\nhas been analysed, and it is composed by-> punctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        //txtarea2.setText("Your file:" + this.name3 + ".arff"
        //+ "\nhas been analysed, and it is composed by-> punctua: " + p + ", rods: " + r + ", networks: " + n
        //+ "\n"
        //+ "\nAnalyse complete");
        //txtarea.setText("Analyse complete");

    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.run("Clear Results");
    IJ.run("Clear Results");
    IJ.run("Close All", "");

    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Summary") != null) {
        IJ.selectWindow("Summary");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("ROI Manager") != null) {
        IJ.selectWindow("ROI Manager");
        IJ.run("Close");
    }
    IJ.run("Close All", "roiManager");
    IJ.run("Close All", "");
    setVisible(false);
    dispose();// TODO add your handling code here:

    setVisible(false);
    dispose();// TODO add your handling code here:
    // TODO add your handling code here:
}

From source file:classifyfromimage1.java

private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
    selectWindow(this.name3);
    this.name3 = IJ.getImage().getTitle();
    this.name4 = this.name3.replaceFirst("[.][^.]+$", "");
    RoiManager rm = RoiManager.getInstance();
    IJ.run("Duplicate...", this.name4);
    IJ.run("Set Measurements...", "area perimeter fit shape limit scientific redirect=None decimal=5");
    selectWindow(this.name3);
    IJ.run("Subtract Background...", "rolling=1.5");
    IJ.run("Enhance Contrast...", "saturated=25 equalize");
    IJ.run("Subtract Background...", "rolling=1.5");
    IJ.run("Convolve...",
            "text1=[-1 -3 -4 -3 -1\n-3 0 6 0 -3\n-4 6 50 6 -4\n-3 0 6 0 -3\n-1 -3 -4 -3 -1\n] normalize");
    IJ.run("8-bit", "");
    IJ.run("Restore Selection", "");
    IJ.run("Make Binary", "");
    Prefs.blackBackground = false;//from   www.j  a  v  a 2s .c  om
    IJ.run("Convert to Mask", "");
    IJ.run("Restore Selection", "");
    this.valor1 = this.interval3.getText();
    this.valor2 = this.interval4.getText();
    this.text = "size=" + this.valor1 + "-" + this.valor2
            + " pixel show=Outlines display include summarize add";
    IJ.saveAs("tif", this.name3 + "_processed");
    String dest_filename1, dest_filename2, full;
    selectWindow("Results");
    //dest_filename1 = this.name2 + "_complete.txt";
    dest_filename2 = this.name3 + "_complete.csv";
    //IJ.saveAs("Results", prova + File.separator + dest_filename1);
    IJ.run("Input/Output...", "jpeg=85 gif=-1 file=.csv copy_row save_column save_row");
    //IJ.saveAs("Results", dir + File.separator + dest_filename2);
    IJ.saveAs("Results", this.name3 + "_complete.csv");
    IJ.run("Restore Selection");
    IJ.run("Clear Results");

    try {
        CSVLoader loader = new CSVLoader();
        loader.setSource(new File(this.name3 + "_complete.csv"));
        Instances data = loader.getDataSet();
        System.out.println(data);

        // save ARFF
        String arffile = this.name3 + ".arff";
        System.out.println(arffile);
        ArffSaver saver = new ArffSaver();
        saver.setInstances(data);
        saver.setFile(new File(arffile));
        saver.writeBatch();
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    Instances data;
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name3 + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels(txtlabel.getText());
        filter.setAttributeName(txtpath2.getText());
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);

        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(txtpath.getText());
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));
            //txtarea2.append(Utils.arrayToString(dist));
            classif.add(newData.classAttribute().value((int) pred));
        }

        classif.removeAll(Arrays.asList("", null));
        System.out.println(classif);
        String vecstring = "";

        for (Object s : classif) {
            vecstring += s + ",";
            System.out.println("Hola " + vecstring);
        }
        Map<String, Integer> seussCount = new HashMap<String, Integer>();
        for (String t : classif) {
            Integer i = seussCount.get(t);
            if (i == null) {
                i = 0;
            }
            seussCount.put(t, i + 1);
        }
        String s = vecstring;
        int counter = 0;
        for (int i = 0; i < s.length(); i++) {
            if (s.charAt(i) == '$') {
                counter++;
            }
        }
        System.out.println(seussCount);
        System.out.println("hola " + counter++);
        IJ.showMessage("Your file:" + this.name3 + "arff" + "\n is composed by" + seussCount);
        txtpath2.setText("Your file:" + this.name3 + "arff" + "\n is composed by" + seussCount);
        A_MachineLearning nf2 = new A_MachineLearning();
        A_MachineLearning.txtresult2.append(this.txtpath2.getText());
        nf2.setVisible(true);

    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.run("Close All", "");

    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Summary") != null) {
        IJ.selectWindow("Summary");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("ROI Manager") != null) {
        IJ.selectWindow("ROI Manager");
        IJ.run("Close");
    }

    setVisible(false);
    dispose();// TODO add your handling code here:
    // TODO add your handling code here:
}

From source file:MachinLearningInterface.java

private void jButton7ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton7ActionPerformed
    Instances data;/*from w ww.ja va 2s  .c  om*/
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name3 + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels("rods,punctua,networks");
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.name3 + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }
        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.name3 + "arff" + "\nhas been analysed, and it is composed by-> punctua: "
                        + p + ", rods: " + r + ", networks: " + n);

    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.showMessage("analysing complete ");
}

From source file:MachinLearningInterface.java

private void jButton10ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton10ActionPerformed
    Instances data;//from  w w w .  j a va2 s .c om
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name3 + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels(this.liststring);
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);

        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(this.model);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));
            //txtarea2.append(Utils.arrayToString(dist));

        }
        URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();

        Classifier cls2 = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls2.classifyInstance(newData.instance(i));
            double[] dist = cls2.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.name3 + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }
        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.name3 + "arff" + "\nhas been analysed, and it is composed by-> punctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        //txtarea2.setText("Your file:" + this.name3 + ".arff"
        //+ "\nhas been analysed, and it is composed by-> punctua: " + p + ", rods: " + r + ", networks: " + n
        //+ "\n"
        //+ "\nAnalyse complete");
        //txtarea.setText("Analyse complete");

    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.showMessage("analysing complete "); // TODO add your handling code here:         // TODO add your handling code here:
}

From source file:A_AdvanceMachineLearning.java

private void jButton10ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton10ActionPerformed
    UIManager.put("OptionPane.yesButtonText", "Confirm");
    UIManager.put("OptionPane.noButtonText", "Cancel");
    int dialogButton = JOptionPane.YES_NO_OPTION;
    int dialogResult = JOptionPane.showConfirmDialog(this, "The labels must be the same used in the weka model",
            "Advance Machine learning", dialogButton, JOptionPane.WARNING_MESSAGE);
    if (dialogResult == 0) {
        this.list.clear();
        //txtcodigo1.setText("hola");
        this.valor = txtcodigo1.getText();
        this.valor1 = txtcodigo2.getText();
        this.valor2 = txtcodigo3.getText();
        this.valor3 = txtcodigo4.getText();
        this.valor4 = txtcodigo5.getText();
        this.valor5 = txtcodigo6.getText();
        //IJ.showMessage("your label 1 is = "+valor+", "+valor1+", "+valor2+", "+valor3+", "+valor4);
        // Array list
        this.list.add(this.valor);
        this.list.add(this.valor1);
        this.list.add(this.valor2);
        this.list.add(this.valor3);
        this.list.add(this.valor4);
        this.list.add(this.valor5);
        this.list.removeAll(Arrays.asList("", null));
        System.out.println(this.list);
        this.liststring = "";

        for (String s : this.list) {
            this.liststring += s + ",";
        }/*ww  w .j  ava2  s.  co m*/
        txtlabel.setText(this.liststring);

        System.out.println(this.liststring);
        txtarea.setText("Your labels are = " + this.list + "\nThe labels had been saved");
        //txtarea.setText("The labels had been saved");

        System.out.println(label);

    } else {
        System.out.println("No Option");

    }
    Instances data;
    try {
        System.out.println(this.file2 + "arff");
        FileReader reader = new FileReader(this.file2 + ".arff");
        BufferedReader br = new BufferedReader(reader);

        data = new Instances(br);
        System.out.println(data);
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels(this.liststring);
        filter.setAttributeName(txtcodigo7.getText());
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print("hola" + newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(this.model);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));

            classif.add(newData.classAttribute().value((int) pred));
        }

        classif.removeAll(Arrays.asList("", null));
        System.out.println(classif);
        String vecstring = "";

        for (Object s : classif) {
            vecstring += s + ",";

        }
        Map<String, Integer> seussCount = new HashMap<String, Integer>();
        for (String t : classif) {
            Integer i = seussCount.get(t);
            if (i == null) {
                i = 0;
            }
            seussCount.put(t, i + 1);
        }
        String s = vecstring;
        String in = vecstring;
        int i = 0;
        Pattern p = Pattern.compile(this.valor1);
        Matcher m = p.matcher(in);
        while (m.find()) {
            i++;
        }

        System.out.println("hola " + i); // Prints 2

        System.out.println(seussCount);
        txtarea2.append("Your file:" + this.file2 + "arff" + "\n is composed by" + seussCount);
        IJ.showMessage("Your file:" + this.file2 + "arff" + "\n is composed by" + seussCount);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    //IJ.showMessage("analysing complete ");// TODO add your handling code here:
}

From source file:ann.ANN.java

public void classify(String data_address, Classifier model) {
    try {/*from   w  w w .jav  a 2 s .  c o  m*/
        Instances test = ConverterUtils.DataSource.read(data_address);
        test.setClassIndex(test.numAttributes() - 1);
        System.out.println("====================================");
        System.out.println("=== Predictions on user test set ===");
        System.out.println("====================================");
        System.out.println("# - actual - predicted - distribution");
        for (int i = 0; i < test.numInstances(); i++) {
            double pred = model.classifyInstance(test.instance(i));
            double[] dist = model.distributionForInstance(test.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(test.instance(i).toString(test.classIndex()) + " - ");
            System.out.print(test.classAttribute().value((int) pred) + " - ");
            System.out.println(Utils.arrayToString(dist));
        }
        System.out.println("\n");
    } catch (Exception ex) {
        System.out.println("Tidak berhasil memprediksi hasil\n");
    }
}