Example usage for weka.core Instances setClassIndex

List of usage examples for weka.core Instances setClassIndex

Introduction

In this page you can find the example usage for weka.core Instances setClassIndex.

Prototype

public void setClassIndex(int classIndex) 

Source Link

Document

Sets the class index of the set.

Usage

From source file:GroupProject.DMChartUI.java

/**
* Action for the generate button//from   w  ww  .java2  s  .  c o  m
* It reads the user input from the table and the selected options and performs
* a classifiecation of the user input
* the user can choose linear regression, naive bayes classifier, or j48 trees to classify 
*
*/
private void generateButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_generateButtonActionPerformed
    // TODO add your handling code here:                                              
    // TODO add your handling code here:
    //File file = new File("studentTemp.csv");
    CSVtoArff converter = new CSVtoArff();
    Instances students = null;
    Instances students2 = null;
    try {
        converter.convert("studentTemp.csv", "studentTemp.arff");
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    try {
        students = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
        students2 = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    //get column to predict values for 
    //int target=students.numAttributes()-1; 
    int target = dataSelector.getSelectedIndex() + 1;
    System.out.printf("this is the target: %d\n", target);
    //set target 
    students.setClassIndex(target);
    students2.setClassIndex(target);

    //case on which radio button is selected 
    //Linear Regressions
    if (LRB.isSelected()) {

        LinearRegression model = null;
        if (Lmodel != null) {
            model = Lmodel;
        } else {
            buildLinearModel();
            model = Lmodel;
        }

        System.out.println("im doing linear regression");

        equationDisplayArea.setText(model.toString());

        System.out.println("im going to get the instance");

        Instance prediction2 = getInstance(true);

        Remove remove = new Remove();
        int[] toremove = { 0, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 16, 17 };
        remove.setAttributeIndicesArray(toremove);

        try {
            remove.setInputFormat(students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        Instances instNew = null;
        try {
            instNew = Filter.useFilter(students, remove);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        prediction2.setDataset(instNew);
        System.err.print("i got the instance");
        double result = 0;
        try {
            result = model.classifyInstance(prediction2);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        System.out.printf("the result : %f \n ", result);
        predictValue.setText(Double.toString(result));
        System.out.println("I'm done with Linear Regression");
    }

    //Naive Bayes
    else if (NBB.isSelected()) {
        Classifier cModel = null;

        if (NBmodel != null) {
            cModel = NBmodel;
        } else {
            buildNBClassifier();
            cModel = NBmodel;
        }

        System.out.println("im doing NB");

        //build test 
        Evaluation eTest = null;
        try {
            eTest = new Evaluation(students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        System.out.println("Using NB");

        try {
            eTest.evaluateModel(cModel, students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        //display the test results to console 
        String strSummary = eTest.toSummaryString();
        System.out.println(strSummary);

        //build instance to predict 
        System.out.println("im going to get the instance");

        Instance prediction2 = getInstance(false);

        prediction2.setDataset(students);
        System.err.print("i got the instance");

        //replace with loop stating the class names 
        //fit text based on name of categories 
        double pred = 0;
        try {
            pred = cModel.classifyInstance(prediction2);
            prediction2.setClassValue(pred);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        //get the predicted value and set predictValue to it 
        predictValue.setText(prediction2.classAttribute().value((int) pred));

        System.out.println("I'm done with Naive Bayes");

        double[] fDistribution2 = null;
        try {
            fDistribution2 = cModel.distributionForInstance(prediction2);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        double max = 0;
        int maxindex = 0;
        max = fDistribution2[0];
        for (int i = 0; i < fDistribution2.length; i++) {
            if (fDistribution2[i] > max) {
                maxindex = i;
                max = fDistribution2[i];
            }
            System.out.println("the value at " + i + " : " + fDistribution2[i]);
            System.out.println("the label at " + i + prediction2.classAttribute().value(i));
        }
        prediction2.setClassValue(maxindex);
        predictValue.setText(prediction2.classAttribute().value(maxindex));

    }
    //J48 Tree
    else if (JB.isSelected()) {

        System.out.println("im doing j48 ");

        Classifier jModel = null;
        if (Jmodel != null) {
            jModel = Jmodel;
        } else {
            buildJClassifier();
            jModel = Jmodel;
        }
        //test model 
        Evaluation eTest2 = null;
        try {
            eTest2 = new Evaluation(students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        System.out.println("Using J48 test");
        try {
            eTest2.evaluateModel(jModel, students);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }
        String strSummary2 = eTest2.toSummaryString();
        System.out.println(strSummary2);

        System.out.println("im going to get the instance");

        Instance prediction2 = getInstance(false);

        prediction2.setDataset(students);
        System.err.print("i got the instance\n");

        double pred = 0;
        try {
            pred = jModel.classifyInstance(prediction2);
            prediction2.setClassValue(pred);
            System.out.println("i did a prediction");
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        //get the predicted value and set predictValue to it 
        System.out.println("this was pred:" + pred);
        predictValue.setText(prediction2.classAttribute().value((int) pred));

        System.out.println("I'm done with J48");
        //replace with loop stating the class names 
        //fit text based on name of categories 

        double[] fDistribution2 = null;
        try {
            fDistribution2 = jModel.distributionForInstance(prediction2);
        } catch (Exception ex) {
            Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
        }

        double max = 0;
        int maxindex = 0;
        max = fDistribution2[0];
        for (int i = 0; i < fDistribution2.length; i++) {
            if (fDistribution2[i] > max) {
                maxindex = i;
                max = fDistribution2[i];
            }
            System.out.println("the value at " + i + " : " + fDistribution2[i]);
            System.out.println("the label at " + i + " " + prediction2.classAttribute().value(i));
        }
        prediction2.setClassValue(maxindex);
        predictValue.setText(prediction2.classAttribute().value(maxindex));

    }

}

From source file:GroupProject.DMChartUI.java

private void buildLinearModel() {
    CSVtoArff converter = new CSVtoArff();
    Instances students = null;
    Instances students2 = null;/*from   w  w w  .j a  v  a2s.c om*/
    try {
        converter.convert("studentTemp.csv", "studentTemp.arff");
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    try {
        students = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
        students2 = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }
    int target = dataSelector.getSelectedIndex() + 1;
    System.out.printf("this is the target: %d\n", target);
    if (target != 14 && target != 15 && target != 18 && target != 19) {
        System.out.println("Please select a numerical category");
        equationDisplayArea.setText("Please select a numerical category");
        return;
    }

    //set target 
    students.setClassIndex(target);
    students2.setClassIndex(target);

    Remove remove = new Remove();
    int[] toremove = { 0, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 16, 17 };
    remove.setAttributeIndicesArray(toremove);

    try {
        remove.setInputFormat(students);
    } catch (Exception ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    Instances instNew = null;
    try {
        instNew = Filter.useFilter(students, remove);
    } catch (Exception ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    //System.out.println(instNew);

    Lmodel = new LinearRegression();
    try {
        System.out.println("im building the model");
        Lmodel.buildClassifier(instNew);
        System.out.println("I finished building the model");
    } catch (Exception ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }
    equationDisplayArea.setText(Lmodel.toString());

}

From source file:GroupProject.DMChartUI.java

private void buildNBClassifier() {
    CSVtoArff converter = new CSVtoArff();
    Instances students = null;
    Instances students2 = null;/*from w w w  .  j a  va  2  s  .  c o m*/
    try {
        converter.convert("studentTemp.csv", "studentTemp.arff");
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    try {
        students = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
        students2 = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }
    int target = dataSelector.getSelectedIndex() + 1;
    System.out.printf("this is the target: %d\n", target);
    if (target == 14 || target == 15 || target == 18 || target == 19) {
        System.out.println("Please select a nominal category");
        equationDisplayArea.setText("Please select a nominal category");
        return;
    }
    //set target 
    students.setClassIndex(target);
    students2.setClassIndex(target);

    System.out.println("im doing NB");
    NBmodel = (Classifier) new NaiveBayes();

    //build the classifier 
    try {
        System.err.println("going to build model ");
        NBmodel.buildClassifier(students);
        System.out.println("I built the model");
    } catch (Exception ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:GroupProject.DMChartUI.java

private void buildJClassifier() {
    CSVtoArff converter = new CSVtoArff();
    Instances students = null;
    Instances students2 = null;/*from w w w .  j av  a 2 s  . c o  m*/
    try {
        converter.convert("studentTemp.csv", "studentTemp.arff");
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    try {
        students = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
        students2 = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }
    int target = dataSelector.getSelectedIndex() + 1;
    System.out.printf("this is the target: %d\n", target);
    if (target == 14 || target == 15 || target == 18 || target == 19) {
        System.out.println("Please select a nominal category");
        equationDisplayArea.setText("Please select a nominal category");
        return;
    }
    //set target 
    students.setClassIndex(target);
    students2.setClassIndex(target);

    Jmodel = (Classifier) new J48();

    try {
        System.out.println("im goin to build the modle");
        Jmodel.buildClassifier(students);
        System.out.println("i finsihed building the mdoel ");
    } catch (Exception ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:Gui.NewClass.java

public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
    Graph bestemodel = null;/* w w  w.  ja va  2s  .com*/
    int anz;
    int anziter;
    List<BayesNetz> results;
    results = new ArrayList<BayesNetz>();
    int[] tab;
    double maxFitness;
    String verfahren = null;
    ArrayList<ComplexClassifierZufall> Complexnetze = new ArrayList<>();
    Simpleclassifier Simplenetz = null;
    BayesNetz K2_Netz;
    System.out.println("Datei im ARFF-Format auswhlen");
    JFileChooser dialogue = new JFileChooser();

    dialogue.showOpenDialog(null);

    // selektierte File
    String Name = dialogue.getSelectedFile().getName();
    System.out.println("Ausgewhlte Datensatz:" + " " + Name);
    if (Name.contains(".arff") && (dialogue.getSelectedFile().isFile())) {
        BufferedReader reader = new BufferedReader(new FileReader(dialogue.getSelectedFile()));

        ArffReader arff = new ArffReader(reader, 1000);
        Instances data = arff.getStructure();
        data.setClassIndex(data.numAttributes() - 1);//
        Instance inst;
        while ((inst = arff.readInstance(data)) != null) {
            data.add(inst);
        }
        System.out.println(
                "bitte geben sie die  Groee der Startpopulation  ein (die Zahl muss groeer 1 sein!!)");

        Scanner s = new Scanner(System.in);
        if (s == null) {
            System.out.println("keine Zahl ausgewhlt");
            throw new Exception();
        }

        anziter = s.nextInt();
        if (anziter <= 1) {
            System.out.println(
                    "Sie haben eine Zahl eigegeben, die kleiner oder gleich 1 ist. Dies ist nicht zulaessig. Die Startpopulationsgroee muss>1 sein!");
            throw new Exception();

        }

        System.out.println("bitte geben sie die Anzahl Iterationen ein");
        Scanner sc = new Scanner(System.in);
        Collection<BayesNetz> dieNetze = new ArrayList<BayesNetz>();
        anz = sc.nextInt();
        System.out.println("ldt bitte warten...");
        /* BayesNetz struct=new BayesNetz(data,1);
                 
         struct.BerechneFitness();
                  
         struct.getGraph().ToString();
         ;
         tab = new int[anz];*/

        int i = 0;
        long anfangszeit_MTA = 0;

        while (i < anziter) {
            ComplexClassifierZufall cc = new ComplexClassifierZufall(data, anziter);
            Complexnetze.add(cc);

            dieNetze.add(new BayesNetz(cc.getinst(), cc.Model));

            i++;
        }

        for (ComplexClassifierZufall c : Complexnetze) {
            anfangszeit_MTA = System.currentTimeMillis();
            c.BauClassifier();
            // System.out.println("Ergebnisse mit Complexclassifier");
            //System.out.println("********************************");
            c.BewertunginProzent();
            // System.out.println("-----------------------------------------------------------------------------------------------------------------------------");
            // System.out.println("                                    ");

        }

        double max = Complexnetze.get(0).Mittlerevalidierungsquote;
        double zeitmin = Complexnetze.get(0).Mittlerezeit;
        int indexbeste = 0;

        long endzeit_MTA = 0;

        for (ComplexClassifierZufall c : Complexnetze) {

            if (max > c.Mittlerevalidierungsquote) {
                max = c.Mittlerevalidierungsquote;
                bestemodel = c.Model;
                indexbeste = Complexnetze.indexOf(c);
            } else {
                if (max == c.Mittlerevalidierungsquote) {
                    if (zeitmin >= c.Mittlerezeit) {
                        max = c.Mittlerevalidierungsquote;
                        zeitmin = c.Mittlerezeit;
                        bestemodel = c.Model;
                        indexbeste = Complexnetze.indexOf(c);
                    }
                }
            }

        }

        if (Complexnetze.get(indexbeste).Model.getAnzahlkanten() != 0) {
            System.out.println(
                    "-----------------------------------------------------------------------------------------------------");
            System.out.println();
            System.out.println("Ergebnisse mit MTA_learning ");
            System.out.println("Fitness:" + ""
                    + (double) (int) (Complexnetze.get(indexbeste).Mittlerevalidierungsquote * 100) / 100
                    + "("
                    + (double) (int) (Complexnetze.get(indexbeste).stadartdeviationvalidierung * 100) / 100
                    + ")" + "%" + " " + "Zeit:" + "" + (int) Complexnetze.get(indexbeste).Mittlerezeit + "("
                    + (int) Complexnetze.get(indexbeste).standartdeviationtime + ")" + "ms");
            System.out.println("Vernetzungsprozent:" + "" + Complexnetze.get(indexbeste).vernetzung + "("
                    + (Complexnetze.get(indexbeste).Model.getMoeglischeAnzahlKanten()
                            * Complexnetze.get(indexbeste).vernetzung)
                            / Complexnetze.get(indexbeste).Model.getAnzahlkanten()
                    + ")" + "%" + "    " + "max Anzahlkanten:"
                    + Complexnetze.get(indexbeste).Model.getAnzahlkanten() + "("
                    + Complexnetze.get(indexbeste).Model.getMoeglischeAnzahlKanten() + " )");
        } else {
            System.out.println(
                    "-----------------------------------------------------------------------------------------------------");
            System.out.println();
            System.out.println("Ergebnisse mit MTA_learning ");
            System.out.println("Fitness:" + ""
                    + (double) (int) (Complexnetze.get(indexbeste).Mittlerevalidierungsquote * 100) / 100
                    + "("
                    + (double) (int) (Complexnetze.get(indexbeste).stadartdeviationvalidierung * 100) / 100
                    + ")" + " " + "Zeit:" + "" + (int) Complexnetze.get(indexbeste).Mittlerezeit + "("
                    + (int) Complexnetze.get(indexbeste).standartdeviationtime + ")" + "ms");
            System.out.println("Vernetzungsprozent:" + "" + Complexnetze.get(indexbeste).vernetzung + "("
                    + (Complexnetze.get(indexbeste).Model.getMoeglischeAnzahlKanten()
                            * Complexnetze.get(indexbeste).vernetzung)
                    + ")" + "%" + "    " + "max Anzahlkanten:"
                    + Complexnetze.get(indexbeste).Model.getAnzahlkanten() + "("
                    + Complexnetze.get(indexbeste).Model.getMoeglischeAnzahlKanten() + " )");
        }

        Complexnetze.get(indexbeste).Model.ToString();
        endzeit_MTA = System.currentTimeMillis();
        System.out.println(
                "Gesamte Ausfhrungszet MTA_learning:" + (endzeit_MTA - anfangszeit_MTA) + " " + "ms");
        results.add(Complexnetze.get(indexbeste).getStruct());
        maxFitness = ((Complexnetze.get(indexbeste).Mittlerevalidierungsquote * 100) / 100);
        verfahren = "MTA_learning";

        System.out.println(
                "---------------------------------------------------------------------------------------------------------------------");

        System.out.println();
        long anfangszeit_NB = System.currentTimeMillis();
        long endzeit_NB = 0;

        Simplenetz = new Simpleclassifier(new BayesNetz(data, 0), anz);

        Simplenetz.BauClassifier();
        System.out.println("Ergebnisse mit NaiveBayes");

        System.out.println("Vernetzungsprozent:" + "" + (100 * (Simplenetz.getinst().numAttributes() - 1))
                / Complexnetze.get(indexbeste).Model.getMaxAnzahlkanten() + "%");
        Simplenetz.BewertunginProzent();

        System.out.println("Fitness NaiveBayes:" + ""
                + (double) (int) (Simplenetz.Mittlerevalidierungsquote * 100) / 100 + "("
                + (double) (int) (Simplenetz.stadartdeviationvalidierung * 100) / 100 + ")" + " " + "Zeit:" + ""
                + (int) Simplenetz.Mittlerezeit + "(" + (int) Simplenetz.standartdeviationtime + ")" + "ms");

        Simplenetz.Model.ToString();
        double[] er = new double[3];
        er = Simplenetz.getStruct().getErgebnisse();
        endzeit_NB = System.currentTimeMillis();
        System.out.println("Gesamte Ausfhrungszet NaiveBayes:" + (endzeit_NB - anfangszeit_NB) + " " + "ms");

        results.add(Simplenetz.getStruct());

        if (maxFitness > ((Simplenetz.Mittlerevalidierungsquote * 100) / 100)) {
            maxFitness = (Simplenetz.Mittlerevalidierungsquote * 100) / 100;
            verfahren = "NaiveBayes";
        }
        /* for(int l=0;l<er.length;l++)
         System.out.println(er[l]);
         /* Simpleclassifier c = new Simpleclassifier(data, 5);
         c.BauClassifier();
         c.BewertunginProzent();// Get a DescriptiveStatistics instance*/

        System.out.println(
                "---------------------------------------------------------------------------------------------------------------------");
        System.out.println();
        System.out.println("Ergebnisse mit K2:");
        System.out.println();
        long anfangszeit_K2 = System.currentTimeMillis();
        long endzeit_K2 = 0;
        K2_Netz = new BayesNetz(new BayesNetz(data, 1));
        er = K2_Netz.getErgebnisse();
        endzeit_K2 = System.currentTimeMillis();
        double f1 = K2_Netz.getFitness();
        System.out.println("Vernetzung:" + " " + K2_Netz.getVernetzung() + "%" + "    " + "Fitness :" + "" + f1
                + "(" + K2_Netz.geterb(3) + ")" + " " + "Zeit:" + (int) K2_Netz.getZeit() + "("
                + K2_Netz.geterb(4) + ")" + "" + "ms");
        K2_Netz.getGraph().ToString();
        System.out.println("Gesamte Ausfhrungszet K2:" + (endzeit_K2 - anfangszeit_K2) + " " + "ms");

        results.add(K2_Netz);
        if (maxFitness > f1) {
            maxFitness = f1;
            verfahren = "K2";
        }

        System.out.println(
                "---------------------------------------------------------------------------------------------------------------------");

        //  System.out.println();
        // System.out.println("Ergebnisse mit Ci:");
        // System.out.println();
        //BayesNetz Ci_Netz=new BayesNetz(new BayesNetz(data,2));
        //System.out.println("Vernetzung:"+Ci_Netz.getVernetzung()+"%"+"    "+"Fitness :" + ""+Ci_Netz.getFitness());
        //Ci_Netz.getGraph().ToString();
        //System.out.println("---------------------------------------------------------------------------------------------------------------------");
        Random r;
        r = new Random();
        long anfangszeit_SGA = System.currentTimeMillis();

        AbstractCandidateFactory Factory = new BayesFactory<BayesNetz>();

        List<EvolutionaryOperator<BayesNetz>> operators = new LinkedList<EvolutionaryOperator<BayesNetz>>();
        operators.add(new Bayescrossover());
        operators.add(new Mutation(new ConstantGenerator<Probability>(new Probability(0.02)),
                new ConstantGenerator(10)));
        EvolutionaryOperator<BayesNetz> pipeline = new EvolutionPipeline<BayesNetz>(operators);
        FitnessEvaluator<BayesNetz> fitness = new BayesFitnessEvaluator();
        SelectionStrategy<BayesNetz> select = new TunrnierSelektion(new Probability(0.6));
        System.out.println("please wait while simulating  evolution...");
        System.out.println();

        long endzeit_SGA = 0;

        EvolutionEngine<BayesNetz> engine = new Geneticsimulation((BayesFactory) Factory, pipeline, fitness,
                select, r);

        engine.addEvolutionObserver(new EvolutionObserver<BayesNetz>() {

            @Override
            public void populationUpdate(PopulationData<? extends BayesNetz> data) {
                System.out.println("Generation" + " " + (data.getGenerationNumber() + 1) + ":");
                System.out.println("Fitness:" + " " + data.getBestCandidate().getFitness() + "," + " "
                        + "Dauer:" + " " + data.getBestCandidate().geterb(3) + " " + "ms");
                ///System.out.println("Beste Netz:");

                //data.getBestCandidate().getGraph().ToString();
                System.out.println(
                        "-------------------------------------------------------------------------------");
            }

        });
        BayesNetz result = new BayesNetz(engine.evolve(dieNetze.size(), 1, dieNetze, new Terminason(anz)));
        endzeit_SGA = System.currentTimeMillis();
        System.out.println("Ergebnisse der Evolution:");
        double f = result.getFitness();
        if (maxFitness > f) {
            maxFitness = f;
            verfahren = "SGA";
        }

        er = result.getErgebnisse();

        System.out.println("Vernetzung:" + " " + result.getVernetzung() + "%" + "  " + "Fittness:" + " " + f
                + "(" + result.geterb(3) + ")" + " " + "Zeit:" + " " + (int) result.getZeit() + "("
                + result.geterb(4) + ")" + " " + "ms");
        result.getGraph().ToString();
        System.out.println("Gesamte Dauer von SGA:" + " " + (endzeit_SGA - anfangszeit_SGA) + " " + "ms");
        results.add(result);

        System.out.println(
                "---------------------------------------------------------------------------------------------------------------------");

    } else {
        System.out.println("ungltige Dateiformat");

    }

    switch (verfahren) {
    case "MTA_learning": {
        System.out.println("Bester Verfahren:" + " " + verfahren);
        System.out.println("gelernte Struktur: (" + results.get(0).getVernetzung() + "% Vernetzung)" + ":");
        results.get(0).getGraph().ToString();

        break;
    }
    case "NaiveBayes": {
        System.out.println("Bester Verfahren:" + " " + verfahren);
        System.out.println("gelernte Struktur: (" + results.get(1).getVernetzung() + "% Vernetzung)" + " "
                + verfahren + ":");
        results.get(1).getGraph().ToString();
        break;
    }

    case "K2": {
        System.out.println("Bester Verfahren:" + " " + verfahren);
        System.out.println("gelernte Struktur: (" + results.get(2).getVernetzung() + "% Vernetzung)" + " "
                + verfahren + ":");
        results.get(2).getGraph().ToString();
        break;
    }

    case "SGA": {
        System.out.println("Bester Verfahren:" + " " + verfahren);
        System.out.println("gelernte Struktur: (" + results.get(3).getVernetzung() + "% Vernetzung)" + " "
                + verfahren + ":");
        results.get(3).getGraph().ToString();
        break;
    }
    default:
        System.out.println("weiss nicht");
    }

}

From source file:hero.unstable.util.classification.wekaClassifier.java

public Evaluation classify(Instances data) throws Exception {
    data.setClassIndex(0);

    // Randomize data
    Evaluation eval = new Evaluation(data);
    Random rand = new Random(seed);

    // Perform cross-validation
    eval.crossValidateModel(classifier, data, folds, rand);

    // output evaluation
    String result = eval.toClassDetailsString();
    /*/*from   w w  w . j a va2  s  .co m*/
    System.out.println();
    System.out.println("=== Setup ===");
    System.out.println("Clasiffier: " + classifier.toString());
    System.out.println("Dataset: " + data.relationName());
    System.out.println("Folds: " + folds);
    System.out.println("Seed: " + seed);
    System.out.println();
    System.out.println(eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false));
    */
    //System.out.println(result);
    return eval;
}

From source file:hurtowniedanych.FXMLController.java

public void trainAndTestKNN() throws FileNotFoundException, IOException, Exception {

    InstanceQuery instanceQuery = new InstanceQuery();
    instanceQuery.setUsername("postgres");
    instanceQuery.setPassword("szupek");
    instanceQuery.setCustomPropsFile(new File("./src/data/DatabaseUtils.props")); // Wskazanie pliku z ustawieniami dla PostgreSQL

    String query = "select ks.wydawnictwo,ks.gatunek, kl.mia-sto\n" + "from zakupy z,ksiazki ks,klienci kl\n"
            + "where ks.id_ksiazka=z.id_ksiazka and kl.id_klient=z.id_klient";

    instanceQuery.setQuery(query);//ww  w.  ja va2  s.  c o m
    Instances data = instanceQuery.retrieveInstances();
    data.setClassIndex(data.numAttributes() - 1);

    data.randomize(new Random());
    double percent = 70.0;
    int trainSize = (int) Math.round(data.numInstances() * percent / 100);
    int testSize = data.numInstances() - trainSize;
    Instances trainData = new Instances(data, 0, trainSize);
    Instances testData = new Instances(data, trainSize, testSize);

    int lSasiadow = Integer.parseInt(textFieldKnn.getText());
    System.out.println(lSasiadow);

    IBk ibk = new IBk(lSasiadow);

    // Ustawienie odleglosci
    EuclideanDistance euclidean = new EuclideanDistance(); // euklidesowej
    ManhattanDistance manhatan = new ManhattanDistance(); // miejska  

    LinearNNSearch linearNN = new LinearNNSearch();

    if (comboboxOdleglosc.getSelectionModel().getSelectedItem().equals("Manhatan")) {
        linearNN.setDistanceFunction(manhatan);
    } else {
        linearNN.setDistanceFunction(euclidean);
    }

    ibk.setNearestNeighbourSearchAlgorithm(linearNN); // ustawienie sposobu szukania sasiadow

    // Tworzenie klasyfikatora
    ibk.buildClassifier(trainData);

    Evaluation eval = new Evaluation(trainData);
    eval.evaluateModel(ibk, testData);
    spr.setVisible(true);
    labelKnn.setVisible(true);
    labelOdleglosc.setVisible(true);
    labelKnn.setText(textFieldKnn.getText());
    labelOdleglosc.setText(comboboxOdleglosc.getSelectionModel().getSelectedItem().toString());
    spr.setText(eval.toSummaryString("Wynik:", true));
}

From source file:id3.MyID3.java

/**
 * Main method//from   w ww. j ava2s.  c  o m
 * @param args arguments
 */
public static void main(String[] args) {
    Instances instances;
    try {
        BufferedReader reader = new BufferedReader(new FileReader("D:\\Weka-3-6\\data\\weather.nominal.arff"));
        try {
            instances = new Instances(reader);
            instances.setClassIndex(instances.numAttributes() - 1);
            MyID3 id3 = new MyID3();
            try {
                id3.buildClassifier(instances);
            } catch (Exception e) {
                e.printStackTrace();
            }
            // Test class distribution
            double[] classDistribution = id3.classDistribution(instances);
            for (int i = 0; i < classDistribution.length; i++) {
                System.out.println(classDistribution[i]);
            }
            // Test entrophy and information gain for each attribute
            System.out.println(id3.computeEntropy(instances));
            Enumeration attributes = instances.enumerateAttributes();
            while (attributes.hasMoreElements()) {
                System.out.println(id3.computeIG(instances, (Attribute) attributes.nextElement()));
            }
            // Test build classifier
            try {
                id3.buildClassifier(instances);
            } catch (Exception e) {
                e.printStackTrace();
            }
            System.out.println(id3.toString());
            // Evaluate model from build classifier (full training)
            Evaluation eval = null;
            try {
                eval = new Evaluation(instances);
            } catch (Exception e) {
                e.printStackTrace();
            }
            try {
                System.out.println(instances);
                eval.evaluateModel(id3, instances);
            } catch (Exception e) {
                e.printStackTrace();
            }
            System.out.println(eval.toSummaryString("\nResults Full-Training\n\n", false));
            // Evaluate model from build classifier (test set)
            // Test Confusion Matrix
            System.out.println("Confusion Matrix : ");
            double[][] cmMatrix = eval.confusionMatrix();
            for (int row_i = 0; row_i < cmMatrix.length; row_i++) {
                for (int col_i = 0; col_i < cmMatrix.length; col_i++) {
                    System.out.print(cmMatrix[row_i][col_i]);
                    System.out.print("|");
                }
                System.out.println();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    }
}

From source file:id3classifier.Main.java

public static void main(String[] args) throws Exception {

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(file);
    Instances dataSet = source.getDataSet();

    // discretize the dataset
    Discretize filter = new Discretize();
    filter.setInputFormat(dataSet);//from  ww  w  . j av  a 2 s.  co m
    dataSet = Filter.useFilter(dataSet, filter);

    // standardize the dataset
    Standardize standardizedData = new Standardize();
    standardizedData.setInputFormat(dataSet);
    dataSet = Filter.useFilter(dataSet, standardizedData);

    // randomize the dataset
    dataSet.setClassIndex(dataSet.numAttributes() - 1);
    dataSet.randomize(new Debug.Random());

    // get the sizes of the training and testing sets and split
    int trainingSize = (int) Math.round(dataSet.numInstances() * .7);
    int testSize = dataSet.numInstances() - trainingSize;
    Instances training = new Instances(dataSet, 0, trainingSize);
    Instances test = new Instances(dataSet, trainingSize, testSize);

    // set up the ID3 classifier on the training data
    ID3Classifiers classifier = new ID3Classifiers();
    classifier.buildClassifier(training);

    // set up the evaluation and test using the classifier and test set
    Evaluation eval = new Evaluation(dataSet);
    eval.evaluateModel(classifier, test);

    // outup and kill, important to exit here to stop javaFX
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
    System.exit(0);
}

From source file:id3j48.WekaAccess.java

public static Instances readArff(String filename) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(datasetFolder + File.separator + filename);
    Instances data = source.getDataSet();
    if (data.classIndex() == -1)
        data.setClassIndex(data.numAttributes() - 1);
    return data;//from  w  w w . j a  v a 2s.  c o  m
}