Example usage for weka.classifiers.bayes NaiveBayes buildClassifier

List of usage examples for weka.classifiers.bayes NaiveBayes buildClassifier

Introduction

In this page you can find the example usage for weka.classifiers.bayes NaiveBayes buildClassifier.

Prototype

@Override
public void buildClassifier(Instances instances) throws Exception 

Source Link

Document

Generates the classifier.

Usage

From source file:ab.demo.AIAssignment2.java

License:Open Source License

public GameState solve() {

    // capture Image
    BufferedImage screenshot = ActionRobot.doScreenShot();

    // process image
    Vision vision = new Vision(screenshot);

    // find the slingshot
    Rectangle sling = vision.findSlingshotMBR();

    // confirm the slingshot
    while (sling == null && aRobot.getState() == GameState.PLAYING) {
        System.out.println("No slingshot detected. Please remove pop up or zoom out");
        ActionRobot.fullyZoomOut();/*from w w  w  .  j ava  2s  .co m*/
        screenshot = ActionRobot.doScreenShot();
        vision = new Vision(screenshot);
        sling = vision.findSlingshotMBR();
    }

    // get all the pigs
    List<ABObject> pigs = vision.findPigsMBR();
    List<ABObject> blocks = vision.findBlocksMBR();

    GameState state = aRobot.getState();

    // if there is a sling, then play, otherwise just skip.
    if (sling != null) {

        if (!pigs.isEmpty()) { //if there are pigs in the level

            Point releasePoint = null;
            Shot shot = new Shot();
            int dx, dy;
            {
                //random pick up a pig
                ABObject pig = pigs.get(randomGenerator.nextInt(pigs.size()));
                Point _tpt = pig.getCenter();

                // estimate the trajectory
                ArrayList<Point> pts = tp.estimateLaunchPoint(sling, _tpt);

                //define all of the wood, ice and stone in the stage
                ArrayList<ABObject> wood = new ArrayList<ABObject>();
                ArrayList<ABObject> stone = new ArrayList<ABObject>();
                ArrayList<ABObject> ice = new ArrayList<ABObject>();
                ArrayList<ABObject> tnt = new ArrayList<ABObject>();

                //initialise counters to store how many times the trajectory intersects blocks of these types
                int woodCount = 0;
                int stoneCount = 0;
                int iceCount = 0;
                int pigsCount = 0;
                int tntCount = 0;

                //populate the wood, stone and ice ArrayLists with the correct materials
                for (int i = 0; i < blocks.size(); i++) {
                    if (blocks.get(i).type == ABType.Wood)
                        wood.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.Stone)
                        stone.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.Ice)
                        ice.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.TNT)
                        tnt.add(blocks.get(i));
                }

                //check whether the trajectory intersects any wood blocks
                for (int i = 0; i < wood.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (wood.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some wood");
                            woodCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for wood on the trajectory (escape for loop)
                    }
                }

                //check whether the trajectory intersects any ice blocks
                for (int i = 0; i < ice.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (ice.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some ice");
                            iceCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for ice on the trajectory (escape for loop)
                    }
                }

                //check whether the trajectory intersects any stone blocks            
                for (int i = 0; i < stone.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (stone.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some stone");
                            stoneCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for stone on the trajectory (escape for loop)
                    }
                }

                //how many pigs the trajectory intersects (this should always be at least 1)         
                for (int i = 0; i < pigs.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (pigs.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects a pig");
                            pigsCount++;
                        }
                    }
                }

                //how many tnt blocks the trajectory intersects            
                for (int i = 0; i < tnt.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (tnt.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some tnt");
                            tntCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for tnt on the trajectory
                    }
                }

                StringBuilder sb = new StringBuilder();
                sb.append(pigsCount + "," + woodCount + "," + iceCount + "," + stoneCount + "," + tntCount
                        + ",?");
                String dataEntry = sb.toString();
                try (PrintWriter out = new PrintWriter(
                        new BufferedWriter(new FileWriter("dataset/birds.level.arff", true)))) {
                    out.println(dataEntry);
                } catch (IOException e) {
                    System.out.println("Error - dataset/birds.level.arff file not found or in use!");
                }

                //indicator of if the agent should continue this shot or not (used in the bayes classifier)
                ArrayList<Boolean> takeShot = new ArrayList<Boolean>();

                try {
                    DataSource source = new DataSource("dataset/birds.data.arff"); //initialise the learning set for the agent
                    Instances data = source.getDataSet();

                    // setting class attribute if the data format does not provide this information
                    // For example, the XRFF format saves the class attribute information as well
                    if (data.classIndex() == -1)
                        data.setClassIndex(data.numAttributes() - 1);

                    DataSource thisLevel = new DataSource("dataset/birds.level.arff"); //initialise the data retrieved from the current level for the agent
                    Instances thisLevelData = thisLevel.getDataSet();
                    if (thisLevelData.classIndex() == -1)
                        thisLevelData.setClassIndex(data.numAttributes() - 1);

                    //build a new NaiveBayes classifier
                    NaiveBayes bayes = new NaiveBayes();
                    bayes.buildClassifier(data);

                    for (int i = 0; i < thisLevelData.numInstances(); i++) { //for all instances in the current level
                        double label = bayes.classifyInstance(thisLevelData.instance(i)); //generate an outcome/classify an instance in the current level
                        thisLevelData.instance(i).setClassValue(label); //store this outcome
                        System.out.println(thisLevelData.instance(i).stringValue(5)); //print it
                        if (thisLevelData.instance(i).stringValue(5) != "?") { //if there is a decisive choice as to whether a shot should be taken
                            data.add(thisLevelData.instance(i)); //store it
                            if (thisLevelData.instance(i).stringValue(5) == "yes") {//if the classifier classifies a good shot, store it
                                takeShot.add(true);
                            } else { //if no, store this too
                                takeShot.add(false);
                            }
                        }
                    }

                    //add all non ? entries to the learning set
                    BufferedWriter writer = new BufferedWriter(new FileWriter("dataset/birds.data.arff"));
                    writer.write(data.toString());
                    writer.flush();
                    writer.close();

                } catch (Exception e) {
                    e.printStackTrace();
                    System.out.println("Exception caught - file handle error");
                }

                //TODO: roll a random number to determine whether we take a shot or not.
                //populated using the bayesian classification above.
                //if we roll true, continue with the random pig shot as usual.
                //if not, take a new random pig and try again.
                //TODO: implement a failsafe so the agent does not get stuck randomly choosing pigs which the bayesian classification does not allow.
                Random rng = new Random(takeShot.size());
                if (takeShot.get(rng.nextInt()))
                    System.out.println("Taking this shot.");
                else {
                    System.out.println("Not taking this shot. Finding another random pig.");
                    return state;
                }

                // if the target is very close to before, randomly choose a
                // point near it
                if (prevTarget != null && distance(prevTarget, _tpt) < 10) {
                    double _angle = randomGenerator.nextDouble() * Math.PI * 2;
                    _tpt.x = _tpt.x + (int) (Math.cos(_angle) * 10);
                    _tpt.y = _tpt.y + (int) (Math.sin(_angle) * 10);
                    System.out.println("Randomly changing to " + _tpt);
                }

                prevTarget = new Point(_tpt.x, _tpt.y);

                // do a high shot when entering a level to find an accurate velocity
                if (firstShot && pts.size() > 1) {
                    releasePoint = pts.get(1);
                } else if (pts.size() == 1)
                    releasePoint = pts.get(0);
                else if (pts.size() == 2) {
                    // randomly choose between the trajectories, with a 1 in
                    // 6 chance of choosing the high one
                    if (randomGenerator.nextInt(6) == 0)
                        releasePoint = pts.get(1);
                    else
                        releasePoint = pts.get(0);
                } else if (pts.isEmpty()) {
                    System.out.println("No release point found for the target");
                    System.out.println("Try a shot with 45 degree");
                    releasePoint = tp.findReleasePoint(sling, Math.PI / 4);
                }

                // Get the reference point
                Point refPoint = tp.getReferencePoint(sling);

                //Calculate the tapping time according the bird type 
                if (releasePoint != null) {
                    double releaseAngle = tp.getReleaseAngle(sling, releasePoint);
                    System.out.println("Release Point: " + releasePoint);
                    System.out.println("Release Angle: " + Math.toDegrees(releaseAngle));
                    int tapInterval = 0;
                    switch (aRobot.getBirdTypeOnSling()) {

                    case RedBird:
                        tapInterval = 0;
                        break; // start of trajectory
                    case YellowBird:
                        tapInterval = 65 + randomGenerator.nextInt(25);
                        break; // 65-90% of the way
                    case WhiteBird:
                        tapInterval = 70 + randomGenerator.nextInt(20);
                        break; // 70-90% of the way
                    case BlackBird:
                        tapInterval = 70 + randomGenerator.nextInt(20);
                        break; // 70-90% of the way
                    case BlueBird:
                        tapInterval = 65 + randomGenerator.nextInt(20);
                        break; // 65-85% of the way
                    default:
                        tapInterval = 60;
                    }

                    int tapTime = tp.getTapTime(sling, releasePoint, _tpt, tapInterval);
                    dx = (int) releasePoint.getX() - refPoint.x;
                    dy = (int) releasePoint.getY() - refPoint.y;
                    shot = new Shot(refPoint.x, refPoint.y, dx, dy, 0, tapTime);
                } else {
                    System.err.println("No Release Point Found");
                    return state;
                }
            }

            // check whether the slingshot is changed. the change of the slingshot indicates a change in the scale.
            {
                ActionRobot.fullyZoomOut();
                screenshot = ActionRobot.doScreenShot();
                vision = new Vision(screenshot);
                Rectangle _sling = vision.findSlingshotMBR();
                if (_sling != null) {
                    double scale_diff = Math.pow((sling.width - _sling.width), 2)
                            + Math.pow((sling.height - _sling.height), 2);
                    if (scale_diff < 25) {
                        if (dx < 0) {
                            aRobot.cshoot(shot);
                            state = aRobot.getState();
                            if (state == GameState.PLAYING) {
                                screenshot = ActionRobot.doScreenShot();
                                vision = new Vision(screenshot);
                                List<Point> traj = vision.findTrajPoints();
                                tp.adjustTrajectory(traj, sling, releasePoint);
                                firstShot = false;
                            }
                        }
                    } else
                        System.out.println(
                                "Scale is changed, can not execute the shot, will re-segement the image");
                } else
                    System.out
                            .println("no sling detected, can not execute the shot, will re-segement the image");
            }

        }

    }
    return state;
}

From source file:clasificador.Perceptron.java

public void naive_Bayes() {
    try {/*from   w  w  w  . j  a  v  a  2s.  com*/
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA EL TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);

        //CONTRUCCIN DEL CLASIFICADOR
        NaiveBayes perceptron = new NaiveBayes();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO NAIVE BAYES\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //EVALUACIN DEL MODELO
        ev.evaluateModel(perceptron, testInstance);
        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO NAIVE BAYES\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < ev.evaluateModel(perceptron, testInstance).length; i++) {
            System.out.println("Se clasifica como:  " + ev.evaluateModel(perceptron, testInstance)[i]);
        }
    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:Control.Classificador.java

public ArrayList<Resultado> classificar(Plano plano, Arquivo arq) {
    try {//from  w ww  .j a  v  a2  s  .  com
        FileReader leitor = new FileReader(arq.arquivo);
        Instances conjunto = new Instances(leitor);
        conjunto.setClassIndex(conjunto.numAttributes() - 1);
        Evaluation avaliacao = new Evaluation(conjunto);
        conjunto = conjunto.resample(new Random());

        Instances baseTreino = null, baseTeste = null;
        Random rand = new Random(1);

        if (plano.eHoldOut) {
            baseTeste = conjunto.testCV(3, 0);
            baseTreino = conjunto.trainCV(3, 0);
        } else {
            baseTeste = baseTreino = conjunto;
        }

        if (plano.IBK) {
            try {
                IB1 vizinho = new IB1();
                vizinho.buildClassifier(baseTeste);
                avaliacao.crossValidateModel(vizinho, baseTeste,
                        (plano.eHoldOut) ? 4 : baseTeste.numInstances(), rand);
                Resultado resultado = new Resultado("NN",
                        avaliacao.toMatrixString("Algortmo Vizinho Mais Prximo - Matriz de Confuso"),
                        avaliacao.toClassDetailsString("kNN"));
                resultado.setTaxaErro(avaliacao.errorRate());
                resultado.setTaxaAcerto(1 - avaliacao.errorRate());
                resultado.setRevocacao(recallToDouble(avaliacao, baseTeste));
                resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste));
                this.resultados.add(resultado);
            } catch (UnsupportedAttributeTypeException ex) {
                Mensagem.erro("Algortmo IB1 no suporta atributos numricos!", "MTCS - ERRO");
            }
        }
        if (plano.J48) {
            try {
                J48 j48 = new J48();
                j48.buildClassifier(baseTeste);
                avaliacao.crossValidateModel(j48, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(),
                        rand);
                Resultado resultado = new Resultado("J48",
                        avaliacao.toMatrixString("Algortmo J48 - Matriz de Confuso"),
                        avaliacao.toClassDetailsString("J48"));
                resultado.setTaxaErro(avaliacao.errorRate());
                resultado.setTaxaAcerto(1 - avaliacao.errorRate());
                resultado.setRevocacao(recallToDouble(avaliacao, baseTeste));
                resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste));
                this.resultados.add(resultado);
            } catch (UnsupportedAttributeTypeException ex) {
                Mensagem.erro("Algortmo J48 no suporta atributos nominais!", "MTCS - ERRO");
            }
        }
        if (plano.KNN) {
            try {
                IBk knn = new IBk(3);
                knn.buildClassifier(baseTeste);
                avaliacao.crossValidateModel(knn, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(),
                        rand);
                Resultado resultado = new Resultado("KNN",
                        avaliacao.toMatrixString("Algortmo KNN - Matriz de Confuso"),
                        avaliacao.toClassDetailsString("kNN"));
                resultado.setTaxaErro(avaliacao.errorRate());
                resultado.setTaxaAcerto(1 - avaliacao.errorRate());
                resultado.setRevocacao(recallToDouble(avaliacao, baseTeste));
                resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste));
                this.resultados.add(resultado);
            } catch (UnsupportedAttributeTypeException ex) {
                Mensagem.erro("Algortmo KNN no suporta atributos numricos!", "MTCS - ERRO");
            }

        }
        if (plano.Naive) {
            NaiveBayes naive = new NaiveBayes();
            naive.buildClassifier(baseTeste);
            avaliacao.crossValidateModel(naive, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(),
                    rand);
            Resultado resultado = new Resultado("Naive",
                    avaliacao.toMatrixString("Algortmo NaiveBayes - Matriz de Confuso"),
                    avaliacao.toClassDetailsString("kNN"));
            resultado.setTaxaErro(avaliacao.errorRate());
            resultado.setTaxaAcerto(1 - avaliacao.errorRate());
            resultado.setRevocacao(recallToDouble(avaliacao, baseTeste));
            resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste));
            this.resultados.add(resultado);
        }
        if (plano.Tree) {
            try {
                Id3 id3 = new Id3();
                id3.buildClassifier(baseTeste);
                avaliacao.crossValidateModel(id3, baseTeste, (plano.eHoldOut) ? 4 : baseTeste.numInstances(),
                        rand);
                Resultado resultado = new Resultado("ID3",
                        avaliacao.toMatrixString("Algortmo ID3 - Matriz de Confuso"),
                        avaliacao.toClassDetailsString("kNN"));
                resultado.setTaxaErro(avaliacao.errorRate());
                resultado.setTaxaAcerto(1 - avaliacao.errorRate());
                resultado.setRevocacao(recallToDouble(avaliacao, baseTeste));
                resultado.setPrecisao(precisionToDouble(avaliacao, baseTeste));
                this.resultados.add(resultado);
            } catch (UnsupportedAttributeTypeException ex) {
                Mensagem.erro("Algortmo Arvore de Deciso no suporta atributos numricos!",
                        "MTCS - ERRO");

            }
        }

    } catch (FileNotFoundException ex) {

        Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex);
    } catch (NullPointerException ex) {
        Mensagem.erro("Selecione um arquivo para comear!", "MTCS - ERRO");
        Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(Classificador.class.getName()).log(Level.SEVERE, null, ex);
    }

    return this.resultados;
}

From source file:controller.BothClassificationsServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    request.setCharacterEncoding("UTF-8");
    String dir = "/data/";
    String path = getServletContext().getRealPath(dir);

    String action = request.getParameter("action");

    switch (action) {
    case "create": {
        String fileName = request.getParameter("file");

        String aux = fileName.substring(0, fileName.indexOf("."));
        String pathInput = path + "/" + request.getParameter("file");
        String pathTrainingOutput = path + "/" + aux + "-training-arff.txt";
        String pathTestOutput = path + "/" + aux + "-test-arff.txt";
        String pathBothClassifications = path + "/" + aux + "-bothClassifications.txt";

        String name = request.getParameter("name");
        int range = Integer.parseInt(request.getParameter("range"));

        int size = Integer.parseInt(request.getParameter("counter"));
        String[] columns = new String[size];
        String[] types = new String[size];
        int[] positions = new int[size];
        int counter = 0;
        for (int i = 0; i < size; i++) {
            if (request.getParameter("column-" + (i + 1)) != null) {
                columns[counter] = request.getParameter("column-" + (i + 1));
                types[counter] = request.getParameter("type-" + (i + 1));
                positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1)));
                counter++;/*from  w  ww  . j  ava2s .  c o  m*/
            }
        }

        FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types,
                positions, counter, range);
        try {
            J48 j48 = new J48();

            BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTraining = new Instances(readerTraining);
            instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1);

            j48.buildClassifier(instancesTraining);

            BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput));
            //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTest = new Instances(readerTest);
            instancesTest.setClassIndex(instancesTest.numAttributes() - 1);

            int correctsDecisionTree = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = j48.classifyInstance(instance);

                if (correctValue == classification) {
                    correctsDecisionTree++;
                }
            }

            Evaluation eval = new Evaluation(instancesTraining);
            eval.evaluateModel(j48, instancesTest);

            PrintWriter writer = new PrintWriter(
                    new BufferedWriter(new FileWriter(pathBothClassifications, false)));

            writer.println("?rvore de Deciso\n\n");

            writer.println(j48.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            NaiveBayes naiveBayes = new NaiveBayes();

            naiveBayes.buildClassifier(instancesTraining);

            eval = new Evaluation(instancesTraining);
            eval.evaluateModel(naiveBayes, instancesTest);

            int correctsNaiveBayes = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = naiveBayes.classifyInstance(instance);

                if (correctValue == classification) {
                    correctsNaiveBayes++;
                }
            }

            writer.println("Naive Bayes\n\n");

            writer.println(naiveBayes.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            writer.close();

            response.sendRedirect("BothClassifications?action=view&correctsDecisionTree=" + correctsDecisionTree
                    + "&correctsNaiveBayes=" + correctsNaiveBayes + "&totalTest=" + instancesTest.size()
                    + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&fileName=" + aux
                    + "-bothClassifications.txt");
        } catch (Exception e) {
            System.out.println(e.getMessage());
            response.sendRedirect("Navigation?action=decisionTree");
        }

        break;
    }
    default:
        response.sendError(404);
    }
}

From source file:Controller.CtlDataMining.java

public String redBayesiana(Instances data) {
    try {/*from w  w w. j  a va  2  s  . c o m*/
        //Creamos un clasificador Bayesiano                
        NaiveBayes nb = new NaiveBayes();

        //creamos el clasificador de la redBayesiana 
        nb.buildClassifier(data);

        //Creamos un objeto para la validacion del modelo con redBayesiana
        Evaluation evalB = new Evaluation(data);

        /*Aplicamos el clasificador bayesiano
        hacemos validacion cruzada, de redBayesiana, con 10 campos, 
        y un aleatorio para la semilla, en este caso es 1 para el 
        muestreo de la validacion cruzada (Como ordenar para luego
        partirlo en 10)*/
        evalB.crossValidateModel(nb, data, 10, new Random(1));

        String resBay = "<br><br><b><center>Resultados NaiveBayes</center>" + "<br>========<br>"
                + "Modelo generado indica los siguientes resultados:" + "<br>========<br></b>";
        //Obtenemos resultados
        resBay = resBay
                + ("<b>1. Numero de instancias clasificadas:</b> " + (int) evalB.numInstances() + "<br>");
        resBay = resBay + ("<b>2. Porcentaje de instancias correctamente " + "clasificadas:</b> "
                + formato.format(evalB.pctCorrect()) + "%<br>");
        resBay = resBay + ("<b>3. Numero de instancias correctamente " + "clasificadas:</b> "
                + (int) evalB.correct() + "<br>");
        resBay = resBay + ("<b>4. Porcentaje de instancias incorrectamente " + "clasificadas:</b> "
                + formato.format(evalB.pctIncorrect()) + "%<br>");
        resBay = resBay + ("<b>5. Numero de instancias incorrectamente " + "clasificadas:</b> "
                + (int) evalB.incorrect() + "<br>");
        resBay = resBay + ("<b>6. Media del error absoluto:</b> " + formato.format(evalB.meanAbsoluteError())
                + "%<br>");
        resBay = resBay
                + ("<b>7. " + evalB.toMatrixString("Matriz de " + "confusion</b>").replace("\n", "<br>"));

        return resBay;

    } catch (Exception e) {
        return "El error es" + e.getMessage();
    }
}

From source file:controller.NaiveBayesServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    request.setCharacterEncoding("UTF-8");
    String dir = "/data/";
    String path = getServletContext().getRealPath(dir);

    String action = request.getParameter("action");

    switch (action) {
    case "create": {
        String fileName = request.getParameter("file");

        String aux = fileName.substring(0, fileName.indexOf("."));
        String pathInput = path + "/" + request.getParameter("file");
        String pathTrainingOutput = path + "/" + aux + "-training-arff.txt";
        String pathTestOutput = path + "/" + aux + "-test-arff.txt";
        String pathNaivebayes = path + "/" + aux + "-naiveBayes.txt";

        String name = request.getParameter("name");
        int range = Integer.parseInt(request.getParameter("range"));

        int size = Integer.parseInt(request.getParameter("counter"));
        String[] columns = new String[size];
        String[] types = new String[size];
        int[] positions = new int[size];
        int counter = 0;

        for (int i = 0; i < size; i++) {
            if (request.getParameter("column-" + (i + 1)) != null) {
                columns[counter] = request.getParameter("column-" + (i + 1));
                types[counter] = request.getParameter("type-" + (i + 1));
                positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1)));
                counter++;//www . jav  a  2  s.  c o  m
            }
        }

        FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types,
                positions, counter, range);

        try {
            NaiveBayes naiveBayes = new NaiveBayes();

            BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTraining = new Instances(readerTraining);
            instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1);

            naiveBayes.buildClassifier(instancesTraining);

            BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput));
            //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTest = new Instances(readerTest);
            instancesTest.setClassIndex(instancesTest.numAttributes() - 1);

            Evaluation eval = new Evaluation(instancesTraining);
            eval.evaluateModel(naiveBayes, instancesTest);

            int corrects = 0;
            int truePositive = 0;
            int trueNegative = 0;
            int falsePositive = 0;
            int falseNegative = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = naiveBayes.classifyInstance(instance);

                if (correctValue == classification) {
                    corrects++;
                }
                if (correctValue == 1 && classification == 1) {
                    truePositive++;
                }
                if (correctValue == 1 && classification == 0) {
                    falseNegative++;
                }
                if (correctValue == 0 && classification == 1) {
                    falsePositive++;
                }
                if (correctValue == 0 && classification == 0) {
                    trueNegative++;
                }
            }

            PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(pathNaivebayes, false)));

            writer.println(naiveBayes.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            writer.close();

            response.sendRedirect(
                    "NaiveBayes?action=view&corrects=" + corrects + "&totalTest=" + instancesTest.size()
                            + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&truePositive="
                            + truePositive + "&trueNegative=" + trueNegative + "&falsePositive=" + falsePositive
                            + "&falseNegative=" + falseNegative + "&fileName=" + aux + "-naiveBayes.txt");

        } catch (Exception e) {
            System.out.println(e.getMessage());
            response.sendRedirect("Navigation?action=naiveBayes");
        }

        break;
    }
    default:
        response.sendError(404);
    }

}

From source file:cs.man.ac.uk.predict.Predictor.java

License:Open Source License

public static void makePredictionsEnsembleNew(String trainPath, String testPath, String resultPath) {
    System.out.println("Training set: " + trainPath);
    System.out.println("Test set: " + testPath);

    /**/*from ww  w.ja va2s.  c o m*/
     * The ensemble classifiers. This is a heterogeneous ensemble.
     */
    J48 learner1 = new J48();
    SMO learner2 = new SMO();
    NaiveBayes learner3 = new NaiveBayes();
    MultilayerPerceptron learner5 = new MultilayerPerceptron();

    System.out.println("Training Ensemble.");
    long startTime = System.nanoTime();
    try {
        BufferedReader reader = new BufferedReader(new FileReader(trainPath));
        Instances data = new Instances(reader);
        data.setClassIndex(data.numAttributes() - 1);
        System.out.println("Training data length: " + data.numInstances());

        learner1.buildClassifier(data);
        learner2.buildClassifier(data);
        learner3.buildClassifier(data);
        learner5.buildClassifier(data);

        long endTime = System.nanoTime();
        long nanoseconds = endTime - startTime;
        double seconds = (double) nanoseconds / 1000000000.0;
        System.out.println("Training Ensemble completed in " + nanoseconds + " (ns) or " + seconds + " (s).");
    } catch (IOException e) {
        System.out.println("Could not train Ensemble classifier IOException on training data file.");
    } catch (Exception e) {
        System.out.println("Could not train Ensemble classifier Exception building model.");
    }

    try {
        String line = "";

        // Read the file and display it line by line. 
        BufferedReader in = null;

        // Read in and store each positive prediction in the tree map.
        try {
            //open stream to file
            in = new BufferedReader(new FileReader(testPath));

            while ((line = in.readLine()) != null) {
                if (line.toLowerCase().contains("@data"))
                    break;
            }
        } catch (Exception e) {
        }

        // A different ARFF loader used here (compared to above) as
        // the ARFF file may be extremely large. In which case the whole
        // file cannot be read in. Instead it is read in incrementally.
        ArffLoader loader = new ArffLoader();
        loader.setFile(new File(testPath));

        Instances data = loader.getStructure();
        data.setClassIndex(data.numAttributes() - 1);

        System.out.println("Ensemble Classifier is ready.");
        System.out.println("Testing on all instances avaialable.");

        startTime = System.nanoTime();

        int instanceNumber = 0;

        // label instances
        Instance current;

        while ((current = loader.getNextInstance(data)) != null) {
            instanceNumber += 1;
            line = in.readLine();

            double classification1 = learner1.classifyInstance(current);
            double classification2 = learner2.classifyInstance(current);
            double classification3 = learner3.classifyInstance(current);
            double classification5 = learner5.classifyInstance(current);

            // All classifiers must agree. This is a very primitive ensemble strategy!
            if (classification1 == 1 && classification2 == 1 && classification3 == 1 && classification5 == 1) {
                if (line != null) {
                    //System.out.println("Instance: "+instanceNumber+"\t"+line);
                    //System.in.read();
                }
                Writer.append(resultPath, instanceNumber + "\n");
            }
        }

        in.close();

        System.out.println("Test set instances: " + instanceNumber);

        long endTime = System.nanoTime();
        long duration = endTime - startTime;
        double seconds = (double) duration / 1000000000.0;

        System.out.println("Testing Ensemble completed in " + duration + " (ns) or " + seconds + " (s).");
    } catch (Exception e) {
        System.out.println("Could not test Ensemble classifier due to an error.");
    }
}

From source file:cs.man.ac.uk.predict.Predictor.java

License:Open Source License

public static void makePredictionsEnsembleStream(String trainPath, String testPath, String resultPath) {
    System.out.println("Training set: " + trainPath);
    System.out.println("Test set: " + testPath);

    /**/*from  ww  w  .ja  v  a2 s  . co m*/
     * The ensemble classifiers. This is a heterogeneous ensemble.
     */
    J48 learner1 = new J48();
    SMO learner2 = new SMO();
    NaiveBayes learner3 = new NaiveBayes();
    MultilayerPerceptron learner5 = new MultilayerPerceptron();

    System.out.println("Training Ensemble.");
    long startTime = System.nanoTime();
    try {
        BufferedReader reader = new BufferedReader(new FileReader(trainPath));
        Instances data = new Instances(reader);
        data.setClassIndex(data.numAttributes() - 1);
        System.out.println("Training data length: " + data.numInstances());

        learner1.buildClassifier(data);
        learner2.buildClassifier(data);
        learner3.buildClassifier(data);
        learner5.buildClassifier(data);

        long endTime = System.nanoTime();
        long nanoseconds = endTime - startTime;
        double seconds = (double) nanoseconds / 1000000000.0;
        System.out.println("Training Ensemble completed in " + nanoseconds + " (ns) or " + seconds + " (s).");
    } catch (IOException e) {
        System.out.println("Could not train Ensemble classifier IOException on training data file.");
    } catch (Exception e) {
        System.out.println("Could not train Ensemble classifier Exception building model.");
    }

    try {
        // A different ARFF loader used here (compared to above) as
        // the ARFF file may be extremely large. In which case the whole
        // file cannot be read in. Instead it is read in incrementally.
        ArffLoader loader = new ArffLoader();
        loader.setFile(new File(testPath));

        Instances data = loader.getStructure();
        data.setClassIndex(data.numAttributes() - 1);

        System.out.println("Ensemble Classifier is ready.");
        System.out.println("Testing on all instances avaialable.");

        startTime = System.nanoTime();

        int instanceNumber = 0;

        // label instances
        Instance current;

        while ((current = loader.getNextInstance(data)) != null) {
            instanceNumber += 1;

            double classification1 = learner1.classifyInstance(current);
            double classification2 = learner2.classifyInstance(current);
            double classification3 = learner3.classifyInstance(current);
            double classification5 = learner5.classifyInstance(current);

            // All classifiers must agree. This is a very primitive ensemble strategy!
            if (classification1 == 1 && classification2 == 1 && classification3 == 1 && classification5 == 1) {
                Writer.append(resultPath, instanceNumber + "\n");
            }
        }

        System.out.println("Test set instances: " + instanceNumber);

        long endTime = System.nanoTime();
        long duration = endTime - startTime;
        double seconds = (double) duration / 1000000000.0;

        System.out.println("Testing Ensemble completed in " + duration + " (ns) or " + seconds + " (s).");
    } catch (Exception e) {
        System.out.println("Could not test Ensemble classifier due to an error.");
    }
}

From source file:epsi.i5.datamining.Weka.java

public void excutionAlgo() throws FileNotFoundException, IOException, Exception {
    BufferedReader reader = new BufferedReader(new FileReader("src/epsi/i5/data/" + fileOne + ".arff"));
    Instances data = new Instances(reader);
    reader.close();/*from   w w  w .  j a  v  a2  s.  com*/
    //System.out.println(data.attribute(0));
    data.setClass(data.attribute(0));
    NaiveBayes NB = new NaiveBayes();
    NB.buildClassifier(data);
    Evaluation naiveBayes = new Evaluation(data);
    naiveBayes.crossValidateModel(NB, data, 10, new Random(1));
    naiveBayes.evaluateModel(NB, data);
    //System.out.println(test.confusionMatrix() + "1");
    //System.out.println(test.correct() + "2");
    System.out.println("*****************************");
    System.out.println("******** Naive Bayes ********");
    System.out.println(naiveBayes.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(naiveBayes.pctCorrect());
    System.out.println("");
    J48 j = new J48();
    j.buildClassifier(data);
    Evaluation jeval = new Evaluation(data);
    jeval.crossValidateModel(j, data, 10, new Random(1));
    jeval.evaluateModel(j, data);
    System.out.println("*****************************");
    System.out.println("************ J48 ************");
    System.out.println(jeval.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(jeval.pctCorrect());
    System.out.println("");
    DecisionTable DT = new DecisionTable();
    DT.buildClassifier(data);
    Evaluation decisionTable = new Evaluation(data);
    decisionTable.crossValidateModel(DT, data, 10, new Random(1));
    decisionTable.evaluateModel(DT, data);
    System.out.println("*****************************");
    System.out.println("******* DecisionTable *******");
    System.out.println(decisionTable.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(decisionTable.pctCorrect());
    System.out.println("");
    OneR OR = new OneR();
    OR.buildClassifier(data);
    Evaluation oneR = new Evaluation(data);
    oneR.crossValidateModel(OR, data, 10, new Random(1));
    oneR.evaluateModel(OR, data);
    System.out.println("*****************************");
    System.out.println("************ OneR ***********");
    System.out.println(oneR.toMatrixString());
    System.out.println("*****************************");
    System.out.println("**** Pourcentage Correct ****");
    System.out.println(oneR.pctCorrect());

    //Polarit
    data.setClass(data.attribute(1));
    System.out.println("");
    M5Rules MR = new M5Rules();
    MR.buildClassifier(data);
    Evaluation m5rules = new Evaluation(data);
    m5rules.crossValidateModel(MR, data, 10, new Random(1));
    m5rules.evaluateModel(MR, data);
    System.out.println("*****************************");
    System.out.println("********** M5Rules **********");
    System.out.println(m5rules.correlationCoefficient());

    System.out.println("");
    LinearRegression LR = new LinearRegression();
    LR.buildClassifier(data);
    Evaluation linearR = new Evaluation(data);
    linearR.crossValidateModel(LR, data, 10, new Random(1));
    linearR.evaluateModel(LR, data);
    System.out.println("*****************************");
    System.out.println("********** linearR **********");
    System.out.println(linearR.correlationCoefficient());
}

From source file:kfst.classifier.WekaClassifier.java

License:Open Source License

/**
 * This method builds and evaluates the naiveBayes(NB) classifier.
 * The naiveBayes are used as the NB classifier implemented in the Weka
 * software.//from  w w  w  . j ava  2  s.co m
 *
 * @param pathTrainData the path of the train set
 * @param pathTestData the path of the test set
 * 
 * @return the classification accuracy
 */
public static double naiveBayes(String pathTrainData, String pathTestData) {
    double resultValue = 0;
    try {
        BufferedReader readerTrain = new BufferedReader(new FileReader(pathTrainData));
        Instances dataTrain = new Instances(readerTrain);
        readerTrain.close();
        dataTrain.setClassIndex(dataTrain.numAttributes() - 1);

        BufferedReader readerTest = new BufferedReader(new FileReader(pathTestData));
        Instances dataTest = new Instances(readerTest);
        readerTest.close();
        dataTest.setClassIndex(dataTest.numAttributes() - 1);

        NaiveBayes nb = new NaiveBayes();
        nb.buildClassifier(dataTrain);
        Evaluation eval = new Evaluation(dataTest);
        eval.evaluateModel(nb, dataTest);
        resultValue = 100 - (eval.errorRate() * 100);
    } catch (Exception ex) {
        Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
    return resultValue;
}