Example usage for weka.classifiers.bayes NaiveBayes classifyInstance

List of usage examples for weka.classifiers.bayes NaiveBayes classifyInstance

Introduction

In this page you can find the example usage for weka.classifiers.bayes NaiveBayes classifyInstance.

Prototype

@Override
public double classifyInstance(Instance instance) throws Exception 

Source Link

Document

Classifies the given test instance.

Usage

From source file:ab.demo.AIAssignment2.java

License:Open Source License

public GameState solve() {

    // capture Image
    BufferedImage screenshot = ActionRobot.doScreenShot();

    // process image
    Vision vision = new Vision(screenshot);

    // find the slingshot
    Rectangle sling = vision.findSlingshotMBR();

    // confirm the slingshot
    while (sling == null && aRobot.getState() == GameState.PLAYING) {
        System.out.println("No slingshot detected. Please remove pop up or zoom out");
        ActionRobot.fullyZoomOut();/* w w  w. j  a  va2 s . co m*/
        screenshot = ActionRobot.doScreenShot();
        vision = new Vision(screenshot);
        sling = vision.findSlingshotMBR();
    }

    // get all the pigs
    List<ABObject> pigs = vision.findPigsMBR();
    List<ABObject> blocks = vision.findBlocksMBR();

    GameState state = aRobot.getState();

    // if there is a sling, then play, otherwise just skip.
    if (sling != null) {

        if (!pigs.isEmpty()) { //if there are pigs in the level

            Point releasePoint = null;
            Shot shot = new Shot();
            int dx, dy;
            {
                //random pick up a pig
                ABObject pig = pigs.get(randomGenerator.nextInt(pigs.size()));
                Point _tpt = pig.getCenter();

                // estimate the trajectory
                ArrayList<Point> pts = tp.estimateLaunchPoint(sling, _tpt);

                //define all of the wood, ice and stone in the stage
                ArrayList<ABObject> wood = new ArrayList<ABObject>();
                ArrayList<ABObject> stone = new ArrayList<ABObject>();
                ArrayList<ABObject> ice = new ArrayList<ABObject>();
                ArrayList<ABObject> tnt = new ArrayList<ABObject>();

                //initialise counters to store how many times the trajectory intersects blocks of these types
                int woodCount = 0;
                int stoneCount = 0;
                int iceCount = 0;
                int pigsCount = 0;
                int tntCount = 0;

                //populate the wood, stone and ice ArrayLists with the correct materials
                for (int i = 0; i < blocks.size(); i++) {
                    if (blocks.get(i).type == ABType.Wood)
                        wood.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.Stone)
                        stone.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.Ice)
                        ice.add(blocks.get(i));
                    if (blocks.get(i).type == ABType.TNT)
                        tnt.add(blocks.get(i));
                }

                //check whether the trajectory intersects any wood blocks
                for (int i = 0; i < wood.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (wood.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some wood");
                            woodCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for wood on the trajectory (escape for loop)
                    }
                }

                //check whether the trajectory intersects any ice blocks
                for (int i = 0; i < ice.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (ice.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some ice");
                            iceCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for ice on the trajectory (escape for loop)
                    }
                }

                //check whether the trajectory intersects any stone blocks            
                for (int i = 0; i < stone.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (stone.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some stone");
                            stoneCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for stone on the trajectory (escape for loop)
                    }
                }

                //how many pigs the trajectory intersects (this should always be at least 1)         
                for (int i = 0; i < pigs.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (pigs.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects a pig");
                            pigsCount++;
                        }
                    }
                }

                //how many tnt blocks the trajectory intersects            
                for (int i = 0; i < tnt.size(); i++) {
                    for (int j = 0; j < pts.size(); j++) {
                        if (tnt.get(i).contains(pts.get(j))) {
                            System.out.println("Trajectory intersects some tnt");
                            tntCount++;
                        }
                        if (pig.contains(pts.get(j))) //if we find the pig on this point
                            j = pts.size() - 1; //stop looking for tnt on the trajectory
                    }
                }

                StringBuilder sb = new StringBuilder();
                sb.append(pigsCount + "," + woodCount + "," + iceCount + "," + stoneCount + "," + tntCount
                        + ",?");
                String dataEntry = sb.toString();
                try (PrintWriter out = new PrintWriter(
                        new BufferedWriter(new FileWriter("dataset/birds.level.arff", true)))) {
                    out.println(dataEntry);
                } catch (IOException e) {
                    System.out.println("Error - dataset/birds.level.arff file not found or in use!");
                }

                //indicator of if the agent should continue this shot or not (used in the bayes classifier)
                ArrayList<Boolean> takeShot = new ArrayList<Boolean>();

                try {
                    DataSource source = new DataSource("dataset/birds.data.arff"); //initialise the learning set for the agent
                    Instances data = source.getDataSet();

                    // setting class attribute if the data format does not provide this information
                    // For example, the XRFF format saves the class attribute information as well
                    if (data.classIndex() == -1)
                        data.setClassIndex(data.numAttributes() - 1);

                    DataSource thisLevel = new DataSource("dataset/birds.level.arff"); //initialise the data retrieved from the current level for the agent
                    Instances thisLevelData = thisLevel.getDataSet();
                    if (thisLevelData.classIndex() == -1)
                        thisLevelData.setClassIndex(data.numAttributes() - 1);

                    //build a new NaiveBayes classifier
                    NaiveBayes bayes = new NaiveBayes();
                    bayes.buildClassifier(data);

                    for (int i = 0; i < thisLevelData.numInstances(); i++) { //for all instances in the current level
                        double label = bayes.classifyInstance(thisLevelData.instance(i)); //generate an outcome/classify an instance in the current level
                        thisLevelData.instance(i).setClassValue(label); //store this outcome
                        System.out.println(thisLevelData.instance(i).stringValue(5)); //print it
                        if (thisLevelData.instance(i).stringValue(5) != "?") { //if there is a decisive choice as to whether a shot should be taken
                            data.add(thisLevelData.instance(i)); //store it
                            if (thisLevelData.instance(i).stringValue(5) == "yes") {//if the classifier classifies a good shot, store it
                                takeShot.add(true);
                            } else { //if no, store this too
                                takeShot.add(false);
                            }
                        }
                    }

                    //add all non ? entries to the learning set
                    BufferedWriter writer = new BufferedWriter(new FileWriter("dataset/birds.data.arff"));
                    writer.write(data.toString());
                    writer.flush();
                    writer.close();

                } catch (Exception e) {
                    e.printStackTrace();
                    System.out.println("Exception caught - file handle error");
                }

                //TODO: roll a random number to determine whether we take a shot or not.
                //populated using the bayesian classification above.
                //if we roll true, continue with the random pig shot as usual.
                //if not, take a new random pig and try again.
                //TODO: implement a failsafe so the agent does not get stuck randomly choosing pigs which the bayesian classification does not allow.
                Random rng = new Random(takeShot.size());
                if (takeShot.get(rng.nextInt()))
                    System.out.println("Taking this shot.");
                else {
                    System.out.println("Not taking this shot. Finding another random pig.");
                    return state;
                }

                // if the target is very close to before, randomly choose a
                // point near it
                if (prevTarget != null && distance(prevTarget, _tpt) < 10) {
                    double _angle = randomGenerator.nextDouble() * Math.PI * 2;
                    _tpt.x = _tpt.x + (int) (Math.cos(_angle) * 10);
                    _tpt.y = _tpt.y + (int) (Math.sin(_angle) * 10);
                    System.out.println("Randomly changing to " + _tpt);
                }

                prevTarget = new Point(_tpt.x, _tpt.y);

                // do a high shot when entering a level to find an accurate velocity
                if (firstShot && pts.size() > 1) {
                    releasePoint = pts.get(1);
                } else if (pts.size() == 1)
                    releasePoint = pts.get(0);
                else if (pts.size() == 2) {
                    // randomly choose between the trajectories, with a 1 in
                    // 6 chance of choosing the high one
                    if (randomGenerator.nextInt(6) == 0)
                        releasePoint = pts.get(1);
                    else
                        releasePoint = pts.get(0);
                } else if (pts.isEmpty()) {
                    System.out.println("No release point found for the target");
                    System.out.println("Try a shot with 45 degree");
                    releasePoint = tp.findReleasePoint(sling, Math.PI / 4);
                }

                // Get the reference point
                Point refPoint = tp.getReferencePoint(sling);

                //Calculate the tapping time according the bird type 
                if (releasePoint != null) {
                    double releaseAngle = tp.getReleaseAngle(sling, releasePoint);
                    System.out.println("Release Point: " + releasePoint);
                    System.out.println("Release Angle: " + Math.toDegrees(releaseAngle));
                    int tapInterval = 0;
                    switch (aRobot.getBirdTypeOnSling()) {

                    case RedBird:
                        tapInterval = 0;
                        break; // start of trajectory
                    case YellowBird:
                        tapInterval = 65 + randomGenerator.nextInt(25);
                        break; // 65-90% of the way
                    case WhiteBird:
                        tapInterval = 70 + randomGenerator.nextInt(20);
                        break; // 70-90% of the way
                    case BlackBird:
                        tapInterval = 70 + randomGenerator.nextInt(20);
                        break; // 70-90% of the way
                    case BlueBird:
                        tapInterval = 65 + randomGenerator.nextInt(20);
                        break; // 65-85% of the way
                    default:
                        tapInterval = 60;
                    }

                    int tapTime = tp.getTapTime(sling, releasePoint, _tpt, tapInterval);
                    dx = (int) releasePoint.getX() - refPoint.x;
                    dy = (int) releasePoint.getY() - refPoint.y;
                    shot = new Shot(refPoint.x, refPoint.y, dx, dy, 0, tapTime);
                } else {
                    System.err.println("No Release Point Found");
                    return state;
                }
            }

            // check whether the slingshot is changed. the change of the slingshot indicates a change in the scale.
            {
                ActionRobot.fullyZoomOut();
                screenshot = ActionRobot.doScreenShot();
                vision = new Vision(screenshot);
                Rectangle _sling = vision.findSlingshotMBR();
                if (_sling != null) {
                    double scale_diff = Math.pow((sling.width - _sling.width), 2)
                            + Math.pow((sling.height - _sling.height), 2);
                    if (scale_diff < 25) {
                        if (dx < 0) {
                            aRobot.cshoot(shot);
                            state = aRobot.getState();
                            if (state == GameState.PLAYING) {
                                screenshot = ActionRobot.doScreenShot();
                                vision = new Vision(screenshot);
                                List<Point> traj = vision.findTrajPoints();
                                tp.adjustTrajectory(traj, sling, releasePoint);
                                firstShot = false;
                            }
                        }
                    } else
                        System.out.println(
                                "Scale is changed, can not execute the shot, will re-segement the image");
                } else
                    System.out
                            .println("no sling detected, can not execute the shot, will re-segement the image");
            }

        }

    }
    return state;
}

From source file:controller.BothClassificationsServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    request.setCharacterEncoding("UTF-8");
    String dir = "/data/";
    String path = getServletContext().getRealPath(dir);

    String action = request.getParameter("action");

    switch (action) {
    case "create": {
        String fileName = request.getParameter("file");

        String aux = fileName.substring(0, fileName.indexOf("."));
        String pathInput = path + "/" + request.getParameter("file");
        String pathTrainingOutput = path + "/" + aux + "-training-arff.txt";
        String pathTestOutput = path + "/" + aux + "-test-arff.txt";
        String pathBothClassifications = path + "/" + aux + "-bothClassifications.txt";

        String name = request.getParameter("name");
        int range = Integer.parseInt(request.getParameter("range"));

        int size = Integer.parseInt(request.getParameter("counter"));
        String[] columns = new String[size];
        String[] types = new String[size];
        int[] positions = new int[size];
        int counter = 0;
        for (int i = 0; i < size; i++) {
            if (request.getParameter("column-" + (i + 1)) != null) {
                columns[counter] = request.getParameter("column-" + (i + 1));
                types[counter] = request.getParameter("type-" + (i + 1));
                positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1)));
                counter++;//from  ww w . j av a 2 s.  c  o  m
            }
        }

        FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types,
                positions, counter, range);
        try {
            J48 j48 = new J48();

            BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTraining = new Instances(readerTraining);
            instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1);

            j48.buildClassifier(instancesTraining);

            BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput));
            //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTest = new Instances(readerTest);
            instancesTest.setClassIndex(instancesTest.numAttributes() - 1);

            int correctsDecisionTree = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = j48.classifyInstance(instance);

                if (correctValue == classification) {
                    correctsDecisionTree++;
                }
            }

            Evaluation eval = new Evaluation(instancesTraining);
            eval.evaluateModel(j48, instancesTest);

            PrintWriter writer = new PrintWriter(
                    new BufferedWriter(new FileWriter(pathBothClassifications, false)));

            writer.println("?rvore de Deciso\n\n");

            writer.println(j48.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            NaiveBayes naiveBayes = new NaiveBayes();

            naiveBayes.buildClassifier(instancesTraining);

            eval = new Evaluation(instancesTraining);
            eval.evaluateModel(naiveBayes, instancesTest);

            int correctsNaiveBayes = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = naiveBayes.classifyInstance(instance);

                if (correctValue == classification) {
                    correctsNaiveBayes++;
                }
            }

            writer.println("Naive Bayes\n\n");

            writer.println(naiveBayes.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            writer.close();

            response.sendRedirect("BothClassifications?action=view&correctsDecisionTree=" + correctsDecisionTree
                    + "&correctsNaiveBayes=" + correctsNaiveBayes + "&totalTest=" + instancesTest.size()
                    + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&fileName=" + aux
                    + "-bothClassifications.txt");
        } catch (Exception e) {
            System.out.println(e.getMessage());
            response.sendRedirect("Navigation?action=decisionTree");
        }

        break;
    }
    default:
        response.sendError(404);
    }
}

From source file:controller.NaiveBayesServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    request.setCharacterEncoding("UTF-8");
    String dir = "/data/";
    String path = getServletContext().getRealPath(dir);

    String action = request.getParameter("action");

    switch (action) {
    case "create": {
        String fileName = request.getParameter("file");

        String aux = fileName.substring(0, fileName.indexOf("."));
        String pathInput = path + "/" + request.getParameter("file");
        String pathTrainingOutput = path + "/" + aux + "-training-arff.txt";
        String pathTestOutput = path + "/" + aux + "-test-arff.txt";
        String pathNaivebayes = path + "/" + aux + "-naiveBayes.txt";

        String name = request.getParameter("name");
        int range = Integer.parseInt(request.getParameter("range"));

        int size = Integer.parseInt(request.getParameter("counter"));
        String[] columns = new String[size];
        String[] types = new String[size];
        int[] positions = new int[size];
        int counter = 0;

        for (int i = 0; i < size; i++) {
            if (request.getParameter("column-" + (i + 1)) != null) {
                columns[counter] = request.getParameter("column-" + (i + 1));
                types[counter] = request.getParameter("type-" + (i + 1));
                positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1)));
                counter++;//  w w w.  jav  a2s .  c  om
            }
        }

        FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types,
                positions, counter, range);

        try {
            NaiveBayes naiveBayes = new NaiveBayes();

            BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTraining = new Instances(readerTraining);
            instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1);

            naiveBayes.buildClassifier(instancesTraining);

            BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput));
            //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTest = new Instances(readerTest);
            instancesTest.setClassIndex(instancesTest.numAttributes() - 1);

            Evaluation eval = new Evaluation(instancesTraining);
            eval.evaluateModel(naiveBayes, instancesTest);

            int corrects = 0;
            int truePositive = 0;
            int trueNegative = 0;
            int falsePositive = 0;
            int falseNegative = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = naiveBayes.classifyInstance(instance);

                if (correctValue == classification) {
                    corrects++;
                }
                if (correctValue == 1 && classification == 1) {
                    truePositive++;
                }
                if (correctValue == 1 && classification == 0) {
                    falseNegative++;
                }
                if (correctValue == 0 && classification == 1) {
                    falsePositive++;
                }
                if (correctValue == 0 && classification == 0) {
                    trueNegative++;
                }
            }

            PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(pathNaivebayes, false)));

            writer.println(naiveBayes.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            writer.close();

            response.sendRedirect(
                    "NaiveBayes?action=view&corrects=" + corrects + "&totalTest=" + instancesTest.size()
                            + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&truePositive="
                            + truePositive + "&trueNegative=" + trueNegative + "&falsePositive=" + falsePositive
                            + "&falseNegative=" + falseNegative + "&fileName=" + aux + "-naiveBayes.txt");

        } catch (Exception e) {
            System.out.println(e.getMessage());
            response.sendRedirect("Navigation?action=naiveBayes");
        }

        break;
    }
    default:
        response.sendError(404);
    }

}

From source file:cs.man.ac.uk.predict.Predictor.java

License:Open Source License

public static void makePredictionsEnsembleNew(String trainPath, String testPath, String resultPath) {
    System.out.println("Training set: " + trainPath);
    System.out.println("Test set: " + testPath);

    /**//from   www. ja  v a  2 s  .c  o  m
     * The ensemble classifiers. This is a heterogeneous ensemble.
     */
    J48 learner1 = new J48();
    SMO learner2 = new SMO();
    NaiveBayes learner3 = new NaiveBayes();
    MultilayerPerceptron learner5 = new MultilayerPerceptron();

    System.out.println("Training Ensemble.");
    long startTime = System.nanoTime();
    try {
        BufferedReader reader = new BufferedReader(new FileReader(trainPath));
        Instances data = new Instances(reader);
        data.setClassIndex(data.numAttributes() - 1);
        System.out.println("Training data length: " + data.numInstances());

        learner1.buildClassifier(data);
        learner2.buildClassifier(data);
        learner3.buildClassifier(data);
        learner5.buildClassifier(data);

        long endTime = System.nanoTime();
        long nanoseconds = endTime - startTime;
        double seconds = (double) nanoseconds / 1000000000.0;
        System.out.println("Training Ensemble completed in " + nanoseconds + " (ns) or " + seconds + " (s).");
    } catch (IOException e) {
        System.out.println("Could not train Ensemble classifier IOException on training data file.");
    } catch (Exception e) {
        System.out.println("Could not train Ensemble classifier Exception building model.");
    }

    try {
        String line = "";

        // Read the file and display it line by line. 
        BufferedReader in = null;

        // Read in and store each positive prediction in the tree map.
        try {
            //open stream to file
            in = new BufferedReader(new FileReader(testPath));

            while ((line = in.readLine()) != null) {
                if (line.toLowerCase().contains("@data"))
                    break;
            }
        } catch (Exception e) {
        }

        // A different ARFF loader used here (compared to above) as
        // the ARFF file may be extremely large. In which case the whole
        // file cannot be read in. Instead it is read in incrementally.
        ArffLoader loader = new ArffLoader();
        loader.setFile(new File(testPath));

        Instances data = loader.getStructure();
        data.setClassIndex(data.numAttributes() - 1);

        System.out.println("Ensemble Classifier is ready.");
        System.out.println("Testing on all instances avaialable.");

        startTime = System.nanoTime();

        int instanceNumber = 0;

        // label instances
        Instance current;

        while ((current = loader.getNextInstance(data)) != null) {
            instanceNumber += 1;
            line = in.readLine();

            double classification1 = learner1.classifyInstance(current);
            double classification2 = learner2.classifyInstance(current);
            double classification3 = learner3.classifyInstance(current);
            double classification5 = learner5.classifyInstance(current);

            // All classifiers must agree. This is a very primitive ensemble strategy!
            if (classification1 == 1 && classification2 == 1 && classification3 == 1 && classification5 == 1) {
                if (line != null) {
                    //System.out.println("Instance: "+instanceNumber+"\t"+line);
                    //System.in.read();
                }
                Writer.append(resultPath, instanceNumber + "\n");
            }
        }

        in.close();

        System.out.println("Test set instances: " + instanceNumber);

        long endTime = System.nanoTime();
        long duration = endTime - startTime;
        double seconds = (double) duration / 1000000000.0;

        System.out.println("Testing Ensemble completed in " + duration + " (ns) or " + seconds + " (s).");
    } catch (Exception e) {
        System.out.println("Could not test Ensemble classifier due to an error.");
    }
}

From source file:cs.man.ac.uk.predict.Predictor.java

License:Open Source License

public static void makePredictionsEnsembleStream(String trainPath, String testPath, String resultPath) {
    System.out.println("Training set: " + trainPath);
    System.out.println("Test set: " + testPath);

    /**// ww  w .  j  a va 2s  .c om
     * The ensemble classifiers. This is a heterogeneous ensemble.
     */
    J48 learner1 = new J48();
    SMO learner2 = new SMO();
    NaiveBayes learner3 = new NaiveBayes();
    MultilayerPerceptron learner5 = new MultilayerPerceptron();

    System.out.println("Training Ensemble.");
    long startTime = System.nanoTime();
    try {
        BufferedReader reader = new BufferedReader(new FileReader(trainPath));
        Instances data = new Instances(reader);
        data.setClassIndex(data.numAttributes() - 1);
        System.out.println("Training data length: " + data.numInstances());

        learner1.buildClassifier(data);
        learner2.buildClassifier(data);
        learner3.buildClassifier(data);
        learner5.buildClassifier(data);

        long endTime = System.nanoTime();
        long nanoseconds = endTime - startTime;
        double seconds = (double) nanoseconds / 1000000000.0;
        System.out.println("Training Ensemble completed in " + nanoseconds + " (ns) or " + seconds + " (s).");
    } catch (IOException e) {
        System.out.println("Could not train Ensemble classifier IOException on training data file.");
    } catch (Exception e) {
        System.out.println("Could not train Ensemble classifier Exception building model.");
    }

    try {
        // A different ARFF loader used here (compared to above) as
        // the ARFF file may be extremely large. In which case the whole
        // file cannot be read in. Instead it is read in incrementally.
        ArffLoader loader = new ArffLoader();
        loader.setFile(new File(testPath));

        Instances data = loader.getStructure();
        data.setClassIndex(data.numAttributes() - 1);

        System.out.println("Ensemble Classifier is ready.");
        System.out.println("Testing on all instances avaialable.");

        startTime = System.nanoTime();

        int instanceNumber = 0;

        // label instances
        Instance current;

        while ((current = loader.getNextInstance(data)) != null) {
            instanceNumber += 1;

            double classification1 = learner1.classifyInstance(current);
            double classification2 = learner2.classifyInstance(current);
            double classification3 = learner3.classifyInstance(current);
            double classification5 = learner5.classifyInstance(current);

            // All classifiers must agree. This is a very primitive ensemble strategy!
            if (classification1 == 1 && classification2 == 1 && classification3 == 1 && classification5 == 1) {
                Writer.append(resultPath, instanceNumber + "\n");
            }
        }

        System.out.println("Test set instances: " + instanceNumber);

        long endTime = System.nanoTime();
        long duration = endTime - startTime;
        double seconds = (double) duration / 1000000000.0;

        System.out.println("Testing Ensemble completed in " + duration + " (ns) or " + seconds + " (s).");
    } catch (Exception e) {
        System.out.println("Could not test Ensemble classifier due to an error.");
    }
}

From source file:lector.Analizador.java

public static void clasificador() {

    BufferedReader reader1;/*from  w w  w .  ja v a 2s  .co  m*/
    BufferedReader reader2;
    try {
        reader1 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                + "proyecto/compartida/DataSetAnalisisSentimientos.arff"));

        reader2 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                + "proyecto/compartida/DataSetAnalisisSentimientos_inc.arff"));
        Instances train = new Instances(reader1);
        train.setClassIndex(train.numAttributes() - 1);
        System.out.println(train.classIndex() + " " + train.numAttributes());

        Instances test = new Instances(reader2);
        test.setClassIndex(train.numAttributes() - 1);
        System.out.println(test.classIndex() + " " + test.numAttributes());

        NaiveBayes model = new NaiveBayes();
        model.buildClassifier(train);

        //classify
        Instances labeled = new Instances(test);

        for (int i = 0; i < test.numInstances(); i++) {
            double clsLabel = model.classifyInstance(test.instance(i));
            labeled.instance(i).setClassValue(clsLabel);
        }

        // https://youtu.be/JY_x5zKTfyo?list=PLJbE6j2EG1pZnBhOg3_Rb63WLCprtyJag
        Evaluation eval_train = new Evaluation(test);
        eval_train.evaluateModel(model, test);

        reader1.close();
        reader2.close();

        //System.out.println(eval_train.toSummaryString("\nResults\n======\n", false));
        String[] options = new String[4];
        options[0] = "-t"; //name of training file
        options[1] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/"
                + "compartida/DataSetAnalisisSentimientos.arff";
        options[2] = "-T";
        options[3] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/"
                + "compartida/DataSetAnalisisSentimientos_inc.arff";
        System.out.println(Evaluation.evaluateModel(model, options));

        try ( // print classification results to file
                BufferedWriter writer = new BufferedWriter(
                        new FileWriter("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                                + "proyecto/compartida/DataSetAnalisisSentimientos_labeled.arff"))) {
            writer.write(labeled.toString());
        }

    } catch (Exception e) {
    }
}

From source file:textmining.TextMining.java

/**
 * Main//w  w  w  .j  a  v a2s . c  o m
 *
 * @param args
 * @throws FileNotFoundException
 * @throws IOException
 * @throws Exception
 */
public static void main(String[] args) throws IOException, Exception {

    //        System.out.println("File selected : "+arff);
    /*OPTIONS*/
    String arff = "C:/wamp/www/AllocineHelper/arff/100_commentaires_1_cat.arff";
    // String arff = "/Users/Mathieu/NetBeansProjects/AllocineHelper/arff/20160104.arff";
    boolean showRegression = Boolean.valueOf("false");
    int nb_folds = 9;
    boolean setIDF = true;
    boolean setTF = true;
    String stemmer = "LovinsStemmer";
    String tokenizer = "Alphabetical";

    //        String arff = args[0];
    //        boolean showRegression = Boolean.valueOf(args[1]);
    //        int nb_folds = Integer.valueOf(args[2]);
    //        boolean setIDF = Boolean.valueOf(args[3]);
    //        boolean setTF = Boolean.valueOf(args[4]);
    //        String stemmer = args[5];
    //        String tokenizer = args[6];
    String stopWords = "C:/wamp/www/AllocineHelper/stopwords_fr.txt";
    ////        String stopWords = "/Users/Mathieu/NetBeansProjects/AllocineHelper/stopwords_fr.txt";        

    //        TestAlgo test1 = new TestAlgo(arff);
    //        test1.setStop_words_path_file(stopWords);
    //        try {
    //            test1.buildData(setIDF, setTF, 1, stemmer, tokenizer);//IDF=>true/false , TF=>true/false , Classe 1 => Commentaires
    //        } catch (Exception ex) {
    //            System.out.println("Fichier inconnu");
    //        }
    //        System.out.println("*************CLASSIFICATION********************");
    //        System.out.println("-------OPTIONS--------");
    //        System.out.println("IDF : " + String.valueOf(setIDF));
    //        System.out.println("TF : " + String.valueOf(setTF));
    //        System.out.println("Nb Folds for Cross Validation : " + nb_folds);
    //        System.out.println("Stemmer : " + stemmer);
    //        System.out.println("Tokenizer : " + tokenizer);
    //        System.out.println("-----------------------");
    //        
    //        System.out.println("*******************");
    //        System.out.println("DECISION TABLE");
    //        System.out.println("*******************");
    //        Classifier decisionTable = (Classifier) new DecisionTable();
    //        test1.setAlgo(decisionTable);
    //        String[] options = weka.core.Utils.splitOptions("-X 1 -S \"weka.attributeSelection.BestFirst -D 1 -N 5\"");
    //        System.out.println(test1.evaluate(options, nb_folds));
    //
    //        System.out.println("*******************");
    //        System.out.println("NAIVE BAYES");
    //        System.out.println("*******************");
    //        Classifier naiveBayes = (Classifier) new NaiveBayes();
    //        test1.setAlgo(naiveBayes);
    //        System.out.println(test1.evaluate(weka.core.Utils.splitOptions(""), nb_folds));
    //
    //        System.out.println("*******************");
    //        System.out.println("J 48");
    //        System.out.println("*******************");
    //        Classifier j48 = new J48();
    //        test1.setAlgo(j48);
    //        System.out.println(test1.evaluate(weka.core.Utils.splitOptions(""), nb_folds));
    //        
    //        System.out.println("*******************");
    //        System.out.println("ONE R");
    //        System.out.println("*******************");
    //        Classifier oneR = new OneR();
    //
    //        test1.setAlgo(oneR);
    //        System.out.println(test1.evaluate(weka.core.Utils.splitOptions(""), nb_folds));
    //
    ////        System.out.println("And the winner is : " + test1.getBestAlgo());
    //        if (showRegression) {
    //
    //            HashMap<String, Classifier> regressionClassifiers = new HashMap<String, Classifier>();
    //
    //            regressionClassifiers.put("LinearRegression", (Classifier) new LinearRegression());
    //            regressionClassifiers.put("SMO Reg", (Classifier) new SMOreg());
    //            regressionClassifiers.put("LeastMedSq", new LeastMedSq());
    //            System.out.println("***********************REGRESSION****************************");
    //
    //            for (Map.Entry<String, Classifier> entry : regressionClassifiers.entrySet()) {
    //                System.out.println("Algo : " + entry.getKey());
    //                Classifier algo = entry.getValue();
    //                test1.setAlgo(algo);
    //                test1.buildData(setIDF, setTF, 0, stemmer, tokenizer);
    //                options = weka.core.Utils.splitOptions("");
    //                System.out.println(test1.evaluateRegression(options, nb_folds));
    //                System.out.println(algo);
    //            }
    //
    //        }
    //Tests predictions
    //        TestAlgo prediction = new TestAlgo(arff);
    //        prediction.setStop_words_path_file(stopWords);
    //        Classifier algo =   (Classifier)new NaiveBayes();
    //        prediction.setAlgo(algo);
    //         prediction.buildData(setIDF, setTF, 1, stemmer,tokenizer);
    //        
    //          String[] options = weka.core.Utils.splitOptions("");
    //          prediction.evaluateRegression(options, nb_folds);
    //         weka.core.SerializationHelper.write("naive_bayes.model", algo);
    //System.exit(-1);
    Instances data;
    //            LinearRegression LR = (LinearRegression)weka.core.SerializationHelper.read("linear_reg.model");
    NaiveBayes NB = (NaiveBayes) weka.core.SerializationHelper.read("naive_bayes.model");
    try (BufferedReader reader = new BufferedReader(
            new FileReader("C:/wamp/www/AllocineHelper/arff/supplied_test.arff"))) {
        data = new Instances(reader);
    }
    data.setClassIndex(data.numAttributes() - 1);
    int nb_good = 0;
    for (int i = 0; i < data.numInstances(); i++) {
        double actualValue = data.instance(i).classValue();

        Instance newInst = data.instance(i);
        System.out.println(newInst);
        double predAlgo = NB.classifyInstance(newInst);
        if (actualValue == predAlgo)
            nb_good++;
        System.out.println(data.classAttribute().value((int) actualValue) + "  => "
                + data.classAttribute().value((int) predAlgo));
        System.out.println("***********************");
    }

    System.out.println("Pourcentage russite prdictions : "
            + ((double) nb_good / (double) data.numInstances() * 100) + " %");
    //         for (int i = 0; i < data.numInstances(); i++) {
    //            double actualValue = data.instance(i).classValue();
    //             System.out.println(actualValue);
    //            Instance newInst = data.instance(i);
    //            
    //            double predAlgo = LR.classifyInstance(newInst);
    //             System.out.println(actualValue + "  => "+predAlgo);
    //        }
    //        
    //        System.out.println("OPTIONS FILTER : STOPWORDS ONLY");
    //        // C_DecisionTable
    //        String rep_DecisionTable = C_DecisionTable(dataFiltered);
    //        System.out.println("DECISION TABLE");
    //        System.out.println(rep_DecisionTable);
    //    
    //        // ---------------
    //        System.out.println("\n--------------------\n");
    //        
    //        // C_NaiveBayes
    //        String rep_NaiveBayes = C_NaiveBayes(dataFiltered);
    //        System.out.println("NAIVE BAYES");
    //        System.out.println(rep_NaiveBayes);
    //        //                       
    //        
    //        System.out.println("OPTIONS FILTER : STOPWORDS + IDF/TF TRANSFORM");
    //        
    //        StringToWordVector wordVector2 = new StringToWordVector();
    //        wordVector2.setInputFormat(data);        
    //        wordVector2.setStopwords(new File(stopWords));
    //        wordVector2.setIDFTransform(true);
    //        wordVector2.setTFTransform(true);
    ////        wordVector2.setStemmer(new SnowballStemmer());
    //        Instances dataFilteredWithOptions = Filter.useFilter(data, wordVector2);
    //        dataFilteredWithOptions.setClassIndex(1);
    //        
    //         System.out.println("LINEAR REGRESSION ON POLARITY");
    //         dataFiltered.setClassIndex(0);
    //        String result_Regression = Regression_on_Polarity(dataFiltered);
    //        System.out.println(result_Regression);
    //        
    //        String rep_DecisionTable_with_Options = C_DecisionTable(dataFilteredWithOptions);        
    //        System.out.println("DECISION TABLE");
    //        System.out.println(rep_DecisionTable_with_Options);
    //        
    //        // C_NaiveBayes
    //        String rep_NaiveBayes_with_Options = C_NaiveBayes(dataFilteredWithOptions);
    //        System.out.println("NAIVE BAYES");
    //        System.out.println(rep_NaiveBayes_with_Options);
    //        
}