Example usage for weka.classifiers.bayes NaiveBayes NaiveBayes

List of usage examples for weka.classifiers.bayes NaiveBayes NaiveBayes

Introduction

In this page you can find the example usage for weka.classifiers.bayes NaiveBayes NaiveBayes.

Prototype

NaiveBayes

Source Link

Usage

From source file:general.Util.java

/**
 * apply all filter to build the classifier
 * @param train data training/*  w w w . j  av a2  s  .co  m*/
 * @param Classifier model
 */
public static void buildModel(String Classifier, Instances train) {
    try {
        // Membangun model dan melakukan test
        switch (Classifier.toLowerCase()) {
        case "naivebayes":
            classifier = new NaiveBayes();
            break;
        case "j48-prune":
            classifier = new MyJ48(true, confidenceFactor);
            break;
        case "j48-unprune":
            classifier = new MyJ48(false, confidenceFactor);
            break;
        case "id3":
            classifier = new MyID3();
        default:
            break;
        }
        classifier.buildClassifier(train);
    } catch (Exception ex) {
        Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:general.Util.java

/**
 * show learning statistic result by percentage split
 * @param data training data/*from ww  w.j a  v a2  s.  c o  m*/
 * @param trainPercent percentage of the training data
 * @param Classifier model
 */
public static void PercentageSplit(Instances data, double trainPercent, String Classifier) {
    try {
        int trainSize = (int) Math.round(data.numInstances() * trainPercent / 100);
        int testSize = data.numInstances() - trainSize;

        data.randomize(new Random(1));

        Instances train = new Instances(data, 0, trainSize);
        Instances test = new Instances(data, trainSize, testSize);
        train.setClassIndex(train.numAttributes() - 1);
        test.setClassIndex(test.numAttributes() - 1);

        switch (Classifier.toLowerCase()) {
        case "naivebayes":
            classifier = new NaiveBayes();
            break;
        case "j48-prune":
            classifier = new MyJ48(true, 0.25f);
            break;
        case "j48-unprune":
            classifier = new MyJ48(false, 0f);
            break;
        case "id3":
            classifier = new MyID3();
            break;
        default:
            break;
        }
        classifier.buildClassifier(train);

        for (int i = 0; i < test.numInstances(); i++) {
            try {
                double pred = classifier.classifyInstance(test.instance(i));
                System.out.print("ID: " + test.instance(i));
                System.out
                        .print(", actual: " + test.classAttribute().value((int) test.instance(i).classValue()));
                System.out.println(", predicted: " + test.classAttribute().value((int) pred));
            } catch (Exception ex) {
                Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex);
            }
        }

        // Start evaluate model using instances test and print results
        try {
            Evaluation eval = new Evaluation(train);
            eval.evaluateModel(classifier, test);
            System.out.println(eval.toSummaryString("\nResults\n\n", false));
        } catch (Exception e) {
            e.printStackTrace();
        }

    } catch (Exception ex) {
        Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:GroupProject.DMChartUI.java

private void buildNBClassifier() {
    CSVtoArff converter = new CSVtoArff();
    Instances students = null;//from w  w  w .j a  v a 2  s . co  m
    Instances students2 = null;
    try {
        converter.convert("studentTemp.csv", "studentTemp.arff");
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    try {
        students = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
        students2 = new Instances(new BufferedReader(new FileReader("studentTemp.arff")));
    } catch (IOException ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }
    int target = dataSelector.getSelectedIndex() + 1;
    System.out.printf("this is the target: %d\n", target);
    if (target == 14 || target == 15 || target == 18 || target == 19) {
        System.out.println("Please select a nominal category");
        equationDisplayArea.setText("Please select a nominal category");
        return;
    }
    //set target 
    students.setClassIndex(target);
    students2.setClassIndex(target);

    System.out.println("im doing NB");
    NBmodel = (Classifier) new NaiveBayes();

    //build the classifier 
    try {
        System.err.println("going to build model ");
        NBmodel.buildClassifier(students);
        System.out.println("I built the model");
    } catch (Exception ex) {
        Logger.getLogger(DMChartUI.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:id3j48.WekaAccess.java

public static void main(String[] args) {
    initializePath();//from   w  ww .  j a  v  a 2  s .  com
    try {
        cin = new Scanner(System.in);
        Instances data = null, tempdata;
        Classifier NBclassifier, ID3classifier, j48classifier;
        Evaluation NBeval, ID3eval, j48eval;
        System.out.println("Enter filename below");
        String filename = cin.nextLine();
        System.out.println("Loading " + filename + "...");
        String extension = "";
        String name = "";
        int i = filename.lastIndexOf('.');
        if (i > 0) {
            extension = filename.substring(i + 1);
            name = filename.substring(0, i);
        }
        if (extension.equalsIgnoreCase("arff")) {
            try {
                data = readArff(filename);
            } catch (Exception ex) {
                Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else if (extension.equalsIgnoreCase("csv")) {
            try {
                data = readCsv(filename);
            } catch (Exception ex) {
                Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else {
            System.out.println("Invalid extension");
            System.exit(0);
        }
        System.out.println(data.toString());
        System.out.println("Resample data? (y for yes) ");
        String resample = cin.nextLine();
        if (resample.equalsIgnoreCase("y")) {
            try {
                tempdata = resampleData(data);
                System.out.println("-- Resampled data --");
                System.out.println(tempdata.toString());
            } catch (Exception ex) {
                Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        tempdata = removeAttribute(data, data.numAttributes());
        System.out.println("-- Remove Attribute --");
        System.out.println(tempdata.toString());
        NBclassifier = buildClassifier(data, new NaiveBayes());
        System.out.println("-- Naive Bayes Classifier --");
        System.out.println(NBclassifier.toString());
        ID3classifier = buildClassifier(data, new Id3());
        System.out.println("-- ID3 Classifier --");
        System.out.println(ID3classifier.toString());
        j48classifier = buildClassifier(data, new J48());
        System.out.println("-- J48 Classifier --");
        System.out.println(j48classifier.toString());
        Instances test = null;
        if (extension.equalsIgnoreCase("arff"))
            test = readArff("test." + filename);
        else if (extension.equalsIgnoreCase("csv"))
            test = readCsv("test." + filename);
        NBeval = testModel(NBclassifier, data, test);
        System.out.println(
                NBeval.toSummaryString("-- Training set evaluation results with Naive Bayes --\n", false));
        ID3eval = testModel(ID3classifier, data, test);
        System.out.println(NBeval.toSummaryString("-- Training set evaluation results with ID3 --\n", false));
        j48eval = testModel(j48classifier, data, test);
        System.out.println(NBeval.toSummaryString("-- Training set evaluation results with J48 --\n", false));
        NBeval = tenFoldCrossValidation(data, NBclassifier);
        System.out.println(
                NBeval.toSummaryString("-- 10-fold cross validation results with Naive Bayes --\n", false));
        ID3eval = tenFoldCrossValidation(data, ID3classifier);
        System.out.println(NBeval.toSummaryString("-- 10-fold cross validation results with ID3 --\n", false));
        j48eval = tenFoldCrossValidation(data, j48classifier);
        System.out.println(NBeval.toSummaryString("-- 10-fold cross validation results with J48 --\n", false));
        NBeval = percentageSplit(data, NBclassifier, 66);
        System.out.println(
                NBeval.toSummaryString("-- 66% split validation results with Naive Bayes --\n", false));
        ID3eval = percentageSplit(data, ID3classifier, 66);
        System.out.println(NBeval.toSummaryString("-- 66% split validation results with ID3 --\n", false));
        j48eval = percentageSplit(data, j48classifier, 66);
        System.out.println(NBeval.toSummaryString("-- 66% split validation results with J48 --\n", false));
        System.out.println("-- Save Naive Bayes Model --");
        saveModel("nb." + name + ".model", NBclassifier);
        System.out.println("-- Save Naive Bayes Model --");
        saveModel("id3." + name + ".model", ID3classifier);
        System.out.println("-- Save Naive Bayes Model --");
        saveModel("j48." + name + ".model", j48classifier);
        System.out.println("-- Save Naive Bayes Model --");
        saveModel("nb." + name + ".model", NBclassifier);
        System.out.println("-- Save ID3 Model --");
        saveModel("id3." + name + ".model", ID3classifier);
        System.out.println("-- Save J48 Model --");
        saveModel("j48." + name + ".model", j48classifier);
        System.out.println("-- Load Naive Bayes Model --");
        System.out.println(loadModel("nb." + name + ".model").toString());
        System.out.println("-- Load ID3 Model --");
        System.out.println(loadModel("id3." + name + ".model").toString());
        System.out.println("-- Load J48 Model --");
        System.out.println(loadModel("j48." + name + ".model").toString());
        System.out.println("-- Classify Naive Bayes Model --");
        classify("classify." + filename, NBclassifier);
        System.out.println("-- Classify ID3 Model --");
        classify("classify." + filename, ID3classifier);
        System.out.println("-- Classify J48 Model --");
        classify("classify." + filename, j48classifier);
    } catch (Exception ex) {
        Logger.getLogger(WekaAccess.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:jjj.asap.sas.models1.job.BuildBasicMetaCostModels.java

License:Open Source License

@Override
protected void run() throws Exception {

    // validate args
    if (!Bucket.isBucket("datasets", inputBucket)) {
        throw new FileNotFoundException(inputBucket);
    }//from w  ww  .  j a  v  a2 s .c o m
    if (!Bucket.isBucket("models", outputBucket)) {
        throw new FileNotFoundException(outputBucket);
    }

    // create prototype classifiers
    Map<String, Classifier> prototypes = new HashMap<String, Classifier>();

    // Bagged REPTrees

    Bagging baggedTrees = new Bagging();
    baggedTrees.setNumExecutionSlots(1);
    baggedTrees.setNumIterations(100);
    baggedTrees.setClassifier(new REPTree());
    baggedTrees.setCalcOutOfBag(false);

    prototypes.put("Bagged-REPTrees", baggedTrees);

    // Bagged SMO

    Bagging baggedSVM = new Bagging();
    baggedSVM.setNumExecutionSlots(1);
    baggedSVM.setNumIterations(100);
    baggedSVM.setClassifier(new SMO());
    baggedSVM.setCalcOutOfBag(false);

    prototypes.put("Bagged-SMO", baggedSVM);

    // Meta Cost model for Naive Bayes

    Bagging bagging = new Bagging();
    bagging.setNumExecutionSlots(1);
    bagging.setNumIterations(100);
    bagging.setClassifier(new NaiveBayes());

    CostSensitiveClassifier meta = new CostSensitiveClassifier();
    meta.setClassifier(bagging);
    meta.setMinimizeExpectedCost(true);

    prototypes.put("CostSensitive-MinimizeExpectedCost-NaiveBayes", bagging);

    // init multi-threading
    Job.startService();
    final Queue<Future<Object>> queue = new LinkedList<Future<Object>>();

    // get the input from the bucket
    List<String> names = Bucket.getBucketItems("datasets", this.inputBucket);
    for (String dsn : names) {

        // for each prototype classifier
        for (Map.Entry<String, Classifier> prototype : prototypes.entrySet()) {

            // 
            // speical logic for meta cost
            //

            Classifier alg = AbstractClassifier.makeCopy(prototype.getValue());

            if (alg instanceof CostSensitiveClassifier) {

                int essaySet = Contest.getEssaySet(dsn);

                String matrix = Contest.getRubrics(essaySet).size() == 3 ? "cost3.txt" : "cost4.txt";

                ((CostSensitiveClassifier) alg)
                        .setCostMatrix(new CostMatrix(new FileReader("/asap/sas/trunk/" + matrix)));

            }

            // use InfoGain to discard useless attributes

            AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();

            classifier.setEvaluator(new InfoGainAttributeEval());

            Ranker ranker = new Ranker();
            ranker.setThreshold(0.0001);
            classifier.setSearch(ranker);

            classifier.setClassifier(alg);

            queue.add(Job.submit(
                    new ModelBuilder(dsn, "InfoGain-" + prototype.getKey(), classifier, this.outputBucket)));
        }
    }

    // wait on complete
    Progress progress = new Progress(queue.size(), this.getClass().getSimpleName());
    while (!queue.isEmpty()) {
        try {
            queue.remove().get();
        } catch (Exception e) {
            Job.log("ERROR", e.toString());
        }
        progress.tick();
    }
    progress.done();
    Job.stopService();

}

From source file:jjj.asap.sas.models1.job.BuildBasicModels.java

License:Open Source License

@Override
protected void run() throws Exception {

    // validate args
    if (!Bucket.isBucket("datasets", inputBucket)) {
        throw new FileNotFoundException(inputBucket);
    }//from  w ww.j a v  a 2  s. c  o m
    if (!Bucket.isBucket("models", outputBucket)) {
        throw new FileNotFoundException(outputBucket);
    }

    // create prototype classifiers
    Map<String, Classifier> prototypes = new HashMap<String, Classifier>();

    // bayes

    BayesNet net = new BayesNet();
    net.setEstimator(new BMAEstimator());
    prototypes.put("BayesNet", net);

    prototypes.put("NaiveBayes", new NaiveBayes());

    // functions

    prototypes.put("RBFNetwork", new RBFNetwork());
    prototypes.put("SMO", new SMO());

    // init multi-threading
    Job.startService();
    final Queue<Future<Object>> queue = new LinkedList<Future<Object>>();

    // get the input from the bucket
    List<String> names = Bucket.getBucketItems("datasets", this.inputBucket);
    for (String dsn : names) {

        // for each prototype classifier
        for (Map.Entry<String, Classifier> prototype : prototypes.entrySet()) {

            // use InfoGain to discard useless attributes

            AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();

            classifier.setEvaluator(new InfoGainAttributeEval());

            Ranker ranker = new Ranker();
            ranker.setThreshold(0.0001);
            classifier.setSearch(ranker);

            classifier.setClassifier(AbstractClassifier.makeCopy(prototype.getValue()));

            queue.add(Job.submit(
                    new ModelBuilder(dsn, "InfoGain-" + prototype.getKey(), classifier, this.outputBucket)));
        }
    }

    // wait on complete
    Progress progress = new Progress(queue.size(), this.getClass().getSimpleName());
    while (!queue.isEmpty()) {
        try {
            queue.remove().get();
        } catch (Exception e) {
            Job.log("ERROR", e.toString());
        }
        progress.tick();
    }
    progress.done();
    Job.stopService();

}

From source file:jjj.asap.sas.models2.job.BuildUnicornEnsemble.java

License:Open Source License

@Override
protected void run() throws Exception {

    // get all the items from the buckets
    Set<String> items = new HashSet<String>();
    for (String inputBucket : inputBuckets) {
        items.addAll(Bucket.getBucketItems("models", inputBucket));
    }// ww w.ja v a  2 s . c om

    // split the buckets up by essay set
    List<String>[] buckets = new List[10];
    for (int i = 0; i < 10; i++) {
        buckets[i] = new ArrayList<String>();
    }

    for (String item : items) {
        int index = Contest.getEssaySet(item) - 1;
        buckets[index].add(item);
    }

    // init multi-threading
    Job.startService();
    final Queue<Future<Object>> queue = new LinkedList<Future<Object>>();

    // Stacking over labels

    for (int k = 0; k < 10; k++) {

        int essaySet = k + 1;

        queue.add(Job.submit(new EnsembleBuilder(essaySet, this.outputBucket, this.outputName + "-naive-bayes",
                new GACommittee(10, 30,
                        new CrossValidatedEnsemble(new StackedClassifier(true, new NaiveBayes()), 5), 60),
                buckets[k], new LoaderL1())));
    }

    // wait on complete
    Progress progress = new Progress(queue.size(), this.getClass().getSimpleName());
    while (!queue.isEmpty()) {
        try {
            queue.remove().get();
        } catch (Exception e) {
            Job.log("ERROR", e.toString());
            e.printStackTrace(System.err);
        }
        progress.tick();
    }
    progress.done();
    Job.stopService();

}

From source file:kfst.classifier.WekaClassifier.java

License:Open Source License

/**
 * This method builds and evaluates the naiveBayes(NB) classifier.
 * The naiveBayes are used as the NB classifier implemented in the Weka
 * software.//from   ww w .ja v  a2 s  .  co  m
 *
 * @param pathTrainData the path of the train set
 * @param pathTestData the path of the test set
 * 
 * @return the classification accuracy
 */
public static double naiveBayes(String pathTrainData, String pathTestData) {
    double resultValue = 0;
    try {
        BufferedReader readerTrain = new BufferedReader(new FileReader(pathTrainData));
        Instances dataTrain = new Instances(readerTrain);
        readerTrain.close();
        dataTrain.setClassIndex(dataTrain.numAttributes() - 1);

        BufferedReader readerTest = new BufferedReader(new FileReader(pathTestData));
        Instances dataTest = new Instances(readerTest);
        readerTest.close();
        dataTest.setClassIndex(dataTest.numAttributes() - 1);

        NaiveBayes nb = new NaiveBayes();
        nb.buildClassifier(dataTrain);
        Evaluation eval = new Evaluation(dataTest);
        eval.evaluateModel(nb, dataTest);
        resultValue = 100 - (eval.errorRate() * 100);
    } catch (Exception ex) {
        Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
    return resultValue;
}

From source file:lector.Analizador.java

public static void clasificador() {

    BufferedReader reader1;//  w  w w  . j ava 2  s .  co m
    BufferedReader reader2;
    try {
        reader1 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                + "proyecto/compartida/DataSetAnalisisSentimientos.arff"));

        reader2 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                + "proyecto/compartida/DataSetAnalisisSentimientos_inc.arff"));
        Instances train = new Instances(reader1);
        train.setClassIndex(train.numAttributes() - 1);
        System.out.println(train.classIndex() + " " + train.numAttributes());

        Instances test = new Instances(reader2);
        test.setClassIndex(train.numAttributes() - 1);
        System.out.println(test.classIndex() + " " + test.numAttributes());

        NaiveBayes model = new NaiveBayes();
        model.buildClassifier(train);

        //classify
        Instances labeled = new Instances(test);

        for (int i = 0; i < test.numInstances(); i++) {
            double clsLabel = model.classifyInstance(test.instance(i));
            labeled.instance(i).setClassValue(clsLabel);
        }

        // https://youtu.be/JY_x5zKTfyo?list=PLJbE6j2EG1pZnBhOg3_Rb63WLCprtyJag
        Evaluation eval_train = new Evaluation(test);
        eval_train.evaluateModel(model, test);

        reader1.close();
        reader2.close();

        //System.out.println(eval_train.toSummaryString("\nResults\n======\n", false));
        String[] options = new String[4];
        options[0] = "-t"; //name of training file
        options[1] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/"
                + "compartida/DataSetAnalisisSentimientos.arff";
        options[2] = "-T";
        options[3] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/"
                + "compartida/DataSetAnalisisSentimientos_inc.arff";
        System.out.println(Evaluation.evaluateModel(model, options));

        try ( // print classification results to file
                BufferedWriter writer = new BufferedWriter(
                        new FileWriter("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                                + "proyecto/compartida/DataSetAnalisisSentimientos_labeled.arff"))) {
            writer.write(labeled.toString());
        }

    } catch (Exception e) {
    }
}

From source file:machinelearning_cw.MachineLearning_CW.java

/**
 * @param args the command line arguments
 *//*www  .  j  av  a2  s .co m*/
public static void main(String[] args) throws Exception {
    // TODO code application logic here

    /* Initializing test datasets */
    ArrayList<Instances> trainData = new ArrayList<Instances>();
    ArrayList<Instances> testData = new ArrayList<Instances>();

    Instances train = WekaLoader.loadData("PitcherTrain.arff");
    Instances test = WekaLoader.loadData("PitcherTest.arff");
    trainData.add(train);
    testData.add(test);

    Instances bananaTrain = WekaLoader.loadData("banana-train.arff");
    Instances bananaTest = WekaLoader.loadData("banana-test.arff");
    trainData.add(bananaTrain);
    testData.add(bananaTest);

    Instances cloudTrain = WekaLoader.loadData("clouds-train.arff");
    Instances cloudTest = WekaLoader.loadData("clouds-test.arff");
    trainData.add(cloudTrain);
    testData.add(cloudTest);

    Instances concentricTrain = WekaLoader.loadData("concentric-train.arff");
    Instances concentricTest = WekaLoader.loadData("concentric-test.arff");
    trainData.add(concentricTrain);
    testData.add(concentricTest);

    // 3 dimensional data set
    Instances habermanTrain = WekaLoader.loadData("haberman-train.arff");
    Instances habermanTest = WekaLoader.loadData("haberman-test.arff");
    trainData.add(habermanTrain);
    testData.add(habermanTest);

    // >3 dimensional data sets
    Instances thyroidTrain = WekaLoader.loadData("thyroid-train.arff");
    Instances thyroidTest = WekaLoader.loadData("thyroid-test.arff");
    trainData.add(thyroidTrain);
    testData.add(thyroidTest);

    Instances heartTrain = WekaLoader.loadData("heart-train.arff");
    Instances heartTest = WekaLoader.loadData("heart-test.arff");
    trainData.add(heartTrain);
    testData.add(heartTest);

    Instances liverTrain = WekaLoader.loadData("liver-train.arff");
    Instances liverTest = WekaLoader.loadData("liver-test.arff");
    trainData.add(liverTrain);
    testData.add(liverTest);

    Instances pendigitisTrain = WekaLoader.loadData("pendigitis-train.arff");
    Instances pendigitisTest = WekaLoader.loadData("pendigitis-test.arff");
    trainData.add(pendigitisTrain);
    testData.add(pendigitisTest);

    Instances phonemeTrain = WekaLoader.loadData("phoneme-train.arff");
    Instances phonemeTest = WekaLoader.loadData("phoneme-test.arff");
    trainData.add(phonemeTrain);
    testData.add(phonemeTest);

    Instances yeastTrain = WekaLoader.loadData("yeast-train.arff");
    Instances yeastTest = WekaLoader.loadData("yeast-test.arff");
    trainData.add(yeastTrain);
    testData.add(yeastTest);

    /* Test to see that BasicKNN provides the same results obtained from
     * the hand exercise.
     */
    System.out.println(
            "Test to see that BasicKNN provides the same" + " results obtained from the hand exercise:");
    System.out.println("(Ties are settled randomly)");
    BasicKNN basicKNN = new BasicKNN();
    basicKNN.buildClassifier(train);
    for (int i = 0; i < test.size(); i++) {
        Instance inst = test.get(i);
        System.out.println(i + 1 + ": " + basicKNN.classifyInstance(inst));
    }

    /* Initializing alternative classifiers */
    IBk wekaKNN = new IBk();
    NaiveBayes naiveBayes = new NaiveBayes();
    J48 decisionTree = new J48();
    SMO svm = new SMO();

    /* Tests for experiments 1,2 & 3 */
    KNN myKNN = new KNN();
    myKNN.setUseStandardisedAttributes(true);
    myKNN.setAutoDetermineK(false);
    myKNN.setUseWeightedVoting(true);
    myKNN.buildClassifier(train);
    //myKNN.setUseAcceleratedNNSearch(true);
    System.out.println("\nAccuracy Experiments:");
    MachineLearning_CW.performClassifierAccuracyTests(myKNN, trainData, testData, 1);

    /* Timing tests */
    System.out.println("\n\nTiming Experiments:");
    MachineLearning_CW.performClassifierTimingTests(wekaKNN, trainData, testData);
}