Example usage for weka.classifiers Evaluation crossValidateModel

List of usage examples for weka.classifiers Evaluation crossValidateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation crossValidateModel.

Prototype

public void crossValidateModel(Classifier classifier, Instances data, int numFolds, Random random)
        throws Exception 

Source Link

Document

Performs a (stratified if class is nominal) cross-validation for a classifier on a set of instances.

Usage

From source file:org.wkwk.classifier.Access.java

/**
 * @param args the command line arguments
 * args[0] = filename train set//from   w ww.  j  a v  a  2 s. c o  m
 * args[1] = filename test set
 * args[2] = remove attribute
 * args[3] = bias resample
 * @throws java.lang.Exception
 */
public static void main(String[] args) throws Exception {

    // Read Dataset (arff, csv)
    DataSource source = new DataSource("../data/weather.nominal.arff");
    //DataSource testSource = new DataSource(args[1]);
    Instances data = source.getDataSet();

    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }

    // Remove attr
    //        String[] rangeOps = new String[2];
    //        rangeOps[0] = "-R";                                    // "range"
    //        rangeOps[1] = args[2];                                 // first attribute
    //        Remove remove = new Remove();                         // new instance of filter
    //        remove.setOptions(rangeOps);                           // set options
    //        remove.setInputFormat(data);                          // inform filter about dataset **AFTER** setting options
    //        Instances newData = Filter.useFilter(data, remove);   // apply filter
    //        
    //        // Filter Resample
    //        String[] biasOps = new String[2];
    //        biasOps[0] = "-B";                                    // "range"
    //        biasOps[1] = args[3];                                 // first attribute
    //        Resample resample = new Resample();
    //        resample.setOptions(biasOps);
    //        resample.setInputFormat(data);
    //        newData = Filter.useFilter(data, resample);
    //        
    // Build Classifier
    MyC45 tree = new MyC45(); // new instance of tree
    tree.buildClassifier(data); // build classifier

    // Evaluation with test set
    //Instances testSet = testSource.getDataSet();
    // train classifier
    //Classifier cls = new MyId3();
    //cls.buildClassifier(data);
    // evaluate classifier and print some statistics
    //Evaluation eval = new Evaluation(data);
    //eval.evaluateModel(cls, testSet);
    //System.out.println(eval.toSummaryString("\nResults\n======\n", false));

    // Evaluation with 10 Fold-CV
    Evaluation evalCV = new Evaluation(data);
    evalCV.crossValidateModel(tree, data, 10, new Random(1));
    System.out.println(evalCV.toSummaryString("\nResults\n======\n", false));
}

From source file:PEBL.TwoStep.java

public static void main(String[] args) throws Exception {

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            "Z:\\\\shared from vm\\\\fourthset\\\\mixed.csv");

    Instances data = source.getDataSet();

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }/*from  www.j a  v a2s . c o m*/

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    // build a c4.5 classifier
    String[] options = new String[1];
    // options[0] = "-C 0.25 -M 2";            // unpruned tree
    options[0] = "-K";
    NaiveBayes c = new NaiveBayes(); // new instance of tree
    c.setOptions(options); // set the options
    c.buildClassifier(data); // build classifier

    // eval
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(c, data, 10, new Random(1));
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
    System.out.println("--- model learned on mixed set ---");

    // load unlabeled data
    ConverterUtils.DataSource s = new ConverterUtils.DataSource(
            "Z:\\\\shared from vm\\\\fourthset\\\\unlabelled.csv");
    Instances unlabeled = s.getDataSet();
    // set class attribute
    unlabeled.setClassIndex(unlabeled.numAttributes() - 1);

    nmf = new NumericToNominal();
    nmf.setInputFormat(unlabeled);
    unlabeled = Filter.useFilter(unlabeled, nmf);

    // label instances
    for (int i = 0; i < unlabeled.numInstances(); i++) {
        double classZero = c.distributionForInstance(unlabeled.instance(i))[0];
        double classOne = c.distributionForInstance(unlabeled.instance(i))[1];
        System.out.print(
                "classifying: " + unlabeled.instance(i) + " : " + classZero + " - " + classOne + " == class: ");
        if (classZero > classOne) {
            System.out.print("0");
            unlabeled.instance(i).setClassValue("0");
        } else {
            System.out.print("1");
            unlabeled.instance(i).setClassValue("1");
        }
        System.out.println("");
    }

    // save labeled data
    // BufferedWriter writer = new BufferedWriter(
    //         new FileWriter("Z:\\\\shared from vm\\\\thirdset\\\\relabelled.arff"));
    // writer.write(labeled.toString());
    // writer.newLine();
    // writer.flush();
    // writer.close();
    ArffSaver saver = new ArffSaver();
    saver.setInstances(unlabeled);
    saver.setFile(new File("Z:\\shared from vm\\thirdset\\relabelled.arff"));
    //        saver.setDestination(new File("Z:\\shared from vm\\thirdset\\relabelled.arff"));   // **not** necessary in 3.5.4 and later
    saver.writeBatch();

}

From source file:PointAnalyser.Main.java

public static void trainC45Classifier() throws Exception {

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }/* w  ww . j  a v  a2s.c o m*/

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    // build a c4.5 classifier
    String[] options = new String[1];
    options[0] = "-C 0.25 -M 2 -U"; // unpruned tree
    tree = new J48(); // new instance of tree
    tree.setOptions(options); // set the options
    tree.buildClassifier(data); // build classifier
    /*
             RemoveMisclassified rm = new RemoveMisclassified();
             rm.setInputFormat(data);
             rm.setClassifier(tree);
             rm.setNumFolds(10);
             rm.setThreshold(0.1);
             rm.setMaxIterations(0);
             data = Filter.useFilter(data, rm);
            
             tree = new J48();         // new instance of tree
             tree.setOptions(options);     // set the options
             tree.buildClassifier(data);   // build classifier
             */
    // eval
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(tree, data, 10, new Random(1));

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());

}

From source file:PointAnalyser.Main.java

public static void trainNNClassifier() throws Exception {

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }//w  w  w. j av  a 2  s.  c om

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    // build a c4.5 classifier
    String[] options = new String[1];
    // options[0] = "-K 1";            // unpruned tree
    nn = new IBk(); // new instance of tree
    //  nn.setCrossValidate(true);
    nn.setKNN(7);
    nn.setNearestNeighbourSearchAlgorithm(new weka.core.neighboursearch.KDTree(data));

    nn.setWindowSize(0);

    // nn.setOptions(options);     // set the options
    nn.buildClassifier(data); // build classifier
    // eval
    Evaluation eval = new Evaluation(data);
    eval.crossValidateModel(nn, data, 10, new Random(1));

    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());

}

From source file:prismcrossvalidation.Classifier.java

static public String crossValidationPRISM_DISCRET() throws FileNotFoundException, IOException, Exception {
    String prismResult = "";
    String source = MainWindow.pathChooseField.getText();
    Instances data = DataLoad.loadData(source.replace("\\", "/"));

    data.setClassIndex(data.numAttributes() - 1);

    Discretize filter = new Discretize();
    Prism rules = new Prism();

    FilteredClassifier fClassifier = new FilteredClassifier();
    fClassifier.setFilter(filter); //Ustawienie aktualnego filtra
    fClassifier.setClassifier(rules); //Ustawienie aktualnego klasyfikatora

    Evaluation eval = new MyEvaluation(data);
    eval.crossValidateModel(fClassifier, data, fold, new Random(1)); //CV dla 10 foldow

    System.out.println("amount of folds: " + fold);
    MainWindow.logArea.append("Amount of folds: " + fold);

    System.out.println(eval.toSummaryString("Wyniki:", false));
    MainWindow.logArea.append(eval.toSummaryString("Wyniki:", false));

    return prismResult = eval.toSummaryString("Wyniki:", false);
}

From source file:qa.experiment.ProcessFeatureVector.java

public void evaluate(Instances trainingData) throws Exception {
    Classifier c1 = new SMO();
    Evaluation eval = new Evaluation(trainingData);
    eval.crossValidateModel(c1, trainingData, 10, new Random(1));
    System.out.println("Estimated Accuracy: " + Double.toString(eval.pctCorrect()));
}

From source file:sentinets.TrainModel.java

License:Open Source License

public void trainModel(Classifier c, String name) {
    Evaluation e;
    try {/*from   w  ww  .  j ava2  s.c o m*/
        e = new Evaluation(ins);
        e.crossValidateModel(c, ins, 10, new Random(1));
        System.out.println("****Results of " + name + "****");
        System.out.println(e.toSummaryString());
        System.out.println(e.toClassDetailsString());
        System.out.println(e.toCumulativeMarginDistributionString());
        System.out.println(e.toMatrixString());
        System.out.println("*********************");
        TrainModel.saveModel(c, name);
    } catch (Exception e1) {
        e1.printStackTrace();
    }

}

From source file:SpamDetector.SpamDetector.java

/**
 * @param args the command line arguments
 *//*from   www .j a v a 2s. com*/
public static void main(String[] args) throws IOException, Exception {
    ArrayList<ArrayList<String>> notSpam = processCSV("notspam.csv");
    ArrayList<ArrayList<String>> spam = processCSV("spam.csv");

    // Cobain generate attribute & data
    FeatureExtraction fe = new FeatureExtraction();
    fe.generateArff(spam, notSpam);

    // Cobain CART
    BufferedReader br = new BufferedReader(new FileReader("data.arff"));

    ArffReader arff = new ArffReader(br);
    Instances data = arff.getData();
    data.setClassIndex(data.numAttributes() - 1);

    SimpleCart tree = new SimpleCart();
    tree.buildClassifier(data);
    System.out.println(tree.toString());

    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(tree, data);
    System.out.println(eval.toSummaryString("\n\n\n\nResults\n======\n", false));
    eval.crossValidateModel(tree, data, 10, new Random());
    System.out.println(eval.toSummaryString("\n\n\n\n10-Fold\n======\n", false));

}

From source file:textmining.TextMining.java

/**
 * Decision Table/*from w  ww .  j  a  va  2s . c  om*/
 *
 * @param instances
 * @return string
 * @throws Exception
 */
private static String C_DecisionTable(Instances instances) throws Exception {
    Classifier decisionTable = (Classifier) new DecisionTable();
    String[] options = weka.core.Utils.splitOptions("-X 1 -S \"weka.attributeSelection.BestFirst -D 1 -N 5\"");
    decisionTable.setOptions(options);
    decisionTable.buildClassifier(instances);
    Evaluation eval = new Evaluation(instances);
    //        eval.evaluateModel(decisionTable, instances);
    eval.crossValidateModel(decisionTable, instances, 5, new Random(1));
    String resume = eval.toSummaryString();

    return eval.toMatrixString(resume);
}

From source file:textmining.TextMining.java

private static String setOptions(Classifier classifier, Instances instances, String[] options)
        throws Exception {
    classifier.setOptions(options);/* w ww. j ava 2s  . c om*/
    classifier.buildClassifier(instances);
    Evaluation eval = new Evaluation(instances);
    eval.crossValidateModel(classifier, instances, 5, new Random(1));
    eval.evaluateModel(classifier, instances);
    String resume = eval.toSummaryString();
    return eval.toMatrixString(resume);
}