Example usage for weka.classifiers Evaluation toSummaryString

List of usage examples for weka.classifiers Evaluation toSummaryString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toSummaryString.

Prototype

@Override
public String toSummaryString() 

Source Link

Document

Calls toSummaryString() with no title and no complexity stats.

Usage

From source file:ml.ann.MainPTR.java

public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
    boolean randomWeight;
    double weightawal = 0.0;
    double learningRate = 0.0001;
    double threshold = 0.00;
    double momentum = 0.00;
    int maxEpoch = 100000;
    int nCrossValidate = 2;

    m_nominalToBinaryFilter = new NominalToBinary();
    m_normalize = new Normalize();

    Scanner in = new Scanner(System.in);
    System.out.println("Lokasi file: ");

    String filepath = in.nextLine();
    filepath = "test-arffs/iris.arff";
    System.out.println("--- Algoritma ---");
    System.out.println("1. Perceptron Training Rule");
    System.out.println("2. Delta Rule Incremental");
    System.out.println("3. Delta Rule Batch");
    System.out.println("Pilihan Algoritma (1/2/3) : ");
    int choice = in.nextInt();
    String temp = in.nextLine();//  w  w  w . j  a v  a2  s .co m

    System.out.println("Apakah Anda ingin memasukkan nilai weight awal? (YES/NO)");
    String isRandom = in.nextLine();
    System.out.println("Apakah Anda ingin memasukkan konfigurasi? (YES/NO)");
    String config = in.nextLine();

    if (config.equalsIgnoreCase("yes")) {
        System.out.print("Masukkan nilai learning rate: ");
        learningRate = in.nextDouble();
        System.out.print("Masukkan nilai threshold: ");
        threshold = in.nextDouble();
        System.out.print("Masukkan nilai momentum: ");
        momentum = in.nextDouble();
        System.out.print("Masukkan jumlah epoch: ");
        threshold = in.nextInt();
        System.out.print("Masukkan jumlah folds untuk crossvalidate: ");
        nCrossValidate = in.nextInt();
    }

    randomWeight = isRandom.equalsIgnoreCase("yes");

    if (randomWeight) {
        System.out.print("Masukkan nilai weight awal: ");
        weightawal = Double.valueOf(in.nextLine());
    }

    //print config
    if (isRandom.equalsIgnoreCase("yes")) {
        System.out.print("isRandom | ");
    } else {
        System.out.print("Weight " + weightawal + " | ");
    }

    System.out.print("L.rate " + learningRate + " | ");
    System.out.print("Max Epoch " + maxEpoch + " | ");
    System.out.print("Threshold " + threshold + " | ");
    System.out.print("Momentum " + momentum + " | ");
    System.out.print("Folds " + nCrossValidate + " | ");
    System.out.println();

    FileReader trainreader = new FileReader(filepath);
    Instances train = new Instances(trainreader);
    train.setClassIndex(train.numAttributes() - 1);

    m_nominalToBinaryFilter.setInputFormat(train);
    train = new Instances(Filter.useFilter(train, m_nominalToBinaryFilter));

    m_normalize.setInputFormat(train);
    train = new Instances(Filter.useFilter(train, m_normalize));

    MultiClassPTR tempMulti = new MultiClassPTR(choice, randomWeight, learningRate, maxEpoch, threshold);
    tempMulti.buildClassifier(train);

    Evaluation eval = new Evaluation(new Instances(train));
    eval.evaluateModel(tempMulti, train);
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}

From source file:mlpoc.MLPOC.java

/**
 * uses the meta-classifier//  www .  j  a  v  a  2 s .  co  m
 */
protected static void useClassifier(Instances data) throws Exception {
    System.out.println("\n1. Meta-classfier");
    AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();
    CfsSubsetEval eval = new CfsSubsetEval();
    GreedyStepwise search = new GreedyStepwise();
    search.setSearchBackwards(true);
    J48 base = new J48();
    classifier.setClassifier(base);
    classifier.setEvaluator(eval);
    classifier.setSearch(search);
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(classifier, data, 10, new Random(1));
    System.out.println(evaluation.toSummaryString());
}

From source file:model.clasification.klasifikacijaIstanca.java

public static void main(String[] args) throws Exception {

    // load data//from w w w. jav  a  2 s  .  c  o  m
    DataSource loader = new DataSource(fileName);
    Instances data = loader.getDataSet();
    data.setClassIndex(data.numAttributes() - 1);

    // Create the Naive Bayes Classifier
    NaiveBayes bayesClsf = new NaiveBayes();
    bayesClsf.buildClassifier(data);

    // output generated model
    //      System.out.println(bayesClsf);

    // Test the model with the original set
    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(bayesClsf, data);

    // Print the result as in Weka explorer
    String strSummary = eval.toSummaryString();

    //      System.out.println("=== Evaluation on training set ===");
    //      System.out.println("=== Summary ===");
    //      System.out.println(strSummary);

    // Get the confusion matrix
    System.out.println(eval.toMatrixString());

}

From source file:naivebayes.NBRun.java

public static void main(String[] args) throws Exception {
    System.out.println("Naive Bayes Classifier");
    Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush_test.arff");
    Scanner scan = new Scanner(System.in);
    Classifier cls;/*from   w  w  w  .  j  a v a2  s  . com*/
    Instances train = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush.arff");

    System.out.println("Read or create model? r/c");
    if (scan.next().equals("c")) {
        cls = new NBTubesAI();
        cls.buildClassifier(train);
        TucilWeka.saveModel(train);
    } else {
        cls = (NBTubesAI) TucilWeka.readModel();
    }
    Evaluation eval = new Evaluation(data);
    System.out.println("10 fold cross validate or Full train? c/f");
    if (scan.next().equals("c")) {
        int fold = 10;
        for (int i = 0; i < data.numAttributes(); i++) {
            System.out.println(i + ". " + data.attribute(i));
        }
        eval.crossValidateModel(cls, data, fold, new Random(1));
    } else {
        for (int i = 0; i < data.numAttributes(); i++) {
            System.out.println(i + ". " + data.attribute(i));
        }
        data.deleteWithMissingClass();
        try {

            eval.evaluateModel(cls, data);
        } catch (java.lang.Exception ex) {
            eval.crossValidateModel(cls, data, 11, new Random(1));
        }
    }
    //        Classifier cls=new NBTubesAI();
    //        cls.buildClassifier(data);

    System.out.println("Hasil evaluasi: ");
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
    System.out.println(eval.toClassDetailsString());
}

From source file:NaiveBayesPckge.NaiveBayesMain.java

public static void printEvaluationSplit(Instances instance) throws Exception {
    Evaluation eval = new Evaluation(instance);

    System.out.println("Split Test Result :");
    eval.evaluateModel(naive, instance);
    System.out.println(eval.toSummaryString()); // Summary of Training
    //System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());

}

From source file:NaiveBayesPckge.NaiveBayesMain.java

public static void printEvaluation(Instances instance) throws Exception {
    Evaluation eval = new Evaluation(instance);
    Evaluation eval2 = new Evaluation(instance);

    System.out.println("Full training Result :");
    eval.evaluateModel(naive, instance);
    System.out.println(eval.toSummaryString()); // Summary of Training
    //System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());

    System.out.println("10 cross validation Result :");
    Random rand = new Random(1);
    eval2.crossValidateModel(naive, instance, 10, rand);
    System.out.println(eval2.toSummaryString()); // Summary of Training
    //System.out.println(eval2.toClassDetailsString());
    System.out.println(eval2.toMatrixString());

    double errorRates = eval.incorrect() / eval.numInstances() * 100;
    double accuracy = eval.correct() / eval.numInstances() * 100;

    //        System.out.println("Accuracy: " + df.format(accuracy) + " %");
    //        System.out.println("Error rate: " + df.format(errorRates) + " %"); // Printing Training Mean root squared error
}

From source file:nl.bioinf.roelen.thema11.query_handlers.QueryHandler.java

License:Open Source License

/**
 * test classifier on the training data by using 10-folds
 * @param fileLocation lccation of the training set Arrf file
 * @param classifier the classifier to use
 * @return String representation of how well it performed
 *//*from   w ww  .  j ava 2  s  . com*/
public String testTenFold(String fileLocation, Classifier classifier) {
    Evaluation eval = ClassifierTester.testTenFold(fileLocation, classifier);
    return eval.toSummaryString();
}

From source file:nl.bioinf.roelen.thema11.query_handlers.QueryHandler.java

License:Open Source License

/**
 * test classifier on the training data by a test set
 * @param fileLocation lccation of the training set Arrf file
 * @param classifier the classifier to use
 * @return String representation of how well it performed
 *///from   www .  ja  va 2 s.com
public String testTestSet(String fileLocation, Classifier classifier) {
    Evaluation eval = ClassifierTester.testTenFold(fileLocation, classifier);
    return eval.toSummaryString();
}

From source file:nl.uva.expose.classification.WekaClassification.java

private void classifierTrainer(Instances trainData) throws Exception {
    trainData.setClassIndex(0);//from w  w  w  .  j av a  2 s  .c  o  m
    //        classifier.setFilter(filter);
    classifier.setClassifier(new NaiveBayes());
    classifier.buildClassifier(trainData);
    Evaluation eval = new Evaluation(trainData);
    eval.crossValidateModel(classifier, trainData, 5, new Random(1));
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println("===== Evaluating on filtered (training) dataset done =====");
    System.out.println("\n\nClassifier model:\n\n" + classifier);
}

From source file:old.CFS.java

/**
 * uses the meta-classifier//from  w  ww . ja  v a  2  s  .  com
 */
protected static void useClassifier(Instances data) throws Exception {
    System.out.println("\n1. Meta-classfier");
    AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();
    ChiSquaredAttributeEval eval = new ChiSquaredAttributeEval();
    Ranker search = new Ranker();
    search.setThreshold(-1.7976931348623157E308);
    search.setNumToSelect(1000);
    J48 base = new J48();
    classifier.setClassifier(base);
    classifier.setEvaluator(eval);
    classifier.setSearch(search);
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(classifier, data, 10, new Random(1));
    System.out.println(evaluation.toSummaryString());
}