Example usage for weka.classifiers.trees J48 J48

List of usage examples for weka.classifiers.trees J48 J48

Introduction

In this page you can find the example usage for weka.classifiers.trees J48 J48.

Prototype

J48

Source Link

Usage

From source file:mulan.examples.SubsetLearnerExamples.java

License:Open Source License

public static void main(String[] args) throws Exception {
    String path = Utils.getOption("path", args); // e.g. -path dataset/
    String filestem = Utils.getOption("filestem", args); // e.g. -filestem emotions
    System.out.println("Loading the training set");
    MultiLabelInstances train = new MultiLabelInstances(path + filestem + "-train.arff",
            path + filestem + ".xml");
    System.out.println("Loading the test set");
    MultiLabelInstances test = new MultiLabelInstances(path + filestem + "-test.arff",
            path + filestem + ".xml");

    /*//ww  w.j a  v  a 2s.c  om
     * The usage of the following methods is demonstrated: "GreedyLabelClustering-U" - an example for
     * running SubsetLearner using GreedyLabelClustering algorithm along with Unconditional labels dependence identification.
     * "EnsembleOfSubsetLearners-U" - an example for running EnsembleOfSubsetLearners algorithm using Unconditional
     * labels dependence identification. "GreedyLabelClustering-C" - an example for running
     * SubsetLearner using GreedyLabelClustering algorithm along with Conditional labels dependence identification.
     * "EnsembleOfSubsetLearners-C" - an example for running EnsembleOfSubsetLearners algorithm using Conditional
     * labels dependence identification. "SubsetLearner" - an example for running SubsetLearner
     * algorithm "UnconditionalLDI" - an example for running the algorithm for Unconditional
     * labels dependence identification. "ConditionalLDI" - an example for running the algorithm
     * for Conditional labels dependence identification.
     */
    String[] methodsToCompare = { "GreedyLabelClustering-U", "EnsembleOfSubsetLearners-U",
            "GreedyLabelClustering-C", "EnsembleOfSubsetLearners-C", "SubsetLearner", "UnconditionalLDI",
            "ConditionalLDI" };
    Evaluator eval = new Evaluator();
    Evaluation results;
    long s1, s2, s3;
    long trainTime, testTime;

    for (String aMethodsToCompare : methodsToCompare) {
        if (aMethodsToCompare.equals("GreedyLabelClustering-U")) {
            System.out.println(
                    "\nStarting GreedyLabelClustering algorithm using Unconditional labels dependence identification");
            UnconditionalChiSquareIdentifier uncond = new UnconditionalChiSquareIdentifier();
            MultiLabelLearner lp = new LabelPowerset(new J48());
            GreedyLabelClustering clusterer = new GreedyLabelClustering(lp, new J48(), uncond);
            SubsetLearner learner = new SubsetLearner(clusterer, lp, new J48());
            learner.setUseCache(true); // use caching mechanism
            learner.setDebug(true);
            s1 = System.currentTimeMillis();
            learner.build(train);
            s2 = System.currentTimeMillis();
            results = eval.evaluate(learner, test);
            s3 = System.currentTimeMillis();
            trainTime = s2 - s1;
            testTime = s3 - s2;
            System.out.println(results.toCSV());
            System.out.println("Train time: " + trainTime + " Test time: " + testTime);
        }

        if (aMethodsToCompare.equals("GreedyLabelClustering-C")) {
            System.out.println(
                    "\nStarting GreedyLabelClustering algorithm using Conditional labels dependence identification");
            ConditionalDependenceIdentifier cond = new ConditionalDependenceIdentifier(new J48());
            MultiLabelLearner lp = new LabelPowerset(new J48());
            GreedyLabelClustering clusterer = new GreedyLabelClustering(lp, new J48(), cond);
            SubsetLearner learner = new SubsetLearner(clusterer, lp, new J48());
            learner.setUseCache(true); // use caching mechanism
            learner.setDebug(true);
            s1 = System.currentTimeMillis();
            learner.build(train);
            s2 = System.currentTimeMillis();
            results = eval.evaluate(learner, test);
            s3 = System.currentTimeMillis();
            trainTime = s2 - s1;
            testTime = s3 - s2;
            System.out.println(results.toCSV());
            System.out.println("Train time: " + trainTime + " Test time: " + testTime);
        }

        if (aMethodsToCompare.equals("EnsembleOfSubsetLearners-U")) {
            System.out.println(
                    "\nStarting EnsembleOfSubsetLearners algorithm using Unconditional labels dependence identification");
            UnconditionalChiSquareIdentifier uncond = new UnconditionalChiSquareIdentifier();
            MultiLabelLearner lp = new LabelPowerset(new J48());
            EnsembleOfSubsetLearners learner = new EnsembleOfSubsetLearners(lp, new J48(), uncond, 10);
            learner.setDebug(true);
            learner.setUseSubsetLearnerCache(true);
            s1 = System.currentTimeMillis();
            learner.build(train);
            s2 = System.currentTimeMillis();
            results = eval.evaluate(learner, test);
            s3 = System.currentTimeMillis();
            trainTime = s2 - s1;
            testTime = s3 - s2;
            System.out.println(results.toCSV());
            System.out.println("Train time: " + trainTime + " Test time: " + testTime);
        }

        if (aMethodsToCompare.equals("EnsembleOfSubsetLearners-C")) {
            System.out.println(
                    "\nStarting EnsembleOfSubsetLearners algorithm using Conditional labels dependence identification");
            ConditionalDependenceIdentifier cond = new ConditionalDependenceIdentifier(new J48());
            MultiLabelLearner lp = new LabelPowerset(new J48());
            EnsembleOfSubsetLearners learner = new EnsembleOfSubsetLearners(lp, new J48(), cond, 10);
            learner.setDebug(true);
            learner.setUseSubsetLearnerCache(true);
            learner.setSelectDiverseModels(false);
            // use strategy for selecting highly weighted ensemble partitions (without seeking
            // for diverse models)
            s1 = System.currentTimeMillis();
            learner.build(train);
            s2 = System.currentTimeMillis();
            System.out.println("Evaluation started. ");
            results = eval.evaluate(learner, test);
            s3 = System.currentTimeMillis();
            trainTime = s2 - s1;
            testTime = s3 - s2;
            System.out.println(results.toCSV());
            System.out.println("Train time: " + trainTime + " Test time: " + testTime);
        }

        if (aMethodsToCompare.equals("SubsetLearner")) {
            System.out.println("\nStarting SubsetLearner algorithm with random label set partition.");
            EnsembleOfSubsetLearners ensemble = new EnsembleOfSubsetLearners();
            List<int[][]> randomSet = ensemble.createRandomSets(train.getNumLabels(), 1);
            int[][] partition = randomSet.get(0);
            System.out.println("Random partition: " + EnsembleOfSubsetLearners.partitionToString(partition));
            SubsetLearner learner = new SubsetLearner(partition, new J48());
            learner.setDebug(true);
            s1 = System.currentTimeMillis();
            learner.build(train);
            s2 = System.currentTimeMillis();
            results = eval.evaluate(learner, test);
            s3 = System.currentTimeMillis();
            trainTime = s2 - s1;
            testTime = s3 - s2;
            System.out.println(results.toCSV());
            System.out.println("Train time: " + trainTime + " Test time: " + testTime);
        }

        if (aMethodsToCompare.equals("UnconditionalLDI")) {
            System.out.println("\nStarting algorithm for Unconditional labels dependence identification.");
            UnconditionalChiSquareIdentifier uncond = new UnconditionalChiSquareIdentifier();
            s1 = System.currentTimeMillis();
            LabelsPair[] pairs = uncond.calculateDependence(train);
            s2 = System.currentTimeMillis();
            testTime = s2 - s1;
            System.out.println("Identified dependency scores of label pairs: \n" + Arrays.toString(pairs));
            System.out.println("Computation time: " + testTime);
        }

        if (aMethodsToCompare.equals("ConditionalLDI")) {
            System.out.println("\nStarting algorithm for Conditional labels dependence identification.");
            ConditionalDependenceIdentifier cond = new ConditionalDependenceIdentifier(new J48());
            s1 = System.currentTimeMillis();
            LabelsPair[] pairs = cond.calculateDependence(train);
            s2 = System.currentTimeMillis();
            testTime = s2 - s1;
            System.out.println("Identified dependency scores of label pairs: \n" + Arrays.toString(pairs));
            System.out.println("Computation time: " + testTime);
        }
    }
}

From source file:mulan.examples.TrainTestExperiment.java

License:Open Source License

public static void main(String[] args) {
    String[] methodsToCompare = { "HOMER", "BR", "CLR", "MLkNN", "MC-Copy", "IncludeLabels", "MC-Ignore",
            "RAkEL", "LP", "MLStacking" };

    try {// ww w  . j a v  a  2 s.co m
        String path = Utils.getOption("path", args); // e.g. -path dataset/
        String filestem = Utils.getOption("filestem", args); // e.g. -filestem emotions
        String percentage = Utils.getOption("percentage", args); // e.g. -percentage 50 (for 50%)
        System.out.println("Loading the dataset");
        MultiLabelInstances mlDataSet = new MultiLabelInstances(path + filestem + ".arff",
                path + filestem + ".xml");

        //split the data set into train and test
        Instances dataSet = mlDataSet.getDataSet();
        //dataSet.randomize(new Random(1));
        RemovePercentage rmvp = new RemovePercentage();
        rmvp.setInvertSelection(true);
        rmvp.setPercentage(Double.parseDouble(percentage));
        rmvp.setInputFormat(dataSet);
        Instances trainDataSet = Filter.useFilter(dataSet, rmvp);

        rmvp = new RemovePercentage();
        rmvp.setPercentage(Double.parseDouble(percentage));
        rmvp.setInputFormat(dataSet);
        Instances testDataSet = Filter.useFilter(dataSet, rmvp);

        MultiLabelInstances train = new MultiLabelInstances(trainDataSet, path + filestem + ".xml");
        MultiLabelInstances test = new MultiLabelInstances(testDataSet, path + filestem + ".xml");

        Evaluator eval = new Evaluator();
        Evaluation results;

        for (int i = 0; i < methodsToCompare.length; i++) {

            if (methodsToCompare[i].equals("BR")) {
                System.out.println(methodsToCompare[i]);
                Classifier brClassifier = new NaiveBayes();
                BinaryRelevance br = new BinaryRelevance(brClassifier);
                br.setDebug(true);
                br.build(train);
                results = eval.evaluate(br, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("LP")) {
                System.out.println(methodsToCompare[i]);
                Classifier lpBaseClassifier = new J48();
                LabelPowerset lp = new LabelPowerset(lpBaseClassifier);
                lp.setDebug(true);
                lp.build(train);
                results = eval.evaluate(lp, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("CLR")) {
                System.out.println(methodsToCompare[i]);
                Classifier clrClassifier = new J48();
                CalibratedLabelRanking clr = new CalibratedLabelRanking(clrClassifier);
                clr.setDebug(true);
                clr.build(train);
                results = eval.evaluate(clr, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("RAkEL")) {
                System.out.println(methodsToCompare[i]);
                MultiLabelLearner lp = new LabelPowerset(new J48());
                RAkEL rakel = new RAkEL(lp);
                rakel.setDebug(true);
                rakel.build(train);
                results = eval.evaluate(rakel, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MC-Copy")) {
                System.out.println(methodsToCompare[i]);
                Classifier mclClassifier = new J48();
                MultiClassTransformation mcTrans = new Copy();
                MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans);
                mcl.setDebug(true);
                mcl.build(train);
                results = eval.evaluate(mcl, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MC-Ignore")) {
                System.out.println(methodsToCompare[i]);
                Classifier mclClassifier = new J48();
                MultiClassTransformation mcTrans = new Ignore();
                MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans);
                mcl.build(train);
                results = eval.evaluate(mcl, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("IncludeLabels")) {
                System.out.println(methodsToCompare[i]);
                Classifier ilClassifier = new J48();
                IncludeLabelsClassifier il = new IncludeLabelsClassifier(ilClassifier);
                il.setDebug(true);
                il.build(train);
                results = eval.evaluate(il, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MLkNN")) {
                System.out.println(methodsToCompare[i]);
                int numOfNeighbors = 10;
                double smooth = 1.0;
                MLkNN mlknn = new MLkNN(numOfNeighbors, smooth);
                mlknn.setDebug(true);
                mlknn.build(train);
                results = eval.evaluate(mlknn, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("HMC")) {
                System.out.println(methodsToCompare[i]);
                Classifier baseClassifier = new J48();
                LabelPowerset lp = new LabelPowerset(baseClassifier);
                RAkEL rakel = new RAkEL(lp);
                HMC hmc = new HMC(rakel);
                hmc.build(train);
                results = eval.evaluate(hmc, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("HOMER")) {
                System.out.println(methodsToCompare[i]);
                Classifier baseClassifier = new SMO();
                CalibratedLabelRanking learner = new CalibratedLabelRanking(baseClassifier);
                learner.setDebug(true);
                HOMER homer = new HOMER(learner, 3, HierarchyBuilder.Method.Random);
                homer.setDebug(true);
                homer.build(train);
                results = eval.evaluate(homer, test);
                System.out.println(results);
            }
            if (methodsToCompare[i].equals("MLStacking")) {
                System.out.println(methodsToCompare[i]);
                int numOfNeighbors = 10;
                Classifier baseClassifier = new IBk(numOfNeighbors);
                Classifier metaClassifier = new Logistic();
                MultiLabelStacking mls = new MultiLabelStacking(baseClassifier, metaClassifier);
                mls.setMetaPercentage(1.0);
                mls.setDebug(true);
                mls.build(train);
                results = eval.evaluate(mls, test);
                System.out.println(results);
            }

        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:mulan.experiments.ICTAI2010.java

License:Open Source License

/**
 * Main class/*from   ww  w. ja va  2s  .  co  m*/
 *
 * @param args command line arguments
 */
public static void main(String[] args) {

    try {
        String path = Utils.getOption("path", args);
        String filestem = Utils.getOption("filestem", args);

        System.out.println("Loading the data set");
        MultiLabelInstances dataset = new MultiLabelInstances(path + filestem + ".arff",
                path + filestem + ".xml");

        Evaluator eval = new Evaluator();
        MultipleEvaluation results;
        List<Measure> measures = new ArrayList<Measure>(1);
        measures.add(new HammingLoss());

        int numFolds = 10;

        MultiLabelLearner[] learner = new MultiLabelLearner[4];
        String[] learnerName = new String[learner.length];

        learner[0] = new MLkNN(10, 1.0);
        learnerName[0] = "MLkNN";
        learner[1] = new CalibratedLabelRanking(new J48());
        learnerName[1] = "CLR";
        Bagging bagging = new Bagging();
        bagging.setClassifier(new J48());
        learner[2] = new BinaryRelevance(bagging);
        learnerName[2] = "BR";
        learner[3] = new BPMLL();
        learnerName[3] = "BPMLL";

        // loop over learners
        for (int i = 0; i < learner.length; i++) {
            // Default
            results = eval.crossValidate(learner[i].makeCopy(), dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";default;-;" + results.toCSV());

            // One Threshold
            OneThreshold ot;
            ot = new OneThreshold(learner[i].makeCopy(), new HammingLoss());
            results = eval.crossValidate(ot, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";one threshold;train;" + results.toCSV());
            ot = new OneThreshold(learner[i].makeCopy(), new HammingLoss(), 5);
            results = eval.crossValidate(ot, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";one threshold;5-cv;" + results.toCSV());

            // RCut
            RCut rcut;
            rcut = new RCut(learner[i].makeCopy());
            results = eval.crossValidate(rcut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";rcut;cardinality;" + results.toCSV());
            rcut = new RCut(learner[i].makeCopy(), new HammingLoss());
            results = eval.crossValidate(rcut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";rcut;train;" + results.toCSV());
            rcut = new RCut(learner[i].makeCopy(), new HammingLoss(), 5);
            results = eval.crossValidate(rcut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";rcut;5-cv;" + results.toCSV());

            // SCut
            SCut scut;
            scut = new SCut(learner[i].makeCopy(), new HammingLoss());
            results = eval.crossValidate(scut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";scut;train;" + results.toCSV());
            scut = new SCut(learner[i].makeCopy(), new HammingLoss(), 5);
            results = eval.crossValidate(scut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";scut;5-cv;" + results.toCSV());

            // MetaLabeler
            MetaLabeler ml;
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Content-Based", "Numeric-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;train;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Score-Based", "Numeric-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;train;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Rank-Based", "Numeric-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;train;ranks;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Content-Based", "Nominal-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;train;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Score-Based", "Nominal-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;train;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Rank-Based", "Nominal-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;ranks;" + results.toCSV());

            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Content-Based", "Numeric-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;cv;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Score-Based", "Numeric-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;cv;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Rank-Based", "Numeric-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;cv;ranks;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Content-Based", "Nominal-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Score-Based", "Nominal-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Rank-Based", "Nominal-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;ranks;" + results.toCSV());

            // ThresholdPrediction
            ThresholdPrediction tp;
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Content-Based", 1);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;train;content;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Score-Based", 1);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;train;scores;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Rank-Based", 1);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;train;ranks;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Content-Based", 5);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;5-cv;content;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Score-Based", 5);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;5-cv;scores;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Rank-Based", 5);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;5-cv;ranks;" + results.toCSV());
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:myclassifier.wekaCode.java

public static Classifier buildClassifier(Instances dataSet, int classifierType, boolean prune)
        throws Exception {
    Classifier classifier = null;/* ww w .j av  a  2s .c o m*/
    if (classifierType == BAYES) {
        classifier = new NaiveBayes();
        classifier.buildClassifier(dataSet);
    } else if (classifierType == ID3) {
        classifier = new Id3();
        classifier.buildClassifier(dataSet);
    } else if (classifierType == J48) {
        classifier = new J48();
        classifier.buildClassifier(dataSet);
    } else if (classifierType == MyID3) {
        classifier = new MyID3();
        classifier.buildClassifier(dataSet);
    } else if (classifierType == MyJ48) {
        MyJ48 j48 = new MyJ48();
        j48.setPruning(prune);
        classifier = j48;
        classifier.buildClassifier(dataSet);

    }
    return classifier;
}

From source file:myid3andc45classifier.Main.java

/**
 * @param args the command line arguments
 *///ww w .j a v  a  2s.co  m
public static void main(String[] args) throws Exception {
    // TODO code application logic here
    WekaAccessor accessor = new WekaAccessor();
    Instances trainset;
    trainset = accessor.readARFF(
            "D:\\Semester VII\\ML\\myID3andC45classifier\\myID3andC45classifier\\resources\\iris.arff");
    Classifier j48 = new J48();
    Classifier model = accessor.train(trainset, j48);
    //accessor.saveModel(model, "C:\\Users\\Julio Savigny\\Desktop\\myID3andC45classifier\\myID3andC45classifier\\some.model");
    //Classifier loadedModel = accessor.loadModel("C:\\Users\\Julio Savigny\\Desktop\\myID3andC45classifier\\myID3andC45classifier\\some.model");
    System.out.println(model);
    //System.out.println(loadedModel);

    // Coba ID3 Apoy
    Classifier customID3 = new MyC45();
    Classifier myId3Model = accessor.train(trainset, customID3);
    Instances resampledTrainset = accessor.resample(trainset);
    System.out.println("WOY");
    System.out.println(myId3Model);
    System.out.println(accessor.tenFoldCrossValidation(trainset, customID3).toSummaryString());
    Evaluation eval = new Evaluation(trainset);
    eval.evaluateModel(myId3Model, trainset);
    //System.out.println(eval.toSummaryString());

    //        System.out.println(trainset);
    //        System.out.println(resampledTrainset);

    // Coba C4.5 Bayu
    //        Classifier customC45 = new myC45();
    //        Classifier myC45Model = accessor.train(trainset, customC45);
    //        System.out.println(myC45Model);
}

From source file:newclassifier.NewClassifier.java

public void setClassifierTree() throws Exception {
    cls = new J48();
    data.setClassIndex(data.numAttributes() - 1);
    //cls.buildClassifier(data);
}

From source file:new_pack.repro.java

public static void main(String[] args) throws Exception {
    System.out.println("count:");
    J48 tree = new J48();
    tree.buildClassifier(null);//from   w w w .jav  a 2 s  .com
}

From source file:nl.bioinf.roelen.thema11.classifier_tools.BoundaryClassifier.java

License:Open Source License

/**
 * method to build a classifier//from  www  .  j  a v a 2 s.c  om
 * @param fileLocation the arrf file our attributes are in
 * @param method the method to use for building our classifier
 * @return the classifier object that was built
 */
public static Classifier build(String fileLocation, String method) {
    //init classifier object
    Classifier classifier;
    classifier = null;

    try {
        //get data
        ConverterUtils.DataSource source = new ConverterUtils.DataSource(fileLocation);
        //SET DATA AND OPTIONS
        Instances data = source.getDataSet();

        //remove the name and position entries, these are not important for classifying
        data.deleteAttributeAt(data.numAttributes() - 2);
        data.deleteAttributeAt(data.numAttributes() - 2);
        data.setClassIndex(data.numAttributes() - 1);

        //prepare data for classifying
        String[] options = new String[1];
        //unpruned
        options[0] = "-U"; // unpruned tree
        //see what method was given
        switch (method.toUpperCase()) {
        case "J48":
            //Build J48 classifier
            classifier = new J48(); // new instance of tree
            break;
        case "OneR":
            //Build OneR classifier
            classifier = new OneR();
            break;
        case "ZeroR":
            //build (useless) ZeroR classifier
            classifier = new ZeroR();
            break;
        default:
            //default is building OneR
            classifier = new OneR();
            break;
        }
        //set the options and build that thing
        classifier.setOptions(options); // set the options
        classifier.buildClassifier(data); // build classifier

    } catch (Exception ex) {
        Logger.getLogger(BoundaryClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
    return classifier;
}

From source file:old.CFS.java

/**
 * uses the meta-classifier//from w  w w .j  a va2s  .  c  om
 */
protected static void useClassifier(Instances data) throws Exception {
    System.out.println("\n1. Meta-classfier");
    AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();
    ChiSquaredAttributeEval eval = new ChiSquaredAttributeEval();
    Ranker search = new Ranker();
    search.setThreshold(-1.7976931348623157E308);
    search.setNumToSelect(1000);
    J48 base = new J48();
    classifier.setClassifier(base);
    classifier.setEvaluator(eval);
    classifier.setSearch(search);
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(classifier, data, 10, new Random(1));
    System.out.println(evaluation.toSummaryString());
}

From source file:org.conqat.engine.commons.machine_learning.BaseWekaClassifier.java

License:Apache License

/**
 * Returns a new classifier based on the given algorithm.
 *///  w w  w.java2 s.  c  o  m
protected weka.classifiers.Classifier getClassifier(EClassificationAlgorithm algorithm) {
    switch (algorithm) {
    case DECISION_TREE_REP:
        return new REPTree();
    case SUPPORT_VECTOR_MACHINE_SMO:
        return new SMO();
    case COST_SENSITIVE_CLASSIFIER:
        return new CostSensitiveClassifier();
    case DECISION_TREE_J48:
        return new J48();
    default:
        throw new AssertionError("Cannot create a classifier without a specified algorithm.");
    }

}