Example usage for weka.classifiers.trees M5P M5P

List of usage examples for weka.classifiers.trees M5P M5P

Introduction

In this page you can find the example usage for weka.classifiers.trees M5P M5P.

Prototype

public M5P() 

Source Link

Document

Creates a new M5P instance.

Usage

From source file:cn.ict.zyq.bestConf.COMT2.COMT2.java

License:Open Source License

private static M5P buildModel(Instances modelInstances, int numOfInstanceInLeaf) throws Exception {
    M5P retval = new M5P();
    retval.setSaveInstances(true);//from www .  ja  va 2  s  .  c  o m
    retval.setOptions(Utils.splitOptions("-N -L -M " + numOfInstanceInLeaf));
    retval.buildClassifier(modelInstances);
    return retval;
}

From source file:lu.lippmann.cdb.dt.ModelTreeFactory.java

License:Open Source License

/**
 * Main method./*  www.  ja  v  a2s .  co m*/
 * @param args command line arguments
 */
public static void main(final String[] args) {
    try {
        //final String f="./samples/csv/uci/winequality-red-simplified.csv";
        final String f = "./samples/csv/uci/winequality-white.csv";
        //final String f="./samples/arff/UCI/crimepredict.arff";
        final Instances dataSet = WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(new File(f));
        System.out.println(dataSet.classAttribute().isNumeric());

        final M5P rt = new M5P();
        //rt.setUnpruned(true);
        rt.setMinNumInstances(1000);
        rt.buildClassifier(dataSet);

        System.out.println(rt);

        System.out.println(rt.graph());

        final GraphWithOperations gwo = GraphUtil.buildGraphWithOperationsFromWekaRegressionString(rt.graph());
        System.out.println(gwo);
        System.out.println(new ASCIIGraphDsl().getDslString(gwo));

        final Evaluation eval = new Evaluation(dataSet);

        /*Field privateStringField = Evaluation.class.getDeclaredField("m_CoverageStatisticsAvailable");
        privateStringField.setAccessible(true);
        //privateStringField.get
        boolean fieldValue = privateStringField.getBoolean(eval);
        System.out.println("fieldValue = " + fieldValue);*/

        double[] d = eval.evaluateModel(rt, dataSet);
        System.out.println("PREDICTED -> " + FormatterUtil.buildStringFromArrayOfDoubles(d));

        System.out.println(eval.errorRate());
        System.out.println(eval.sizeOfPredictedRegions());

        System.out.println(eval.toSummaryString("", true));

        System.out.println(new DecisionTree(gwo, eval.errorRate()));
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:lu.lippmann.cdb.ext.hydviga.gaps.GapFillerFactory.java

License:Open Source License

public static GapFiller getGapFiller(final Algo algo) throws Exception {
    final GapFiller tsgp;
    if (algo == Algo.EM_WITH_DISCR_TIME)
        tsgp = new GapFillerEM(true);
    else if (algo == Algo.EM)
        tsgp = new GapFillerEM(false);
    else if (algo == Algo.Interpolation)
        tsgp = new GapFillerInterpolation(false);
    else if (algo == Algo.ZeroR)
        tsgp = new GapFillerClassifier(false, new ZeroR());
    else if (algo == Algo.REG_WITH_DISCR_TIME)
        tsgp = new GapFillerRegressions(true);
    else if (algo == Algo.REG)
        tsgp = new GapFillerRegressions(false);
    else if (algo == Algo.M5P_WITH_DISCR_TIME)
        tsgp = new GapFillerClassifier(true, new M5P());
    else if (algo == Algo.M5P)
        tsgp = new GapFillerClassifier(false, new M5P());
    else if (algo == Algo.ANN_WITH_DISCR_TIME)
        tsgp = new GapFillerClassifier(true, new MultilayerPerceptron());
    else if (algo == Algo.ANN)
        tsgp = new GapFillerClassifier(false, new MultilayerPerceptron());
    else if (algo == Algo.NEARESTNEIGHBOUR_WITH_DISCR_TIME)
        tsgp = new GapFillerClassifier(true, new IBk());
    else if (algo == Algo.NEARESTNEIGHBOUR)
        tsgp = new GapFillerClassifier(false, new IBk());
    else/*  w  w  w  .j  a  va  2s  .c o m*/
        throw new Exception("Algo not managed -> " + algo);
    return tsgp;
}

From source file:lu.lippmann.cdb.ext.hydviga.gaps.GapFillerFactory.java

License:Open Source License

public static GapFiller getGapFiller(final String algoname, final boolean useDiscretizedTime) throws Exception {
    final GapFiller tsgp;
    if (algoname.equals("EM"))
        tsgp = new GapFillerEM(useDiscretizedTime);
    else if (algoname.equals("Interpolation"))
        tsgp = new GapFillerInterpolation(useDiscretizedTime);
    else if (algoname.equals("ZeroR"))
        tsgp = new GapFillerClassifier(useDiscretizedTime, new ZeroR());
    else if (algoname.equals("REG"))
        tsgp = new GapFillerRegressions(useDiscretizedTime);
    else if (algoname.equals("M5P"))
        tsgp = new GapFillerClassifier(useDiscretizedTime, new M5P());
    else if (algoname.equals("ANN"))
        tsgp = new GapFillerClassifier(useDiscretizedTime, new MultilayerPerceptron());
    else if (algoname.equals("NEARESTNEIGHBOUR"))
        tsgp = new GapFillerClassifier(useDiscretizedTime, new IBk());
    else// w w  w  . ja va2s .  co  m
        throw new Exception("Algo name not managed -> " + algoname);
    return tsgp;
}

From source file:mulan.experiments.ICTAI2010.java

License:Open Source License

/**
 * Main class//from  w  w  w .j a v a 2 s.c o m
 *
 * @param args command line arguments
 */
public static void main(String[] args) {

    try {
        String path = Utils.getOption("path", args);
        String filestem = Utils.getOption("filestem", args);

        System.out.println("Loading the data set");
        MultiLabelInstances dataset = new MultiLabelInstances(path + filestem + ".arff",
                path + filestem + ".xml");

        Evaluator eval = new Evaluator();
        MultipleEvaluation results;
        List<Measure> measures = new ArrayList<Measure>(1);
        measures.add(new HammingLoss());

        int numFolds = 10;

        MultiLabelLearner[] learner = new MultiLabelLearner[4];
        String[] learnerName = new String[learner.length];

        learner[0] = new MLkNN(10, 1.0);
        learnerName[0] = "MLkNN";
        learner[1] = new CalibratedLabelRanking(new J48());
        learnerName[1] = "CLR";
        Bagging bagging = new Bagging();
        bagging.setClassifier(new J48());
        learner[2] = new BinaryRelevance(bagging);
        learnerName[2] = "BR";
        learner[3] = new BPMLL();
        learnerName[3] = "BPMLL";

        // loop over learners
        for (int i = 0; i < learner.length; i++) {
            // Default
            results = eval.crossValidate(learner[i].makeCopy(), dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";default;-;" + results.toCSV());

            // One Threshold
            OneThreshold ot;
            ot = new OneThreshold(learner[i].makeCopy(), new HammingLoss());
            results = eval.crossValidate(ot, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";one threshold;train;" + results.toCSV());
            ot = new OneThreshold(learner[i].makeCopy(), new HammingLoss(), 5);
            results = eval.crossValidate(ot, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";one threshold;5-cv;" + results.toCSV());

            // RCut
            RCut rcut;
            rcut = new RCut(learner[i].makeCopy());
            results = eval.crossValidate(rcut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";rcut;cardinality;" + results.toCSV());
            rcut = new RCut(learner[i].makeCopy(), new HammingLoss());
            results = eval.crossValidate(rcut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";rcut;train;" + results.toCSV());
            rcut = new RCut(learner[i].makeCopy(), new HammingLoss(), 5);
            results = eval.crossValidate(rcut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";rcut;5-cv;" + results.toCSV());

            // SCut
            SCut scut;
            scut = new SCut(learner[i].makeCopy(), new HammingLoss());
            results = eval.crossValidate(scut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";scut;train;" + results.toCSV());
            scut = new SCut(learner[i].makeCopy(), new HammingLoss(), 5);
            results = eval.crossValidate(scut, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";scut;5-cv;" + results.toCSV());

            // MetaLabeler
            MetaLabeler ml;
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Content-Based", "Numeric-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;train;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Score-Based", "Numeric-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;train;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Rank-Based", "Numeric-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;train;ranks;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Content-Based", "Nominal-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;train;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Score-Based", "Nominal-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;train;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Rank-Based", "Nominal-Class");
            ml.setFolds(1);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;ranks;" + results.toCSV());

            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Content-Based", "Numeric-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;cv;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Score-Based", "Numeric-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;cv;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new M5P(), "Rank-Based", "Numeric-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;m5p;cv;ranks;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Content-Based", "Nominal-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;content;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Score-Based", "Nominal-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;scores;" + results.toCSV());
            ml = new MetaLabeler(learner[i].makeCopy(), new J48(), "Rank-Based", "Nominal-Class");
            ml.setFolds(5);
            results = eval.crossValidate(ml, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";metalabeler;j48;cv;ranks;" + results.toCSV());

            // ThresholdPrediction
            ThresholdPrediction tp;
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Content-Based", 1);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;train;content;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Score-Based", 1);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;train;scores;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Rank-Based", 1);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;train;ranks;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Content-Based", 5);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;5-cv;content;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Score-Based", 5);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;5-cv;scores;" + results.toCSV());
            tp = new ThresholdPrediction(learner[i].makeCopy(), new M5P(), "Rank-Based", 5);
            results = eval.crossValidate(tp, dataset, measures, numFolds);
            System.out.println(learnerName[i] + ";tp;m5p;5-cv;ranks;" + results.toCSV());
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:util.Weka.java

public Weka(String FicheroEntrenamiento) {
    try {/*from w ww  .ja v a 2  s  . c  o  m*/
        casosEntrenamiento = new Instances(new BufferedReader(new FileReader(FicheroEntrenamiento)));
        casosEntrenamiento.setClassIndex(casosEntrenamiento.numAttributes() - 1);
        conocimiento = new M5P(); //ra regresin, o J48 para clasificacin
        conocimiento.buildClassifier(casosEntrenamiento);
    } catch (Exception e) {
        e.printStackTrace();
    }
}