Example usage for weka.classifiers.trees J48 J48

List of usage examples for weka.classifiers.trees J48 J48

Introduction

In this page you can find the example usage for weka.classifiers.trees J48 J48.

Prototype

J48

Source Link

Usage

From source file:Learning.WekaWrapper.java

public double[] evaluate(String fn) throws Exception {

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(fn);

    Instances data = source.getDataSet();

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }//w ww  .  j av  a2s.  c om

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    tree = new J48(); // new instance of tree

    String[] options = new String[1];

    options[0] = "-C 0.25 -M 2";
    tree.setOptions(options);
    tree.buildClassifier(data); // build classifier

    // eval
    eval = new Evaluation(data);
    eval.crossValidateModel(tree, data, 5, new Random(1));

    // System.out.println("corr: " + eval.pctCorrect());
    // System.out.println("inco: " + eval.pctIncorrect());
    // System.out.println(eval.toSummaryString());
    // System.out.println(eval.toMatrixString());
    //  System.out.println(eval.toClassDetailsString());
    double[] results = new double[2];
    results[0] = eval.pctCorrect();
    results[1] = eval.pctIncorrect();
    return results;
}

From source file:LVCoref.WekaWrapper.java

License:Open Source License

public static void main(String[] args) {
    try {/*from w  w w . jav  a  2  s . com*/
        List<Document> docs = new LinkedList<Document>();
        Document d = new Document();
        d.readCONLL("data/pipeline/interview_16.lvsem.conll");
        d.addAnnotationMMAX("data/interview_16_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_23.lvsem.conll");
        d.addAnnotationMMAX("data/interview_23_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_27.lvsem.conll");
        d.addAnnotationMMAX("data/interview_27_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_38.lvsem.conll");
        d.addAnnotationMMAX("data/interview_38_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);

        Instances train = toArff2(docs);
        train.setClassIndex(train.numAttributes() - 1);
        String[] options = { "-U" };//, "-C", "0.5"};
        Classifier cls = new J48();
        cls.setOptions(options);
        cls.buildClassifier(train);

        docs = new LinkedList<Document>();
        d = new Document();
        d.readCONLL("data/pipeline/interview_43.lvsem.conll");
        d.addAnnotationMMAX("data/interview_43_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);
        d = new Document();
        d.readCONLL("data/pipeline/interview_46.lvsem.conll");
        d.addAnnotationMMAX("data/interview_46_coref_level.xml");
        d.useGoldMentions();
        docs.add(d);

        Evaluation eval = new Evaluation(train);

        Instances data = toArff2(docs);
        data.setClassIndex(data.numAttributes() - 1);
        for (int i = 0; i < data.numInstances(); i++) {
            double clsLabel = cls.classifyInstance(data.instance(i));
            //System.out.println(clsLabel);
            data.instance(i).setClassValue(clsLabel);
            System.out.println(data.instance(i).toString(data.classIndex()));
        }

        //     eval.crossValidateModel(cls, train, 10, new Random(1));
        //            // generate curve
        //     ThresholdCurve tc = new ThresholdCurve();
        //     //int classIndex = test.numAttributes()-1;
        //     Instances result = tc.getCurve(eval.predictions());//, classIndex);
        // 
        //     // plot curve
        //     ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
        //     vmc.setROCString("(Area under ROC = " + 
        //         weka.core.Utils.doubleToString(tc.getROCArea(result), 4) + ")");
        //     vmc.setName(result.relationName());
        //     PlotData2D tempd = new PlotData2D(result);
        //     tempd.setPlotName(result.relationName());
        //     tempd.addInstanceNumberAttribute();
        //     // specify which points are connected
        //     boolean[] cp = new boolean[result.numInstances()];
        //     for (int n = 1; n < cp.length; n++)
        //       cp[n] = true;
        //     tempd.setConnectPoints(cp);
        //     // add plot
        //     vmc.addPlot(tempd);
        // 
        //     // display curve
        //     String plotName = vmc.getName(); 
        //     final javax.swing.JFrame jf = 
        //       new javax.swing.JFrame("Weka Classifier Visualize: "+plotName);
        //     jf.setSize(500,400);
        //     jf.getContentPane().setLayout(new BorderLayout());
        //     jf.getContentPane().add(vmc, BorderLayout.CENTER);
        //     jf.addWindowListener(new java.awt.event.WindowAdapter() {
        //       public void windowClosing(java.awt.event.WindowEvent e) {
        //       jf.dispose();
        //       }
        //     });
        //     jf.setVisible(true);

        //            Instances test = toArff2(docs);
        //            test.setClassIndex(test.numAttributes()-1);
        //            
        //            
        //           Evaluation evals = new Evaluation(train); 
        //
        //            evals.evaluateModel(cls, test);
        //            System.out.println(evals.toSummaryString("\nResults\n======\n", false));
        //             System.out.println(evals.toMatrixString());
        //              System.out.println(evals.toClassDetailsString());
        //            
        //            System.out.println(cls);
        //            //System.out.println(toArff2(docs));

    } catch (Exception ex) {
        Logger.getLogger(WekaWrapper.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:machinelearning_cw.MachineLearning_CW.java

/**
 * @param args the command line arguments
 *///from   w w  w . j a  v a 2  s  . co m
public static void main(String[] args) throws Exception {
    // TODO code application logic here

    /* Initializing test datasets */
    ArrayList<Instances> trainData = new ArrayList<Instances>();
    ArrayList<Instances> testData = new ArrayList<Instances>();

    Instances train = WekaLoader.loadData("PitcherTrain.arff");
    Instances test = WekaLoader.loadData("PitcherTest.arff");
    trainData.add(train);
    testData.add(test);

    Instances bananaTrain = WekaLoader.loadData("banana-train.arff");
    Instances bananaTest = WekaLoader.loadData("banana-test.arff");
    trainData.add(bananaTrain);
    testData.add(bananaTest);

    Instances cloudTrain = WekaLoader.loadData("clouds-train.arff");
    Instances cloudTest = WekaLoader.loadData("clouds-test.arff");
    trainData.add(cloudTrain);
    testData.add(cloudTest);

    Instances concentricTrain = WekaLoader.loadData("concentric-train.arff");
    Instances concentricTest = WekaLoader.loadData("concentric-test.arff");
    trainData.add(concentricTrain);
    testData.add(concentricTest);

    // 3 dimensional data set
    Instances habermanTrain = WekaLoader.loadData("haberman-train.arff");
    Instances habermanTest = WekaLoader.loadData("haberman-test.arff");
    trainData.add(habermanTrain);
    testData.add(habermanTest);

    // >3 dimensional data sets
    Instances thyroidTrain = WekaLoader.loadData("thyroid-train.arff");
    Instances thyroidTest = WekaLoader.loadData("thyroid-test.arff");
    trainData.add(thyroidTrain);
    testData.add(thyroidTest);

    Instances heartTrain = WekaLoader.loadData("heart-train.arff");
    Instances heartTest = WekaLoader.loadData("heart-test.arff");
    trainData.add(heartTrain);
    testData.add(heartTest);

    Instances liverTrain = WekaLoader.loadData("liver-train.arff");
    Instances liverTest = WekaLoader.loadData("liver-test.arff");
    trainData.add(liverTrain);
    testData.add(liverTest);

    Instances pendigitisTrain = WekaLoader.loadData("pendigitis-train.arff");
    Instances pendigitisTest = WekaLoader.loadData("pendigitis-test.arff");
    trainData.add(pendigitisTrain);
    testData.add(pendigitisTest);

    Instances phonemeTrain = WekaLoader.loadData("phoneme-train.arff");
    Instances phonemeTest = WekaLoader.loadData("phoneme-test.arff");
    trainData.add(phonemeTrain);
    testData.add(phonemeTest);

    Instances yeastTrain = WekaLoader.loadData("yeast-train.arff");
    Instances yeastTest = WekaLoader.loadData("yeast-test.arff");
    trainData.add(yeastTrain);
    testData.add(yeastTest);

    /* Test to see that BasicKNN provides the same results obtained from
     * the hand exercise.
     */
    System.out.println(
            "Test to see that BasicKNN provides the same" + " results obtained from the hand exercise:");
    System.out.println("(Ties are settled randomly)");
    BasicKNN basicKNN = new BasicKNN();
    basicKNN.buildClassifier(train);
    for (int i = 0; i < test.size(); i++) {
        Instance inst = test.get(i);
        System.out.println(i + 1 + ": " + basicKNN.classifyInstance(inst));
    }

    /* Initializing alternative classifiers */
    IBk wekaKNN = new IBk();
    NaiveBayes naiveBayes = new NaiveBayes();
    J48 decisionTree = new J48();
    SMO svm = new SMO();

    /* Tests for experiments 1,2 & 3 */
    KNN myKNN = new KNN();
    myKNN.setUseStandardisedAttributes(true);
    myKNN.setAutoDetermineK(false);
    myKNN.setUseWeightedVoting(true);
    myKNN.buildClassifier(train);
    //myKNN.setUseAcceleratedNNSearch(true);
    System.out.println("\nAccuracy Experiments:");
    MachineLearning_CW.performClassifierAccuracyTests(myKNN, trainData, testData, 1);

    /* Timing tests */
    System.out.println("\n\nTiming Experiments:");
    MachineLearning_CW.performClassifierTimingTests(wekaKNN, trainData, testData);
}

From source file:matres.MatResUI.java

private void doClassification() {
    J48 m_treeResiko;/*from w  ww.  ja  v  a  2  s .c  om*/
    J48 m_treeAksi;
    NaiveBayes m_nbResiko;
    NaiveBayes m_nbAksi;
    FastVector m_fvInstanceRisks;
    FastVector m_fvInstanceActions;

    InputStream isRiskTree = getClass().getResourceAsStream("data/ResikoTree.model");
    InputStream isRiskNB = getClass().getResourceAsStream("data/ResikoNB.model");
    InputStream isActionTree = getClass().getResourceAsStream("data/AksiTree.model");
    InputStream isActionNB = getClass().getResourceAsStream("data/AksiNB.model");

    m_treeResiko = new J48();
    m_treeAksi = new J48();
    m_nbResiko = new NaiveBayes();
    m_nbAksi = new NaiveBayes();
    try {
        //m_treeResiko = (J48) weka.core.SerializationHelper.read("ResikoTree.model");
        m_treeResiko = (J48) weka.core.SerializationHelper.read(isRiskTree);
        //m_nbResiko = (NaiveBayes) weka.core.SerializationHelper.read("ResikoNB.model");
        m_nbResiko = (NaiveBayes) weka.core.SerializationHelper.read(isRiskNB);
        //m_treeAksi = (J48) weka.core.SerializationHelper.read("AksiTree.model");
        m_treeAksi = (J48) weka.core.SerializationHelper.read(isActionTree);
        //m_nbAksi = (NaiveBayes) weka.core.SerializationHelper.read("AksiNB.model");
        m_nbAksi = (NaiveBayes) weka.core.SerializationHelper.read(isActionNB);
    } catch (Exception ex) {
        Logger.getLogger(MatResUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    System.out.println("Setting up an Instance...");
    // Values for LIKELIHOOD OF OCCURRENCE
    FastVector fvLO = new FastVector(5);
    fvLO.addElement("> 10 in 1 year");
    fvLO.addElement("1 - 10 in 1 year");
    fvLO.addElement("1 in 1 year to 1 in 10 years");
    fvLO.addElement("1 in 10 years to 1 in 100 years");
    fvLO.addElement("1 in more than 100 years");
    // Values for SAFETY
    FastVector fvSafety = new FastVector(5);
    fvSafety.addElement("near miss");
    fvSafety.addElement("first aid injury, medical aid injury");
    fvSafety.addElement("lost time injury / temporary disability");
    fvSafety.addElement("permanent disability");
    fvSafety.addElement("fatality");
    // Values for EXTRA FUEL COST
    FastVector fvEFC = new FastVector(5);
    fvEFC.addElement("< 100 million rupiah");
    fvEFC.addElement("0,1 - 1 billion rupiah");
    fvEFC.addElement("1 - 10 billion rupiah");
    fvEFC.addElement("10 - 100  billion rupiah");
    fvEFC.addElement("> 100 billion rupiah");
    // Values for SYSTEM RELIABILITY
    FastVector fvSR = new FastVector(5);
    fvSR.addElement("< 100 MWh");
    fvSR.addElement("0,1 - 1 GWh");
    fvSR.addElement("1 - 10 GWh");
    fvSR.addElement("10 - 100 GWh");
    fvSR.addElement("> 100 GWh");
    // Values for EQUIPMENT COST
    FastVector fvEC = new FastVector(5);
    fvEC.addElement("< 50 million rupiah");
    fvEC.addElement("50 - 500 million rupiah");
    fvEC.addElement("0,5 - 5 billion rupiah");
    fvEC.addElement("5 -50 billion rupiah");
    fvEC.addElement("> 50 billion rupiah");
    // Values for CUSTOMER SATISFACTION SOCIAL FACTOR
    FastVector fvCSSF = new FastVector(5);
    fvCSSF.addElement("Complaint from the VIP customer");
    fvCSSF.addElement("Complaint from industrial customer");
    fvCSSF.addElement("Complaint from community");
    fvCSSF.addElement("Complaint from community that have potential riot");
    fvCSSF.addElement("High potential riot");
    // Values for RISK
    FastVector fvRisk = new FastVector(4);
    fvRisk.addElement("Low");
    fvRisk.addElement("Moderate");
    fvRisk.addElement("High");
    fvRisk.addElement("Extreme");
    // Values for ACTION
    FastVector fvAction = new FastVector(3);
    fvAction.addElement("Life Extension Program");
    fvAction.addElement("Repair/Refurbish");
    fvAction.addElement("Replace/Run to Fail + Investment");

    // Defining Attributes, including Class(es) Attributes
    Attribute attrLO = new Attribute("LO", fvLO);
    Attribute attrSafety = new Attribute("Safety", fvSafety);
    Attribute attrEFC = new Attribute("EFC", fvEFC);
    Attribute attrSR = new Attribute("SR", fvSR);
    Attribute attrEC = new Attribute("EC", fvEC);
    Attribute attrCSSF = new Attribute("CSSF", fvCSSF);
    Attribute attrRisk = new Attribute("Risk", fvRisk);
    Attribute attrAction = new Attribute("Action", fvAction);

    m_fvInstanceRisks = new FastVector(7);
    m_fvInstanceRisks.addElement(attrLO);
    m_fvInstanceRisks.addElement(attrSafety);
    m_fvInstanceRisks.addElement(attrEFC);
    m_fvInstanceRisks.addElement(attrSR);
    m_fvInstanceRisks.addElement(attrEC);
    m_fvInstanceRisks.addElement(attrCSSF);
    m_fvInstanceRisks.addElement(attrRisk);

    m_fvInstanceActions = new FastVector(7);
    m_fvInstanceActions.addElement(attrLO);
    m_fvInstanceActions.addElement(attrSafety);
    m_fvInstanceActions.addElement(attrEFC);
    m_fvInstanceActions.addElement(attrSR);
    m_fvInstanceActions.addElement(attrEC);
    m_fvInstanceActions.addElement(attrCSSF);
    m_fvInstanceActions.addElement(attrAction);

    Instances dataRisk = new Instances("A-Risk-instance-to-classify", m_fvInstanceRisks, 0);
    Instances dataAction = new Instances("An-Action-instance-to-classify", m_fvInstanceActions, 0);
    double[] riskValues = new double[dataRisk.numAttributes()];
    double[] actionValues = new double[dataRisk.numAttributes()];

    String strLO = (String) m_cmbLO.getSelectedItem();
    String strSafety = (String) m_cmbSafety.getSelectedItem();
    String strEFC = (String) m_cmbEFC.getSelectedItem();
    String strSR = (String) m_cmbSR.getSelectedItem();
    String strEC = (String) m_cmbEC.getSelectedItem();
    String strCSSF = (String) m_cmbCSSF.getSelectedItem();

    Instance instRisk = new DenseInstance(7);
    Instance instAction = new DenseInstance(7);

    if (strLO.equals("-- none --")) {
        instRisk.setMissing(0);
        instAction.setMissing(0);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(0), strLO);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(0), strLO);
    }
    if (strSafety.equals("-- none --")) {
        instRisk.setMissing(1);
        instAction.setMissing(1);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(1), strSafety);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(1), strSafety);
    }
    if (strEFC.equals("-- none --")) {
        instRisk.setMissing(2);
        instAction.setMissing(2);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(2), strEFC);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(2), strEFC);
    }
    if (strSR.equals("-- none --")) {
        instRisk.setMissing(3);
        instAction.setMissing(3);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(3), strSR);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(3), strSR);
    }
    if (strEC.equals("-- none --")) {
        instRisk.setMissing(4);
        instAction.setMissing(4);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(4), strEC);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(4), strEC);
    }
    if (strCSSF.equals("-- none --")) {
        instRisk.setMissing(5);
        instAction.setMissing(5);
    } else {
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(5), strCSSF);
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(5), strCSSF);
    }
    instRisk.setMissing(6);
    instAction.setMissing(6);

    dataRisk.add(instRisk);
    instRisk.setDataset(dataRisk);
    dataRisk.setClassIndex(dataRisk.numAttributes() - 1);

    dataAction.add(instAction);
    instAction.setDataset(dataAction);
    dataAction.setClassIndex(dataAction.numAttributes() - 1);

    System.out.println("Instance Resiko: " + dataRisk.instance(0));
    System.out.println("\tNum Attributes : " + dataRisk.numAttributes());
    System.out.println("\tNum instances  : " + dataRisk.numInstances());
    System.out.println("Instance Action: " + dataAction.instance(0));
    System.out.println("\tNum Attributes : " + dataAction.numAttributes());
    System.out.println("\tNum instances  : " + dataAction.numInstances());

    int classIndexRisk = 0;
    int classIndexAction = 0;
    String strClassRisk = null;
    String strClassAction = null;

    try {
        //classIndexRisk = (int) m_treeResiko.classifyInstance(dataRisk.instance(0));
        classIndexRisk = (int) m_treeResiko.classifyInstance(instRisk);
        classIndexAction = (int) m_treeAksi.classifyInstance(instAction);
    } catch (Exception ex) {
        Logger.getLogger(MatResUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    strClassRisk = (String) fvRisk.elementAt(classIndexRisk);
    strClassAction = (String) fvAction.elementAt(classIndexAction);
    System.out.println("[Risk  Class Index: " + classIndexRisk + " Class Label: " + strClassRisk + "]");
    System.out.println("[Action  Class Index: " + classIndexAction + " Class Label: " + strClassAction + "]");
    if (strClassRisk != null) {
        m_txtRisk.setText(strClassRisk);
    }

    double[] riskDist = null;
    double[] actionDist = null;
    try {
        riskDist = m_nbResiko.distributionForInstance(dataRisk.instance(0));
        actionDist = m_nbAksi.distributionForInstance(dataAction.instance(0));
        String strProb;
        // set up RISK progress bars
        m_jBarRiskLow.setValue((int) (100 * riskDist[0]));
        m_jBarRiskLow.setString(String.format("%6.3f%%", 100 * riskDist[0]));
        m_jBarRiskModerate.setValue((int) (100 * riskDist[1]));
        m_jBarRiskModerate.setString(String.format("%6.3f%%", 100 * riskDist[1]));
        m_jBarRiskHigh.setValue((int) (100 * riskDist[2]));
        m_jBarRiskHigh.setString(String.format("%6.3f%%", 100 * riskDist[2]));
        m_jBarRiskExtreme.setValue((int) (100 * riskDist[3]));
        m_jBarRiskExtreme.setString(String.format("%6.3f%%", 100 * riskDist[3]));
    } catch (Exception ex) {
        Logger.getLogger(MatResUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    double predictedProb = 0.0;
    String predictedClass = "";

    // Loop over all the prediction labels in the distribution.
    for (int predictionDistributionIndex = 0; predictionDistributionIndex < riskDist.length; predictionDistributionIndex++) {
        // Get this distribution index's class label.
        String predictionDistributionIndexAsClassLabel = dataRisk.classAttribute()
                .value(predictionDistributionIndex);
        int classIndex = dataRisk.classAttribute().indexOfValue(predictionDistributionIndexAsClassLabel);
        // Get the probability.
        double predictionProbability = riskDist[predictionDistributionIndex];

        if (predictionProbability > predictedProb) {
            predictedProb = predictionProbability;
            predictedClass = predictionDistributionIndexAsClassLabel;
        }

        System.out.printf("[%2d %10s : %6.3f]", classIndex, predictionDistributionIndexAsClassLabel,
                predictionProbability);
    }
    m_txtRiskNB.setText(predictedClass);
}

From source file:meddle.TrainModelByDomainOS.java

License:Open Source License

/**
 * Given the classifierName, return a classifier
 *
 * @param classifierName//from  w  w w .  ja  v a2  s.c  o m
 *            e.g. J48, Bagging etc.
 */
public static Classifier getClassifier(String classifierName) {
    Classifier classifier = null;
    if (classifierName.equals("J48")) {
        J48 j48 = new J48();
        j48.setUnpruned(true);
        classifier = j48;
    } else if (classifierName.equals("AdaBoostM1")) {
        AdaBoostM1 adm = new AdaBoostM1();
        adm.setNumIterations(10);
        J48 j48 = new J48();
        adm.setClassifier(j48);
        classifier = adm;
    } else if (classifierName.equals("Bagging")) {
        Bagging bagging = new Bagging();
        bagging.setNumIterations(10);
        J48 j48 = new J48();
        bagging.setClassifier(j48);
        classifier = bagging;
    } else if (classifierName.equals("Stacking")) {
        Stacking stacking = new Stacking();
        stacking.setMetaClassifier(new Logistic());
        Classifier cc[] = new Classifier[2];
        cc[0] = new J48();
        cc[1] = new IBk();
        stacking.setClassifiers(cc);
        classifier = stacking;
    } else if (classifierName.equals("AdditiveRegression")) {
        AdditiveRegression ar = new AdditiveRegression();
        ar.setClassifier(new J48());
        classifier = ar;
    } else if (classifierName.equals("LogitBoost")) {
        LogitBoost lb = new LogitBoost();
        lb.setClassifier(new J48());
        classifier = lb;
    }
    return classifier;
}

From source file:meka.classifiers.multilabel.ProblemTransformationMethod.java

License:Open Source License

public ProblemTransformationMethod() {
    // default classifier for GUI
    this.m_Classifier = new J48();
}

From source file:meka.classifiers.multitarget.BCC.java

License:Open Source License

public BCC() {
    // default classifier for GUI
    this.m_Classifier = new J48();
}

From source file:meka.classifiers.multitarget.NSR.java

License:Open Source License

public NSR() {
    // default classifier for GUI
    this.m_Classifier = new J48();
}

From source file:mlpoc.MLPOC.java

/**
 * @param args the command line arguments
 *///from  w  w  w . j a va  2  s . co m
public static void main(String[] args) {
    try {
        // TODO code application logic here
        BufferedReader br;
        br = new BufferedReader(
                new FileReader("D:/Extra/B.E Project/agrodeploy/webapp/Data/ClusterAutotrain12.arff"));
        Instances training_data = new Instances(br);
        br.close();
        training_data.setClassIndex(training_data.numAttributes() - 1);
        br = new BufferedReader(new FileReader("D:/Extra/B.E Project/agrodeploy/webapp/Data/TestFinal.arff"));
        Instances testing_data = new Instances(br);
        br.close();
        testing_data.setClassIndex(testing_data.numAttributes() - 1);
        String summary = training_data.toSummaryString();
        int number_samples = training_data.numInstances();
        int number_attributes_per_sample = training_data.numAttributes();
        System.out.println("Number of attributes in model = " + number_attributes_per_sample);
        System.out.println("Number of samples = " + number_samples);
        System.out.println("Summary: " + summary);
        System.out.println();

        J48 j48 = new J48();
        FilteredClassifier fc = new FilteredClassifier();
        fc.setClassifier(j48);
        fc.buildClassifier(training_data);
        System.out.println("Testing instances: " + testing_data.numInstances());
        for (int i = 0; i < testing_data.numInstances(); i++) {
            double pred = fc.classifyInstance(testing_data.instance(i));
            String s1 = testing_data.classAttribute().value((int) pred);
            System.out.println(testing_data.instance(i) + " Predicted value: " + s1);
        }
        Evaluation crossValidate = crossValidate(
                "D:/Extra/B.E Project/agrodeploy/webapp/Data/ClusterAutotrain12.arff");

        DataSource source = new DataSource(
                "D:/Extra/B.E Project/agrodeploy/webapp/Data/ClusterAutotrain12.arff");
        Instances data = source.getDataSet();
        System.out.println(data.numInstances());
        data.setClassIndex(data.numAttributes() - 1);

        // 1. meta-classifier
        useClassifier(data);

        // 2. filter
        useFilter(data);
    } catch (Exception ex) {
        Logger.getLogger(MLPOC.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:mlpoc.MLPOC.java

/**
 * uses the meta-classifier//from   www  . ja v  a 2s  .  c  om
 */
protected static void useClassifier(Instances data) throws Exception {
    System.out.println("\n1. Meta-classfier");
    AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();
    CfsSubsetEval eval = new CfsSubsetEval();
    GreedyStepwise search = new GreedyStepwise();
    search.setSearchBackwards(true);
    J48 base = new J48();
    classifier.setClassifier(base);
    classifier.setEvaluator(eval);
    classifier.setSearch(search);
    Evaluation evaluation = new Evaluation(data);
    evaluation.crossValidateModel(classifier, data, 10, new Random(1));
    System.out.println(evaluation.toSummaryString());
}