Example usage for weka.classifiers.trees J48 J48

List of usage examples for weka.classifiers.trees J48 J48

Introduction

In this page you can find the example usage for weka.classifiers.trees J48 J48.

Prototype

J48

Source Link

Usage

From source file:homemadeWEKA.java

public static void treeLearning(Instances data) throws Exception {
    //        Evaluation eval = new Evaluation(data);
    Classifier model_tree = (Classifier) new J48();
    model_tree.buildClassifier(data); // build classifier pruned tree, succeed
    //        String forPredictionsPrinting = null;
    //        double[] print_eval;
    //        print_eval = eval.evaluateModel(model_tree, data, forPredictionsPrinting);
    //        System.out.println(forPredictionsPrinting);
    save_model(model_tree);/*from  w w w  . j ava 2  s  .  c o m*/
    //        return model_tree; 
}

From source file:homemadeWEKA.java

public static void treeLearning_crossVal(Instances data) throws Exception {
    Evaluation eval = new Evaluation(data);
    J48 tree = new J48();
    eval.crossValidateModel(tree, data, 10, new Random(1));
    save_modelWithEval(tree, eval);//from w w w  . j  ava2s  . co  m
    //        System.out.println(eval.toSummaryString("\nResult of tree learning with cross validation 10 folds\n \n",false));
}

From source file:ClassificationClass.java

public Evaluation cls_c4_5(Instances data) {
    Evaluation eval = null;// w w  w.  ja v  a 2 s.  com
    try {
        Classifier classifier;
        PreparingSteps preparingSteps = new PreparingSteps();
        data.setClassIndex(data.numAttributes() - 1);
        classifier = new J48();
        classifier.buildClassifier(data);
        eval = new Evaluation(data);
        eval.evaluateModel(classifier, data);

        System.out.println(eval.toSummaryString());
    } catch (Exception ex) {
        Logger.getLogger(ClassificationClass.class.getName()).log(Level.SEVERE, null, ex);
    }
    return eval;
}

From source file:adams.ml.model.classification.WekaClassifier.java

License:Open Source License

/**
 * Adds options to the internal list of options.
 *///from  w w  w  . j  av a 2 s  . c  o  m
@Override
public void defineOptions() {
    super.defineOptions();

    m_OptionManager.add("classifier", "classifier", new J48());
}

From source file:algoritmogeneticocluster.NewClass.java

public static void main(String[] args) throws Exception {
    BufferedReader datafile = readDataFile("tabela10.arff");

    Instances data = new Instances(datafile);
    data.setClassIndex(data.numAttributes() - 1);

    // Do 10-split cross validation
    Instances[][] split = crossValidationSplit(data, 10);

    // Separate split into training and testing arrays
    Instances[] trainingSplits = split[0];
    Instances[] testingSplits = split[1];

    // Use a set of classifiers
    Classifier[] models = { new SMO(), new J48(), // a decision tree
            new PART(), new DecisionTable(), //decision table majority classifier
            new DecisionStump() //one-level decision tree

    };//from  www .j a v  a 2  s .c  o  m

    // Run for each model
    for (int j = 0; j < models.length; j++) {

        // Collect every group of predictions for current model in a FastVector
        FastVector predictions = new FastVector();

        // For each training-testing split pair, train and test the classifier
        for (int i = 0; i < trainingSplits.length; i++) {
            Evaluation validation = classify(models[j], trainingSplits[i], testingSplits[i]);

            predictions.appendElements(validation.predictions());

            // Uncomment to see the summary for each training-testing pair.
            //System.out.println(models[j].toString());
        }

        // Calculate overall accuracy of current classifier on all splits
        double accuracy = calculateAccuracy(predictions);

        // Print current classifier's name and accuracy in a complicated,
        // but nice-looking way.
        System.out.println("Accuracy of " + models[j].getClass().getSimpleName() + ": "
                + String.format("%.2f%%", accuracy) + "\n---------------------------------");
    }

}

From source file:aprendizadodemaquina.classificadores.ClassificadorDT.java

License:Open Source License

@Override
public int treinar(File arquivoTreinamento, String parametros) {

    Logger.getLogger("ARS logger").info("Treinamento de classificador J48");
    // TODO: Verificar por que os parmetros dos classificadores no esto funcionando

    try {//from   w ww.  ja  v  a2  s  .  co  m

        ConverterUtils.DataSource fonte = new ConverterUtils.DataSource(arquivoTreinamento.getAbsolutePath());
        dadosTreinamento = fonte.getDataSet();

        // Identifica o atributo que ser a classe de interesse (ltimo atributo = qual relao )
        dadosTreinamento.setClassIndex(dadosTreinamento.numAttributes() - 1);

        // Cria um classificador que aceita filtros
        classificador = new FilteredClassifier();
        classificador.setClassifier(new J48());

        // Seleciona os atributos que no so parte da classificacao
        // TODO: SE OS DADOS DE TREINAMENTO TIVEREM A FEATURE DE IDENTIFICAO...
        /*
        Remove remove = new Remove();
        int[] atributosRemovidos = new int[4];
        atributosRemovidos[0] = 0;   // ID da sentena
        atributosRemovidos[1] = 1;   // termo1
        atributosRemovidos[2] = 2;   // termo2
        remove.setAttributeIndicesArray( atributosRemovidos );
        // Acopla o filtro de remoo ao classificador
        fc.setFilter( remove );
        */
        // FIM-SE

        //                String []param = weka.core.Utils.splitOptions(parametros[cmbTipoClassificador1.getSelectedIndex()]);
        //                fc.setOptions(param);

        // Faz o treinamento do classificador
        classificador.buildClassifier(dadosTreinamento);

        return 0;

    } catch (Exception ex) {
        Logger.getLogger("ARS logger").log(Level.SEVERE, null, ex);
        if (ex instanceof IllegalArgumentException) {
            javax.swing.JOptionPane.showMessageDialog(null,
                    "O arquivo de treinamento fornecido no  um arquivo ARFF vlido", "Erro",
                    javax.swing.JOptionPane.ERROR_MESSAGE);
        }
        return 1;
    }

}

From source file:at.aictopic1.sentimentanalysis.machinelearning.impl.J48Classifier.java

/**
 * sets classifier/*  w  ww .j ava 2  s  .c o m*/
 */
@Override
protected void setClassifier() {

    //classifier
    this.usedClassifier = new J48();
    //.. other options
    this.fcClassifier.setClassifier(this.usedClassifier);
}

From source file:au.edu.usyd.it.yangpy.sampling.BPSO.java

License:Open Source License

/**
 * the target function in fitness form/*w w  w . ja  v a 2 s  .c o m*/
 * 
 * @return   classification accuracy
 */
public double ensembleClassify() {
    double fitnessValue = 0.0;
    double classifiersScore = 0.0;

    /* load in the modified data set */
    try {
        Instances reducedSet = new Instances(new BufferedReader(new FileReader("reduced.arff")));
        reducedSet.setClassIndex(reducedSet.numAttributes() - 1);

        // calculating the evaluation values using each classifier respectively
        if (verbose == true) {
            System.out.println();
            System.out.println(" |----------J4.8-----------|");
            System.out.println(" |            |            |");
        }
        J48 tree = new J48();
        classifiersScore = classify(tree, reducedSet, internalTest);
        fitnessValue += classifiersScore;

        if (verbose == true) {
            System.out.println();
            System.out.println(" |-----3NearestNeighbor----|");
            System.out.println(" |            |            |");
        }
        IBk nn3 = new IBk(3);
        classifiersScore = classify(nn3, reducedSet, internalTest);
        fitnessValue += classifiersScore;

        if (verbose == true) {
            System.out.println();
            System.out.println(" |--------NaiveBayes-------|");
            System.out.println(" |            |            |");
        }
        NaiveBayes nb = new NaiveBayes();
        classifiersScore = classify(nb, reducedSet, internalTest);
        fitnessValue += classifiersScore;

        if (verbose == true) {
            System.out.println();
            System.out.println(" |-------RandomForest------|");
            System.out.println(" |            |            |");
        }
        RandomForest rf5 = new RandomForest();
        rf5.setNumTrees(5);
        classifiersScore = classify(rf5, reducedSet, internalTest);
        fitnessValue += classifiersScore;

        if (verbose == true) {
            System.out.println();
            System.out.println(" |---------Logistic--------|");
            System.out.println(" |            |            |");
        }
        Logistic log = new Logistic();
        classifiersScore = classify(log, reducedSet, internalTest);
        fitnessValue += classifiersScore;

    } catch (IOException ioe) {
        ioe.printStackTrace();
    }

    fitnessValue /= 5;

    if (verbose == true) {
        System.out.println();
        System.out.println("Fitness: " + fitnessValue);
        System.out.println("---------------------------------------------------");
    }

    return fitnessValue;
}

From source file:au.edu.usyd.it.yangpy.snp.Ensemble.java

License:Open Source License

public void ensemble(String mode) throws Exception {

    numInstances = test.numInstances();//from  ww  w .  ja  v a  2s.c o  m
    numClasses = test.numClasses();
    givenValue = new double[numInstances];
    predictDistribution = new double[numClassifiers][numInstances][numClasses];
    predictValue = new double[numClassifiers][numInstances];
    voteValue = new double[numInstances][numClasses];

    // Setting the given class values of the test instances.
    for (int i = 0; i < numInstances; i++) {
        givenValue[i] = test.instance(i).classValue();
    }

    // Calculating the predicted class values using each classifier respectively.
    // J48 coverTree1NN KStar coverTree3NN coverTree5NN

    J48 tree = new J48();
    tree.setUnpruned(true);
    aucClassifiers[0] = classify(tree, 0);

    KStar kstar = new KStar();
    aucClassifiers[1] = classify(kstar, 1);

    IBk ctnn1 = new IBk(1);
    CoverTree search = new CoverTree();
    ctnn1.setNearestNeighbourSearchAlgorithm(search);
    aucClassifiers[2] = classify(ctnn1, 2);

    IBk ctnn3 = new IBk(3);
    ctnn3.setNearestNeighbourSearchAlgorithm(search);
    aucClassifiers[3] = classify(ctnn3, 3);

    IBk ctnn5 = new IBk(5);
    ctnn5.setNearestNeighbourSearchAlgorithm(search);
    aucClassifiers[4] = classify(ctnn5, 4);

    // Print the classification results if in print mode.
    if (mode.equals("v")) {
        System.out.println("J48   AUC: " + aucClassifiers[0]);
        System.out.println("KStar AUC: " + aucClassifiers[1]);
        System.out.println("CTNN1 AUC: " + aucClassifiers[2]);
        System.out.println("CTNN3 AUC: " + aucClassifiers[3]);
        System.out.println("CTNN5 AUC: " + aucClassifiers[4]);
        System.out.println("   -         -   ");
    }
}

From source file:boa.aggregators.DecisionTreeAggregator.java

License:Apache License

/** {@inheritDoc} */
@Override//from  www . j a v  a  2s  .c  o  m
public void finish() throws IOException, InterruptedException {
    int NumOfAttributes = this.getVectorSize();
    List<Attribute> attributes = new ArrayList<Attribute>();
    FastVector fvAttributes = new FastVector(NumOfAttributes);

    for (int i = 0; i < NumOfAttributes; i++) {
        attributes.add(new Attribute("Attribute" + i));
        fvAttributes.addElement(attributes.get(i));
    }

    Instances trainingSet = new Instances("DecisionTree", fvAttributes, 1);
    trainingSet.setClassIndex(NumOfAttributes - 1);

    for (List<Double> vector : this.vectors.values()) {
        Instance instance = new Instance(NumOfAttributes);
        for (int i = 0; i < vector.size(); i++) {
            instance.setValue((Attribute) fvAttributes.elementAt(i), vector.get(i));
        }
        trainingSet.add(instance);
    }

    try {
        this.model = new J48();
        this.model.setOptions(options);
        this.model.buildClassifier(trainingSet);
    } catch (Exception ex) {
    }

    this.saveModel(this.model);
}