Example usage for weka.classifiers.trees J48 buildClassifier

List of usage examples for weka.classifiers.trees J48 buildClassifier

Introduction

In this page you can find the example usage for weka.classifiers.trees J48 buildClassifier.

Prototype

@Override
public void buildClassifier(Instances instances) throws Exception 

Source Link

Document

Generates the classifier.

Usage

From source file:function.BuildClassifier.java

public static void buildClassifier(Instances inst) throws Exception {
    String[] options = new String[1];
    options[0] = "-U";
    J48 tree = new J48();
    tree.setOptions(options);//from w w  w . j  av  a  2  s. c  om
    tree.buildClassifier(inst);
}

From source file:ia02classificacao.IA02Classificacao.java

/**
 * @param args the command line arguments
 */// w w w. j a  v a 2s  . c  o  m
public static void main(String[] args) throws Exception {

    // abre o banco de dados arff e mostra a quantidade de instancias (linhas)
    DataSource arquivo = new DataSource("data/zoo.arff");
    Instances dados = arquivo.getDataSet();
    System.out.println("Instancias lidas: " + dados.numInstances());

    // FILTER: remove o atributo nome do animal da classificao
    String[] parametros = new String[] { "-R", "1" };
    Remove filtro = new Remove();
    filtro.setOptions(parametros);
    filtro.setInputFormat(dados);
    dados = Filter.useFilter(dados, filtro);

    AttributeSelection selAtributo = new AttributeSelection();
    InfoGainAttributeEval avaliador = new InfoGainAttributeEval();
    Ranker busca = new Ranker();
    selAtributo.setEvaluator(avaliador);
    selAtributo.setSearch(busca);
    selAtributo.SelectAttributes(dados);
    int[] indices = selAtributo.selectedAttributes();
    System.out.println("Selected attributes: " + Utils.arrayToString(indices));

    // Usa o algoritimo J48 e mostra a classificao dos dados em forma textual
    String[] opcoes = new String[1];
    opcoes[0] = "-U";
    J48 arvore = new J48();
    arvore.setOptions(opcoes);
    arvore.buildClassifier(dados);
    System.out.println(arvore);

    // Usa o algoritimo J48 e mostra a classificao de dados em forma grafica
    /*
    TreeVisualizer tv = new TreeVisualizer(null, arvore.graph(), new PlaceNode2());
    JFrame frame = new javax.swing.JFrame("?rvore de Conhecimento");
    frame.setSize(800,500);
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.getContentPane().add(tv);
    frame.setVisible(true);
    tv.fitToScreen();
    */

    /*
    * Classificao de novos dados
    */

    System.out.println("\n\nCLASSIFICAO DE NOVOS DADOS");
    // criar atributos
    double[] vals = new double[dados.numAttributes()];
    vals[0] = 1.0; // hair
    vals[1] = 0.0; // feathers
    vals[2] = 0.0; // eggs
    vals[3] = 1.0; // milk
    vals[4] = 1.0; // airborne
    vals[5] = 0.0; // aquatic
    vals[6] = 0.0; // predator
    vals[7] = 1.0; // toothed
    vals[8] = 1.0; // backbone
    vals[9] = 1.0; // breathes
    vals[10] = 0.0; // venomous
    vals[11] = 0.0; // fins
    vals[12] = 4.0; // legs
    vals[13] = 1.0; // tail
    vals[14] = 1.0; // domestic
    vals[15] = 1.0; // catsize

    // Criar uma instncia baseada nestes atributos
    Instance meuUnicornio = new DenseInstance(1.0, vals);

    // Adicionar a instncia nos dados
    meuUnicornio.setDataset(dados);

    // Classificar esta nova instncia
    double label = arvore.classifyInstance(meuUnicornio);

    // Imprimir o resultado da classificao
    System.out.println("Novo Animal: Unicrnio");
    System.out.println("classificacao: " + dados.classAttribute().value((int) label));

    /*
    * Avaliao e predio de erros de mtrica
    */
    System.out.println("\n\nAVALIAO E PREDIO DE ERROS DE MTRICA");
    Classifier cl = new J48();
    Evaluation eval_roc = new Evaluation(dados);
    eval_roc.crossValidateModel(cl, dados, 10, new Random(1), new Object[] {});
    System.out.println(eval_roc.toSummaryString());

    /*
    * Matriz de confuso
    */
    System.out.println("\n\nMATRIZ DE CONFUSO");
    double[][] confusionMatrix = eval_roc.confusionMatrix();
    System.out.println(eval_roc.toMatrixString());

}

From source file:ia03classificador.jFrClassificador.java

public void doClassificate() throws Exception {

    // Quando clicado, a variavel recebe 1, quando no clicado recebe 0
    v00 = ((btn00.isSelected()) ? ((double) 1) : ((double) 0));
    v01 = ((btn01.isSelected()) ? ((double) 1) : ((double) 0));
    v02 = ((btn02.isSelected()) ? ((double) 1) : ((double) 0));
    v03 = ((btn03.isSelected()) ? ((double) 1) : ((double) 0));
    v04 = ((btn04.isSelected()) ? ((double) 1) : ((double) 0));
    v05 = ((btn05.isSelected()) ? ((double) 1) : ((double) 0));
    v06 = ((btn06.isSelected()) ? ((double) 1) : ((double) 0));
    v07 = ((btn07.isSelected()) ? ((double) 1) : ((double) 0));
    v08 = ((btn08.isSelected()) ? ((double) 1) : ((double) 0));
    v09 = ((btn09.isSelected()) ? ((double) 1) : ((double) 0));
    v10 = ((btn10.isSelected()) ? ((double) 1) : ((double) 0));
    v11 = ((btn11.isSelected()) ? ((double) 1) : ((double) 0));
    v13 = ((btn13.isSelected()) ? ((double) 1) : ((double) 0));
    v14 = ((btn14.isSelected()) ? ((double) 1) : ((double) 0));
    v15 = ((btn15.isSelected()) ? ((double) 1) : ((double) 0));
    legs = txtLegs.getText();/*from w  w  w.ja  v a  2 s. c o m*/
    legs = ((legs == null || legs.trim().isEmpty() ? "2" : legs));
    name = txtName.getText();

    // abre o banco de dados arff e guarda os registros no objeto dados
    ConverterUtils.DataSource arquivo = new ConverterUtils.DataSource("data/zoo.arff");
    Instances dados = arquivo.getDataSet();

    // FILTER: remove o atributo nome do animal da classificao
    String[] parametros = new String[] { "-R", "1" };
    Remove filtro = new Remove();
    filtro.setOptions(parametros);
    filtro.setInputFormat(dados);
    dados = Filter.useFilter(dados, filtro);

    AttributeSelection selAtributo = new AttributeSelection();
    InfoGainAttributeEval avaliador = new InfoGainAttributeEval();
    Ranker busca = new Ranker();
    selAtributo.setEvaluator(avaliador);
    selAtributo.setSearch(busca);
    selAtributo.SelectAttributes(dados);
    int[] indices = selAtributo.selectedAttributes();
    //System.out.println("Selected attributes: " + Utils.arrayToString(indices));

    // Usa o algoritimo J48 para montar a arvore de dados
    String[] opcoes = new String[1];
    opcoes[0] = "-U";
    J48 arvore = new J48();
    arvore.setOptions(opcoes);
    arvore.buildClassifier(dados);

    // cria o novo elemento para comparao
    double[] vals = new double[dados.numAttributes()];
    vals[0] = v00; // hair
    vals[1] = v01; // feathers
    vals[2] = v02; // eggs
    vals[3] = v03; // milk
    vals[4] = v04; // airborne
    vals[5] = v05; // aquatic
    vals[6] = v06; // predator
    vals[7] = v07; // toothed
    vals[8] = v08; // backbone
    vals[9] = v09; // breathes
    vals[10] = v10; // venomous
    vals[11] = v11; // fins
    vals[12] = Double.parseDouble(legs); // legs
    vals[13] = v13; // tail
    vals[14] = v14; // domestic
    vals[15] = v15; // catsize

    // Criar uma instncia baseada nestes atributos
    Instance newAnimal = new DenseInstance(1.0, vals);

    // Adicionar a instncia nos dados
    newAnimal.setDataset(dados);

    // Classificar esta nova instncia
    double label = arvore.classifyInstance(newAnimal);

    // Imprimir o resultado da classificao
    lblClassification.setText(dados.classAttribute().value((int) label));

}

From source file:kfst.classifier.WekaClassifier.java

License:Open Source License

/**
 * This method builds and evaluates the decision tree(DT) classifier.
 * The j48 are used as the DT classifier implemented in the Weka software.
 *
 * @param pathTrainData the path of the train set
 * @param pathTestData the path of the test set
 * @param confidenceValue The confidence factor used for pruning
 * @param minNumSampleInLeaf The minimum number of instances per leaf
 * // w  w w.  j  a va  2s .c  o m
 * @return the classification accuracy
 */
public static double dTree(String pathTrainData, String pathTestData, double confidenceValue,
        int minNumSampleInLeaf) {
    double resultValue = 0;
    try {
        BufferedReader readerTrain = new BufferedReader(new FileReader(pathTrainData));
        Instances dataTrain = new Instances(readerTrain);
        readerTrain.close();
        dataTrain.setClassIndex(dataTrain.numAttributes() - 1);

        BufferedReader readerTest = new BufferedReader(new FileReader(pathTestData));
        Instances dataTest = new Instances(readerTest);
        readerTest.close();
        dataTest.setClassIndex(dataTest.numAttributes() - 1);

        J48 decisionTree = new J48();
        decisionTree.setConfidenceFactor((float) confidenceValue);
        decisionTree.setMinNumObj(minNumSampleInLeaf);
        decisionTree.buildClassifier(dataTrain);
        Evaluation eval = new Evaluation(dataTest);
        eval.evaluateModel(decisionTree, dataTest);
        resultValue = 100 - (eval.errorRate() * 100);
    } catch (Exception ex) {
        Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
    return resultValue;
}

From source file:KFST.featureSelection.embedded.TreeBasedMethods.DecisionTreeBasedMethod.java

License:Open Source License

/**
 * {@inheritDoc }// www  .  j av  a  2  s. com
 */
@Override
protected String buildClassifier(Instances dataTrain) {
    try {
        if (TREE_TYPE == TreeType.C45) {
            J48 decisionTreeC45 = new J48();
            decisionTreeC45.setConfidenceFactor((float) confidenceValue);
            decisionTreeC45.setMinNumObj(minNumSampleInLeaf);
            decisionTreeC45.buildClassifier(dataTrain);
            return decisionTreeC45.toString();
        } else if (TREE_TYPE == TreeType.RANDOM_TREE) {
            RandomTree decisionTreeRandomTree = new RandomTree();
            decisionTreeRandomTree.setKValue(randomTreeKValue);
            decisionTreeRandomTree.setMaxDepth(randomTreeMaxDepth);
            decisionTreeRandomTree.setMinNum(randomTreeMinNum);
            decisionTreeRandomTree.setMinVarianceProp(randomTreeMinVarianceProp);
            decisionTreeRandomTree.buildClassifier(dataTrain);
            return decisionTreeRandomTree.toString();
        }
    } catch (Exception ex) {
        Logger.getLogger(DecisionTreeBasedMethod.class.getName()).log(Level.SEVERE, null, ex);
    }
    return "";
}

From source file:new_pack.repro.java

public static void main(String[] args) throws Exception {
    System.out.println("count:");
    J48 tree = new J48();
    tree.buildClassifier(null);
}

From source file:org.scify.NewSumServer.Server.MachineLearning.labelTagging.java

License:Apache License

/**
 * Find the recommend labels from classifier
 *
 * @return the recommend labels/*from  w w w.j  a va 2 s .  c om*/
 */
public static String recommendation(INSECTDB file, String text) {

    String labelList = "-none-";
    //create IVector
    String Ivector = vector.labellingVector(text, file); // take the similarity vectors for each class graph

    try {

        Instances dataTrainSet = dataSets.trainingSet(file); //take the train  dataset 
        Instances dataLabelSet = dataSets.labelingSet(file, Ivector);//take tha labe  dataset
        ArffSaver saver = new ArffSaver();
        saver.setInstances(dataTrainSet);
        saver.setFile(new File("./data/dataTrainSet.arff"));
        saver.writeBatch();

        ArffSaver saver2 = new ArffSaver();
        saver2.setInstances(dataLabelSet);
        saver2.setFile(new File("./data/dataLabelSet.arff"));
        saver2.writeBatch();

        File temp = File.createTempFile("exportFile", null);
        //TODO: creat classifier

        //            String option = "-S 2 -K 2 -D 3 -G 0.0 -R 0.0 -N 0.5 -M 40.0 -C 1.0 -E 0.001 -P 0.1"; // classifier options
        //            String[] options = option.split("\\s+");

        if (dataTrainSet.classIndex() == -1) {
            dataTrainSet.setClassIndex(dataTrainSet.numAttributes() - 1);
        }

        // Create a  classifier LibSVM

        //            NaiveBayes nb = new NaiveBayes();
        //            RandomForest nb = new RandomForest();
        J48 nb = new J48();
        //            nb.setOptions(options);
        nb.buildClassifier(dataTrainSet);

        // End train method

        if (dataLabelSet.classIndex() == -1) {
            dataLabelSet.setClassIndex(dataLabelSet.numAttributes() - 1);
        }

        StringBuffer writer = new StringBuffer();

        PlainText output = new PlainText();
        output.setBuffer(writer);
        output.setHeader(dataLabelSet);
        output.printClassifications(nb, dataLabelSet);

        //            PrintStream ps2 = new PrintStream(classGname);
        //            ps2.print(writer.toString());
        //            ps2.close();
        PrintStream ps = new PrintStream(temp); //Add to temp file the results of classifying
        ps.print(writer.toString());
        ps.close();

        //TODO: export result
        //            labelList = result(temp);                                                    //if result is true adds the current class graph name in label list
        labelList = result(temp) + " --------->> " + text; //if result is true adds the current class graph name in label list
        Utilities.appendToFile(labelList);

    } catch (Exception ex) {
        Logger.getLogger(labelTagging.class.getName()).log(Level.SEVERE, null, ex);
    }

    return labelList;
}

From source file:org.uclab.mm.kcl.ddkat.modellearner.ModelLearner.java

License:Apache License

/**
* Method to compute the classification accuracy.
*
* @param algo the algorithm name/*www.j  a va 2  s  .  co  m*/
* @param data the data instances
* @param datanature the dataset nature (i.e. original or processed data)
* @throws Exception the exception
*/
protected String[] modelAccuracy(String algo, Instances data, String datanature) throws Exception {

    String modelResultSet[] = new String[4];
    String modelStr = "";
    Classifier classifier = null;

    // setting class attribute if the data format does not provide this information           
    if (data.classIndex() == -1)
        data.setClassIndex(data.numAttributes() - 1);

    String decisionAttribute = data.attribute(data.numAttributes() - 1).toString();
    String res[] = decisionAttribute.split("\\s+");
    decisionAttribute = res[1];

    if (algo.equals("BFTree")) {

        // Use BFTree classifiers
        BFTree BFTreeclassifier = new BFTree();
        BFTreeclassifier.buildClassifier(data);
        modelStr = BFTreeclassifier.toString();
        classifier = BFTreeclassifier;

    } else if (algo.equals("FT")) {

        // Use FT classifiers
        FT FTclassifier = new FT();
        FTclassifier.buildClassifier(data);
        modelStr = FTclassifier.toString();
        classifier = FTclassifier;

    } else if (algo.equals("J48")) {

        // Use J48 classifiers
        J48 J48classifier = new J48();
        J48classifier.buildClassifier(data);
        modelStr = J48classifier.toString();
        classifier = J48classifier;
        System.out.println("Model String: " + modelStr);

    } else if (algo.equals("J48graft")) {

        // Use J48graft classifiers
        J48graft J48graftclassifier = new J48graft();
        J48graftclassifier.buildClassifier(data);
        modelStr = J48graftclassifier.toString();
        classifier = J48graftclassifier;

    } else if (algo.equals("RandomTree")) {

        // Use RandomTree classifiers
        RandomTree RandomTreeclassifier = new RandomTree();
        RandomTreeclassifier.buildClassifier(data);
        modelStr = RandomTreeclassifier.toString();
        classifier = RandomTreeclassifier;

    } else if (algo.equals("REPTree")) {

        // Use REPTree classifiers
        REPTree REPTreeclassifier = new REPTree();
        REPTreeclassifier.buildClassifier(data);
        modelStr = REPTreeclassifier.toString();
        classifier = REPTreeclassifier;

    } else if (algo.equals("SimpleCart")) {

        // Use SimpleCart classifiers
        SimpleCart SimpleCartclassifier = new SimpleCart();
        SimpleCartclassifier.buildClassifier(data);
        modelStr = SimpleCartclassifier.toString();
        classifier = SimpleCartclassifier;

    }

    modelResultSet[0] = algo;
    modelResultSet[1] = decisionAttribute;
    modelResultSet[2] = modelStr;

    // Collect every group of predictions for J48 model in a FastVector
    FastVector predictions = new FastVector();

    Evaluation evaluation = new Evaluation(data);
    int folds = 10; // cross fold validation = 10
    evaluation.crossValidateModel(classifier, data, folds, new Random(1));
    // System.out.println("Evaluatuion"+evaluation.toSummaryString());
    System.out.println("\n\n" + datanature + " Evaluatuion " + evaluation.toMatrixString());

    // ArrayList<Prediction> predictions = evaluation.predictions();
    predictions.appendElements(evaluation.predictions());

    System.out.println("\n\n 11111");
    // Calculate overall accuracy of current classifier on all splits
    double correct = 0;

    for (int i = 0; i < predictions.size(); i++) {
        NominalPrediction np = (NominalPrediction) predictions.elementAt(i);
        if (np.predicted() == np.actual()) {
            correct++;
        }
    }

    System.out.println("\n\n 22222");
    double accuracy = 100 * correct / predictions.size();
    String accString = String.format("%.2f%%", accuracy);
    modelResultSet[3] = accString;
    System.out.println(datanature + " Accuracy " + accString);

    String modelFileName = algo + "-DDKA.model";

    System.out.println("\n\n 33333");

    ObjectOutputStream oos = new ObjectOutputStream(
            new FileOutputStream("D:\\DDKAResources\\" + modelFileName));
    oos.writeObject(classifier);
    oos.flush();
    oos.close();

    return modelResultSet;

}

From source file:personality_prediction.Classifier.java

/**
 * @param args the command line arguments
 *//*from   ww w.  j  a v a  2 s .  c  om*/
void run_classifier() {
    // TODO code application logic here
    try {
        //csv2arff();
        System.out.println("Enter the class for which you want to classify");
        System.out.println("1..Openness");
        System.out.println("2..Neuroticism");
        System.out.println("3..Agreeableness");
        System.out.println("4..Conscientiousness");
        System.out.println("5..Extraversion");
        System.out.println();
        Scanner sc = new Scanner(System.in);
        int choice = sc.nextInt();
        String filename = "";
        // BufferedReader reader=new BufferedReader(new FileReader(""));
        if (choice == 1) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_open.arff";
            //reader = new BufferedReader(new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_open.arff"));
        } else if (choice == 2) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_neur.arff";
            // reader = new BufferedReader(new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_neur.arff"));
        } else if (choice == 3) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_agr.arff";
            // reader = new BufferedReader(new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_agr.arff"));
        } else if (choice == 4) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_con.arff";
            // reader = new BufferedReader(new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_con.arff"));
        } else if (choice == 5) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_extr.arff";
            // reader = new BufferedReader(new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\Training_data_extr.arff"));  
        }
        //BufferedReader reader = new BufferedReader(new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\"));
        // DataSource source = new DataSource("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_Dataset\\Features_value.arff");
        //Instances data = source.getDataSet();
        BufferedReader reader = new BufferedReader(new FileReader(filename));
        Instances data = new Instances(reader);
        reader.close();
        //******88setting class attribute************
        data.setClassIndex(data.numAttributes() - 1);

        //  OptionsToCode option=new OptionsToCode();
        // String options[]={"java","ExperimentDemo","-classifier weka.classifiers.trees.M5P","-exptype regression","-splittype randomsplit","-runs 10",
        //"-percentage 66","-result /some/where/results.arff","-t bolts.arff","-t bodyfat.arff"};
        // String[] options={"weka.classifiers.functions.SMO"};
        //String[] options={"weka.classifiers.trees.M5P"};
        //option.convert(options);

        //*******************building a classifier*********************
        String[] options = new String[1];
        options[0] = "-U"; // unpruned tree
        J48 tree = new J48(); // new instance of tree
        tree.setOptions(options); // set the options
        tree.buildClassifier(data); // build classifier

        if (choice == 1) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_open.arff";
            //fr=new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_open.arff");
        } else if (choice == 2) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_neur.arff";
            //fr=new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_neur.arff");       
        } else if (choice == 3) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_agr.arff";
            // fr=new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_agr.arff");              
        } else if (choice == 4) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_con.arff";
            //fr=new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_con.arff");                
        } else if (choice == 5) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_extr.arff";
            //fr=new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_extr.arff");              
        }
        FileReader fr = new FileReader(filename);
        BufferedReader br = new BufferedReader(fr);
        Instances unlabeled = new Instances(br);
        /// Instances unlabeled = new Instances(
        //  new BufferedReader(
        //  new FileReader("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_Dataset\\experiment\\test_data_unlabelled.arff")));
        // set class attribute
        unlabeled.setClassIndex(unlabeled.numAttributes() - 1);
        // create copy
        Instances labeled = new Instances(unlabeled);
        // label instances
        for (int i = 0; i < unlabeled.numInstances(); i++) {
            double clsLabel = tree.classifyInstance(unlabeled.instance(i));
            labeled.instance(i).setClassValue(clsLabel);
        }
        // save labeled data

        if (choice == 1) {
            filename = "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_open_labelled.arff";
            // fr1=new FileWriter("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_open123.arff");
        } else if (choice == 2) {
            // fr1=new FileWriter("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_neur_labelled.arff");       
        } else if (choice == 3) {
            // fr1=new FileWriter("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_agr_labelled.arff");              
        } else if (choice == 4) {
            //fr1=new FileWriter("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_con_labelled.arff");                
        } else if (choice == 5) {
            // fr1=new FileWriter("C:\\Users\\somya\\Desktop\\Personality Mining\\WEKA_DataSet\\Labelling\\Testing_data_extr_labelled.arff");              
        }
        FileWriter fr1 = new FileWriter(filename);
        BufferedWriter writer = new BufferedWriter(fr1);
        writer.write(labeled.toString());
        writer.newLine();
        writer.flush();
        writer.close();
    } catch (Exception e) {
        System.out.println(e.getLocalizedMessage());
    }
}

From source file:rdfsystem.data.DataMining.java

public static String classify(RdfManager manager) throws Exception {
    Instances ins = transformData(manager, true);
    ins.setClassIndex(ins.attribute("year").index());
    J48 tree = new J48();
    tree.buildClassifier(ins);
    return tree.graph();
}