List of usage examples for weka.classifiers Evaluation toSummaryString
@Override
public String toSummaryString()
From source file:Statistics.WekaFunctions.java
public void buildLRcls() { try {/*from w w w .j a va 2s .c om*/ System.out.println("building classifier"); model.setOptions(weka.core.Utils.splitOptions("-S 0 -D")); // set options model.buildClassifier(trainParameters); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(trainParameters); eval.evaluateModel(model, trainParameters); System.out.println(eval.toSummaryString()); } catch (Exception ex) { Logger.getLogger(WekaFunctions.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:textmining.TextMining.java
/** * Decision Table//from w w w . j a v a2 s. c om * * @param instances * @return string * @throws Exception */ private static String C_DecisionTable(Instances instances) throws Exception { Classifier decisionTable = (Classifier) new DecisionTable(); String[] options = weka.core.Utils.splitOptions("-X 1 -S \"weka.attributeSelection.BestFirst -D 1 -N 5\""); decisionTable.setOptions(options); decisionTable.buildClassifier(instances); Evaluation eval = new Evaluation(instances); // eval.evaluateModel(decisionTable, instances); eval.crossValidateModel(decisionTable, instances, 5, new Random(1)); String resume = eval.toSummaryString(); return eval.toMatrixString(resume); }
From source file:textmining.TextMining.java
private static String setOptions(Classifier classifier, Instances instances, String[] options) throws Exception { classifier.setOptions(options);//from ww w.j av a 2 s . c om classifier.buildClassifier(instances); Evaluation eval = new Evaluation(instances); eval.crossValidateModel(classifier, instances, 5, new Random(1)); eval.evaluateModel(classifier, instances); String resume = eval.toSummaryString(); return eval.toMatrixString(resume); }
From source file:Tubes.Classification.java
public static void main(String[] args) throws FileNotFoundException, IOException, Exception { StringToWordVector filter = new StringToWordVector(); File training = new File(classTrain); File testing = new File(classTest); BufferedReader readTrain = new BufferedReader(new FileReader(training)); BufferedReader readTest = new BufferedReader(new FileReader(testing)); Instances dataTrain = new Instances(readTrain); Instances dataTest = new Instances(readTest); filter.setInputFormat(dataTrain);//from w ww. ja v a 2s. c o m dataTrain = Filter.useFilter(dataTrain, filter); dataTrain.setClassIndex(dataTrain.numAttributes() - 1); dataTest.setClassIndex(dataTest.numAttributes() - 1); Classification classify = new Classification(); NaiveBayes bayes = new NaiveBayes(); // RandomForest rf = new RandomForest(); // BayesNet bayesNet = new BayesNet(); LibSVM libSVM = new LibSVM(); System.out.println("==========================Naive Bayes Evaluation==========================="); Evaluation eval = classify.runClassifier(bayes, dataTrain, dataTest); System.out.println(eval.toSummaryString() + "\n"); System.out.println(eval.toClassDetailsString() + "\n"); System.out.println(eval.toMatrixString() + "\n"); System.out.println("==========================================================================="); // // ====System.out.println("==============================Random Forest================================"); // Evaluation eval2 = classify.runClassifier(rf, dataTrain, dataTest); // System.out.println(eval2.toSummaryString() + "\n"); // System.out.println(eval2.toClassDetailsString() + "\n"); // System.out.println(eval2.toMatrixString() + "\n"); // System.out.println("======================================================================="); // // System.out.println("==============================Bayesian Network================================"); // Evaluation eval3 = classify.runClassifier(bayesNet, dataTrain, dataTest); // System.out.println(eval3.toSummaryString() + "\n"); // System.out.println(eval3.toClassDetailsString() + "\n"); // System.out.println(eval3.toMatrixString() + "\n"); // System.out.println("==========================================================================="); System.out.println("==============================LibSVM================================"); libSVM.setCacheSize(512); // MB libSVM.setNormalize(true); libSVM.setShrinking(true); libSVM.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_LINEAR, LibSVM.TAGS_KERNELTYPE)); libSVM.setDegree(3); libSVM.setSVMType(new SelectedTag(LibSVM.SVMTYPE_C_SVC, LibSVM.TAGS_SVMTYPE)); Evaluation eval4 = classify.runClassifier(libSVM, dataTrain, dataTest); System.out.println(eval4.toSummaryString() + "\n"); System.out.println(eval4.toClassDetailsString() + "\n"); System.out.println(eval4.toMatrixString() + "\n"); System.out.println("==========================================================================="); }
From source file:tubes.ml.pkg1.TubesML1.java
public void akses() throws Exception { Discretize filter;//from w w w . j a v a 2 s .co m int fold = 10; int fold3 = 3; int trainNum, testNum; PrintWriter file = new PrintWriter("model.txt"); /***dataset 1***/ file.println("***DATASET 1***"); fileReader tets = new fileReader("./src/data/iris.arff"); try { tets.read(); } catch (IOException ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } Instances data = tets.getData(); filter = new Discretize(); try { filter.setInputFormat(data); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } /*ID3*/ Instances discreteData; discreteData = Filter.useFilter(data, filter); trainNum = discreteData.numInstances() * 3 / 4; testNum = discreteData.numInstances() / 4; for (int i = 0; i < fold; i++) { try { Instances train = discreteData.trainCV(fold, i); Instances test = discreteData.testCV(fold, i); Id3 iTiga = new Id3(); Evaluation validation = new Evaluation(train); try { iTiga.buildClassifier(train); System.out.println(iTiga.toString()); file.println(iTiga.toString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } validation.evaluateModel(iTiga, test); System.out.println(validation.toSummaryString()); file.println("Validation " + (i + 1)); file.println(validation.toSummaryString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } } /*J48*/ trainNum = data.numInstances() * 3 / 4; testNum = data.numInstances() / 4; J48 jKT = new J48(); for (int i = 0; i < fold; i++) { Instances train = data.trainCV(fold, i); Instances test = data.testCV(fold, i); try { Evaluation validation = new Evaluation(train); try { jKT.buildClassifier(data); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } validation.evaluateModel(jKT, test); System.out.println(validation.toSummaryString()); file.println("Validation " + (i + 1)); file.println(validation.toSummaryString()); // System.out.println(jKT.toString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } } /*dataset 2*/ file.println("***DATASET 2***"); tets.setFilepath("./src/data/weather.arff"); try { tets.read(); } catch (IOException ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } data = new Instances(tets.getData()); /*ID3*/ discreteData = Filter.useFilter(data, filter); trainNum = discreteData.numInstances() * 3 / 4; testNum = discreteData.numInstances() / 4; for (int i = 0; i < fold3; i++) { try { Instances train = discreteData.trainCV(trainNum, i); Instances test = discreteData.testCV(testNum, i); Id3 iTiga = new Id3(); Evaluation validation = new Evaluation(train); try { iTiga.buildClassifier(train); System.out.println(iTiga.toString()); //file.println(iTiga.toString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } validation.evaluateModel(iTiga, test); System.out.println(validation.toSummaryString()); file.println("Validation " + (i + 1)); file.println(validation.toSummaryString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } } System.out.println(testNum); file.println("Test Number"); file.println(testNum); /*J48*/ trainNum = data.numInstances() * 3 / 4; testNum = data.numInstances() / 4; for (int i = 0; i < fold; i++) { Instances train = data.trainCV(fold, i); Instances test = data.testCV(fold, i); try { Evaluation validation = new Evaluation(train); try { jKT.buildClassifier(data); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } validation.evaluateModel(jKT, test); System.out.println(validation.toSummaryString()); file.println(validation.toSummaryString()); System.out.println(jKT.toString()); file.println(jKT.toString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } } /*dataset 3*/ file.println("***DATASET 3***"); tets.setFilepath("./src/data/weather.nominal.arff"); try { tets.read(); } catch (IOException ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } data = new Instances(tets.getData()); /*ID3*/ discreteData = Filter.useFilter(data, filter); trainNum = discreteData.numInstances() * 3 / 4; testNum = discreteData.numInstances() / 4; for (int i = 0; i < fold3; i++) { try { Instances train = discreteData.trainCV(fold, i); Instances test = discreteData.testCV(fold, i); Id3 iTiga = new Id3(); Evaluation validation = new Evaluation(train); try { iTiga.buildClassifier(train); System.out.println(iTiga.toString()); file.println(iTiga.toString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } validation.evaluateModel(iTiga, test); System.out.println(validation.toSummaryString()); file.println(validation.toSummaryString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } } System.out.println(testNum); file.println("Test Number"); file.println(testNum); /*J48*/ trainNum = data.numInstances() * 3 / 4; testNum = data.numInstances() / 4; for (int i = 0; i < fold; i++) { Instances train = data.trainCV(fold, i); Instances test = data.testCV(fold, i); try { Evaluation validation = new Evaluation(train); try { jKT.buildClassifier(data); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } validation.evaluateModel(jKT, test); System.out.println(validation.toSummaryString()); file.println(validation.toSummaryString()); System.out.println(jKT.toString()); file.println(jKT.toString()); } catch (Exception ex) { Logger.getLogger(TubesML1.class.getName()).log(Level.SEVERE, null, ex); } } /*RESULTT*/ System.out.println(jKT.toString()); file.println("RESULT"); file.println(jKT.toString()); file.close(); }
From source file:tubes2ai.DriverNB.java
public static void run(String data) throws Exception { //System.out.println("tes driver"); ConverterUtils.DataSource source = new ConverterUtils.DataSource(data); Instances dataTrain = source.getDataSet(); //if (dataTrain.classIndex() == -1) dataTrain.setClassIndex(0);// w w w .jav a2s . c om ArffSaver saver = new ArffSaver(); // dataTrain.setClassIndex(); Discretize discretize = new Discretize(); discretize.setInputFormat(dataTrain); Instances dataTrainDisc = Filter.useFilter(dataTrain, discretize); //NaiveBayes NB = new NaiveBayes(); AIJKNaiveBayes NB = new AIJKNaiveBayes(); NB.buildClassifier(dataTrainDisc); Evaluation eval = new Evaluation(dataTrainDisc); eval.evaluateModel(NB, dataTrainDisc); System.out.println(eval.toSummaryString()); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); /*Instance inst = new DenseInstance(5); inst.setDataset(dataTrain); inst.setValue(0, "sunny"); inst.setValue(1, "hot"); inst.setValue(2, "high"); inst.setValue(3, "FALSE"); inst.setValue(4, "yes"); double a = NB.classifyInstance(inst); String hasil=""; if(a==0.0){ hasil="YES"; } else{ hasil="NO"; } //double[] b = NB.distributionForInstance(inst); System.out.println("Hasil klasifikasi: "+hasil); //System.out.println(b);*/ }
From source file:util.FeatureExtract.java
public static void createArff(String directory) { TextDirectoryLoader loader = new TextDirectoryLoader(); try {/*from w w w . j av a 2 s.com*/ // convert the directory into a dataset loader.setDirectory(new File(directory)); Instances dataRaw = loader.getDataSet(); // apply the StringToWordVector and tf-idf weighting StringToWordVector filter = new StringToWordVector(); filter.setIDFTransform(true); filter.setInputFormat(dataRaw); Instances dataFiltered = Filter.useFilter(dataRaw, filter); // output the arff file ArffSaver saver = new ArffSaver(); saver.setInstances(dataFiltered); saver.setFile(new File(SpamFilterConfig.getArffFilePath())); saver.writeBatch(); // train with simple cart SimpleCart classifier = new SimpleCart(); classifier.buildClassifier(dataFiltered); System.out.println("\n\nClassifier model:\n\n" + classifier.toString()); // using 10 cross validation Evaluation eval = new Evaluation(dataFiltered); eval.crossValidateModel(classifier, dataFiltered, 10, new Random(1)); System.out.println("\n\nCross fold:\n\n" + eval.toSummaryString()); } catch (Exception ex) { Logger.getLogger(FeatureExtract.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:uv.datamining.tp2.WekaModeler.java
public static void generarArbol(File file, float cm) throws Exception { ArffLoader loader = new ArffLoader(); loader.setFile(file);/*w ww . j ava 2 s. c om*/ Instances data = loader.getDataSet(); data.setClassIndex(data.numAttributes() - 1); //columna con el atributo clase J48 tree = new J48(); tree.setConfidenceFactor(cm); tree.buildClassifier(data); Evaluation eval = new Evaluation(data); eval.evaluateModel(tree, data); System.out.println(eval.toSummaryString()); weka.core.SerializationHelper.write( file.getAbsolutePath().substring(0, file.getAbsolutePath().lastIndexOf(".")) + ".model", tree); }