List of usage examples for weka.classifiers Evaluation crossValidateModel
public void crossValidateModel(String classifierString, Instances data, int numFolds, String[] options, Random random) throws Exception
From source file:classif.Prototyper.java
License:Open Source License
/** * Predict the accuracy of the prototypes based on the learning set. It uses * cross validation to draw the prediction. * //from ww w.j a v a 2 s . co m * @param nbFolds * the number of folds for the x-validation * @return the predicted accuracy */ public double predictAccuracyXVal(int nbFolds) throws Exception { Evaluation eval = new Evaluation(trainingData); eval.crossValidateModel(this, trainingData, nbFolds, new Random(), new Object[] {}); return eval.errorRate(); }
From source file:ia02classificacao.IA02Classificacao.java
/** * @param args the command line arguments *///from ww w.j av a2s .co m public static void main(String[] args) throws Exception { // abre o banco de dados arff e mostra a quantidade de instancias (linhas) DataSource arquivo = new DataSource("data/zoo.arff"); Instances dados = arquivo.getDataSet(); System.out.println("Instancias lidas: " + dados.numInstances()); // FILTER: remove o atributo nome do animal da classificao String[] parametros = new String[] { "-R", "1" }; Remove filtro = new Remove(); filtro.setOptions(parametros); filtro.setInputFormat(dados); dados = Filter.useFilter(dados, filtro); AttributeSelection selAtributo = new AttributeSelection(); InfoGainAttributeEval avaliador = new InfoGainAttributeEval(); Ranker busca = new Ranker(); selAtributo.setEvaluator(avaliador); selAtributo.setSearch(busca); selAtributo.SelectAttributes(dados); int[] indices = selAtributo.selectedAttributes(); System.out.println("Selected attributes: " + Utils.arrayToString(indices)); // Usa o algoritimo J48 e mostra a classificao dos dados em forma textual String[] opcoes = new String[1]; opcoes[0] = "-U"; J48 arvore = new J48(); arvore.setOptions(opcoes); arvore.buildClassifier(dados); System.out.println(arvore); // Usa o algoritimo J48 e mostra a classificao de dados em forma grafica /* TreeVisualizer tv = new TreeVisualizer(null, arvore.graph(), new PlaceNode2()); JFrame frame = new javax.swing.JFrame("?rvore de Conhecimento"); frame.setSize(800,500); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().add(tv); frame.setVisible(true); tv.fitToScreen(); */ /* * Classificao de novos dados */ System.out.println("\n\nCLASSIFICAO DE NOVOS DADOS"); // criar atributos double[] vals = new double[dados.numAttributes()]; vals[0] = 1.0; // hair vals[1] = 0.0; // feathers vals[2] = 0.0; // eggs vals[3] = 1.0; // milk vals[4] = 1.0; // airborne vals[5] = 0.0; // aquatic vals[6] = 0.0; // predator vals[7] = 1.0; // toothed vals[8] = 1.0; // backbone vals[9] = 1.0; // breathes vals[10] = 0.0; // venomous vals[11] = 0.0; // fins vals[12] = 4.0; // legs vals[13] = 1.0; // tail vals[14] = 1.0; // domestic vals[15] = 1.0; // catsize // Criar uma instncia baseada nestes atributos Instance meuUnicornio = new DenseInstance(1.0, vals); // Adicionar a instncia nos dados meuUnicornio.setDataset(dados); // Classificar esta nova instncia double label = arvore.classifyInstance(meuUnicornio); // Imprimir o resultado da classificao System.out.println("Novo Animal: Unicrnio"); System.out.println("classificacao: " + dados.classAttribute().value((int) label)); /* * Avaliao e predio de erros de mtrica */ System.out.println("\n\nAVALIAO E PREDIO DE ERROS DE MTRICA"); Classifier cl = new J48(); Evaluation eval_roc = new Evaluation(dados); eval_roc.crossValidateModel(cl, dados, 10, new Random(1), new Object[] {}); System.out.println(eval_roc.toSummaryString()); /* * Matriz de confuso */ System.out.println("\n\nMATRIZ DE CONFUSO"); double[][] confusionMatrix = eval_roc.confusionMatrix(); System.out.println(eval_roc.toMatrixString()); }
From source file:soccer.core.SimpleClassifier.java
public void evaluate() throws IOException, Exception { Instances data = loader.buildInstances(); NumericToNominal toNominal = new NumericToNominal(); toNominal.setOptions(new String[] { "-R", "5,6,8,9" }); toNominal.setInputFormat(data);//from ww w .j a v a 2s.c om data = Filter.useFilter(data, toNominal); data.setClassIndex(6); // DataSink.write(ARFF_STRING, data); EnsembleLibrary ensembleLib = new EnsembleLibrary(); ensembleLib.addModel("weka.classifiers.trees.J48"); ensembleLib.addModel("weka.classifiers.bayes.NaiveBayes"); ensembleLib.addModel("weka.classifiers.functions.SMO"); ensembleLib.addModel("weka.classifiers.meta.AdaBoostM1"); ensembleLib.addModel("weka.classifiers.meta.LogitBoost"); ensembleLib.addModel("classifiers.trees.DecisionStump"); ensembleLib.addModel("classifiers.trees.DecisionStump"); EnsembleLibrary.saveLibrary(new File("./ensembleLib.model.xml"), ensembleLib, null); EnsembleSelection model = new EnsembleSelection(); model.setOptions(new String[] { "-L", "./ensembleLib.model.xml", // </path/to/modelLibrary>"-W", path+"esTmp", // </path/to/working/directory> - "-B", "10", // <numModelBags> "-E", "1.0", // <modelRatio>. "-V", "0.25", // <validationRatio> "-H", "100", // <hillClimbIterations> "-I", "1.0", // <sortInitialization> "-X", "2", // <numFolds> "-P", "roc", // <hillclimbMettric> "-A", "forward", // <algorithm> "-R", "true", // - Flag to be selected more than once "-G", "true", // - stops adding models when performance degrades "-O", "true", // - verbose output. "-S", "1", // <num> - Random number seed. "-D", "true" // - run in debug mode }); // double resES[] = evaluate(ensambleSel); // System.out.println("Ensemble Selection\n" // + "\tchurn: " + resES[0] + "\n" // + "\tappetency: " + resES[1] + "\n" // + "\tup-sell: " + resES[2] + "\n" // + "\toverall: " + resES[3] + "\n"); // models.add(new J48()); // models.add(new RandomForest()); // models.add(new NaiveBayes()); // models.add(new AdaBoostM1()); // models.add(new Logistic()); // models.add(new MultilayerPerceptron()); int FOLDS = 5; Evaluation eval = new Evaluation(data); // // for (Classifier model : models) { eval.crossValidateModel(model, data, FOLDS, new Random(1), new Object[] {}); System.out.println(model.getClass().getName() + "\n" + "\tRecall: " + eval.recall(1) + "\n" + "\tPrecision: " + eval.precision(1) + "\n" + "\tF-measure: " + eval.fMeasure(1)); System.out.println(eval.toSummaryString()); // } // LogitBoost cl = new LogitBoost(); // cl.setOptions(new String[] { // "-Q", "-I", "100", "-Z", "4", "-O", "4", "-E", "4" // }); // cl.buildClassifier(data); // Evaluation eval = new Evaluation(data); // eval.crossValidateModel(cl, data, 6, new Random(1), new Object[]{}); // System.out.println(eval.weightedFMeasure()); // System.out.println(cl.graph()); // System.out.println(cl.globalInfo()); }
From source file:tcc.FeatureExtraction.java
public void knn() throws IOException { //parsing CSV to Arff CSVLoader loader = new CSVLoader(); loader.setSource(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.csv")); Instances inst = loader.getDataSet(); ArffSaver saver = new ArffSaver(); saver.setInstances(inst);/*from w w w.ja v a 2s . c om*/ saver.setFile(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); saver.setDestination(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); saver.writeBatch(); BufferedReader reader = new BufferedReader( new FileReader("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); Instances data = new Instances(reader); reader.close(); data.setClassIndex(data.numAttributes() - 1); //Normalizando try { Normalize norm = new Normalize(); norm.setInputFormat(data); data = Filter.useFilter(data, norm); } catch (Exception ex) { Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex); } File csv = new File("/root/TCC/Resultados/knn.csv"); FileWriter fw = new FileWriter(csv); BufferedWriter bw = new BufferedWriter(fw); for (int i = 1; i < 51; i++) { //instanciando o classificador IBk knn = new IBk(); knn.setKNN(i); try { knn.buildClassifier(data); Evaluation eval = new Evaluation(data); //System.out.println(eval.toSummaryString("\nResults\n======\n", false)); eval.crossValidateModel(knn, data, 10, new Random(1), new Object[] {}); double auc = eval.areaUnderROC(1); System.out.println(auc); bw.write(Double.toString(auc)); bw.newLine(); } catch (Exception ex) { Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex); } } bw.close(); }
From source file:tcc.FeatureExtraction.java
public void rbf() throws IOException { //parsing CSV to Arff CSVLoader loader = new CSVLoader(); loader.setSource(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.csv")); Instances inst = loader.getDataSet(); ArffSaver saver = new ArffSaver(); saver.setInstances(inst);//from w w w . j a va2 s .c o m saver.setFile(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); saver.setDestination(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); saver.writeBatch(); BufferedReader reader = new BufferedReader( new FileReader("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); Instances data = new Instances(reader); reader.close(); data.setClassIndex(data.numAttributes() - 1); //Normalizando try { Normalize norm = new Normalize(); norm.setInputFormat(data); data = Filter.useFilter(data, norm); } catch (Exception ex) { Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex); } File csv = new File("/root/TCC/Resultados/rbf.csv"); FileWriter fw = new FileWriter(csv); BufferedWriter bw = new BufferedWriter(fw); for (int i = 1; i < 51; i++) { //instanciando o classificador RBFNetwork rbf = new RBFNetwork(); rbf.setNumClusters(i); try { rbf.buildClassifier(data); Evaluation eval = new Evaluation(data); //System.out.println(eval.toSummaryString("\nResults\n======\n", false)); eval.crossValidateModel(rbf, data, 10, new Random(1), new Object[] {}); double auc = eval.areaUnderROC(1); System.out.println(auc); bw.write(Double.toString(auc)); bw.newLine(); } catch (Exception ex) { Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex); } } bw.close(); }