List of usage examples for weka.classifiers Evaluation crossValidateModel
public void crossValidateModel(Classifier classifier, Instances data, int numFolds, Random random) throws Exception
From source file:DocClassifier.java
public Evaluation cvClassify(Classifier classifier, int k) throws Exception { docPredList.clear();/*from ww w.j a v a 2 s . c o m*/ Instances trainInstances = createInstances(trainFiles); Evaluation ev = new Evaluation(trainInstances); ev.crossValidateModel(classifier, trainInstances, k, new Random(1)); return ev; }
From source file:PreparingSteps.java
public double getSourceFitnessValue(int foldnumber, int N, Debug.Random rand, Instances data, double[] food_source, Evaluation eval, Classifier classifier) { double fitness = 0; int girildi = 0; Instances data1 = data;// w w w .j a va 2 s. com for (int j = 0; j < N - 1; j++) { if (food_source[j] == 0) { data1.deleteAttributeAt(j - girildi); girildi += 1; } } try { eval.crossValidateModel(classifier, data1, foldnumber, rand); fitness = eval.weightedFMeasure(); } catch (Exception ex) { ex.printStackTrace(); } return fitness; }
From source file:homemadeWEKA.java
public static void treeLearning_crossVal(Instances data) throws Exception { Evaluation eval = new Evaluation(data); J48 tree = new J48(); eval.crossValidateModel(tree, data, 10, new Random(1)); save_modelWithEval(tree, eval);//from w w w . j a va 2 s . c om // System.out.println(eval.toSummaryString("\nResult of tree learning with cross validation 10 folds\n \n",false)); }
From source file:GrowTree.java
public void crossvalidaion() throws Exception { DataSource source = new DataSource( "F:\\backup\\BTH\\#6DV2542 Machine Learning\\WEKA experiments\\UCI\\iris.arff"); Instances newData = source.getDataSet(); Evaluation eval = new Evaluation(newData); eval.crossValidateModel(tree, newData, 10, new Random(1)); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); }
From source file:adams.opt.cso.AbstractClassifierBasedSimpleCatSwarmOptimization.java
License:Open Source License
/** * Evaluates the classifier on the dataset and returns the metric. * * @param cls the classifier to evaluate * @param data the data to use for evaluation * @param folds the number of folds to use * @param seed the seed for the randomization * @return the metric/*from w ww .j a v a2 s . c o m*/ * @throws Exception if the evaluation fails */ protected double evaluateClassifier(Classifier cls, Instances data, int folds, int seed) throws Exception { Evaluation evaluation; evaluation = new Evaluation(data); evaluation.crossValidateModel(cls, data, folds, new Random(seed)); return getMeasure().extract(evaluation, true); }
From source file:adams.opt.optimise.genetic.fitnessfunctions.AttributeSelection.java
License:Open Source License
public double evaluate(OptData opd) { init();/*from ww w.j a v a2 s . c o m*/ int cnt = 0; int[] weights = getWeights(opd); Instances newInstances = new Instances(getInstances()); for (int i = 0; i < getInstances().numInstances(); i++) { Instance in = newInstances.instance(i); cnt = 0; for (int a = 0; a < getInstances().numAttributes(); a++) { if (a == getInstances().classIndex()) continue; if (weights[cnt++] == 0) { in.setValue(a, 0); } else { in.setValue(a, in.value(a)); } } } Classifier newClassifier = null; try { newClassifier = (Classifier) OptionUtils.shallowCopy(getClassifier()); // evaluate classifier on data Evaluation evaluation = new Evaluation(newInstances); evaluation.crossValidateModel(newClassifier, newInstances, getFolds(), new Random(getCrossValidationSeed())); // obtain measure double measure = 0; if (getMeasure() == Measure.ACC) measure = evaluation.pctCorrect(); else if (getMeasure() == Measure.CC) measure = evaluation.correlationCoefficient(); else if (getMeasure() == Measure.MAE) measure = evaluation.meanAbsoluteError(); else if (getMeasure() == Measure.RAE) measure = evaluation.relativeAbsoluteError(); else if (getMeasure() == Measure.RMSE) measure = evaluation.rootMeanSquaredError(); else if (getMeasure() == Measure.RRSE) measure = evaluation.rootRelativeSquaredError(); else throw new IllegalStateException("Unhandled measure '" + getMeasure() + "'!"); measure = getMeasure().adjust(measure); return (measure); // process fitness } catch (Exception e) { getLogger().log(Level.SEVERE, "Error evaluating", e); } return 0; }
From source file:algoritmogeneticocluster.Cromossomo.java
private void classifica() { //SMO classifier = new SMO(); //HyperPipes classifier = new HyperPipes(); IBk classifier = new IBk(5); BufferedReader datafile = readDataFile(inId + ".arff"); Instances data;/*from w ww. j a v a 2 s . co m*/ Evaluation eval; try { data = new Instances(datafile); data.setClassIndex(data.numAttributes() - 1); eval = new Evaluation(data); Random rand = new Random(1); // usando semente = 1 int folds = 10; eval.crossValidateModel(classifier, data, folds, rand); //this.fitness = eval.pctCorrect(); //fitness = new BigDecimal(fitness).setScale(2, RoundingMode.HALF_UP).doubleValue();//arredondamento para duas casas pctAcerto = eval.pctCorrect(); pctAcerto = new BigDecimal(pctAcerto).setScale(2, RoundingMode.HALF_UP).doubleValue(); microAverage = getMicroAverage(eval, data); microAverage = new BigDecimal(microAverage).setScale(2, RoundingMode.HALF_UP).doubleValue(); macroAverage = getMacroAverage(eval, data); macroAverage = new BigDecimal(macroAverage).setScale(2, RoundingMode.HALF_UP).doubleValue(); } catch (Exception ex) { System.out.println("Erro ao tentar fazer a classificacao"); Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex); } switch (metodoFitness) { case 1: fitness = pctAcerto; break; case 2: fitness = microAverage; break; case 3: fitness = macroAverage; break; default: break; } }
From source file:algoritmogeneticocluster.WekaSimulation.java
/** * @param args the command line arguments *///from w w w. ja va 2 s . co m public static void main(String[] args) { SMO classifier = new SMO(); HyperPipes hy = new HyperPipes(); // classifier.buildClassifier(trainset); BufferedReader datafile = readDataFile("tabela10.arff"); Instances data; Evaluation eval; try { data = new Instances(datafile); data.setClassIndex(data.numAttributes() - 1); eval = new Evaluation(data); Random rand = new Random(1); // using seed = 1 int folds = 10; eval.crossValidateModel(classifier, data, folds, rand); System.out.println(eval.toString()); System.out.println(eval.numInstances()); System.out.println(eval.correct()); System.out.println(eval.incorrect()); System.out.println(eval.pctCorrect()); System.out.println(eval.pctIncorrect()); } catch (Exception ex) { Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:ann.ANN.java
public static void crossValidation(Classifier model, Instances data) { try {/* w w w .j ava 2 s . co m*/ Evaluation eval = new Evaluation(data); eval.crossValidateModel(model, data, 10, new Random(1)); System.out.println("================================"); System.out.println("========Cross Validation========"); System.out.println("================================"); System.out.println(eval.toSummaryString("\n=== Summary ===\n", false)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString("=== Confusion Matrix ===\n")); } catch (Exception ex) { System.out.println(ex.toString()); } }
From source file:ANN_single2.MultilayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\Team.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);/*from w ww . j a v a 2 s. c o m*/ train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); MultilayerPerceptron slp = new MultilayerPerceptron(train, 13, 0.1, 0.5); // slp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.crossValidateModel(slp, train, 10, new Random(1)); // eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); }