List of usage examples for weka.classifiers Evaluation evaluateModel
public static String evaluateModel(Classifier classifier, String[] options) throws Exception
From source file:MLP.java
MLP() { try {//from w ww.j a v a 2 s. c o m FileReader trainreader = new FileReader("C:\\new.arff"); FileReader testreader = new FileReader("C:\\new.arff"); Instances train = new Instances(trainreader); Instances test = new Instances(testreader); train.setClassIndex(train.numAttributes() - 1); test.setClassIndex(test.numAttributes() - 1); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 4")); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); trainreader.close(); testreader.close(); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:activeSegmentation.learning.WekaClassifier.java
License:Open Source License
/** * Evaluates the classifier using the test dataset and stores the evaluation. * * @param instances The instances to test * @return The evaluation/*from w w w. j a v a 2 s . c o m*/ */ @Override public double[] testModel(IDataSet instances) { try { // test the current classifier with the test set Evaluation evaluator = new Evaluation(new Instances(instances.getDataset(), 0)); double[] predict = evaluator.evaluateModel(classifier, instances.getDataset()); System.out.println(evaluator.toSummaryString()); return predict; } catch (Exception e) { Logger.getLogger(WekaClassifier.class.getName()).log(Level.SEVERE, null, e); } return null; }
From source file:algoritmogeneticocluster.NewClass.java
public static Evaluation classify(Classifier model, Instances trainingSet, Instances testingSet) throws Exception { Evaluation evaluation = new Evaluation(trainingSet); model.buildClassifier(trainingSet);//from www .j a v a 2 s. c o m evaluation.evaluateModel(model, testingSet); return evaluation; }
From source file:ann.ANN.java
public void percentageSplit(Classifier model, double percent, Instances data) { try {/* ww w . ja v a 2 s .c o m*/ int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances train = new Instances(data, trainSize); Instances test = new Instances(data, testSize); ; for (int i = 0; i < trainSize; i++) { train.add(data.instance(i)); } for (int i = trainSize; i < data.numInstances(); i++) { test.add(data.instance(i)); } Evaluation eval = new Evaluation(train); eval.evaluateModel(model, test); System.out.println("================================"); System.out.println("========Percentage Split======="); System.out.println("================================"); System.out.println(eval.toSummaryString("\n=== Summary ===\n", false)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString("=== Confusion Matrix ===\n")); } catch (Exception ex) { System.out.println("File tidak berhasil di-load"); } }
From source file:ANN.MultilayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\iris.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);/* w w w . ja v a 2s .c o m*/ train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); System.out.println(); // System.out.println(i + " "+0.8); MultilayerPerceptron slp = new MultilayerPerceptron(train, 0.1, 5000, 14); slp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.print(eval.toMatrixString()); }
From source file:ANN.MultiplePerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\iris.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);// ww w .j a v a 2s.c o m train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); MultiplePerceptron mlp = new MultiplePerceptron(train, 20, 0.3); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, train); System.out.println(eval.toSummaryString()); System.out.print(eval.toMatrixString()); }
From source file:ANN_Single.SinglelayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\diabetes.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);/*from w w w .j a v a2s . c om*/ train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); System.out.println(); // System.out.println(i + " "+0.8); SinglelayerPerceptron slp = new SinglelayerPerceptron(train, 0.1, 5000); slp.buildClassifier(train); Evaluation eval = new Evaluation(train); // eval.crossValidateModel(slp, train, 10, new Random(1)); eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.print(eval.toMatrixString()); }
From source file:ANN_single2.SinglelayerPerceptron.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\Team.arff")); Instances train = source.getDataSet(); Normalize nm = new Normalize(); nm.setInputFormat(train);/*www . j a v a 2s.c o m*/ train = Filter.useFilter(train, nm); train.setClassIndex(train.numAttributes() - 1); for (int i = 100; i < 3000; i += 100) { for (double j = 0.01; j < 1; j += 0.01) { System.out.println(i + " " + j); SinglelayerPerceptron slp = new SinglelayerPerceptron(i, j, 0.00); slp.buildClassifier(train); Evaluation eval = new Evaluation(train); // eval.crossValidateModel(slp, train,10, new Random(1)); eval.evaluateModel(slp, train); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); } } }
From source file:asap.CrossValidation.java
/** * * @param dataInput//from ww w . ja v a 2 s. c o m * @param classIndex * @param removeIndices * @param cls * @param seed * @param folds * @param modelOutputFile * @return * @throws Exception */ public static String performCrossValidation(String dataInput, String classIndex, String removeIndices, AbstractClassifier cls, int seed, int folds, String modelOutputFile) throws Exception { PerformanceCounters.startTimer("cross-validation ST"); PerformanceCounters.startTimer("cross-validation init ST"); // loads data and set class index Instances data = DataSource.read(dataInput); String clsIndex = classIndex; switch (clsIndex) { case "first": data.setClassIndex(0); break; case "last": data.setClassIndex(data.numAttributes() - 1); break; default: try { data.setClassIndex(Integer.parseInt(clsIndex) - 1); } catch (NumberFormatException e) { data.setClassIndex(data.attribute(clsIndex).index()); } break; } Remove removeFilter = new Remove(); removeFilter.setAttributeIndices(removeIndices); removeFilter.setInputFormat(data); data = Filter.useFilter(data, removeFilter); // randomize data Random rand = new Random(seed); Instances randData = new Instances(data); randData.randomize(rand); if (randData.classAttribute().isNominal()) { randData.stratify(folds); } // perform cross-validation and add predictions Evaluation eval = new Evaluation(randData); Instances trainSets[] = new Instances[folds]; Instances testSets[] = new Instances[folds]; Classifier foldCls[] = new Classifier[folds]; for (int n = 0; n < folds; n++) { trainSets[n] = randData.trainCV(folds, n); testSets[n] = randData.testCV(folds, n); foldCls[n] = AbstractClassifier.makeCopy(cls); } PerformanceCounters.stopTimer("cross-validation init ST"); PerformanceCounters.startTimer("cross-validation folds+train ST"); //paralelize!!:-------------------------------------------------------------- for (int n = 0; n < folds; n++) { Instances train = trainSets[n]; Instances test = testSets[n]; // the above code is used by the StratifiedRemoveFolds filter, the // code below by the Explorer/Experimenter: // Instances train = randData.trainCV(folds, n, rand); // build and evaluate classifier Classifier clsCopy = foldCls[n]; clsCopy.buildClassifier(train); eval.evaluateModel(clsCopy, test); } cls.buildClassifier(data); //until here!----------------------------------------------------------------- PerformanceCounters.stopTimer("cross-validation folds+train ST"); PerformanceCounters.startTimer("cross-validation post ST"); // output evaluation String out = "\n" + "=== Setup ===\n" + "Classifier: " + cls.getClass().getName() + " " + Utils.joinOptions(cls.getOptions()) + "\n" + "Dataset: " + data.relationName() + "\n" + "Folds: " + folds + "\n" + "Seed: " + seed + "\n" + "\n" + eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false) + "\n"; if (!modelOutputFile.isEmpty()) { SerializationHelper.write(modelOutputFile, cls); } PerformanceCounters.stopTimer("cross-validation post ST"); PerformanceCounters.stopTimer("cross-validation ST"); return out; }
From source file:asap.NLPSystem.java
private String _buildClassifier() { Evaluation eval; try {// ww w. j a v a 2s. c o m eval = new Evaluation(trainingSet); } catch (Exception ex) { Logger.getLogger(NLPSystem.class.getName()).log(Level.SEVERE, null, ex); return "Error creating evaluation instance for given data!"; } try { classifier.buildClassifier(trainingSet); } catch (Exception ex) { Logger.getLogger(NLPSystem.class.getName()).log(Level.SEVERE, null, ex); } try { trainingPredictions = eval.evaluateModel(classifier, trainingSet); trainingPearsonsCorrelation = eval.correlationCoefficient(); } catch (Exception ex) { Logger.getLogger(NLPSystem.class.getName()).log(Level.SEVERE, null, ex); } classifierBuilt = true; return "Classifier built (" + trainingPearsonsCorrelation + ")."; }