List of usage examples for weka.classifiers Evaluation Evaluation
public Evaluation(Instances data) throws Exception
From source file:com.Machine_learning.model.MyNaiveBayes.java
public MyNaiveBayes(Instances data) { dataInstances = data;// w w w.ja v a2 s . c o m try { classifier = new NaiveBayes(); classifier.buildClassifier(dataInstances); eval = new Evaluation(dataInstances); } catch (Exception ex) { Logger.getLogger(MyNaiveBayes.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.Machine_learning.model.MyNaiveBayes.java
public void applyMethod(String method) { try {//from w ww . j ava 2 s .co m List<Instances> datasets = new ArrayList<>(); if (method.equals("cross-validation")) { eval.crossValidateModel(classifier, dataInstances, 4, new Random(1)); return; } else if (method.equals("test-set")) { Preprocessing preprocessTestSet = new Preprocessing(null); datasets = preprocessTestSet.getDataSets( MyNaiveBayes.class.getResource("/data/categories-per-train.arff").getPath(), MyNaiveBayes.class.getResource("/data/2017-articles-correct.arff").getPath()); } else if (method.equals("percentage")) { Preprocessing preprocessTestSet = new Preprocessing(null); datasets = preprocessTestSet.getDataSets( MyNaiveBayes.class.getResource("/data/categories-per-train.arff").getPath(), MyNaiveBayes.class.getResource("/data/categories-per-test.arff").getPath()); } else { return; } classifier = new NaiveBayes(); classifier.buildClassifier(datasets.get(0)); eval = new Evaluation(datasets.get(0)); eval.evaluateModel(classifier, datasets.get(1)); } catch (Exception ex) { Logger.getLogger(MyNaiveBayes.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.Machine_learning.model.MySupportVectorMachine.java
public MySupportVectorMachine(Instances data) { dataInstances = data;/* w w w .j a v a2s . c o m*/ try { classifier = new LibSVM(); classifier.setOptions(splitOptions(options)); classifier.buildClassifier(data); eval = new Evaluation(dataInstances); } catch (Exception ex) { System.out.println("THROWN " + ex.getMessage()); Logger.getLogger(MySupportVectorMachine.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.Machine_learning.model.MySupportVectorMachine.java
public void applyMethod(String method) { try {//from w w w.ja va 2 s.co m List<Instances> datasets = new ArrayList<>(); if (method.equals("cross-validation")) { eval.crossValidateModel(classifier, dataInstances, 4, new Random(1)); return; } else if (method.equals("test-set")) { Preprocessing preprocessTestSet = new Preprocessing(null); datasets = preprocessTestSet.getDataSets( MyNaiveBayes.class.getResource("/data/categories-per-train.arff").getPath(), MyNaiveBayes.class.getResource("/data/2017-articles-correct.arff").getPath()); } else if (method.equals("percentage")) { Preprocessing preprocessTestSet = new Preprocessing(null); datasets = preprocessTestSet.getDataSets( MyNaiveBayes.class.getResource("/data/categories-per-train.arff").getPath(), MyNaiveBayes.class.getResource("/data/categories-per-test.arff").getPath()); } else { return; } classifier = new LibSVM(); classifier.setOptions(splitOptions(options)); classifier.buildClassifier(datasets.get(0)); eval = new Evaluation(datasets.get(0)); eval.evaluateModel(classifier, datasets.get(1)); } catch (Exception ex) { Logger.getLogger(MyNaiveBayes.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.mycompany.id3classifier.ID3Shell.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource("lensesData.csv"); Instances dataSet = source.getDataSet(); Discretize filter = new Discretize(); filter.setInputFormat(dataSet);//from w w w . j av a 2 s .co m dataSet = Filter.useFilter(dataSet, filter); Standardize standardize = new Standardize(); standardize.setInputFormat(dataSet); dataSet = Filter.useFilter(dataSet, standardize); dataSet.setClassIndex(dataSet.numAttributes() - 1); dataSet.randomize(new Random(9001)); //It's over 9000!! int folds = 10; //Perform crossvalidation Evaluation eval = new Evaluation(dataSet); for (int n = 0; n < folds; n++) { int trainingSize = (int) Math.round(dataSet.numInstances() * .7); int testSize = dataSet.numInstances() - trainingSize; Instances trainingData = dataSet.trainCV(folds, n); Instances testData = dataSet.testCV(folds, n); ID3Classifier classifier = new ID3Classifier(); // Id3 classifier = new Id3(); classifier.buildClassifier(trainingData); eval.evaluateModel(classifier, testData); } System.out.println(eval.toSummaryString("\nResults:\n", false)); }
From source file:com.mycompany.knnclassifier.kNNShell.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource("carData.csv"); Instances dataSet = source.getDataSet(); Standardize standardize = new Standardize(); standardize.setInputFormat(dataSet); dataSet = Filter.useFilter(dataSet, standardize); dataSet.setClassIndex(dataSet.numAttributes() - 1); dataSet.randomize(new Random(9001)); //It's over 9000!! int trainingSize = (int) Math.round(dataSet.numInstances() * .7); int testSize = dataSet.numInstances() - trainingSize; Instances trainingData = new Instances(dataSet, 0, trainingSize); Instances testData = new Instances(dataSet, trainingSize, testSize); kNNClassifier classifier = new kNNClassifier(3); classifier.buildClassifier(trainingData); //Used to compare to Weka's built in KNN algorithm //Classifier classifier = new IBk(1); //classifier.buildClassifier(trainingData); Evaluation eval = new Evaluation(trainingData); eval.evaluateModel(classifier, testData); System.out.println(eval.toSummaryString("\nResults:\n", false)); }
From source file:com.mycompany.neuralnetwork.NeuralNetworkShell.java
public static void main(String[] args) throws Exception { ConverterUtils.DataSource source = new ConverterUtils.DataSource("irisData.csv"); Instances dataSet = source.getDataSet(); Standardize standardize = new Standardize(); standardize.setInputFormat(dataSet); dataSet = Filter.useFilter(dataSet, standardize); dataSet.setClassIndex(dataSet.numAttributes() - 1); dataSet.randomize(new Random(9001)); //It's over 9000!! int trainingSize = (int) Math.round(dataSet.numInstances() * .7); int testSize = dataSet.numInstances() - trainingSize; Instances trainingData = new Instances(dataSet, 0, trainingSize); Instances testData = new Instances(dataSet, trainingSize, testSize); //MultilayerPerceptron classifier = new MultilayerPerceptron(); NeuralNetworkClassifier classifier = new NeuralNetworkClassifier(3, 20000, 0.1); classifier.buildClassifier(trainingData); Evaluation eval = new Evaluation(trainingData); eval.evaluateModel(classifier, testData); System.out.println(eval.toSummaryString("\nResults:\n", false)); }
From source file:com.reactivetechnologies.analytics.core.eval.AdaBoostM1WithBuiltClassifiers.java
License:Open Source License
@Override protected void buildClassifierWithWeights(Instances data) throws Exception { Instances training;//from w w w. j av a2s. c o m double epsilon, reweight; Evaluation evaluation; int numInstances = data.numInstances(); // Initialize data m_Betas = new double[m_Classifiers.length]; m_NumIterationsPerformed = 0; // Create a copy of the data so that when the weights are diddled // with it doesn't mess up the weights for anyone else training = new Instances(data, 0, numInstances); // Do boostrap iterations for (m_NumIterationsPerformed = 0; m_NumIterationsPerformed < m_Classifiers.length; m_NumIterationsPerformed++) { if (m_Debug) { System.err.println("Training classifier " + (m_NumIterationsPerformed + 1)); } // Select instances to train the classifier on if (m_WeightThreshold < 100) { selectWeightQuantile(training, (double) m_WeightThreshold / 100); } else { new Instances(training, 0, numInstances); } /** Changed here: DO NOT Build the classifier! */ /*if (m_Classifiers[m_NumIterationsPerformed] instanceof Randomizable) ((Randomizable) m_Classifiers[m_NumIterationsPerformed]).setSeed(randomInstance.nextInt()); m_Classifiers[m_NumIterationsPerformed].buildClassifier(trainData);*/ /** End change */ // Evaluate the classifier evaluation = new Evaluation(data); evaluation.evaluateModel(m_Classifiers[m_NumIterationsPerformed], training); epsilon = evaluation.errorRate(); // Stop if error too small or error too big and ignore this model if (Utils.grOrEq(epsilon, 0.5) || Utils.eq(epsilon, 0)) { if (m_NumIterationsPerformed == 0) { m_NumIterationsPerformed = 1; // If we're the first we have to to use it } break; } // Determine the weight to assign to this model m_Betas[m_NumIterationsPerformed] = Math.log((1 - epsilon) / epsilon); reweight = (1 - epsilon) / epsilon; if (m_Debug) { System.err.println("\terror rate = " + epsilon + " beta = " + m_Betas[m_NumIterationsPerformed]); } // Update instance weights setWeights(training, reweight); } }
From source file:com.reactivetechnologies.analytics.core.eval.AdaBoostM1WithBuiltClassifiers.java
License:Open Source License
@Override protected void buildClassifierUsingResampling(Instances data) throws Exception { Instances trainData, training;/*from ww w .j a v a 2 s .c o m*/ double epsilon, reweight, sumProbs; Evaluation evaluation; int numInstances = data.numInstances(); int resamplingIterations = 0; // Initialize data m_Betas = new double[m_Classifiers.length]; m_NumIterationsPerformed = 0; // Create a copy of the data so that when the weights are diddled // with it doesn't mess up the weights for anyone else training = new Instances(data, 0, numInstances); sumProbs = training.sumOfWeights(); for (int i = 0; i < training.numInstances(); i++) { training.instance(i).setWeight(training.instance(i).weight() / sumProbs); } // Do boostrap iterations for (m_NumIterationsPerformed = 0; m_NumIterationsPerformed < m_Classifiers.length; m_NumIterationsPerformed++) { if (m_Debug) { System.err.println("Training classifier " + (m_NumIterationsPerformed + 1)); } // Select instances to train the classifier on if (m_WeightThreshold < 100) { trainData = selectWeightQuantile(training, (double) m_WeightThreshold / 100); } else { trainData = new Instances(training); } // Resample resamplingIterations = 0; double[] weights = new double[trainData.numInstances()]; for (int i = 0; i < weights.length; i++) { weights[i] = trainData.instance(i).weight(); } do { /** Changed here: DO NOT build classifier*/ // Build and evaluate classifier //m_Classifiers[m_NumIterationsPerformed].buildClassifier(sample); /** End change */ evaluation = new Evaluation(data); evaluation.evaluateModel(m_Classifiers[m_NumIterationsPerformed], training); epsilon = evaluation.errorRate(); resamplingIterations++; } while (Utils.eq(epsilon, 0) && (resamplingIterations < 10)); // Stop if error too big or 0 if (Utils.grOrEq(epsilon, 0.5) || Utils.eq(epsilon, 0)) { if (m_NumIterationsPerformed == 0) { m_NumIterationsPerformed = 1; // If we're the first we have to to use it } break; } // Determine the weight to assign to this model m_Betas[m_NumIterationsPerformed] = Math.log((1 - epsilon) / epsilon); reweight = (1 - epsilon) / epsilon; if (m_Debug) { System.err.println("\terror rate = " + epsilon + " beta = " + m_Betas[m_NumIterationsPerformed]); } // Update instance weights setWeights(training, reweight); } }
From source file:com.sliit.rules.RuleContainer.java
public Map<String, String> evaluateModel() { Map<String, String> evaluationSummary = new HashMap<String, String>(); try {/*w w w . ja v a 2 s .co m*/ instances.setClassIndex(instances.numAttributes() - 1); Evaluation evaluation = new Evaluation(instances); evaluation.evaluateModel(ruleMoldel, instances); ArrayList<Rule> rulesList = ruleMoldel.getRuleset(); String rules = ruleMoldel.toString(); evaluationSummary.put("rules", rules); evaluationSummary.put("summary", evaluation.toSummaryString()); evaluationSummary.put("confusion_matrix", evaluation.toMatrixString()); } catch (Exception e) { log.error("Error occurred:" + e.getLocalizedMessage()); } return evaluationSummary; }