List of usage examples for weka.classifiers Evaluation toSummaryString
public String toSummaryString(String title, boolean printComplexityStatistics)
From source file:myclassifier.naiveBayes.java
public void CrossValidation() throws Exception { if (data != null) { Instances train = data;// w w w. j av a 2s . c o m // train classifier NBClassifier.buildClassifier(train); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(train); eval.crossValidateModel(NBClassifier, train, 10, new Random(1)); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString()); } else { System.out.println("Data is null"); } }
From source file:myclassifier.naiveBayes.java
public void PercentageSplit(double percent) throws Exception { // Percent split int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances train = new Instances(data, 0, trainSize); Instances test = new Instances(data, trainSize, testSize); // train classifier NBClassifier.buildClassifier(train); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(train); eval.evaluateModel(NBClassifier, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString("\n=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString()); }
From source file:myclassifier.wekaCode.java
public static void testingTestSet(Instances dataSet, Classifier classifiers, Instances testSet) throws Exception { Evaluation evaluation = new Evaluation(dataSet); evaluation.evaluateModel(classifiers, testSet); //Evaluates the classifier on a given set of instances. System.out.println(evaluation.toSummaryString("\n Testing Model given Test Set ", false)); System.out.println(evaluation.toClassDetailsString()); }
From source file:myclassifier.wekaCode.java
public static void foldValidation(Instances dataSet, Classifier classifiers) throws Exception { Evaluation evaluation = new Evaluation(dataSet); evaluation.crossValidateModel(classifiers, dataSet, 10, new Random(1)); //Evaluates the classifier on a given set of instances. System.out.println(evaluation.toSummaryString("\n 10-fold cross validation", false)); System.out.println(evaluation.toMatrixString("\n Confusion Matrix")); }
From source file:neuralnetwork.NeuralNetwork.java
/** * @param args the command line arguments * @throws java.lang.Exception// w w w .j a v a2 s .com */ public static void main(String[] args) throws Exception { ConverterUtils.DataSource source; source = new ConverterUtils.DataSource("C:\\Users\\Harvey\\Documents\\iris.csv"); Instances data = source.getDataSet(); if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } data.randomize(new Debug.Random(1)); RemovePercentage trainFilter = new RemovePercentage(); trainFilter.setPercentage(70); trainFilter.setInputFormat(data); Instances train = Filter.useFilter(data, trainFilter); trainFilter.setInvertSelection(true); trainFilter.setInputFormat(data); Instances test = Filter.useFilter(data, trainFilter); Standardize filter = new Standardize(); filter.setInputFormat(train); Instances newTrain = Filter.useFilter(test, filter); Instances newTest = Filter.useFilter(train, filter); Classifier nNet = new NeuralNet(); nNet.buildClassifier(newTrain); Evaluation eval = new Evaluation(newTest); eval.evaluateModel(nNet, newTest); System.out.println(eval.toSummaryString("\nResults\n-------------\n", false)); }
From source file:newclassifier.NewClassifier.java
public void crossValidation() throws Exception { cls.buildClassifier(data);/*from w ww . j a v a2s .c om*/ Evaluation eval = new Evaluation(data); eval.crossValidateModel(cls, data, 10, new Random(1)); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); }
From source file:newclassifier.NewClassifier.java
public void givenTestSet(String path) throws Exception { Instances test = DataSource.read(path); test.setClassIndex(test.numAttributes() - 1); cls.buildClassifier(data);//w w w . j av a 2 s .com Evaluation eval = new Evaluation(data); eval.evaluateModel(cls, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); System.out.println(eval.toClassDetailsString()); System.out.println(eval.toMatrixString()); }
From source file:newsclassifier.NewsClassifier.java
public void CrossValidation(Classifier cls, int n) throws Exception { data.setClassIndex(0);//from w w w .j av a2 s . co m Evaluation eval = new Evaluation(data); cls.buildClassifier(data); eval.crossValidateModel(cls, data, n, new Random(1)); System.out.println(eval.toSummaryString("Results", false)); //System.out.println(eval.toClassDetailsString()); //System.out.println(eval.toMatrixString()); }
From source file:org.opentox.jaqpot3.qsar.trainer.MlrRegression.java
License:Open Source License
@Override public Model train(Instances data) throws JaqpotException { try {/* w w w .j a v a2 s.co m*/ getTask().getMeta().addComment( "Dataset successfully retrieved and converted " + "into a weka.core.Instances object"); UpdateTask firstTaskUpdater = new UpdateTask(getTask()); firstTaskUpdater.setUpdateMeta(true); firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary? try { firstTaskUpdater.update(); } catch (DbException ex) { throw new JaqpotException(ex); } finally { try { firstTaskUpdater.close(); } catch (DbException ex) { throw new JaqpotException(ex); } } Instances trainingSet = data; getTask().getMeta().addComment("The downloaded dataset is now preprocessed"); firstTaskUpdater = new UpdateTask(getTask()); firstTaskUpdater.setUpdateMeta(true); firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary? try { firstTaskUpdater.update(); } catch (DbException ex) { throw new JaqpotException(ex); } finally { try { firstTaskUpdater.close(); } catch (DbException ex) { throw new JaqpotException(ex); } } /* SET CLASS ATTRIBUTE */ Attribute target = trainingSet.attribute(targetUri.toString()); if (target == null) { throw new BadParameterException("The prediction feature you provided was not found in the dataset"); } else { if (!target.isNumeric()) { throw new QSARException("The prediction feature you provided is not numeric."); } } trainingSet.setClass(target); /* Very important: place the target feature at the end! (target = last)*/ int numAttributes = trainingSet.numAttributes(); int classIndex = trainingSet.classIndex(); Instances orderedTrainingSet = null; List<String> properOrder = new ArrayList<String>(numAttributes); for (int j = 0; j < numAttributes; j++) { if (j != classIndex) { properOrder.add(trainingSet.attribute(j).name()); } } properOrder.add(trainingSet.attribute(classIndex).name()); try { orderedTrainingSet = InstancesUtil.sortByFeatureAttrList(properOrder, trainingSet, -1); } catch (JaqpotException ex) { logger.error("Improper dataset - training will stop", ex); throw ex; } orderedTrainingSet.setClass(orderedTrainingSet.attribute(targetUri.toString())); /* START CONSTRUCTION OF MODEL */ Model m = new Model(Configuration.getBaseUri().augment("model", getUuid().toString())); m.setAlgorithm(getAlgorithm()); m.setCreatedBy(getTask().getCreatedBy()); m.setDataset(datasetUri); m.addDependentFeatures(dependentFeature); try { dependentFeature.loadFromRemote(); } catch (ServiceInvocationException ex) { Logger.getLogger(MlrRegression.class.getName()).log(Level.SEVERE, null, ex); } Set<LiteralValue> depFeatTitles = null; if (dependentFeature.getMeta() != null) { depFeatTitles = dependentFeature.getMeta().getTitles(); } String depFeatTitle = dependentFeature.getUri().toString(); if (depFeatTitles != null) { depFeatTitle = depFeatTitles.iterator().next().getValueAsString(); m.getMeta().addTitle("MLR model for " + depFeatTitle) .addDescription("MLR model for the prediction of " + depFeatTitle + " (uri: " + dependentFeature.getUri() + " )."); } else { m.getMeta().addTitle("MLR model for the prediction of the feature with URI " + depFeatTitle) .addComment("No name was found for the feature " + depFeatTitle); } /* * COMPILE THE LIST OF INDEPENDENT FEATURES with the exact order in which * these appear in the Instances object (training set). */ m.setIndependentFeatures(independentFeatures); /* CREATE PREDICTED FEATURE AND POST IT TO REMOTE SERVER */ String predictionFeatureUri = null; Feature predictedFeature = publishFeature(m, dependentFeature.getUnits(), "Predicted " + depFeatTitle + " by MLR model", datasetUri, featureService); m.addPredictedFeatures(predictedFeature); predictionFeatureUri = predictedFeature.getUri().toString(); getTask().getMeta().addComment("Prediction feature " + predictionFeatureUri + " was created."); firstTaskUpdater = new UpdateTask(getTask()); firstTaskUpdater.setUpdateMeta(true); firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary? try { firstTaskUpdater.update(); } catch (DbException ex) { throw new JaqpotException(ex); } finally { try { firstTaskUpdater.close(); } catch (DbException ex) { throw new JaqpotException(ex); } } /* ACTUAL TRAINING OF THE MODEL USING WEKA */ LinearRegression linreg = new LinearRegression(); String[] linRegOptions = { "-S", "1", "-C" }; try { linreg.setOptions(linRegOptions); linreg.buildClassifier(orderedTrainingSet); } catch (final Exception ex) {// illegal options or could not build the classifier! String message = "MLR Model could not be trained"; logger.error(message, ex); throw new JaqpotException(message, ex); } try { // evaluate classifier and print some statistics Evaluation eval = new Evaluation(orderedTrainingSet); eval.evaluateModel(linreg, orderedTrainingSet); String stats = eval.toSummaryString("\nResults\n======\n", false); ActualModel am = new ActualModel(linreg); am.setStatistics(stats); m.setActualModel(am); } catch (NotSerializableException ex) { String message = "Model is not serializable"; logger.error(message, ex); throw new JaqpotException(message, ex); } catch (final Exception ex) {// illegal options or could not build the classifier! String message = "MLR Model could not be trained"; logger.error(message, ex); throw new JaqpotException(message, ex); } m.getMeta().addPublisher("OpenTox").addComment("This is a Multiple Linear Regression Model"); //save the instances being predicted to abstract trainer for calculating DoA predictedInstances = orderedTrainingSet; excludeAttributesDoA.add(dependentFeature.getUri().toString()); return m; } catch (QSARException ex) { String message = "QSAR Exception: cannot train MLR model"; logger.error(message, ex); throw new JaqpotException(message, ex); } }
From source file:org.opentox.jaqpot3.qsar.trainer.PLSTrainer.java
License:Open Source License
@Override public Model train(Instances data) throws JaqpotException { Model model = new Model(Configuration.getBaseUri().augment("model", getUuid().toString())); data.setClass(data.attribute(targetUri.toString())); Boolean targetURIIncluded = false; for (Feature tempFeature : independentFeatures) { if (StringUtils.equals(tempFeature.getUri().toString(), targetUri.toString())) { targetURIIncluded = true;//from w w w. ja v a2 s . c o m break; } } if (!targetURIIncluded) { independentFeatures.add(new Feature(targetUri)); } model.setIndependentFeatures(independentFeatures); /* * Train the PLS filter */ PLSFilter pls = new PLSFilter(); try { pls.setInputFormat(data); pls.setOptions(new String[] { "-C", Integer.toString(numComponents), "-A", pls_algorithm, "-P", preprocessing, "-U", doUpdateClass }); PLSFilter.useFilter(data, pls); } catch (Exception ex) { Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex); } PLSModel actualModel = new PLSModel(pls); try { PLSClassifier cls = new PLSClassifier(); cls.setFilter(pls); cls.buildClassifier(data); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(data); eval.evaluateModel(cls, data); String stats = eval.toSummaryString("", false); ActualModel am = new ActualModel(actualModel); am.setStatistics(stats); model.setActualModel(am); } catch (NotSerializableException ex) { Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex); throw new JaqpotException(ex); } catch (Exception ex) { Logger.getLogger(PLSTrainer.class.getName()).log(Level.SEVERE, null, ex); throw new JaqpotException(ex); } model.setDataset(datasetUri); model.setAlgorithm(Algorithms.plsFilter()); model.getMeta().addTitle("PLS Model for " + datasetUri); Set<Parameter> parameters = new HashSet<Parameter>(); Parameter targetPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "target", new LiteralValue(targetUri.toString(), XSDDatatype.XSDstring)) .setScope(Parameter.ParameterScope.MANDATORY); Parameter nComponentsPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "numComponents", new LiteralValue(numComponents, XSDDatatype.XSDpositiveInteger)) .setScope(Parameter.ParameterScope.MANDATORY); Parameter preprocessingPrm = new Parameter( Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "preprocessing", new LiteralValue(preprocessing, XSDDatatype.XSDstring)).setScope(Parameter.ParameterScope.OPTIONAL); Parameter algorithmPrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "algorithm", new LiteralValue(pls_algorithm, XSDDatatype.XSDstring)) .setScope(Parameter.ParameterScope.OPTIONAL); Parameter doUpdatePrm = new Parameter(Configuration.getBaseUri().augment("parameter", RANDOM.nextLong()), "doUpdateClass", new LiteralValue(doUpdateClass, XSDDatatype.XSDboolean)) .setScope(Parameter.ParameterScope.OPTIONAL); parameters.add(targetPrm); parameters.add(nComponentsPrm); parameters.add(preprocessingPrm); parameters.add(doUpdatePrm); parameters.add(algorithmPrm); model.setParameters(parameters); for (int i = 0; i < numComponents; i++) { Feature f = publishFeature(model, "", "PLS-" + i, datasetUri, featureService); model.addPredictedFeatures(f); } //save the instances being predicted to abstract trainer for calculating DoA predictedInstances = data; //in pls target is not excluded return model; }