List of usage examples for weka.classifiers Classifier buildClassifier
public abstract void buildClassifier(Instances data) throws Exception;
From source file:org.knime.knip.suise.node.pixclassmodel.PixClassModelNodeModel.java
License:Open Source License
/** * {@inheritDoc}//w w w . j a va 2 s. c o m */ @Override protected PortObject[] execute(PortObject[] inObjects, ExecutionContext exec) throws Exception { BufferedDataTable inTable = (BufferedDataTable) inObjects[0]; int imgColIdx = getImgColumnIndex(inTable.getDataTableSpec()); int labColIdx = getLabelingColumnIndex(inTable.getDataTableSpec()); // retrieve all available labels RowIterator it = inTable.iterator(); DataRow row; Set<String> labels = new HashSet<String>(); Instances trainingSet = null; int rowCount = inTable.getRowCount(); int i = 0; while (it.hasNext()) { row = it.next(); if (row.getCell(labColIdx).isMissing() || row.getCell(imgColIdx).isMissing()) { setWarningMessage("Errors occurred while execution! See console for details."); LOGGER.warn("Missing cell in row " + row.getKey() + ". Row skipped!"); continue; } RandomAccessibleInterval<LabelingType<L>> lab = ((LabelingValue<L>) row.getCell(labColIdx)) .getLabeling(); ImgPlus<T> img = ((ImgPlusValue<T>) row.getCell(imgColIdx)).getImgPlus(); // collect available labels LabelRegions<L> regions = KNIPGateway.regions().regions(lab); labels.addAll(regions.getExistingLabels().stream().map(l -> l.toString()).collect(Collectors.toList())); int[] tmp = m_featDimSelection.getSelectedDimIndices(img.numDimensions(), img); if (tmp.length == 0) { setWarningMessage("Errors occurred while execution! See console for details."); LOGGER.warn("Feature dimensions doesn't exist in image in row " + row.getKey() + ". Row skipped!"); continue; } int featDim = tmp[0]; int[] dimIndices = m_dimSelection.getSelectedDimIndices(img.numDimensions(), img); List<String> classLabels = new ArrayList<String>(); for (L label : regions.getExistingLabels()) { classLabels.add(label.toString()); } BuildTrainingData<L, T> btd = new BuildTrainingData<L, T>(classLabels, dimIndices, featDim, m_resampleRate.getDoubleValue(), m_balanceClassInstances.getBooleanValue()); if (trainingSet == null) { trainingSet = btd.bufferFactory().instantiate(lab, img); } exec.setProgress("Building training set for row " + row.getKey()); try { btd.compute(lab, img, trainingSet); } catch (KNIPRuntimeException e) { setWarningMessage("Errors occurred while execution! See console for details."); LOGGER.warn("Row " + row.getKey() + " skipped. " + e.getLocalizedMessage()); } exec.checkCanceled(); exec.setProgress((double) i / rowCount); i++; } // build classifier exec.setProgress("Build classifier ..."); if (trainingSet == null) { throw new IllegalStateException( "No training set could be created due to the lack of training samples. Maybe wrong (i.e. non-existent) feature dimension selected!?"); } // count instances per class for debugging purposes double[] classDistr = new double[trainingSet.numClasses()]; for (Instance instance : trainingSet) { classDistr[(int) instance.classValue()]++; } Classifier classifier = m_classifierSelection.getClassifier(); classifier.buildClassifier(trainingSet); return new PortObject[] { new WekaClassifierPortObject(classifier, trainingSet, new WekaClassifierPortObjectSpec(labels.toArray(new String[labels.size()]))) }; }
From source file:org.mcennis.graphrat.algorithm.machinelearning.BuildClassifierPerActor.java
License:Open Source License
public void execute(Graph g) { // construct the queries to be used ActorByMode groundMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); groundMode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false); ActorByMode targetMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); targetMode.buildQuery((String) parameter.get("TargetMode").get(), ".*", false); LinkByRelation groundTruth = (LinkByRelation) LinkQueryFactory.newInstance().create("LinkByRelation"); groundTruth.buildQuery((String) parameter.get("Relation").get(), false); // build a list of new artists TreeSet<Actor> artists = new TreeSet<Actor>(); artists.addAll(AlgorithmMacros.filterActor(parameter, g, targetMode.execute(g, artists, null))); // collect the instance variables from the properties to be the for (Actor i : artists) { TreeSet<Actor> artist = new TreeSet<Actor>(); artist.add(i);//from w w w .java 2s . c o m Classifier classifier = createClassifier(); Iterator<Actor> users = AlgorithmMacros.filterActor(parameter, g, groundMode, null, null); Instances dataSet = null; boolean firstRun = true; while (users.hasNext()) { TreeSet<Actor> user = new TreeSet<Actor>(); user.add(users.next()); Property property = user.first().getProperty( AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get())); if (property.getPropertyClass().getName().contentEquals(Instance.class.getName())) { List values = property.getValue(); if (!values.isEmpty()) { // get the existing instance Instance object = (Instance) values.get(0); if (firstRun == true) { firstRun = false; Instances current = object.dataset(); FastVector attributes = new FastVector(); for (int j = 0; j < current.numAttributes(); ++j) { attributes.addElement(current.attribute(j)); } Attribute classValue = new Attribute(i.getID()); attributes.addElement(classValue); dataSet = new Instances(i.getID(), attributes, 1000); dataSet.setClassIndex(dataSet.numAttributes() - 1); } // for every artist, create a temporary artist classifer double[] content = new double[object.numAttributes() + 1]; for (int j = 0; j < object.numAttributes() + 1; ++j) { content[j] = object.value(j); } Iterator<Link> link = null; if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) { link = AlgorithmMacros.filterLink(parameter, g, groundTruth, user, artist, null); } else { link = AlgorithmMacros.filterLink(parameter, g, groundTruth, artist, user, null); } if (link.hasNext()) { content[content.length - 1] = link.next().getStrength(); } else if ((Boolean) parameter.get("AbsenceIsMissing").get()) { content[content.length - 1] = Double.NaN; } else { content[content.length - 1] = 0.0; } Instance base = new Instance(1.0, content); base.setDataset(dataSet); dataSet.add(base); } } } try { classifier.buildClassifier(dataSet); Property classifierProperty = PropertyFactory.newInstance().create( AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("ClassifierProperty").get()), (String) parameter.get("ClassifierProperty").getType(), weka.classifiers.Classifier.class); classifierProperty.add(classifier); i.add(classifierProperty); Property instancesProperty = PropertyFactory.newInstance().create( AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("InstancesProperty").get()), (String) parameter.get("InstancesProperty").getType(), weka.core.Instances.class); instancesProperty.add(classifier); i.add(instancesProperty); } catch (Exception ex) { Logger.getLogger(BuildClassifierPerActor.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.BuildClassifierSingleAttribute.java
License:Open Source License
public void execute(Graph g) { // construct the queries to be used ActorByMode groundMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); groundMode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false); ActorByMode targetMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); targetMode.buildQuery((String) parameter.get("TargetMode").get(), ".*", false); LinkByRelation groundTruth = (LinkByRelation) LinkQueryFactory.newInstance().create("LinkByRelation"); groundTruth.buildQuery((String) parameter.get("Relation").get(), false); // build a list of new artists TreeSet<Actor> artists = new TreeSet<Actor>(); artists.addAll(AlgorithmMacros.filterActor(parameter, g, targetMode.execute(g, artists, null))); // collect the instance variables from the properties to be the Classifier classifier = createClassifier(); Iterator<Actor> users = AlgorithmMacros.filterActor(parameter, g, groundMode, null, null); Instances dataSet = null;//from w w w. j a v a 2 s . c o m boolean firstEntry = true; while (users.hasNext()) { TreeSet<Actor> user = new TreeSet<Actor>(); user.add(users.next()); Property property = user.first().getProperty( AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get())); if (property.getPropertyClass().getName().contentEquals(Instance.class.getName())) { List values = property.getValue(); if (!values.isEmpty()) { // get the existing instance Instance object = (Instance) values.get(0); if (firstEntry) { firstEntry = false; Instances current = object.dataset(); FastVector attributes = new FastVector(); for (int j = 0; j < current.numAttributes(); ++j) { attributes.addElement(current.attribute(j)); } FastVector targetNames = new FastVector(); Iterator<Actor> artistIt = targetMode.executeIterator(g, null, null); while (artistIt.hasNext()) { targetNames.addElement(artistIt.next().getID()); } Attribute classValue = new Attribute("TargetID", targetNames); attributes.addElement(classValue); dataSet = new Instances("Training", attributes, 1000); dataSet.setClassIndex(dataSet.numAttributes() - 1); } // for every artist, create a temporary artist classifer double[] content = new double[object.numAttributes() + 1]; for (int j = 0; j < object.numAttributes() + 1; ++j) { content[j] = object.value(j); } Iterator<Link> link = null; if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) { link = AlgorithmMacros.filterLink(parameter, g, groundTruth, user, null, null); } else { link = AlgorithmMacros.filterLink(parameter, g, groundTruth, null, user, null); } if (link.hasNext()) { double strength = Double.NEGATIVE_INFINITY; Actor target = null; while (link.hasNext()) { Link l = link.next(); if (l.getStrength() > strength) { strength = l.getStrength(); if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) { target = l.getDestination(); } else { target = l.getSource(); } } } content[content.length - 1] = dataSet.attribute(dataSet.numAttributes() - 1) .indexOfValue(target.getID()); } else { content[content.length - 1] = Double.NaN; } Instance base = new Instance(1.0, content); base.setDataset(dataSet); dataSet.add(base); } } } try { classifier.buildClassifier(dataSet); Property classifierProperty = PropertyFactory.newInstance().create("BasicProperty", AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("ClassifierProperty").get()), weka.classifiers.Classifier.class); classifierProperty.add(classifier); g.add(classifierProperty); Property instancesProperty = PropertyFactory.newInstance().create("BasicProperty", AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("InstancesProperty").get()), weka.core.Instances.class); instancesProperty.add(classifier); g.add(instancesProperty); } catch (Exception ex) { Logger.getLogger(BuildClassifierSingleAttribute.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.WekaClassifierMultiAttribute.java
License:Open Source License
@Override public void execute(Graph g) { Actor[] source = g.getActor((String) parameter[1].getValue()); if (source != null) { // create the atributes for each artist FastVector sourceTypes = new FastVector(); Actor[] dest = g.getActor((String) parameter[3].getValue()); if (dest != null) { // create the Instances set backing this object Instances masterSet = null;/*from ww w .j a v a2 s. co m*/ Instance[] trainingData = new Instance[source.length]; for (int i = 0; i < source.length; ++i) { // First, acquire the instance objects for each actor Property p = null; if ((Boolean) parameter[10].getValue()) { p = source[i].getProperty((String) parameter[2].getValue() + g.getID()); } else { p = source[i].getProperty((String) parameter[2].getValue()); } if (p != null) { Object[] values = p.getValue(); if (values.length > 0) { sourceTypes.addElement(source[i].getID()); trainingData[i] = (Instance) ((Instance) values[0]).copy(); // assume that this Instance has a backing dataset // that contains all Instance objects to be tested if (masterSet == null) { masterSet = new Instances(trainingData[i].dataset(), source.length); } masterSet.add(trainingData[i]); sourceTypes.addElement(source[i].getID()); } else { trainingData[i] = null; Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have an Instance value of property ID " + p.getType()); } } else { trainingData[i] = null; Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have a property of ID " + p.getType()); } } Vector<Attribute> destVector = new Vector<Attribute>(); for (int i = 0; i < dest.length; ++i) { FastVector type = new FastVector(); type.addElement("false"); type.addElement("true"); Attribute tmp = new Attribute(dest[i].getID(), type); destVector.add(tmp); masterSet.insertAttributeAt(tmp, masterSet.numAttributes()); } Attribute sourceID = new Attribute("sourceID", sourceTypes); masterSet.insertAttributeAt(sourceID, masterSet.numAttributes()); //set ground truth for evaluation for (int i = 0; i < masterSet.numInstances(); ++i) { Instance inst = masterSet.instance(i); Actor user = g.getActor((String) parameter[i].getValue(), sourceID.value((int) inst.value(sourceID))); if (user != null) { for (int j = 0; j < dest.length; ++j) { if (g.getLink((String) parameter[4].getValue(), user, dest[j]) != null) { inst.setValue(sourceID, "true"); } else { if ((Boolean) parameter[9].getValue()) { inst.setValue(sourceID, "false"); } else { inst.setValue(sourceID, Double.NaN); } } } } else { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, "Actor " + sourceID.value((int) inst.value(sourceID)) + " does not exist in graph"); } } // perform cross fold evaluation of each classifier in turn String[] opts = ((String) parameter[9].getValue()).split("\\s+"); Properties props = new Properties(); if ((Boolean) parameter[11].getValue()) { props.setProperty("LinkType", (String) parameter[5].getValue() + g.getID()); } else { props.setProperty("LinkType", (String) parameter[5].getValue()); } props.setProperty("LinkClass", "Basic"); try { for (int destCount = 0; destCount < dest.length; ++destCount) { masterSet.setClass(destVector.get(destCount)); for (int i = 0; i < (Integer) parameter[8].getValue(); ++i) { Instances test = masterSet.testCV((Integer) parameter[8].getValue(), i); Instances train = masterSet.testCV((Integer) parameter[8].getValue(), i); Classifier classifier = (Classifier) ((Class) parameter[7].getValue()).newInstance(); classifier.setOptions(opts); classifier.buildClassifier(train); for (int j = 0; j < test.numInstances(); ++j) { String sourceName = sourceID.value((int) test.instance(j).value(sourceID)); double result = classifier.classifyInstance(test.instance(j)); String predicted = masterSet.classAttribute().value((int) result); Link derived = LinkFactory.newInstance().create(props); derived.set(g.getActor((String) parameter[2].getValue(), sourceName), 1.0, g.getActor((String) parameter[3].getValue(), predicted)); g.add(derived); } } } } catch (InstantiationException ex) { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, null, ex); } } else { // dest==null Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Ground truth mode '" + (String) parameter[3].getValue() + "' has no actors"); } } else { // source==null Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Source mode '" + (String) parameter[2].getValue() + "' has no actors"); } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.WekaClassifierOneAttribute.java
License:Open Source License
@Override public void execute(Graph g) { Actor[] source = g.getActor((String) parameter[1].getValue()); if (source != null) { // create the Instance sets for each ac FastVector classTypes = new FastVector(); FastVector sourceTypes = new FastVector(); Actor[] dest = g.getActor((String) parameter[3].getValue()); if (dest != null) { for (int i = 0; i < dest.length; ++i) { classTypes.addElement(dest[i].getID()); }/*from www.j a v a2s . c o m*/ Attribute classAttribute = new Attribute((String) parameter[5].getValue(), classTypes); Instance[] trainingData = new Instance[source.length]; Instances masterSet = null; for (int i = 0; i < source.length; ++i) { // First, acquire the instance objects for each actor Property p = null; if ((Boolean) parameter[9].getValue()) { p = source[i].getProperty((String) parameter[2].getValue() + g.getID()); } else { p = source[i].getProperty((String) parameter[2].getValue()); } if (p != null) { Object[] values = p.getValue(); if (values.length > 0) { sourceTypes.addElement(source[i].getID()); trainingData[i] = (Instance) ((Instance) values[0]).copy(); // assume that this Instance has a backing dataset // that contains all Instance objects to be tested if (masterSet == null) { masterSet = new Instances(trainingData[i].dataset(), source.length); } masterSet.add(trainingData[i]); } else { trainingData[i] = null; Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have an Instance value of property ID " + p.getType()); } } else { trainingData[i] = null; Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have a property of ID " + p.getType()); } } // for every actor, fix the instance Attribute sourceID = new Attribute("sourceID", sourceTypes); masterSet.insertAttributeAt(sourceID, masterSet.numAttributes()); masterSet.insertAttributeAt(classAttribute, masterSet.numAttributes()); masterSet.setClass(classAttribute); for (int i = 0; i < source.length; ++i) { if (trainingData[i] != null) { trainingData[i].setValue(sourceID, source[i].getID()); Link[] link = g.getLinkBySource((String) parameter[4].getValue(), source[i]); if (link == null) { trainingData[i].setClassValue(Double.NaN); } else { trainingData[i].setClassValue(link[0].getDestination().getID()); } } } String[] opts = ((String) parameter[7].getValue()).split("\\s+"); Properties props = new Properties(); if ((Boolean) parameter[10].getValue()) { props.setProperty("LinkType", (String) parameter[5].getValue() + g.getID()); } else { props.setProperty("LinkType", (String) parameter[5].getValue()); } props.setProperty("LinkClass", "Basic"); try { for (int i = 0; i < (Integer) parameter[8].getValue(); ++i) { Instances test = masterSet.testCV((Integer) parameter[8].getValue(), i); Instances train = masterSet.testCV((Integer) parameter[8].getValue(), i); Classifier classifier = (Classifier) ((Class) parameter[6].getValue()).newInstance(); classifier.setOptions(opts); classifier.buildClassifier(train); for (int j = 0; j < test.numInstances(); ++j) { String sourceName = sourceID.value((int) test.instance(j).value(sourceID)); double result = classifier.classifyInstance(test.instance(j)); String predicted = masterSet.classAttribute().value((int) result); Link derived = LinkFactory.newInstance().create(props); derived.set(g.getActor((String) parameter[2].getValue(), sourceName), 1.0, g.getActor((String) parameter[3].getValue(), predicted)); g.add(derived); } } } catch (InstantiationException ex) { Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.SEVERE, null, ex); } } else { // dest==null Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Ground truth mode '" + (String) parameter[3].getValue() + "' has no actors"); } } else { // source==null Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Source mode '" + (String) parameter[2].getValue() + "' has no actors"); } }
From source file:org.wikipedia.miner.annotation.Disambiguator.java
License:Open Source License
/** * Builds a classifier of the given type using the previously built (or loaded) training data. * /* ww w . j ava 2s.c o m*/ * @param classifier a configured classifier, that is ready to be built. * @throws Exception if there is no training data */ public void buildClassifier(Classifier classifier) throws Exception { System.out.println("Disambiguator: Building classifier..."); weightTrainingInstances(); if (trainingData == null) { throw new WekaException( "You must load training data or train on a set of articles before builing classifier."); } else { this.classifier = classifier; classifier.buildClassifier(trainingData); } }
From source file:org.wikipedia.miner.annotation.weighting.LinkDetector.java
License:Open Source License
/** * //from w w w. j ava 2 s . c o m * * @param classifier * @throws Exception */ public void buildClassifier(Classifier classifier) throws Exception { System.out.println("LinkDetector: Building classifier..."); weightTrainingInstances(); if (trainingData == null) { throw new WekaException( "You must load training data or train on a set of articles before builing classifier."); } else { this.classifier = classifier; classifier.buildClassifier(trainingData); } }
From source file:personality_prediction.Evaluation_Result.java
void eval_result() { try {/*from www. j ava 2s . c om*/ DataSource source_train = new DataSource( "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\training_data_neur.csv"); Instances train = source_train.getDataSet(); DataSource source_test = new DataSource( "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Testing dataset\\Testing_data_neur.csv"); Instances test = source_test.getDataSet(); train.setClassIndex(train.numAttributes() - 1); test.setClassIndex(train.numAttributes() - 1); // train classifier Classifier cls = new J48(); cls.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(cls, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); } catch (Exception e) { System.out.println(e.getLocalizedMessage()); } }
From source file:predictforex.ANN.java
static void FullTraining_aNN() throws Exception { Classifier cls = new MultilayerPerceptron(); cls.buildClassifier(dataset); eval = new Evaluation(dataset); eval.evaluateModel(cls, dataset);//from w ww .j a v a2 s . c o m System.out.println(eval.toSummaryString("Results ANN full training \n", false)); System.out.println(eval.toClassDetailsString()); System.out.println(eval.fMeasure(1) + " " + eval.precision(1) + " " + eval.recall(1)); System.out.println(eval.toMatrixString()); }
From source file:predictor.Predictor.java
public static void multilayerPerceptron() throws Exception { DataSource train = new DataSource(configuration.getWorkspace() + "train_common.arff"); DataSource test = new DataSource(configuration.getWorkspace() + "test_common.arff"); Instances trainInstances = train.getDataSet(); Instances testInstances = test.getDataSet(); //last attribute classify trainInstances.setClassIndex(trainInstances.numAttributes() - 1); testInstances.setClassIndex(testInstances.numAttributes() - 1); // /*from w w w .ja v a 2s . com*/ // Classifier cModel = (Classifier)new MultilayerPerceptron(); // cModel.buildClassifier(trainInstances); // // weka.core.SerializationHelper.write("/some/where/nBayes.model", cModel); // // Classifier cls = (Classifier) weka.core.SerializationHelper.read("/some/where/nBayes.model"); // // // Test the model // Evaluation eTest = new Evaluation(trainInstances); // eTest.evaluateModel(cls, testInstances); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.buildClassifier(trainInstances); mlp.setHiddenLayers(configuration.getHiddenLayers()); mlp.setLearningRate(configuration.getLearningRate()); mlp.setTrainingTime(configuration.getEpocs()); mlp.setMomentum(configuration.getMomentum()); // train classifier Classifier cls = new MultilayerPerceptron(); cls.buildClassifier(trainInstances); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(trainInstances); eval.evaluateModel(cls, testInstances); System.out.println(eval.toSummaryString()); }