List of usage examples for weka.classifiers Evaluation toSummaryString
@Override
public String toSummaryString()
From source file:ca.uottawa.balie.WekaLearner.java
License:Open Source License
/** * Test the learned model.//from w w w .j a va 2s . c o m * * @return A summary string of the performance of the classifier */ public String TestModel() { if (DEBUG) DebugInfo.Out("Testing on " + m_TestingSet.numInstances() + " instances"); Evaluation evaluation = null; try { evaluation = new Evaluation(m_TrainingSet); evaluation.evaluateModel(m_Scheme, m_TestingSet); } catch (Exception e) { System.out.println(e.getMessage()); } String strSummary = evaluation.toSummaryString(); strSummary += "\n\nConfusion Matrix: \n\n"; m_ConfusionMatrix = evaluation.confusionMatrix(); for (int i = 0; i != m_ConfusionMatrix.length; ++i) { for (int j = 0; j != m_ConfusionMatrix[i].length; ++j) { strSummary += String.valueOf(m_ConfusionMatrix[i][j]) + "\t"; } strSummary += "\n"; } return strSummary; }
From source file:Clases.RedNeuronal.RedNeuronal.java
public void redNeuronal(int puntaje, int tiempo, int error) throws Exception { //si puntaje >= 200 entonces aprendido //si tiempo <= 240 (4 minutos) entonces aprendido //si errores <= 3 entonces aprendido String[] dato = { obtnerPuntaje(puntaje), obtenerTiempo(tiempo), obtenerErrores(error) }; ConverterUtils.DataSource con = new ConverterUtils.DataSource( "C:\\Users\\USUARIO\\Documents\\SILVIIS\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); // ConverterUtils.DataSource con = new ConverterUtils.DataSource("E:\\Unl\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); Instances instances = con.getDataSet(); System.out.println(instances); instances.setClassIndex(instances.numAttributes() - 1); MultilayerPerceptron mp = new MultilayerPerceptron(); mp.buildClassifier(instances);/* w w w . ja v a2 s. c o m*/ Evaluation evalucion = new Evaluation(instances); evalucion.evaluateModel(mp, instances); System.out.println(evalucion.toSummaryString()); System.out.println(evalucion.toMatrixString()); String datosEntrada = null; String datosSalida = "no se puede predecir"; for (int i = 0; i < instances.numInstances(); i++) { double predecido = mp.classifyInstance(instances.instance(i)); datosEntrada = dato[0] + " " + dato[1] + " " + dato[2]; if ((int) instances.instance(i).value(0) == Integer.parseInt(dato[0]) && (int) instances.instance(i).value(1) == Integer.parseInt(dato[1]) && (int) instances.instance(i).value(2) == Integer.parseInt(dato[2])) { datosSalida = instances.classAttribute().value((int) predecido); } } System.out.println("DATOS DE ENTRADA: " + datosEntrada); System.out.println("SALIDA PREDECIDA: " + datosSalida); switch (datosSalida) { case "0": resultado = "Excelente ha aprendido"; imgResultado = "Excelente.jpg"; imgREDneuronal = "0.png"; System.out.println("Excelente ha aprendido"); break; case "1": resultado = "Disminuir Errores"; imgResultado = "Bueno.jpg"; imgREDneuronal = "1.png"; System.out.println("Disminuir Errores"); break; case "2": resultado = "Disminuir Tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "2.png"; System.out.println("Disminuir Tiempo"); break; case "3": resultado = "Disminuir Errores y tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "3.png"; System.out.println("Disminuir Errores y tiempo"); break; case "4": resultado = "Subir Puntaje"; imgResultado = "pensando.jpg"; imgREDneuronal = "4.png"; System.out.println("Subir Puntaje"); break; case "5": resultado = "Subir Puntaje y disminuir Errores"; imgResultado = "pensando.jpg"; imgREDneuronal = "5.png"; System.out.println("Subir Puntaje y disminuir Errores"); break; case "6": resultado = "Subir Puntaje y disminuir Tiempo"; imgResultado = "pensando.jpg"; imgREDneuronal = "6.png"; System.out.println("Subir Puntaje y disminuir Tiempo"); break; case "7": resultado = "Ponle mas Empeo"; imgResultado = "pensando.jpg"; imgREDneuronal = "7.png"; System.out.println("Ponle mas Empeo"); break; default: resultado = "Verifique entradas, no se puede predecir"; imgResultado = "Error.jpg"; System.out.println("Verifique entradas, no se puede predecir"); break; } }
From source file:clases.Resultados.java
public static void imprimirResultados(Evaluation evaluador) { try {/*from w w w. j a v a2 s . c om*/ System.out.println("=================================================="); System.out.println("Las figuras de mrito del clasificador ptimo son:"); System.out.println("=================================================="); System.out.println(evaluador.toSummaryString()); System.out.println(evaluador.toClassDetailsString()); System.out.println(evaluador.toMatrixString()); } catch (Exception ex) { System.out.println("Error al mostrar los resultados: " + ex); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchKMeansEUC() { try {/*from ww w. j a v a 2 s . c om*/ out = new PrintStream(new FileOutputStream(rep + "/KEUC_All_results.csv", true)); // out.println("dataset;algorithm;nbPrototypes;execTime;trainErrorRate;testErrorRate;prototypesPerClassDistribution"); String algo = "KMEANSEUC"; System.out.println(algo); // PrintStream outProto = new PrintStream(new FileOutputStream(rep + "/" + dataName + "_KMEANS.proto", append)); nbPrototypesMax = this.train.numInstances() / this.train.numClasses(); // if(nbPrototypesMax>20) nbPrototypesMax = 10; // if (nbPrototypesMax > 100) // nbPrototypesMax = 100; int tmp; tmp = nbExp; for (int j = 1; j <= nbPrototypesMax; j++) { if (j == 1) nbExp = 1; else nbExp = tmp; System.out.println("nbPrototypes=" + j); for (int n = 0; n < nbExp; n++) { EUCKNNClassifierKMeans classifierKMeansEUC = new EUCKNNClassifierKMeans(); classifierKMeansEUC.setNbPrototypesPerClass(j); classifierKMeansEUC.setFillPrototypes(true); startTime = System.currentTimeMillis(); classifierKMeansEUC.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // int[] classDistrib = PrototyperUtil.getPrototypesPerClassDistribution(classifierKMeansEUC.prototypes, train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifierKMeansEUC, test); System.out.println(eval.toSummaryString()); // Evaluation evaltrain = new Evaluation(train); // evaltrain.evaluateModel(classifierKMeansEUC, train); double testError = eval.errorRate(); // double trainError = evaltrain.errorRate(); // System.out.println("TrainError:"+trainError+"\n"); System.out.println("TestError:" + testError + "\n"); // PrototyperUtil.savePrototypes(classifierKMeansEUC.prototypes, rep + "/" + dataName + "_KMEANSEUC[" + j + "]_XP" + n + ".proto"); out.format("%s,%s,%d,%.4f\n", dataName, algo, (j * train.numClasses()), testError); out.flush(); } } // outProto.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchDT() { if (train.numClasses() == 2) { try {// w ww .j a va 2 s . c o m String algo = "DecisionTree"; System.out.println(algo); double testError = 0.0; ClassifyDT dt = new ClassifyDT(); dt.buildClassifier(train); System.out.println("\nClassify test sets:\n"); Evaluation eval = new Evaluation(train); eval.evaluateModel(dt, test); testError = eval.errorRate(); System.out.println("TestError:" + testError + "\n"); System.out.println(eval.toSummaryString()); } catch (Exception e) { e.printStackTrace(); } } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchJ48() { try {/*from w w w. j a va 2s. c o m*/ String algo = "J48"; System.out.println(algo); double testError = 0.0; J48 dt = new J48(); dt.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(dt, test); testError = eval.errorRate(); System.out.println("TestError:" + testError + "\n"); System.out.println(dt.toSummaryString()); System.out.println(dt.graph()); System.out.println(eval.toSummaryString()); } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchStaticEnsemble() { try {// www.j a v a 2 s . c o m String algo = "StaticEnsemble"; System.out.println(algo); double testError = 0.0; double testError_DT = 0.0; double testError_FKM_4 = 0.0; double testError_FKM_10 = 0.0; // double testError_KMeans = 0.0; startTime = System.currentTimeMillis(); StaticEnsembleClassify staticensembleClassify = new StaticEnsembleClassify(); staticensembleClassify.buildClassifier(train); endTime = System.currentTimeMillis(); duration = endTime - startTime; // Duration traintime = Duration.ofMillis(duration); // System.out.println(traintime); Evaluation eval_FKM_4 = new Evaluation(train); eval_FKM_4.evaluateModel(staticensembleClassify.getFkm_4(), test); testError_FKM_4 = eval_FKM_4.errorRate(); staticensembleClassify.setWeight_fkm_4(testError_FKM_4); System.out.println("TestError of FKM_4:" + testError_FKM_4 + "\n"); // Evaluation eval_KMeans = new Evaluation(train); // eval_KMeans.evaluateModel(ensembleClassify.getKMeans(), test); // testError_KMeans = eval_KMeans.errorRate(); // ensembleClassify.setWeight_kmeans(testError_KMeans); // System.out.println("TestError of KMeans:" + testError_KMeans + "\n"); Evaluation eval_FKM_10 = new Evaluation(train); eval_FKM_10.evaluateModel(staticensembleClassify.getFkm_10(), test); testError_FKM_10 = eval_FKM_10.errorRate(); staticensembleClassify.setWeight_fkm_10(testError_FKM_10); System.out.println("TestError of FKM_10:" + testError_FKM_10 + "\n"); Evaluation eval_DT = new Evaluation(train); eval_DT.evaluateModel(staticensembleClassify.getDt(), test); testError_DT = eval_DT.errorRate(); staticensembleClassify.setWeight_dt(testError_DT); System.out.println("TestError of DT:" + testError_DT + "\n"); Evaluation eval = new Evaluation(train); eval.evaluateModel(staticensembleClassify, test); testError = eval.errorRate(); System.out.println("TestError of Ensemble:" + testError + "\n"); System.out.println(eval.toSummaryString()); } catch (Exception e) { e.printStackTrace(); } }
From source file:classif.ExperimentsLauncher.java
License:Open Source License
public void launchDynamicEnsemble() { int method = 0; switch (method) { case 0:/*from w w w . j av a2s . c o m*/ try { String algo = "BigDTDynamicEnsemble"; System.out.println(algo); double testError = 0.0; BDTEClassifier dynamicEnsembleClassify = new BDTEClassifier(); dynamicEnsembleClassify.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(dynamicEnsembleClassify, test); testError = eval.errorRate(); System.out.println("TestError:" + testError + "\n"); System.out.println(eval.toSummaryString()); } catch (Exception e) { e.printStackTrace(); } break; case 1: try { String algo = "FKMDynamicEnsemble"; System.out.println(algo); double testError = 0.0; FKMDEClassifier dynamicEnsembleClassify = new FKMDEClassifier(); dynamicEnsembleClassify.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(dynamicEnsembleClassify, test); testError = eval.errorRate(); System.out.println("TestError:" + testError + "\n"); System.out.println(eval.toSummaryString()); } catch (Exception e) { e.printStackTrace(); } break; } }
From source file:com.actelion.research.orbit.imageAnalysis.tasks.ObjectTrainWorker.java
License:Open Source License
@Override protected void doWork() { if (dontRun) { dontRun = false;/* w ww.jav a 2 s . c o m*/ return; } trainSet = null; if (modelToBuild != null && modelToBuild.getClassifier() != null) modelToBuild.getClassifier().setBuild(false); List<double[]> trainData = new ArrayList<double[]>(); int mipLayer = -1; // used for checking if all iFrames (with trainData) have the same mapLayer (otherwise the model cannot be trained) for (ImageFrame iFrame : iFrames) { int sampleSize = Math.min(3, iFrame.recognitionFrame.bimg.getImage().getSampleModel().getNumBands()); // was always 1 before! (max 3 because alpha should be ignored) for (int i = 0; i < iFrame.recognitionFrame.getClassShapes().size(); i++) { checkPaused(); List<Shape> shapes = iFrame.recognitionFrame.getClassShapes().get(i).getShapeList(); if (shapes != null && shapes.size() > 0) { if (mipLayer < 0) { mipLayer = iFrame.getMipLayer(); logger.trace("iFrame candidate mipLayer {} from iFrame with width {}", mipLayer, iFrame.recognitionFrame.bimg.getWidth()); } else { if (mipLayer != iFrame.getMipLayer()) { logger.error( "Cell classifier cannot be trained on different image layers. Please use only training data of the same image layer."); return; } } if (mipLayer != modelToBuild.getMipLayer()) { // only same layer as segmentation allowed. Otherwise the cell features must be scaled, too (which is not yet the case). logger.error("Cell classifier must be trained on same layer as segmentation"); return; } } trainData.addAll(new ObjectFeatureBuilderTiled(null).buildFeatures(shapes, i + 1, iFrame.recognitionFrame, iFrame.recognitionFrame.getClassImage(), sampleSize, 0, 0)); // classes 1.0, 2.0, ... } } logger.trace("train levelNum: {}", mipLayer); if (trainData.size() == 0) { logger.error("trainset is empty, classifier cannot be trained."); trainSet = null; return; } if (isCancelled()) { cleanUp(); return; } timeEst = 1000 * 10L; setProgress(10); logger.debug("trainData contains " + trainData.size() + " samples"); Attribute classAttr = null; // create the first time a new trainSet. All further trainings will append new instances. if (trainSet == null) { // build traindata header double[] firstRowAll = trainData.get(0); double[] firstRow = Arrays.copyOfRange(firstRowAll, 0, firstRowAll.length - ObjectFeatureBuilderTiled.SkipTailForClassification); ArrayList<Attribute> attrInfo = new ArrayList<Attribute>(firstRow.length); for (int a = 0; a < firstRow.length - 1; a++) { Attribute attr = new Attribute("a" + a); // if (a<firstRow.length-2) attr.setWeight(0.1d); else attr.setWeight(1.0d); attrInfo.add(attr); } List<String> classValues = new ArrayList<String>( iFrames.get(0).recognitionFrame.getClassShapes().size()); for (int i = 0; i < iFrames.get(0).recognitionFrame.getClassShapes().size(); i++) { classValues.add((i + 1) + ".0"); // "1.0", "2.0", ... } classAttr = new Attribute("class", classValues); attrInfo.add(classAttr); trainSet = new Instances("trainSet pattern classes", attrInfo, trainData.size()); trainSet.setClassIndex(firstRow.length - 1); } else classAttr = trainSet.attribute("class"); // add instances for (double[] valsAll : trainData) { // skip some non relevant attributes like centerX/Y double[] vals = Arrays.copyOfRange(valsAll, 0, valsAll.length - ObjectFeatureBuilderTiled.SkipTailForClassification); vals[vals.length - 1] = valsAll[valsAll.length - 1]; // class value double classV = classAttr.indexOfValue(Double.toString(vals[vals.length - 1])); vals[vals.length - 1] = classV; Instance inst = new DenseInstance(1.0d, vals); trainSet.add(inst); } // trainSet = trainSet.resample(rand); logger.debug("trainSet contains " + trainSet.numInstances() + " instances"); if (logger.isTraceEnabled()) logger.trace(trainSet.toString()); // building classifier if (isCancelled()) { cleanUp(); return; } checkPaused(); timeEst = 1000 * 5L; setProgress(20); logger.info("Start training classifier... "); classifier = new ClassifierWrapper(new weka.classifiers.functions.SMO()); try { classifier.buildClassifier(trainSet); classifier.setBuild(true); modelToBuild.setClassifier(classifier); modelToBuild.setStructure(trainSet.stringFreeStructure()); modelToBuild.setCellClassification(true); modelToBuild.setMipLayer(mipLayer); setProgress(85); // evaluation StringBuilder cnamesInfo = new StringBuilder( "Evaluation for object classification model with classes: "); for (int i = 0; i < modelToBuild.getClassShapes().size(); i++) { cnamesInfo.append(modelToBuild.getClassShapes().get(i).getName()); if (i < modelToBuild.getClassShapes().size() - 1) cnamesInfo.append(", "); } logger.info(cnamesInfo.toString()); Evaluation evaluation = new Evaluation(trainSet); evaluation.evaluateModel(classifier.getClassifier(), trainSet); logger.info(evaluation.toSummaryString()); if (evaluation.pctCorrect() < OrbitUtils.ACCURACY_WARNING) { String w = "Warning: The model classifies the training objects only with an accuracy of " + evaluation.pctCorrect() + "%.\nThat means that the marked objects are not diverse enough.\nYou might want to remove some marked objects and mark some more representative ones.\nHowever, you can still use this model if you want (check the object classification)."; logger.warn(w); if (withGUI && !ScaleoutMode.SCALEOUTMODE.get()) { JOptionPane.showMessageDialog(null, w, "Warning: Low accuracy", JOptionPane.WARNING_MESSAGE); } } } catch (Exception e) { classifier = null; logger.error("error training classifier: ", e); } logger.info("training done."); timeEst = 0L; setProgress(100); }
From source file:com.actelion.research.orbit.imageAnalysis.tasks.TrainWorker.java
License:Open Source License
private void trainClassifier() throws OrbitImageServletException { logger.debug("start trainClassifier"); if (modelToBuild != null && modelToBuild.getClassifier() != null) modelToBuild.getClassifier().setBuild(false); trainSet = null;/*w w w . j av a 2s. co m*/ List<double[]> trainData = new ArrayList<double[]>(); int mipLayer = -1; // used for checking if all iFrames (with trainData) have the same mapLayer (otherwise the model cannot be trained) for (ImageFrame iFrame : iFrames) { if (logger.isTraceEnabled()) logger.trace( iFrame.getTitle() + ": #ClassShapes: " + iFrame.recognitionFrame.getClassShapes().size()); for (int i = 0; i < iFrame.recognitionFrame.getClassShapes().size(); i++) { // checkPaused(); if (iFrame.recognitionFrame.getClassShapes().get(i).getShapeList().size() > 0) { // set and check mip level only for iFrames with shapes (training data) if (mipLayer < 0) { mipLayer = iFrame.getMipLayer(); logger.trace("iFrame candidate mipLayer {} from iFrame with width {}", mipLayer, iFrame.recognitionFrame.bimg.getWidth()); } else { if (mipLayer != iFrame.getMipLayer()) { logger.error( "Model cannot be trained on different image layers. Please use only training data of the same image layer."); return; } } } List<Shape> shapes = iFrame.recognitionFrame.getClassShapes().get(i).getShapeList(); trainData.addAll(getFeatures(shapes, i + 1, iFrame.recognitionFrame.bimg)); // classes 1.0, 2.0, ... } } logger.trace("train levelNum: {}", mipLayer); if (trainData.size() == 0) { logger.error("trainset is empty, classifier cannot be trained."); trainSet = null; return; } if (isCancelled()) { logger.debug("canceled"); cleanUp(); return; } timeEst = 1000 * 10L; setProgress(10); logger.debug("trainData contains " + trainData.size() + " samples"); // limit training instances if (trainData.size() > MAXINST) { Collections.shuffle(trainData, rand); trainData = trainData.subList(0, MAXINST); logger.debug("trainSet shirked to " + trainData.size() + " instances"); } Attribute classAttr = null; // create the first time a new trainSet. All further trainings will append new instances. if (trainSet == null) { // build traindata header double[] firstRow = trainData.get(0); ArrayList<Attribute> attrInfo = new ArrayList<Attribute>(firstRow.length); for (int a = 0; a < firstRow.length - 1; a++) { Attribute attr = new Attribute("a" + a); // if (a<firstRow.length-2) attr.setWeight(0.1d); else attr.setWeight(1.0d); attrInfo.add(attr); } List<String> classValues = new ArrayList<String>( iFrames.get(0).recognitionFrame.getClassShapes().size()); for (int i = 0; i < iFrames.get(0).recognitionFrame.getClassShapes().size(); i++) { classValues.add((i + 1) + ".0"); // "1.0", "2.0", ... } classAttr = new Attribute("class", classValues); attrInfo.add(classAttr); trainSet = new Instances("trainSet pattern classes", attrInfo, trainData.size()); trainSet.setClassIndex(firstRow.length - 1); } else classAttr = trainSet.attribute("class"); // add instances for (double[] vals : trainData) { double classV = classAttr.indexOfValue(Double.toString(vals[vals.length - 1])); vals[vals.length - 1] = classV; //Instance inst = new Instance(1.0d, vals); Instance inst = new DenseInstance(1.0d, vals); trainSet.add(inst); } trainSet = trainSet.resample(rand); logger.debug("trainSet contains " + trainSet.numInstances() + " instances"); // building classifier if (isCancelled()) { cleanUp(); return; } checkPaused(); timeEst = 1000 * 5L; setProgress(20); logger.info("Start training classifier... "); Classifier c; /* // experiments with deep learning... do not use in production. if (AparUtils.DEEPORBIT) { FeatureDescription fd = modelToBuild!=null? modelToBuild.getFeatureDescription(): new FeatureDescription(); TissueFeatures tissueFeaturre = AparUtils.createTissueFeatures(fd, null); int numOutNeurons = modelToBuild.getClassShapes().size(); int numInNeurons = tissueFeaturre.prepareDoubleArray().length-1; logger.debug("numNeuronsIn:"+numInNeurons+" numNeuronsOut:"+numOutNeurons); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(numInNeurons,100, numOutNeurons); for (int a=0; a<numOutNeurons; a++) { neuralNet.getOutputNeurons()[a].setLabel("class"+a); } neuralNet.connectInputsToOutputs(); MomentumBackpropagation mb = new MomentumBackpropagation(); mb.setLearningRate(0.2d); mb.setMomentum(0.7d); //mb.setMaxIterations(20); mb.setMaxError(0.12); neuralNet.setLearningRule(mb); c = new WekaNeurophClassifier(neuralNet); } else { c = new weka.classifiers.functions.SMO(); } */ c = new weka.classifiers.functions.SMO(); //weka.classifiers.functions.LibSVM c = new weka.classifiers.functions.LibSVM(); //Classifier c = new weka.classifiers.trees.J48(); classifier = new ClassifierWrapper(c); //classifier = new weka.classifiers.bayes.BayesNet(); //classifier = new weka.classifiers.functions.MultilayerPerceptron(); //((weka.classifiers.functions.SMO)classifier).setKernel(new weka.classifiers.functions.supportVector.RBFKernel()); try { classifier.buildClassifier(trainSet); classifier.setBuild(true); modelToBuild.setClassifier(classifier); modelToBuild.setStructure(trainSet.stringFreeStructure()); modelToBuild.setCellClassification(false); modelToBuild.setMipLayer(mipLayer); logger.debug("training done"); // evaluation StringBuilder cnamesInfo = new StringBuilder("Evaluation for model with classes: "); for (int i = 0; i < modelToBuild.getClassShapes().size(); i++) { cnamesInfo.append(modelToBuild.getClassShapes().get(i).getName()); if (i < modelToBuild.getClassShapes().size() - 1) cnamesInfo.append(", "); } logger.info(cnamesInfo.toString()); Evaluation evaluation = new Evaluation(trainSet); evaluation.evaluateModel(classifier.getClassifier(), trainSet); logger.info(evaluation.toSummaryString()); if (evaluation.pctCorrect() < OrbitUtils.ACCURACY_WARNING) { final String w = "Warning: The model classifies the training shapes only with an accuracy of " + evaluation.pctCorrect() + "%.\nThat means that the drawn class shapes are not diverse enough.\nYou might want to remove some class shapes and mark some more representative regions.\nHowever, you can still use this model if you want (check the classification)."; logger.warn(w); if (withGUI && !ScaleoutMode.SCALEOUTMODE.get()) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { JOptionPane.showMessageDialog(null, w, "Warning: Low accuracy", JOptionPane.WARNING_MESSAGE); } }); } } } catch (Exception e) { classifier = null; logger.error("error training classifier", e); } // logger.trace(classifier.toString()); }