List of usage examples for weka.core Instance toString
public String toString(Attribute att);
From source file:dataMining.kNN.java
/** * Metoda do wyszykiwania najczstrzej decyzji spord ssiadw z listy * przekazywanej jako parametr.// w ww. j a v a 2 s . c o m * * @param list lista ssiadw * @return Decyzja. */ private double selectValue(ArrayList<Instance> list) { HashMap<Double, Integer> valuesMap = new HashMap<>(); for (Instance i : list) { if (valuesMap.containsKey(Double.parseDouble(i.toString(numAtt)))) { int values = valuesMap.get(Double.parseDouble(i.toString(numAtt))) + 1; valuesMap.replace(Double.parseDouble(i.toString(numAtt)), values); } else { valuesMap.put(Double.parseDouble(i.toString(numAtt)), 1); } } Collection c = valuesMap.values(); Iterator it = c.iterator(); int max = Integer.parseInt(it.next().toString()); while (it.hasNext()) { int isMax = Integer.parseInt(it.next().toString()); if (max < isMax) { max = isMax; } } double value = 0; for (Double d : valuesMap.keySet()) { if (valuesMap.get(d).equals(max)) { value = d; } } return value; }
From source file:dataMining.kNN.java
/** * Metoda do liczenia odlegoci metryk Manhattan. * * @param a pierwszy obiekt//from w w w .j a v a2 s .co m * @param b drugi obiekt * @return odlego */ private int manhattan(Instance a, Instance b) { int sum = 0; for (int i = 0; i < a.numAttributes(); i++) { if (i == numAtt) { continue; } else { int tmp = 0; try { tmp = Math.abs(Integer.parseInt(a.toString(i)) - Integer.parseInt(b.toString(i))); } catch (NumberFormatException ex) { tmp = 0; } sum = sum + tmp; } } return sum; }
From source file:dataMining.Szacowanie.java
/** * Metoda do przeldania danych i szacowania poprawnoci klasyfikacji. * * @return Wynik oceny klasyfikacji./*from www . j a va2s. co m*/ */ public String reviewData() { float correctCount = 0; for (int i = 0; i < testKNNData.numInstances(); i++) { Instance kNN = testKNNData.instance(i); Instance test = testData.instance(i); if (kNN.toString(10).equals(test.toString(10))) { correctCount++; } } float correct = (correctCount / 99) * 100; DecimalFormat df = new DecimalFormat(); df.setMaximumFractionDigits(2); df.setMinimumFractionDigits(2); String s = "%\tWynik Klasyfikacji:\n%\t\tSklasyfikowanych obiektw: " + testKNNData.numInstances() + "\n%\t\tPoprawnie Sklasyfikowanych: " + (int) correctCount + "\n%\t\tPoprawna Klasyfikacja na poziom" + "ie: " + df.format(correct) + "\n"; return s; }
From source file:GClass.EvaluationInternal.java
License:Open Source License
/** * Prints the predictions for the given dataset into a String variable. *///from w w w . j ava2 s . c o m protected static String printClassifications(Classifier classifier, Instances train, String testFileName, int classIndex, Range attributesToOutput) throws Exception { StringBuffer text = new StringBuffer(); if (testFileName.length() != 0) { BufferedReader testReader = null; try { testReader = new BufferedReader(new FileReader(testFileName)); } catch (Exception e) { throw new Exception("Can't open file " + e.getMessage() + '.'); } Instances test = new Instances(testReader, 1); if (classIndex != -1) { test.setClassIndex(classIndex - 1); } else { test.setClassIndex(test.numAttributes() - 1); } int i = 0; while (test.readInstance(testReader)) { Instance instance = test.instance(0); Instance withMissing = (Instance) instance.copy(); withMissing.setDataset(test); double predValue = ((Classifier) classifier).classifyInstance(withMissing); if (test.classAttribute().isNumeric()) { if (Instance.isMissingValue(predValue)) { text.append(i + " missing "); } else { text.append(i + " " + predValue + " "); } if (instance.classIsMissing()) { text.append("missing"); } else { text.append(instance.classValue()); } text.append(" " + attributeValuesString(withMissing, attributesToOutput) + "\n"); } else { if (Instance.isMissingValue(predValue)) { text.append(i + " missing "); } else { text.append(i + " " + test.classAttribute().value((int) predValue) + " "); } if (Instance.isMissingValue(predValue)) { text.append("missing "); } else { text.append(classifier.distributionForInstance(withMissing)[(int) predValue] + " "); } text.append(instance.toString(instance.classIndex()) + " " + attributeValuesString(withMissing, attributesToOutput) + "\n"); } test.delete(0); i++; } testReader.close(); } return text.toString(); }
From source file:kmeans.MyKMeans.java
void updateCentroidForNominal(int numCentroid, int numAttr) { // System.out.println("Update centroid "+numCentroid+" attr "+dataSource.attribute(numAttr)+"|"+numAttr); int distinctValue = dataSource.attribute(numAttr).numValues(); int[] countInst = new int[distinctValue]; for (int i = 0; i < distinctValue; i++) countInst[i]++;/*w w w . jav a 2 s .c o m*/ Attribute attr = dataSource.attribute(numAttr); List<Integer> listInst = listClusteredInstance.get(numCentroid); //Mencari nilai attribut paling banyak dalam 1 cluster for (int i = 0; i < listInst.size(); i++) { Instance inst = dataSource.get(listInst.get(i)); if (!inst.isMissing(attr)) { String attrValue = inst.toString(attr); int indexValue = attr.indexOfValue(attrValue); // System.out.println(inst+"|"+attrValue+"|"+indexValue); countInst[indexValue]++; } } int max = -1, idxMax = -1; for (int i = 0; i < distinctValue; i++) { if (countInst[i] > max) { idxMax = i; max = countInst[i]; } } String newValue = attr.value(idxMax); Instance tempCentroid = centroid.get(numCentroid); tempCentroid.setValue(attr, newValue); centroid.set(numCentroid, tempCentroid); }
From source file:milk.gui.experiment.MIResultsPanel.java
License:Open Source License
/** * Queries the user enough to make a database query to retrieve experiment * results.//from w w w .jav a2 s .c o m */ protected void setInstancesFromDBaseQuery() { try { if (m_InstanceQuery == null) { m_InstanceQuery = new MIInstanceQuery(); } String dbaseURL = m_InstanceQuery.getDatabaseURL(); dbaseURL = (String) JOptionPane.showInputDialog(this, "Enter the database URL", "Query Database", JOptionPane.PLAIN_MESSAGE, null, null, dbaseURL); if (dbaseURL == null) { m_FromLab.setText("Cancelled"); return; } m_InstanceQuery.setDatabaseURL(dbaseURL); m_InstanceQuery.connectToDatabase(); if (!m_InstanceQuery.experimentIndexExists()) { m_FromLab.setText("No experiment index"); return; } m_FromLab.setText("Getting experiment index"); Instances index = m_InstanceQuery.retrieveInstances("SELECT * FROM " + MIInstanceQuery.EXP_INDEX_TABLE); if (index.numInstances() == 0) { m_FromLab.setText("No experiments available"); return; } m_FromLab.setText("Got experiment index"); DefaultListModel lm = new DefaultListModel(); for (int i = 0; i < index.numInstances(); i++) { lm.addElement(index.instance(i).toString()); } JList jl = new JList(lm); ListSelectorDialog jd = new ListSelectorDialog(null, jl); int result = jd.showDialog(); if (result != ListSelectorDialog.APPROVE_OPTION) { m_FromLab.setText("Cancelled"); return; } Instance selInst = index.instance(jl.getSelectedIndex()); Attribute tableAttr = index.attribute(MIInstanceQuery.EXP_RESULT_COL); String table = MIInstanceQuery.EXP_RESULT_PREFIX + selInst.toString(tableAttr); setInstancesFromDatabaseTable(table); } catch (Exception ex) { m_FromLab.setText("Problem reading database"); } }
From source file:mulan.transformations.ColumnSubsetSelection.java
License:Open Source License
public MultiLabelInstances transform(MultiLabelInstances data, int kappa, long seed) { try {/*from www .j a va 2 s .c o m*/ if (kappa >= data.getNumLabels()) { throw new MulanRuntimeException( "Dimensionality reduction parameter should not exceed or be equal to the total count of labels!"); } // integer indices of physical label assignments int[] labelIndices = data.getLabelIndices(); int[] indices = new int[labelIndices.length]; System.arraycopy(labelIndices, 0, indices, 0, labelIndices.length); // load label indicator matrix in a Matrix object double[][] datmatrix = new double[data.getDataSet().numInstances()][labelIndices.length]; Matrix mat = new Matrix(datmatrix); for (int i = 0; i < data.getDataSet().numInstances(); i++) { Instance instance = data.getDataSet().instance(i); for (int j = 0; j < labelIndices.length; j++) { mat.set(i, j, Double.parseDouble(instance.toString(labelIndices[j]))); //DEBUG: System.out.print("" + Double.parseDouble(instance.toString(labelIndices[j])) + ","); } } // make private copy of the label matrix this.Y = mat; // compute eigenvalue analysis of label indicator matrix SingularValueDecomposition svd = new SingularValueDecomposition(mat); //DEBUG: System.out.println("rows = " + svd.getV().getRowDimension() + ", cols = " + svd.getV().getColumnDimension()); assert (svd.getV().getRowDimension() == svd.getV().getColumnDimension()); Matrix rVec = svd.getV(); Matrix Vk = new Matrix(new double[svd.getV().getRowDimension()][kappa]); // snippet (2) for (int i = 0; i < kappa; i++) { for (int j = 0; j < svd.getV().getColumnDimension(); j++) { Vk.set(j, i, rVec.get(i, j)); } } // compute column selection probabilitites double[] selectionProbabilities = new double[Vk.getRowDimension()]; double[] selectionProbabilitiesCDF = new double[Vk.getRowDimension()]; for (int i = 0; i < Vk.getRowDimension(); i++) { selectionProbabilities[i] = 0.0; for (int j = 0; j < kappa; j++) { selectionProbabilities[i] += Math.pow(Vk.get(i, j), 2); } selectionProbabilities[i] = Math.sqrt(selectionProbabilities[i]); } // normalize probabilities double psum = 0.0; for (int i = 0; i < Vk.getRowDimension(); i++) { psum += selectionProbabilities[i]; //System.out.println("psum = " + psum); } //System.out.println("psum = " + psum); //assert (psum != 0 && psum == 1.0); // must be non-zero and unitary for (int i = 0; i < Vk.getRowDimension(); i++) { selectionProbabilities[i] /= psum; } psum = 0.0; for (int i = 0; i < Vk.getRowDimension(); i++) { psum += selectionProbabilities[i]; selectionProbabilitiesCDF[i] = psum; } // add selected columns on a linked list sampledIndiceSet = new java.util.HashSet(); // run column-sampling loop int sampling_count = 0; Random generator = new Random(seed); while (sampledIndiceSet.size() < kappa) // ...loop until knapsack gets filled... { // pick a random number //DEBUG: //double roulette = generator.nextDouble() * 0.5; double roulette = generator.nextDouble(); // seek closest match according to sampling probabilities int closest_match = -1; // iterate label cols for (int i = 0; i < Vk.getRowDimension(); i++) { if (roulette < selectionProbabilitiesCDF[i]) // ...spot a possible match... { // ...if so, select and quit scope... closest_match = i; // BEWARE! "i" is an index over the label enumeration, not an ordering index! break; } } // if we stepped on the flag, something serious is going on! assert (closest_match != -1); // see if column was selected; if not, add it if (!sampledIndiceSet.contains((Object) closest_match)) { sampledIndiceSet.add((Object) closest_match); //System.out.println("DEBUG(CSSP): Added column " + closest_match + " to the sampled column set!"); } sampling_count += 1; } System.out.println("Sampling loop completed in " + sampling_count + " runs."); // compute indices-to-remove array indicesToRemove = new int[labelIndices.length - sampledIndiceSet.size()]; // compute all **PHYSICAL** (not VIRTUAL) indices of label columns for CSSP to remove int idx = 0; for (int i = 0; i < labelIndices.length; i++) { if (!sampledIndiceSet.contains((Object) i)) { indicesToRemove[idx] = indices[i]; idx += 1; } } // apply CSSP: select columns to remove int[] selectedIndicesObj = indicesToRemove.clone(); selectedIndicesInt = new int[selectedIndicesObj.length]; for (int i = 0; i < selectedIndicesObj.length; i++) { selectedIndicesInt[i] = (int) selectedIndicesObj[i]; } // compute Moore-Penrose pseudo-inverse matrix of the column-reduced label indicator matrix double[][] datmatrix2 = new double[data.getDataSet().numInstances()][labelIndices.length - selectedIndicesInt.length]; Matrix matC = new Matrix(datmatrix2); //DEBUG: //System.out.println("Selecting only " + matC.getColumnDimension() + " columns; removing " + selectedIndicesInt.length + " columns out of an original total of " + data.getLabelIndices().length + " labels!"); // compute indices to keep java.util.LinkedList<Integer> indicesToKeep = new java.util.LinkedList(); for (int i = 0; i < labelIndices.length; i++) { boolean keep = true; // see if this col has to be removed for (int k = 0; k < selectedIndicesInt.length; k++) { if (selectedIndicesInt[k] == labelIndices[i]) { keep = false; break; } } // add if we actually should keep this... if (keep) { indicesToKeep.add(labelIndices[i]); } } assert (indicesToKeep.size() == matC.getColumnDimension()); for (int i = 0; i < matC.getRowDimension(); i++) { // get data instance Instance instance = data.getDataSet().instance(i); // replicate data from ALL columns that WOULD not be removed by CSSP for (int j = 0; j < matC.getColumnDimension(); j++) { // get label indice int corrIdx = (int) indicesToKeep.get(j); // update matC matC.set(i, j, Double.parseDouble(instance.toString(corrIdx))); } } //DEBUG: System.out.println("matC rows = " + matC.getRowDimension() + ", cols = " + matC.getColumnDimension() + "\n data original label cols # = " + data.getLabelIndices().length); // make private copy of projection matrices // Moore-Penrose pseudo-inverse of the label matrix matC // see http://robotics.caltech.edu/~jwb/courses/ME115/handouts/pseudo.pdf for an SVD-based workaround for MP-inverse // Moore-Penrose pseudoinverse computation based on Singular Value Decomposition (SVD) /* SingularValueDecomposition decomp = Vk.svd(); Matrix S = decomp.getS(); Matrix Scross = new Matrix(selectedIndicesInt.length,selectedIndicesInt.length); for(int i = 0; i < selectedIndicesInt.length; i++) { for(int j = 0; j < selectedIndicesInt.length; j++) { if(i == j) { if(S.get(i, j) == 0) { Scross.set(i, j, 0.0); } else { Scross.set(i, j, 1 / S.get(i, j)); } } else { Scross.set(i, j, 0.0); } } } this.Yc = decomp.getV().times(Scross).times(decomp.getU().transpose()); */ // DEBUG: traditional way of computing the Moore-Penrose pseudoinverse if (matC.getRowDimension() >= matC.getColumnDimension()) { this.Yc = ((matC.transpose().times(matC)).inverse()).times(matC.transpose()); } else { this.Yc = matC.transpose().times((matC.times(matC.transpose()).inverse())); } //System.out.println("Yc rows: " + Yc.getRowDimension() + "\nYc cols: " + Yc.getColumnDimension() + "\n Y rows: " + Y.getRowDimension() + "\nY cols: " + Y.getColumnDimension()); this.ProjectionMatrix = Yc.times(Y); // compute projection matrix // add sampled indices to Remove object remove = new Remove(); remove.setAttributeIndicesArray(selectedIndicesInt); remove.setInvertSelection(false); remove.setInputFormat(data.getDataSet()); // apply remove filter on the labels transformed = Filter.useFilter(data.getDataSet(), remove); this.sampledIndicesObj = indicesToKeep.toArray(); return data.reintegrateModifiedDataSet(transformed); } catch (Exception ex) { // do nothing //Logger.getLogger(BinaryRelevanceTransformation.class.getName()).log(Level.SEVERE, null, ex); return null; } }
From source file:NaiveBayes.NaiveBayes.java
@Override public double classifyInstance(Instance last) { double prob[] = new double[last.classAttribute().numValues()]; for (int classIndex = 0; classIndex < last.attribute(last.classIndex()).numValues(); classIndex++) {//classifikasi double temp = 1; int i = 0; for (Atribut attr : getList()) { if (i == last.classIndex()) i++;//from www.j av a 2 s . com //System.out.println(attr.getName()+"="+last.attribute(i).name()); temp *= attr.getFrekuensiNilai(last.attribute(last.classIndex()).value(classIndex), last.toString(i), last.value(i), last.attribute(i).isNumeric()) / numEachClass[classIndex]; i++; } double res; res = numEachClass[classIndex] / last.numAttributes() * temp; prob[classIndex] = res; } return maxIndex(prob); }
From source file:regulyasocjacyjne.RegulyAsocjacyjne.java
public static void infoObj() throws Exception { Instances data = loadData("./src/date/irysy.arff"); for (int i = 0; i < data.numInstances(); i++) //Przegladanie obiektow {//from w w w . ja va2 s .c o m System.out.println("Wiersz numer " + i + ":"); Instance instance = data.instance(i); //Pobranie obiektu (wiersza danych) o podanym numerze for (int j = 0; j < instance.numAttributes(); j++) //Przegladanie atrybutow w obiekcie { String textValue = instance.toString(j); //Pobranie wartosci atrybutu o podanym numerze (tzn. pobranie tekstowej reprezentacji wartosci) System.out.print(textValue + ", "); } System.out.println(); } }
From source file:sirius.trainer.step4.RunClassifierWithNoLocationIndex.java
License:Open Source License
public static Object jackKnifeClassifierOneWithNoLocationIndex(JInternalFrame parent, ApplicationData applicationData, JTextArea classifierOneDisplayTextArea, GenericObjectEditor m_ClassifierEditor, double ratio, GraphPane myGraph, ClassifierResults classifierResults, int range, double threshold, boolean outputClassifier, String classifierName, String[] classifierOptions, boolean returnClassifier, int randomNumberForClassifier) { try {/*from w w w . ja v a2 s.co m*/ StatusPane statusPane = applicationData.getStatusPane(); long totalTimeStart = System.currentTimeMillis(), totalTimeElapsed; Classifier tempClassifier; if (m_ClassifierEditor != null) tempClassifier = (Classifier) m_ClassifierEditor.getValue(); else tempClassifier = Classifier.forName(classifierName, classifierOptions); //Assume that class attribute is the last attribute - This should be the case for all Sirius produced Arff files //split the instances into positive and negative Instances posInst = new Instances(applicationData.getDataset1Instances()); posInst.setClassIndex(posInst.numAttributes() - 1); for (int x = 0; x < posInst.numInstances();) if (posInst.instance(x).stringValue(posInst.numAttributes() - 1).equalsIgnoreCase("pos")) x++; else posInst.delete(x); posInst.deleteAttributeType(Attribute.STRING); Instances negInst = new Instances(applicationData.getDataset1Instances()); negInst.setClassIndex(negInst.numAttributes() - 1); for (int x = 0; x < negInst.numInstances();) if (negInst.instance(x).stringValue(negInst.numAttributes() - 1).equalsIgnoreCase("neg")) x++; else negInst.delete(x); negInst.deleteAttributeType(Attribute.STRING); //Train classifier one with the full dataset first then do cross-validation to gauge its accuracy long trainTimeStart = 0, trainTimeElapsed = 0; if (statusPane != null) statusPane.setText("Training Classifier One... May take a while... Please wait..."); //Record Start Time trainTimeStart = System.currentTimeMillis(); Instances fullInst = new Instances(applicationData.getDataset1Instances()); fullInst.setClassIndex(fullInst.numAttributes() - 1); Classifier classifierOne; if (m_ClassifierEditor != null) classifierOne = (Classifier) m_ClassifierEditor.getValue(); else classifierOne = Classifier.forName(classifierName, classifierOptions); if (outputClassifier) classifierOne.buildClassifier(fullInst); //Record Total Time used to build classifier one trainTimeElapsed = System.currentTimeMillis() - trainTimeStart; //Training Done String tclassifierName; if (m_ClassifierEditor != null) tclassifierName = m_ClassifierEditor.getValue().getClass().getName(); else tclassifierName = classifierName; if (classifierResults != null) { classifierResults.updateList(classifierResults.getClassifierList(), "Classifier: ", tclassifierName); classifierResults.updateList(classifierResults.getClassifierList(), "Training Data: ", " Jack Knife Validation"); classifierResults.updateList(classifierResults.getClassifierList(), "Time Used: ", Utils.doubleToString(trainTimeElapsed / 1000.0, 2) + " seconds"); } String classifierOneFilename = applicationData.getWorkingDirectory() + File.separator + "ClassifierOne_" + randomNumberForClassifier + ".scores"; BufferedWriter outputCrossValidation = new BufferedWriter(new FileWriter(classifierOneFilename)); //Instances foldTrainingInstance; //Instances foldTestingInstance; int positiveDataset1FromInt = applicationData.getPositiveDataset1FromField(); int positiveDataset1ToInt = applicationData.getPositiveDataset1ToField(); int negativeDataset1FromInt = applicationData.getNegativeDataset1FromField(); int negativeDataset1ToInt = applicationData.getNegativeDataset1ToField(); Step1TableModel positiveStep1TableModel = applicationData.getPositiveStep1TableModel(); Step1TableModel negativeStep1TableModel = applicationData.getNegativeStep1TableModel(); FastaFileManipulation fastaFile = new FastaFileManipulation(positiveStep1TableModel, negativeStep1TableModel, positiveDataset1FromInt, positiveDataset1ToInt, negativeDataset1FromInt, negativeDataset1ToInt, applicationData.getWorkingDirectory()); FastaFormat fastaFormat; String header[] = new String[fullInst.numInstances()]; String data[] = new String[fullInst.numInstances()]; int counter = 0; while ((fastaFormat = fastaFile.nextSequence("pos")) != null) { header[counter] = fastaFormat.getHeader(); data[counter] = fastaFormat.getSequence(); counter++; } while ((fastaFormat = fastaFile.nextSequence("neg")) != null) { header[counter] = fastaFormat.getHeader(); data[counter] = fastaFormat.getSequence(); counter++; } //run jack knife validation for (int x = 0; x < fullInst.numInstances(); x++) { if (applicationData.terminateThread == true) { if (statusPane != null) statusPane.setText("Interrupted - Classifier One Training Completed"); outputCrossValidation.close(); return classifierOne; } if (statusPane != null) statusPane.setText("Running " + (x + 1) + " / " + fullInst.numInstances()); Instances trainPosInst = new Instances(posInst); Instances trainNegInst = new Instances(negInst); Instance testInst; //split data into training and testing if (x < trainPosInst.numInstances()) { testInst = posInst.instance(x); trainPosInst.delete(x); } else { testInst = negInst.instance(x - posInst.numInstances()); trainNegInst.delete(x - posInst.numInstances()); } Instances trainInstances; if (trainPosInst.numInstances() < trainNegInst.numInstances()) { trainInstances = new Instances(trainPosInst); int max = (int) (ratio * trainPosInst.numInstances()); if (ratio == -1) max = trainNegInst.numInstances(); Random rand = new Random(1); for (int y = 0; y < trainNegInst.numInstances() && y < max; y++) { int index = rand.nextInt(trainNegInst.numInstances()); trainInstances.add(trainNegInst.instance(index)); trainNegInst.delete(index); } } else { trainInstances = new Instances(trainNegInst); int max = (int) (ratio * trainNegInst.numInstances()); if (ratio == -1) max = trainPosInst.numInstances(); Random rand = new Random(1); for (int y = 0; y < trainPosInst.numInstances() && y < max; y++) { int index = rand.nextInt(trainPosInst.numInstances()); trainInstances.add(trainPosInst.instance(index)); trainPosInst.delete(index); } } Classifier foldClassifier = tempClassifier; foldClassifier.buildClassifier(trainInstances); double[] results = foldClassifier.distributionForInstance(testInst); int classIndex = testInst.classIndex(); String classValue = testInst.toString(classIndex); outputCrossValidation.write(header[x]); outputCrossValidation.newLine(); outputCrossValidation.write(data[x]); outputCrossValidation.newLine(); if (classValue.equals("pos")) outputCrossValidation.write("pos,0=" + results[0]); else if (classValue.equals("neg")) outputCrossValidation.write("neg,0=" + results[0]); else { outputCrossValidation.close(); throw new Error("Invalid Class Type!"); } outputCrossValidation.newLine(); outputCrossValidation.flush(); } outputCrossValidation.close(); PredictionStats classifierOneStatsOnJackKnife = new PredictionStats(classifierOneFilename, range, threshold); totalTimeElapsed = System.currentTimeMillis() - totalTimeStart; if (classifierResults != null) classifierResults.updateList(classifierResults.getResultsList(), "Total Time Used: ", Utils.doubleToString(totalTimeElapsed / 60000, 2) + " minutes " + Utils.doubleToString((totalTimeElapsed / 1000.0) % 60.0, 2) + " seconds"); //if(classifierOneDisplayTextArea != null) classifierOneStatsOnJackKnife.updateDisplay(classifierResults, classifierOneDisplayTextArea, true); applicationData.setClassifierOneStats(classifierOneStatsOnJackKnife); if (myGraph != null) myGraph.setMyStats(classifierOneStatsOnJackKnife); if (statusPane != null) statusPane.setText("Done!"); if (returnClassifier) return classifierOne; else return classifierOneStatsOnJackKnife; } catch (Exception e) { e.printStackTrace(); JOptionPane.showMessageDialog(parent, e.getMessage(), "ERROR", JOptionPane.ERROR_MESSAGE); return null; } }