List of usage examples for weka.classifiers AbstractClassifier classifyInstance
@Override public double classifyInstance(Instance instance) throws Exception
From source file:trainableSegmentation.Trainable_Segmentation.java
License:GNU General Public License
/** * Classify instance concurrently// www.j av a2s . c om * @param data set of instances to classify * @param classifier current classifier * @return classification result */ private static Callable<double[]> classifyIntances(final Instances data, final AbstractClassifier classifier, final AtomicInteger counter) { return new Callable<double[]>() { public double[] call() { final int numInstances = data.numInstances(); final double[] classificationResult = new double[numInstances]; for (int i = 0; i < numInstances; i++) { try { if (0 == i % 4000) counter.addAndGet(4000); classificationResult[i] = classifier.classifyInstance(data.instance(i)); } catch (Exception e) { IJ.showMessage("Could not apply Classifier!"); e.printStackTrace(); return null; } } return classificationResult; } }; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Classify a slice in a concurrent way/*from ww w. j av a 2 s . c om*/ * @param slice image to classify * @param dataInfo empty set of instances containing the data structure (attributes and classes) * @param classifier classifier to use * @param counter counter used to display the progress in the tool bar * @param probabilityMaps flag to calculate probabilities or binary results * @return classification result */ public Callable<ImagePlus> classifySlice(final ImagePlus slice, final Instances dataInfo, final AbstractClassifier classifier, final AtomicInteger counter, final boolean probabilityMaps) { if (Thread.currentThread().isInterrupted()) return null; return new Callable<ImagePlus>() { public ImagePlus call() { // Create feature stack for slice IJ.showStatus("Creating features..."); IJ.log("Creating features of slice " + slice.getTitle() + "..."); final FeatureStack sliceFeatures = new FeatureStack(slice); // Use the same features as the current classifier sliceFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures()); sliceFeatures.setMaximumSigma(maximumSigma); sliceFeatures.setMinimumSigma(minimumSigma); sliceFeatures.setMembranePatchSize(membranePatchSize); sliceFeatures.setMembraneSize(membraneThickness); if (false == sliceFeatures.updateFeaturesST()) { IJ.log("Classifier execution was interrupted."); return null; } filterFeatureStackByList(featureNames, sliceFeatures); final int width = slice.getWidth(); final int height = slice.getHeight(); final int numClasses = dataInfo.numClasses(); ImageStack classificationResult = new ImageStack(width, height); final int numInstances = width * height; final double[][] probArray; if (probabilityMaps) probArray = new double[numClasses][numInstances]; else probArray = new double[1][numInstances]; IJ.log("Classifying slice " + slice.getTitle() + "..."); for (int x = 0; x < width; x++) for (int y = 0; y < height; y++) { try { if (0 == (x + y * width) % 4000) { if (Thread.currentThread().isInterrupted()) return null; counter.addAndGet(4000); } final DenseInstance ins = sliceFeatures.createInstance(x, y, 0); ins.setDataset(dataInfo); if (probabilityMaps) { double[] prob = classifier.distributionForInstance(ins); for (int k = 0; k < numClasses; k++) { probArray[k][x + y * width] = prob[k]; } } else { probArray[0][x + y * width] = classifier.classifyInstance(ins); } } catch (Exception e) { IJ.showMessage("Could not apply Classifier!"); e.printStackTrace(); return null; } } if (probabilityMaps) { for (int k = 0; k < numClasses; k++) classificationResult.addSlice("class-" + (k + 1), new FloatProcessor(width, height, probArray[k])); } else classificationResult.addSlice("result", new FloatProcessor(width, height, probArray[0])); return new ImagePlus("classified-slice", classificationResult); } }; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Classify a list of images in a concurrent way * @param list of images to classify// w ww. ja v a 2s . c o m * @param dataInfo empty set of instances containing the data structure (attributes and classes) * @param classifier classifier to use * @param counter counter used to display the progress in the tool bar * @param probabilityMaps flag to calculate probabilities or binary results * @return classification result */ public Callable<ArrayList<ImagePlus>> classifyListOfImages(final ArrayList<ImagePlus> images, final Instances dataInfo, final AbstractClassifier classifier, final AtomicInteger counter, final boolean probabilityMaps) { if (Thread.currentThread().isInterrupted()) return null; return new Callable<ArrayList<ImagePlus>>() { public ArrayList<ImagePlus> call() { ArrayList<ImagePlus> result = new ArrayList<ImagePlus>(); for (ImagePlus image : images) { // Create feature stack for the image IJ.showStatus("Creating features..."); IJ.log("Creating features of slice " + image.getTitle() + ", size = " + image.getWidth() + "x" + image.getHeight() + "..."); final FeatureStack sliceFeatures = new FeatureStack(image); // Use the same features as the current classifier sliceFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures()); sliceFeatures.setMaximumSigma(maximumSigma); sliceFeatures.setMinimumSigma(minimumSigma); sliceFeatures.setMembranePatchSize(membranePatchSize); sliceFeatures.setMembraneSize(membraneThickness); if (false == sliceFeatures.updateFeaturesST()) { IJ.log("Classifier execution was interrupted."); return null; } filterFeatureStackByList(featureNames, sliceFeatures); final int width = image.getWidth(); final int height = image.getHeight(); final int numClasses = dataInfo.numClasses(); ImageStack classificationResult = new ImageStack(width, height); final int numInstances = width * height; final double[][] probArray; if (probabilityMaps) probArray = new double[numClasses][numInstances]; else probArray = new double[1][numInstances]; IJ.log("Classifying slice " + image.getTitle() + "..."); for (int x = 0; x < width; x++) for (int y = 0; y < height; y++) { try { if (0 == (x + y * width) % 4000) { if (Thread.currentThread().isInterrupted()) return null; counter.addAndGet(4000); } final DenseInstance ins = sliceFeatures.createInstance(x, y, 0); ins.setDataset(dataInfo); if (probabilityMaps) { double[] prob = classifier.distributionForInstance(ins); for (int k = 0; k < numClasses; k++) { probArray[k][x + y * width] = prob[k]; } } else { probArray[0][x + y * width] = classifier.classifyInstance(ins); } } catch (Exception e) { IJ.showMessage("Could not apply Classifier!"); e.printStackTrace(); return null; } } if (probabilityMaps) { for (int k = 0; k < numClasses; k++) classificationResult.addSlice("class-" + (k + 1), new FloatProcessor(width, height, probArray[k])); } else classificationResult.addSlice("result", new FloatProcessor(width, height, probArray[0])); result.add(new ImagePlus("classified-image-" + image.getTitle(), classificationResult)); } return result; } }; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Classify instances concurrently//from w ww . j a v a 2 s . com * * @param fsa feature stack array with the feature vectors * @param dataInfo empty set of instances containing the data structure (attributes and classes) * @param first index of the first instance to classify (considering the feature stack array as a 1D array) * @param numInstances number of instances to classify in this thread * @param classifier current classifier * @param counter auxiliary counter to be able to update the progress bar * @param probabilityMaps if true return a probability map for each class instead of a classified image * @return classification result */ private static Callable<double[][]> classifyInstances(final FeatureStackArray fsa, final Instances dataInfo, final int first, final int numInstances, final AbstractClassifier classifier, final AtomicInteger counter, final boolean probabilityMaps) { if (Thread.currentThread().isInterrupted()) return null; return new Callable<double[][]>() { public double[][] call() { final double[][] classificationResult; final int width = fsa.getWidth(); final int height = fsa.getHeight(); final int sliceSize = width * height; final int numClasses = dataInfo.numClasses(); if (probabilityMaps) classificationResult = new double[numClasses][numInstances]; else classificationResult = new double[1][numInstances]; for (int i = 0; i < numInstances; i++) { try { if (0 == i % 4000) { if (Thread.currentThread().isInterrupted()) return null; counter.addAndGet(4000); } final int absolutePos = first + i; final int slice = absolutePos / sliceSize; final int localPos = absolutePos - slice * sliceSize; final int x = localPos % width; final int y = localPos / width; DenseInstance ins = fsa.get(slice).createInstance(x, y, 0); ins.setDataset(dataInfo); if (probabilityMaps) { double[] prob = classifier.distributionForInstance(ins); for (int k = 0; k < numClasses; k++) classificationResult[k][i] = prob[k]; } else { classificationResult[0][i] = classifier.classifyInstance(ins); } } catch (Exception e) { IJ.showMessage("Could not apply Classifier!"); e.printStackTrace(); return null; } } return classificationResult; } }; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Classify instances concurrently/* w w w. j a v a2s . c om*/ * * @param data set of instances to classify * @param classifier current classifier * @param counter auxiliary counter to be able to update the progress bar * @param probabilityMaps return a probability map for each class instead of a * classified image * @return classification result */ private static Callable<double[][]> classifyInstances(final Instances data, final AbstractClassifier classifier, final AtomicInteger counter, final boolean probabilityMaps) { if (Thread.currentThread().isInterrupted()) return null; return new Callable<double[][]>() { public double[][] call() { final int numInstances = data.numInstances(); final int numClasses = data.numClasses(); final double[][] classificationResult; if (probabilityMaps) classificationResult = new double[numClasses][numInstances]; else classificationResult = new double[1][numInstances]; for (int i = 0; i < numInstances; i++) { try { if (0 == i % 4000) { if (Thread.currentThread().isInterrupted()) return null; counter.addAndGet(4000); } if (probabilityMaps) { double[] prob = classifier.distributionForInstance(data.get(i)); for (int k = 0; k < numClasses; k++) classificationResult[k][i] = prob[k]; } else { classificationResult[0][i] = classifier.classifyInstance(data.get(i)); } } catch (Exception e) { IJ.showMessage("Could not apply Classifier!"); e.printStackTrace(); return null; } } return classificationResult; } }; }