List of usage examples for weka.core Instances get
@Override
publicInstance get(int index)
From source file:tr.gov.ulakbim.jDenetX.experiments.wrappers.EvalActiveBoostingID.java
License:Open Source License
public Instances clusteredInstances(Instances data) { if (data == null) { throw new NullPointerException("Data is null at clusteredInstances method"); }/*w ww . j a va 2s.c om*/ Instances sampled_data = data; for (int i = 0; i < sampled_data.numInstances(); i++) { sampled_data.remove(i); } SimpleKMeans sKmeans = new SimpleKMeans(); data.setClassIndex(data.numAttributes() - 1); Remove filter = new Remove(); filter.setAttributeIndices("" + (data.classIndex() + 1)); List assignments = new ArrayList(); try { filter.setInputFormat(data); Instances dataClusterer = Filter.useFilter(data, filter); String[] options = new String[3]; options[0] = "-I"; // max. iterations options[1] = "500"; options[2] = "-O"; sKmeans.setNumClusters(data.numClasses()); sKmeans.setOptions(options); sKmeans.buildClusterer(dataClusterer); System.out.println("Kmeans\n:" + sKmeans); System.out.println(Arrays.toString(sKmeans.getAssignments())); assignments = Arrays.asList(sKmeans.getAssignments()); } catch (Exception e) { e.printStackTrace(); } System.out.println("Assignments\n: " + assignments); ClusterEvaluation eval = new ClusterEvaluation(); eval.setClusterer(sKmeans); try { eval.evaluateClusterer(data); } catch (Exception e) { e.printStackTrace(); } int classesToClustersMap[] = eval.getClassesToClusters(); for (int i = 0; i < classesToClustersMap.length; i++) { if (assignments.get(i).equals(((Integer) classesToClustersMap[(int) data.get(i).classValue()]))) { ((Instances) sampled_data).add(data.get(i)); } } return ((Instances) sampled_data); }
From source file:tr.gov.ulakbim.jDenetX.experiments.wrappers.EvalActiveBoostingID.java
License:Open Source License
public static Instances clusterInstances(Instances data) { XMeans xmeans = new XMeans(); Remove filter = new Remove(); Instances dataClusterer = null; if (data == null) { throw new NullPointerException("Data is null at clusteredInstances method"); }/*from w w w. j a v a 2s. c o m*/ //Get the attributes from the data for creating the sampled_data object ArrayList<Attribute> attrList = new ArrayList<Attribute>(); Enumeration attributes = data.enumerateAttributes(); while (attributes.hasMoreElements()) { attrList.add((Attribute) attributes.nextElement()); } Instances sampled_data = new Instances(data.relationName(), attrList, 0); data.setClassIndex(data.numAttributes() - 1); sampled_data.setClassIndex(data.numAttributes() - 1); filter.setAttributeIndices("" + (data.classIndex() + 1)); data.remove(0);//In Wavelet Stream of MOA always the first element comes without class try { filter.setInputFormat(data); dataClusterer = Filter.useFilter(data, filter); String[] options = new String[4]; options[0] = "-L"; // max. iterations options[1] = Integer.toString(noOfClassesInPool - 1); if (noOfClassesInPool > 2) { options[1] = Integer.toString(noOfClassesInPool - 1); xmeans.setMinNumClusters(noOfClassesInPool - 1); } else { options[1] = Integer.toString(noOfClassesInPool); xmeans.setMinNumClusters(noOfClassesInPool); } xmeans.setMaxNumClusters(data.numClasses() + 1); System.out.println("No of classes in the pool: " + noOfClassesInPool); xmeans.setUseKDTree(true); //xmeans.setOptions(options); xmeans.buildClusterer(dataClusterer); System.out.println("Xmeans\n:" + xmeans); } catch (Exception e) { e.printStackTrace(); } //System.out.println("Assignments\n: " + assignments); ClusterEvaluation eval = new ClusterEvaluation(); eval.setClusterer(xmeans); try { eval.evaluateClusterer(data); int classesToClustersMap[] = eval.getClassesToClusters(); //check the classes to cluster map int clusterNo = 0; for (int i = 0; i < data.size(); i++) { clusterNo = xmeans.clusterInstance(dataClusterer.get(i)); //Check if the class value of instance and class value of cluster matches if ((int) data.get(i).classValue() == classesToClustersMap[clusterNo]) { sampled_data.add(data.get(i)); } } } catch (Exception e) { e.printStackTrace(); } return ((Instances) sampled_data); }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Get test error of current classifier on a specific image and its binary labels * * @param image input image//from w w w. jav a 2s. com * @param labels binary labels * @param whiteClassIndex index of the white class * @param blackClassIndex index of the black class * @param verbose option to display evaluation information in the log window * @return pixel classification error */ public double getTestError(ImagePlus image, ImagePlus labels, int whiteClassIndex, int blackClassIndex, boolean verbose) { IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + image.getTitle() + "..."); // Set proper class names (skip empty list ones) ArrayList<String> classNames = new ArrayList<String>(); if (null == loadedClassNames) { for (int i = 0; i < numOfClasses; i++) if (examples[0].get(i).size() > 0) classNames.add(getClassLabels()[i]); } else classNames = loadedClassNames; // Apply labels final int height = image.getHeight(); final int width = image.getWidth(); final int depth = image.getStackSize(); Instances testData = null; for (int z = 1; z <= depth; z++) { final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z), image.getImageStack().getProcessor(z)); // Create feature stack for test image IJ.showStatus("Creating features for test image (slice " + z + ")..."); if (verbose) IJ.log("Creating features for test image (slice " + z + ")..."); final FeatureStack testImageFeatures = new FeatureStack(testSlice); // Use the same features as the current classifier testImageFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures()); testImageFeatures.setMaximumSigma(maximumSigma); testImageFeatures.setMinimumSigma(minimumSigma); testImageFeatures.setMembranePatchSize(membranePatchSize); testImageFeatures.setMembraneSize(membraneThickness); testImageFeatures.updateFeaturesMT(); testImageFeatures.setUseNeighbors(featureStackArray.useNeighborhood()); filterFeatureStackByList(this.featureNames, testImageFeatures); final Instances data = testImageFeatures.createInstances(classNames); data.setClassIndex(data.numAttributes() - 1); if (verbose) IJ.log("Assigning classes based on the labels..."); final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int n = 0, y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) { final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex; data.get(n).setClassValue(newValue); } if (null == testData) testData = data; else { for (int i = 0; i < data.numInstances(); i++) testData.add(data.get(i)); } } if (verbose) IJ.log("Evaluating test data..."); double error = -1; try { final Evaluation evaluation = new Evaluation(testData); evaluation.evaluateModel(classifier, testData); if (verbose) { IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false)); IJ.log(evaluation.toClassDetailsString() + "\n"); IJ.log(evaluation.toMatrixString()); } error = evaluation.errorRate(); } catch (Exception e) { e.printStackTrace(); } return error; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Get test error of current classifier on a specific image and its binary labels * * @param image input image//w w w . j ava 2s .co m * @param labels binary labels * @param filters list of filters to create features * @param whiteClassIndex index of the white class * @param blackClassIndex index of the black class * @param verbose option to display evaluation information in the log window * @return pixel classification error */ public double getTestError(ImagePlus image, ImagePlus labels, ImagePlus filters, int whiteClassIndex, int blackClassIndex, boolean verbose) { IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + image.getTitle() + "..."); // Set proper class names (skip empty list ones) ArrayList<String> classNames = new ArrayList<String>(); if (null == loadedClassNames) { for (int i = 0; i < numOfClasses; i++) if (examples[0].get(i).size() > 0) classNames.add(getClassLabels()[i]); } else classNames = loadedClassNames; // Apply labels final int height = image.getHeight(); final int width = image.getWidth(); final int depth = image.getStackSize(); Instances testData = null; for (int z = 1; z <= depth; z++) { final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z), image.getImageStack().getProcessor(z)); // Create feature stack for test image IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + z + "..."); final FeatureStack testImageFeatures = new FeatureStack(testSlice); // Create features by applying the filters testImageFeatures.addFeaturesMT(filters); final Instances data = testImageFeatures.createInstances(classNames); data.setClassIndex(data.numAttributes() - 1); if (verbose) IJ.log("Assigning classes based on the labels..."); final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int n = 0, y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) { final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex; data.get(n).setClassValue(newValue); } if (null == testData) testData = data; else { for (int i = 0; i < data.numInstances(); i++) testData.add(data.get(i)); } } if (verbose) IJ.log("Evaluating test data..."); double error = -1; try { final Evaluation evaluation = new Evaluation(testData); evaluation.evaluateModel(classifier, testData); if (verbose) { IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false)); IJ.log(evaluation.toClassDetailsString() + "\n"); IJ.log(evaluation.toMatrixString()); } error = evaluation.errorRate(); } catch (Exception e) { e.printStackTrace(); } return error; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Update the class attribute of "data" from * the input binary labels. The number of instances of "data" * must match the size of the input labels image (or stack) * * @param data input instances//from w ww . j a v a 2 s .c o m * @param labels binary labels * @param classIndex1 index of the white (different from 0) class * @param classIndex2 index of the black (0) class */ public static void updateDataClassification(Instances data, ImagePlus labels, int classIndex1, int classIndex2) { // Check sizes final int size = labels.getWidth() * labels.getHeight() * labels.getStackSize(); if (size != data.numInstances()) { IJ.log("Error: labels size does not match loaded training data set size."); return; } final int width = labels.getWidth(); final int height = labels.getHeight(); final int depth = labels.getStackSize(); // Update class with new labels for (int n = 0, z = 1; z <= depth; z++) { final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) data.get(n).setClassValue(slice.getPixel(x, y) > 0 ? classIndex1 : classIndex2); } }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Update the class attribute of "data" from * the input binary labels. The number of instances of "data" * must match the size of the input labels image (or stack) * * @param data input instances//from w ww . ja v a 2s . c om * @param labels binary labels * @param classIndex1 index of the white (different from 0) class * @param classIndex2 index of the black (0) class */ public static void updateDataClassification(Instances data, ImagePlus labels, int classIndex1, int classIndex2, ArrayList<Point3f>[] mismatches) { // Check sizes final int size = labels.getWidth() * labels.getHeight() * labels.getStackSize(); if (size != data.numInstances()) { IJ.log("Error: labels size does not match loaded training data set size."); return; } final int width = labels.getWidth(); final int height = labels.getHeight(); final int depth = labels.getStackSize(); // Update class with new labels for (int n = 0, z = 1; z <= depth; z++) { final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) { final double newValue = slice.getPixel(x, y) > 0 ? classIndex1 : classIndex2; /* // reward matching with previous value... if(data.get(n).classValue() == newValue) { double weight = data.get(n).weight(); data.get(n).setWeight(++weight); } */ data.get(n).setClassValue(newValue); } } /* if(null != mismatches) for(int i=0; i<depth; i++) { IJ.log("slice " + i + ": " + mismatches[i].size() + " mismatches"); for(Point3f p : mismatches[i]) { //IJ.log("point = " + p); final int n = (int) p.x + ((int) p.y -1) * width + i * (width*height); double weight = data.get(n).weight(); data.get(n).setWeight(++weight); } } */ }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Write current instances into an ARFF file * @param data set of instances//w w w . j av a 2s. c om * @param filename ARFF file name */ public boolean writeDataToARFF(Instances data, String filename) { BufferedWriter out = null; try { out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename))); final Instances header = new Instances(data, 0); out.write(header.toString()); for (int i = 0; i < data.numInstances(); i++) { out.write(data.get(i).toString() + "\n"); } } catch (Exception e) { IJ.log("Error: couldn't write instances into .ARFF file."); IJ.showMessage("Exception while saving data as ARFF file"); e.printStackTrace(); return false; } finally { try { out.close(); } catch (IOException e) { e.printStackTrace(); } } return true; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Classify instances concurrently/*from ww w .j ava 2s. c o m*/ * * @param data set of instances to classify * @param classifier current classifier * @param counter auxiliary counter to be able to update the progress bar * @param probabilityMaps return a probability map for each class instead of a * classified image * @return classification result */ private static Callable<double[][]> classifyInstances(final Instances data, final AbstractClassifier classifier, final AtomicInteger counter, final boolean probabilityMaps) { if (Thread.currentThread().isInterrupted()) return null; return new Callable<double[][]>() { public double[][] call() { final int numInstances = data.numInstances(); final int numClasses = data.numClasses(); final double[][] classificationResult; if (probabilityMaps) classificationResult = new double[numClasses][numInstances]; else classificationResult = new double[1][numInstances]; for (int i = 0; i < numInstances; i++) { try { if (0 == i % 4000) { if (Thread.currentThread().isInterrupted()) return null; counter.addAndGet(4000); } if (probabilityMaps) { double[] prob = classifier.distributionForInstance(data.get(i)); for (int k = 0; k < numClasses; k++) classificationResult[k][i] = prob[k]; } else { classificationResult[0][i] = classifier.classifyInstance(data.get(i)); } } catch (Exception e) { IJ.showMessage("Could not apply Classifier!"); e.printStackTrace(); return null; } } return classificationResult; } }; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Merge two datasets of Weka instances in place * @param first first (and destination) dataset * @param second second dataset//from www .ja va 2s . co m */ public void mergeDataInPlace(Instances first, Instances second) { for (int i = 0; i < second.numInstances(); i++) first.add(second.get(i)); }
From source file:tubes2ai.AIJKNaiveBayes.java
@Override public void buildClassifier(Instances i) throws Exception { // throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. getCapabilities().testWithFail(i);/*from w w w .java 2 s . co m*/ nAttribute = i.numAttributes(); classIndex = i.classIndex(); Instance inst; Attribute att; int n_instance = i.numInstances(); //inisialisasi matrix 3x3; //pertama cari ada berapa value di kelas nClassValue = i.attribute(classIndex).numValues(); freq = new int[nAttribute][][]; prob = new double[nAttribute][][]; int a = 0; while (a < nAttribute) { int nValue = i.attribute(a).numValues(); if (a != classIndex) { freq[a] = new int[nValue][nClassValue]; prob[a] = new double[nValue][nClassValue]; } else { freq[a] = new int[1][nClassValue]; prob[a] = new double[1][nClassValue]; } a++; } //System.out.println("beres buat matriks"); //inisialisasi matriks sama nilai 0 a = 0; int b; int c; while (a < nAttribute) { //outlook dkk b = 0; int nValue = i.attribute(a).numValues(); //System.out.println("row "+a); while (b < nValue) { c = 0; //System.out.println("row1 "+b); if (a == classIndex) { //System.out.println("row2 "+c); freq[a][0][b] = 0; } else { while (c < nClassValue) { //System.out.println("row2 "+c); freq[a][b][c] = 0; c++; } } b++; } a++; } //System.out.println("beres inisialisasi 0"); a = 0; int val; int classValue; while (a < n_instance) { inst = i.get(a); b = 0; classValue = (int) inst.value(classIndex); while (b < nAttribute) { val = (int) inst.value(b); if (b == classIndex) { freq[b][0][classValue]++; } else { freq[b][val][classValue]++; } b++; } a++; } //System.out.println("beres frekuensi!!!!"); a = 0; while (a < nAttribute) { b = 0; int nValue = i.attribute(a).numValues(); //System.out.println("row "+a); while (b < nValue) { //System.out.println("row1 "+b); if (a != classIndex) { c = 0; while (c < nClassValue) { //System.out.println("freq "+freq[a][b][c]); //System.out.println("freq_index "+freq[classIndex][0][c]); prob[a][b][c] = (double) (freq[a][b][c]) / (double) (freq[classIndex][0][c]); //System.out.println("prob ["+a+"]["+b+"]["+c+"] "+ prob[a][b][c]); c++; } } else { prob[a][0][b] = (double) freq[a][0][b] / i.numInstances(); //System.out.println("prob ["+a+"][0]["+b+"] "+ prob[a][0][b]); } b++; } a++; } //System.out.println("beres prob!!!!"); }