List of usage examples for weka.core Instances add
@Override public boolean add(Instance instance)
From source file:trainableSegmentation.Trainable_Segmentation.java
License:GNU General Public License
/** * Train classifier with the current instances *//*w ww .j av a2 s . c o m*/ public void trainClassifier() { // Two list of examples need to be non empty int nonEmpty = 0; for (int i = 0; i < numOfClasses; i++) if (examples[i].size() > 0) nonEmpty++; if (nonEmpty < 2 && loadedTrainingData == null) { IJ.showMessage("Cannot train without at least 2 sets of examples!"); return; } // Disable buttons until the training has finished setButtonsEnabled(false); // Create feature stack if it was not created yet if (featureStack.isEmpty()) { IJ.showStatus("Creating feature stack..."); featureStack.updateFeaturesMT(); } IJ.showStatus("Training classifier..."); Instances data = null; if (nonEmpty < 2) IJ.log("Training from loaded data only..."); else { final long start = System.currentTimeMillis(); data = createTrainingInstances(); final long end = System.currentTimeMillis(); IJ.log("Creating training data took: " + (end - start) + "ms"); data.setClassIndex(data.numAttributes() - 1); } if (loadedTrainingData != null && data != null) { IJ.log("Merging data..."); for (int i = 0; i < loadedTrainingData.numInstances(); i++) data.add(loadedTrainingData.instance(i)); IJ.log("Finished"); } else if (data == null) { data = loadedTrainingData; IJ.log("Taking loaded data as only data..."); } IJ.showStatus("Training classifier..."); IJ.log("Training classifier..."); if (null == data) { IJ.log("WTF"); } // Train the classifier on the current data final long start = System.currentTimeMillis(); try { classifier.buildClassifier(data); } catch (Exception e) { IJ.showMessage(e.getMessage()); e.printStackTrace(); return; } final long end = System.currentTimeMillis(); final DecimalFormat df = new DecimalFormat("0.0000"); final String outOfBagError = (rf != null) ? ", out of bag error: " + df.format(rf.measureOutOfBagError()) : ""; IJ.log("Finished training in " + (end - start) + "ms" + outOfBagError); if (updateWholeData) { updateTestSet(); IJ.log("Test dataset updated (" + wholeImageData.numInstances() + " instances, " + wholeImageData.numAttributes() + " attributes)."); } IJ.log("Classifying whole image..."); classifiedImage = applyClassifier(wholeImageData, trainingImage.getWidth(), trainingImage.getHeight(), Runtime.getRuntime().availableProcessors()); IJ.log("Finished segmentation of whole image."); if (useGUI) { overlayButton.setEnabled(true); resultButton.setEnabled(true); applyButton.setEnabled(true); probimgButton.setEnabled(true); showColorOverlay = false; toggleOverlay(); setButtonsEnabled(true); } //featureStack.show(); }
From source file:trainableSegmentation.Trainable_Segmentation.java
License:GNU General Public License
/** * Save training model into a file/* www . j a v a 2s.co m*/ */ public void saveTrainingData() { boolean examplesEmpty = true; for (int i = 0; i < numOfClasses; i++) if (examples[i].size() > 0) { examplesEmpty = false; break; } if (examplesEmpty && loadedTrainingData == null) { IJ.showMessage("There is no data to save"); return; } if (featureStack.getSize() < 2) { setButtonsEnabled(false); featureStack.updateFeaturesMT(); setButtonsEnabled(true); } Instances data = createTrainingInstances(); data.setClassIndex(data.numAttributes() - 1); if (null != loadedTrainingData && null != data) { IJ.log("merging data"); for (int i = 0; i < loadedTrainingData.numInstances(); i++) { // IJ.log("" + i) data.add(loadedTrainingData.instance(i)); } IJ.log("Finished"); } else if (null == data) data = loadedTrainingData; SaveDialog sd = new SaveDialog("Choose save file", "data", ".arff"); if (sd.getFileName() == null) return; IJ.log("Writing training data: " + data.numInstances() + " instances..."); writeDataToARFF(data, sd.getDirectory() + sd.getFileName()); IJ.log("Wrote training data: " + sd.getDirectory() + sd.getFileName()); }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Save training data into a file (.arff) * @param pathname complete path name// w w w .java 2 s. co m * @return false if error */ public boolean saveData(final String pathname) { boolean examplesEmpty = true; for (int i = 0; i < numOfClasses; i++) { for (int n = 0; n < trainingImage.getImageStackSize(); n++) if (examples[n].get(i).size() > 0) { examplesEmpty = false; break; } } if (examplesEmpty && loadedTrainingData == null) { IJ.log("There is no data to save"); return false; } if (featureStackArray.isEmpty() || updateFeatures) { IJ.log("Creating feature stack..."); if (false == featureStackArray.updateFeaturesMT(featureStackToUpdateTrain)) return false; Arrays.fill(featureStackToUpdateTrain, false); filterFeatureStackByList(); updateFeatures = false; IJ.log("Feature stack is now updated."); } Instances data = null; if (examplesEmpty == false) { data = createTrainingInstances(); data.setClassIndex(data.numAttributes() - 1); } if (null != loadedTrainingData && null != data) { IJ.log("Merging data..."); for (int i = 0; i < loadedTrainingData.numInstances(); i++) { // IJ.log("" + i) data.add(loadedTrainingData.instance(i)); } IJ.log("Finished: total number of instances = " + data.numInstances()); } else if (null == data) data = loadedTrainingData; IJ.log("Writing training data: " + data.numInstances() + " instances..."); //IJ.log("Data: " + data.numAttributes() +" attributes, " + data.numClasses() + " classes"); writeDataToARFF(data, pathname); IJ.log("Saved training data: " + pathname); return true; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Load a new image to segment (no GUI)//from w ww . j a v a 2 s . c o m * * @param newImage new image to segment * @return false if error */ public boolean loadNewImage(ImagePlus newImage) { // Accumulate current data in "loadedTrainingData" IJ.log("Storing previous image instances..."); if (featureStackArray.isEmpty() || updateFeatures) { IJ.log("Creating feature stack..."); if (false == featureStackArray.updateFeaturesMT(featureStackToUpdateTrain)) return false; Arrays.fill(featureStackToUpdateTrain, false); filterFeatureStackByList(); updateFeatures = false; IJ.log("Feature stack is now updated."); } // Create instances Instances data = createTrainingInstances(); if (null != loadedTrainingData && null != data) { data.setClassIndex(data.numAttributes() - 1); IJ.log("Merging data..."); for (int i = 0; i < loadedTrainingData.numInstances(); i++) { // IJ.log("" + i) data.add(loadedTrainingData.instance(i)); } IJ.log("Finished"); } else if (null == data) data = loadedTrainingData; // Store merged data as loaded data loadedTrainingData = data; if (null != loadedTrainingData) { Attribute classAttribute = loadedTrainingData.classAttribute(); Enumeration<String> classValues = classAttribute.enumerateValues(); // Update list of names of loaded classes loadedClassNames = new ArrayList<String>(); while (classValues.hasMoreElements()) { final String className = classValues.nextElement().trim(); loadedClassNames.add(className); } IJ.log("Number of accumulated examples: " + loadedTrainingData.numInstances()); } else IJ.log("Number of accumulated examples: 0"); // Updating image IJ.log("Updating image..."); // Set new image as training image trainingImage = new ImagePlus("Advanced Weka Segmentation", newImage.getImageStack()); // Initialize feature stack array (no features yet) featureStackArray = new FeatureStackArray(trainingImage.getImageStackSize(), minimumSigma, maximumSigma, useNeighbors, membraneThickness, membranePatchSize, enabledFeatures); // Remove traces from the lists and ROI overlays and initialize each feature stack IJ.log("Removing previous markings..."); examples = new Vector[trainingImage.getImageStackSize()]; for (int i = 0; i < trainingImage.getImageStackSize(); i++) { examples[i] = new Vector<ArrayList<Roi>>(MAX_NUM_CLASSES); for (int j = 0; j < MAX_NUM_CLASSES; j++) examples[i].add(new ArrayList<Roi>()); // Initialize each feature stack (one per slice) featureStackArray.set(new FeatureStack(trainingImage.getImageStack().getProcessor(i + 1)), i); } featureStackToUpdateTrain = new boolean[trainingImage.getImageStackSize()]; featureStackToUpdateTest = new boolean[trainingImage.getImageStackSize()]; Arrays.fill(featureStackToUpdateTest, true); updateFeatures = true; updateWholeData = true; // Remove current classification result image classifiedImage = null; IJ.log("New image: " + newImage.getTitle() + " (" + trainingImage.getImageStackSize() + " slice(s))"); IJ.log("Done"); return true; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Get test error of current classifier on a specific image and its binary labels * * @param image input image/* ww w.j a v a 2s.c o m*/ * @param labels binary labels * @param whiteClassIndex index of the white class * @param blackClassIndex index of the black class * @param verbose option to display evaluation information in the log window * @return pixel classification error */ public double getTestError(ImagePlus image, ImagePlus labels, int whiteClassIndex, int blackClassIndex, boolean verbose) { IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + image.getTitle() + "..."); // Set proper class names (skip empty list ones) ArrayList<String> classNames = new ArrayList<String>(); if (null == loadedClassNames) { for (int i = 0; i < numOfClasses; i++) if (examples[0].get(i).size() > 0) classNames.add(getClassLabels()[i]); } else classNames = loadedClassNames; // Apply labels final int height = image.getHeight(); final int width = image.getWidth(); final int depth = image.getStackSize(); Instances testData = null; for (int z = 1; z <= depth; z++) { final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z), image.getImageStack().getProcessor(z)); // Create feature stack for test image IJ.showStatus("Creating features for test image (slice " + z + ")..."); if (verbose) IJ.log("Creating features for test image (slice " + z + ")..."); final FeatureStack testImageFeatures = new FeatureStack(testSlice); // Use the same features as the current classifier testImageFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures()); testImageFeatures.setMaximumSigma(maximumSigma); testImageFeatures.setMinimumSigma(minimumSigma); testImageFeatures.setMembranePatchSize(membranePatchSize); testImageFeatures.setMembraneSize(membraneThickness); testImageFeatures.updateFeaturesMT(); testImageFeatures.setUseNeighbors(featureStackArray.useNeighborhood()); filterFeatureStackByList(this.featureNames, testImageFeatures); final Instances data = testImageFeatures.createInstances(classNames); data.setClassIndex(data.numAttributes() - 1); if (verbose) IJ.log("Assigning classes based on the labels..."); final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int n = 0, y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) { final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex; data.get(n).setClassValue(newValue); } if (null == testData) testData = data; else { for (int i = 0; i < data.numInstances(); i++) testData.add(data.get(i)); } } if (verbose) IJ.log("Evaluating test data..."); double error = -1; try { final Evaluation evaluation = new Evaluation(testData); evaluation.evaluateModel(classifier, testData); if (verbose) { IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false)); IJ.log(evaluation.toClassDetailsString() + "\n"); IJ.log(evaluation.toMatrixString()); } error = evaluation.errorRate(); } catch (Exception e) { e.printStackTrace(); } return error; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Get test error of current classifier on a specific image and its binary labels * * @param image input image// ww w. j a v a2 s . com * @param labels binary labels * @param filters list of filters to create features * @param whiteClassIndex index of the white class * @param blackClassIndex index of the black class * @param verbose option to display evaluation information in the log window * @return pixel classification error */ public double getTestError(ImagePlus image, ImagePlus labels, ImagePlus filters, int whiteClassIndex, int blackClassIndex, boolean verbose) { IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + image.getTitle() + "..."); // Set proper class names (skip empty list ones) ArrayList<String> classNames = new ArrayList<String>(); if (null == loadedClassNames) { for (int i = 0; i < numOfClasses; i++) if (examples[0].get(i).size() > 0) classNames.add(getClassLabels()[i]); } else classNames = loadedClassNames; // Apply labels final int height = image.getHeight(); final int width = image.getWidth(); final int depth = image.getStackSize(); Instances testData = null; for (int z = 1; z <= depth; z++) { final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z), image.getImageStack().getProcessor(z)); // Create feature stack for test image IJ.showStatus("Creating features for test image..."); if (verbose) IJ.log("Creating features for test image " + z + "..."); final FeatureStack testImageFeatures = new FeatureStack(testSlice); // Create features by applying the filters testImageFeatures.addFeaturesMT(filters); final Instances data = testImageFeatures.createInstances(classNames); data.setClassIndex(data.numAttributes() - 1); if (verbose) IJ.log("Assigning classes based on the labels..."); final ImageProcessor slice = labels.getImageStack().getProcessor(z); for (int n = 0, y = 0; y < height; y++) for (int x = 0; x < width; x++, n++) { final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex; data.get(n).setClassValue(newValue); } if (null == testData) testData = data; else { for (int i = 0; i < data.numInstances(); i++) testData.add(data.get(i)); } } if (verbose) IJ.log("Evaluating test data..."); double error = -1; try { final Evaluation evaluation = new Evaluation(testData); evaluation.evaluateModel(classifier, testData); if (verbose) { IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false)); IJ.log(evaluation.toClassDetailsString() + "\n"); IJ.log(evaluation.toMatrixString()); } error = evaluation.errorRate(); } catch (Exception e) { e.printStackTrace(); } return error; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Create training instances out of the user markings * @return set of instances (feature vectors in Weka format) *///from w w w . j av a 2 s . co m public Instances createTrainingInstances() { //IJ.log("create training instances: num of features = " + featureStackArray.getNumOfFeatures()); ArrayList<Attribute> attributes = new ArrayList<Attribute>(); for (int i = 1; i <= featureStackArray.getNumOfFeatures(); i++) { String attString = featureStackArray.getLabel(i); attributes.add(new Attribute(attString)); //IJ.log("Add attribute " + attString); } final ArrayList<String> classes; int numOfInstances = 0; int numOfUsedClasses = 0; if (null == this.loadedTrainingData) { classes = new ArrayList<String>(); for (int i = 0; i < numOfClasses; i++) { // Do not add empty lists for (int n = 0; n < trainingImage.getImageStackSize(); n++) { if (examples[n].get(i).size() > 0) { if (classes.contains(getClassLabels()[i]) == false) classes.add(getClassLabels()[i]); numOfUsedClasses++; } numOfInstances += examples[n].get(i).size(); } } } else { classes = this.loadedClassNames; } attributes.add(new Attribute("class", classes)); /* IJ.log("added class attribute with values:"); for(int i=0; i<classes.size(); i++) IJ.log(" " + classes.get(i)); */ final Instances trainingData = new Instances("segment", attributes, numOfInstances); IJ.log("Training input:"); final boolean colorFeatures = this.trainingImage.getType() == ImagePlus.COLOR_RGB; // For all classes for (int l = 0; l < numOfClasses; l++) { int nl = 0; // Read all lists of examples for (int sliceNum = 1; sliceNum <= trainingImage.getImageStackSize(); sliceNum++) for (int j = 0; j < examples[sliceNum - 1].get(l).size(); j++) { Roi r = examples[sliceNum - 1].get(l).get(j); // For polygon rois we get the list of points if (r instanceof PolygonRoi && r.getType() == Roi.FREELINE) { if (r.getStrokeWidth() == 1) { int[] x = r.getPolygon().xpoints; int[] y = r.getPolygon().ypoints; final int n = r.getPolygon().npoints; for (int i = 0; i < n; i++) { double[] values = new double[featureStackArray.getNumOfFeatures() + 1]; if (colorFeatures) for (int z = 1; z <= featureStackArray.getNumOfFeatures(); z++) values[z - 1] = featureStackArray.get(sliceNum - 1).getProcessor(z) .getPixel(x[i], y[i]); else for (int z = 1; z <= featureStackArray.getNumOfFeatures(); z++) values[z - 1] = featureStackArray.get(sliceNum - 1).getProcessor(z) .getPixelValue(x[i], y[i]); values[featureStackArray.getNumOfFeatures()] = (double) l; trainingData.add(new DenseInstance(1.0, values)); // increase number of instances for this class nl++; } } else // For thicker lines, include also neighbors { final int width = (int) Math.round(r.getStrokeWidth()); FloatPolygon p = r.getFloatPolygon(); int n = p.npoints; double x1, y1; double x2 = p.xpoints[0] - (p.xpoints[1] - p.xpoints[0]); double y2 = p.ypoints[0] - (p.ypoints[1] - p.ypoints[0]); for (int i = 0; i < n; i++) { x1 = x2; y1 = y2; x2 = p.xpoints[i]; y2 = p.ypoints[i]; double dx = x2 - x1; double dy = y1 - y2; double length = (float) Math.sqrt(dx * dx + dy * dy); dx /= length; dy /= length; double x = x2 - dy * width / 2.0; double y = y2 - dx * width / 2.0; int n2 = width; do { if (x >= 0 && x < featureStackArray.get(sliceNum - 1).getWidth() && y >= 0 && y < featureStackArray.get(sliceNum - 1).getHeight()) { double[] values = new double[featureStackArray.getNumOfFeatures() + 1]; if (colorFeatures) for (int z = 1; z <= featureStackArray.getNumOfFeatures(); z++) values[z - 1] = featureStackArray.get(sliceNum - 1).getProcessor(z) .getPixelInterpolated(x, y); else for (int z = 1; z <= featureStackArray.getNumOfFeatures(); z++) values[z - 1] = featureStackArray.get(sliceNum - 1).getProcessor(z) .getInterpolatedValue(x, y); values[featureStackArray.getNumOfFeatures()] = (double) l; trainingData.add(new DenseInstance(1.0, values)); // increase number of instances for this class nl++; } x += dy; y += dx; } while (--n2 > 0); } } } else // for the rest of rois we get ALL points inside the roi { final ShapeRoi shapeRoi = new ShapeRoi(r); final Rectangle rect = shapeRoi.getBounds(); final int lastX = rect.x + rect.width; final int lastY = rect.y + rect.height; for (int x = rect.x; x < lastX; x++) for (int y = rect.y; y < lastY; y++) if (shapeRoi.contains(x, y)) { double[] values = new double[featureStackArray.getNumOfFeatures() + 1]; if (colorFeatures) for (int z = 1; z <= featureStackArray.getNumOfFeatures(); z++) values[z - 1] = featureStackArray.get(sliceNum - 1).getProcessor(z) .getPixel(x, y); else for (int z = 1; z <= featureStackArray.getNumOfFeatures(); z++) values[z - 1] = featureStackArray.get(sliceNum - 1).getProcessor(z) .getPixelValue(x, y); values[featureStackArray.getNumOfFeatures()] = (double) l; trainingData.add(new DenseInstance(1.0, values)); // increase number of instances for this class nl++; } } } IJ.log("# of pixels selected as " + getClassLabels()[l] + ": " + nl); } if (trainingData.numInstances() == 0) return null; // Set the index of the class attribute trainingData.setClassIndex(featureStackArray.getNumOfFeatures()); return trainingData; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Train classifier with the current instances *///from w w w . j av a 2 s. co m public boolean trainClassifier() { if (Thread.currentThread().isInterrupted()) { IJ.log("Classifier training was interrupted."); return false; } // At least two lists of different classes of examples need to be non empty int nonEmpty = 0; for (int i = 0; i < numOfClasses; i++) for (int j = 0; j < trainingImage.getImageStackSize(); j++) if (examples[j].get(i).size() > 0) { nonEmpty++; break; } if (nonEmpty < 2 && null == loadedTrainingData) { IJ.showMessage("Cannot train without at least 2 sets of examples!"); return false; } // Create feature stack if necessary (training from traces // and the features stack is empty or the settings changed) if (nonEmpty > 1 && featureStackArray.isEmpty() || updateFeatures) { IJ.showStatus("Creating feature stack..."); IJ.log("Creating feature stack..."); long start = System.currentTimeMillis(); if (false == featureStackArray.updateFeaturesMT(featureStackToUpdateTrain)) { IJ.log("Feature stack was not updated."); IJ.showStatus("Feature stack was not updated."); return false; } Arrays.fill(featureStackToUpdateTrain, false); filterFeatureStackByList(); updateFeatures = false; updateWholeData = true; long end = System.currentTimeMillis(); IJ.log("Feature stack array is now updated (" + featureStackArray.getSize() + " slice(s) with " + featureStackArray.getNumOfFeatures() + " features, took " + (end - start) + "ms)."); } IJ.showStatus("Creating training instances..."); Instances data = null; if (nonEmpty < 1) IJ.log("Training from loaded data only..."); else { final long start = System.currentTimeMillis(); traceTrainingData = data = createTrainingInstances(); final long end = System.currentTimeMillis(); IJ.log("Creating training data took: " + (end - start) + "ms"); } if (loadedTrainingData != null && data != null) { IJ.log("Merging data..."); for (int i = 0; i < loadedTrainingData.numInstances(); i++) data.add(loadedTrainingData.instance(i)); IJ.log("Finished: total number of instances = " + data.numInstances()); } else if (data == null) { data = loadedTrainingData; IJ.log("Taking loaded data as only data..."); } if (null == data) { IJ.log("WTF"); } // Update train header this.trainHeader = new Instances(data, 0); // Resample data if necessary if (homogenizeClasses) { final long start = System.currentTimeMillis(); IJ.showStatus("Homogenizing classes distribution..."); IJ.log("Homogenizing classes distribution..."); data = homogenizeTrainingData(data); final long end = System.currentTimeMillis(); IJ.log("Done. Homogenizing classes distribution took: " + (end - start) + "ms"); } IJ.showStatus("Training classifier..."); IJ.log("Training classifier..."); if (Thread.currentThread().isInterrupted()) { IJ.log("Classifier training was interrupted."); return false; } // Train the classifier on the current data final long start = System.currentTimeMillis(); try { classifier.buildClassifier(data); } catch (InterruptedException ie) { IJ.log("Classifier construction was interrupted."); return false; } catch (Exception e) { IJ.showMessage(e.getMessage()); e.printStackTrace(); return false; } // Print classifier information IJ.log(this.classifier.toString()); final long end = System.currentTimeMillis(); IJ.log("Finished training in " + (end - start) + "ms"); return true; }
From source file:trainableSegmentation.WekaSegmentation.java
License:GNU General Public License
/** * Merge two datasets of Weka instances in place * @param first first (and destination) dataset * @param second second dataset// w w w . j av a 2 s .c om */ public void mergeDataInPlace(Instances first, Instances second) { for (int i = 0; i < second.numInstances(); i++) first.add(second.get(i)); }
From source file:transformation.mimlTOml.ArithmeticTransformation.java
License:Open Source License
@Override public MultiLabelInstances transformDataset() throws Exception { Instances newData = new Instances(template); int labelIndices[] = dataset.getLabelIndices(); Instance newInst = new DenseInstance(newData.numAttributes()); newInst.setDataset(newData); // Sets the reference to the dataset // For all bags in the dataset double nBags = dataset.getNumBags(); for (int i = 0; i < nBags; i++) { // retrieves a bag Bag bag = dataset.getBag(i);/*w w w. j a va 2 s . co m*/ // sets the bagLabel newInst.setValue(0, bag.value(0)); // retrieves instances (relational value) for each bag Instances instances = bag.getBagAsInstances(); // for all attributes in bag for (int j = 0, attIdx = 1; j < instances.numAttributes(); j++, attIdx++) { double value = instances.meanOrMode(j); newInst.setValue(attIdx, value); } // inserts label information into the instance for (int j = 0; j < labelIndices.length; j++) { newInst.setValue(updatedLabelIndices[j], dataset.getBag(i).value(labelIndices[j])); } newData.add(newInst); } return new MultiLabelInstances(newData, dataset.getLabelsMetaData()); }