List of usage examples for weka.core Instance numAttributes
public int numAttributes();
From source file:mulan.classifier.meta.thresholding.ThresholdPrediction.java
License:Open Source License
@Override protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception { boolean[] predictedLabels = new boolean[numLabels]; Instance modifiedIns = modifiedInstanceX(instance, metaDatasetChoice); modifiedIns.insertAttributeAt(modifiedIns.numAttributes()); // set dataset to instance modifiedIns.setDataset(classifierInstances); double bipartition_key = classifier.classifyInstance(modifiedIns); MultiLabelOutput mlo = baseLearner.makePrediction(instance); double[] arrayOfScores = new double[numLabels]; arrayOfScores = mlo.getConfidences(); for (int i = 0; i < numLabels; i++) { if (arrayOfScores[i] >= bipartition_key) { predictedLabels[i] = true;//from ww w . ja va 2 s . c o m } else { predictedLabels[i] = false; } } MultiLabelOutput final_mlo = new MultiLabelOutput(predictedLabels, mlo.getConfidences()); return final_mlo; }
From source file:mulan.classifier.neural.BPMLL.java
License:Open Source License
public MultiLabelOutput makePredictionInternal(Instance instance) throws InvalidDataException { Instance inputInstance = null; if (nominalToBinaryFilter != null) { try {/*from w ww . j ava 2 s. c o m*/ nominalToBinaryFilter.input(instance); inputInstance = nominalToBinaryFilter.output(); inputInstance.setDataset(null); } catch (Exception ex) { throw new InvalidDataException("The input instance for prediction is invalid. " + "Instance is not consistent with the data the model was built for."); } } else { inputInstance = DataUtils.createInstance(instance, instance.weight(), instance.toDoubleArray()); } int numAttributes = inputInstance.numAttributes(); if (numAttributes < model.getNetInputSize()) { throw new InvalidDataException("Input instance do not have enough attributes " + "to be processed by the model. Instance is not consistent with the data the model was built for."); } // if instance has more attributes than model input, we assume that true outputs // are there, so we remove them List<Integer> someLabelIndices = new ArrayList<Integer>(); boolean labelsAreThere = false; if (numAttributes > model.getNetInputSize()) { for (int index : this.labelIndices) { someLabelIndices.add(index); } labelsAreThere = true; } if (normalizeAttributes) { normalizer.normalize(inputInstance); } int inputDim = model.getNetInputSize(); double[] inputPattern = new double[inputDim]; int indexCounter = 0; for (int attrIndex = 0; attrIndex < numAttributes; attrIndex++) { if (labelsAreThere && someLabelIndices.contains(attrIndex)) { continue; } inputPattern[indexCounter] = inputInstance.value(attrIndex); indexCounter++; } double[] labelConfidences = model.feedForward(inputPattern); double threshold = thresholdF.computeThreshold(labelConfidences); boolean[] labelPredictions = new boolean[numLabels]; Arrays.fill(labelPredictions, false); for (int labelIndex = 0; labelIndex < numLabels; labelIndex++) { if (labelConfidences[labelIndex] > threshold) { labelPredictions[labelIndex] = true; } // translate from bipolar output to binary labelConfidences[labelIndex] = (labelConfidences[labelIndex] + 1) / 2; } MultiLabelOutput mlo = new MultiLabelOutput(labelPredictions, labelConfidences); return mlo; }
From source file:mulan.classifier.neural.MMPLearner.java
License:Open Source License
private double[] getFeatureVector(Instance inputInstance) { if (convertNomToBin && nomToBinFilter != null) { try {// w ww. j a va 2s . c o m nomToBinFilter.input(inputInstance); inputInstance = nomToBinFilter.output(); inputInstance.setDataset(null); } catch (Exception ex) { throw new InvalidDataException("The input instance for prediction is invalid. " + "Instance is not consistent with the data the model was built for."); } } // check if number in attributes is at least equal to model input int numAttributes = inputInstance.numAttributes(); int modelInputDim = perceptrons.get(0).getWeights().length - 1; if (numAttributes < modelInputDim) { throw new InvalidDataException("Input instance do not have enough attributes " + "to be processed by the model. Instance is not consistent with the data the model was built for."); } // if instance has more attributes than model input, we assume that true outputs // are there, so we remove them List<Integer> labelIndices = new ArrayList<Integer>(); boolean labelsAreThere = false; if (numAttributes > modelInputDim) { for (int index : this.labelIndices) { labelIndices.add(index); } labelsAreThere = true; } double[] inputPattern = new double[modelInputDim]; int indexCounter = 0; for (int attrIndex = 0; attrIndex < numAttributes; attrIndex++) { if (labelsAreThere && labelIndices.contains(attrIndex)) { continue; } inputPattern[indexCounter] = inputInstance.value(attrIndex); indexCounter++; } return inputPattern; }
From source file:mulan.classifier.transformation.CalibratedLabelRanking.java
License:Open Source License
/** * This method does a prediction for an instance with the values of label missing * @param instance// w w w . ja v a 2 s.c om * @return prediction * @throws java.lang.Exception */ public MultiLabelOutput makePredictionStandard(Instance instance) throws Exception { boolean[] bipartition = new boolean[numLabels]; double[] confidences = new double[numLabels]; int[] voteLabel = new int[numLabels + 1]; //System.out.println("Instance:" + instance.toString()); // delete all labels and add a new atribute at the end Instance newInstance = RemoveAllLabels.transformInstance(instance, labelIndices); newInstance.insertAttributeAt(newInstance.numAttributes()); //initialize the array voteLabel Arrays.fill(voteLabel, 0); int counter = 0; for (int label1 = 0; label1 < numLabels - 1; label1++) { for (int label2 = label1 + 1; label2 < numLabels; label2++) { if (!nodata[counter]) { double distribution[] = new double[2]; try { newInstance.setDataset(metaDataTest[counter]); distribution = oneVsOneModels[counter].distributionForInstance(newInstance); } catch (Exception e) { System.out.println(e); return null; } int maxIndex = (distribution[0] > distribution[1]) ? 0 : 1; // Ensure correct predictions both for class values {0,1} and {1,0} Attribute classAttribute = metaDataTest[counter].classAttribute(); if (classAttribute.value(maxIndex).equals("1")) { voteLabel[label1]++; } else { voteLabel[label2]++; } } counter++; } } int voteVirtual = 0; MultiLabelOutput virtualMLO = virtualLabelModels.makePrediction(instance); boolean[] virtualBipartition = virtualMLO.getBipartition(); for (int i = 0; i < numLabels; i++) { if (virtualBipartition[i]) { voteLabel[i]++; } else { voteVirtual++; } } for (int i = 0; i < numLabels; i++) { if (voteLabel[i] >= voteVirtual) { bipartition[i] = true; } else { bipartition[i] = false; } confidences[i] = 1.0 * voteLabel[i] / numLabels; } MultiLabelOutput mlo = new MultiLabelOutput(bipartition, confidences); return mlo; }
From source file:mulan.classifier.transformation.CalibratedLabelRanking.java
License:Open Source License
/** * This method does a prediction for an instance with the values of label missing * according to QWeighted algorithm for Multilabel Classification (QCMLPP2), which is * described in :/*from ww w . j av a 2 s .c o m*/ * Loza Mencia, E., Park, S.-H., and Fuernkranz, J. (2009) * Efficient voting prediction for pairwise multilabel classification. * In Proceedings of 17th European Symposium on Artificial * Neural Networks (ESANN 2009), Bruges (Belgium), April 2009 * * This method reduces the number of classifier evaluations and guarantees the same * Multilabel Output as ordinary Voting. But: the estimated confidences are only * approximated. Therefore, ranking-based performances are worse than ordinary voting. * @param instance * @return prediction * @throws java.lang.Exception */ public MultiLabelOutput makePredictionQW(Instance instance) throws Exception { int[] voteLabel = new int[numLabels]; int[] played = new int[numLabels + 1]; int[][] playedMatrix = new int[numLabels + 1][numLabels + 1]; int[] sortarr = new int[numLabels + 1]; double[] limits = new double[numLabels]; boolean[] bipartition = new boolean[numLabels]; double[] confidences = new double[numLabels]; int voteVirtual = 0; double limitVirtual = 0.0; boolean allEqualClassesFound = false; // delete all labels and add a new atribute at the end Instance newInstance = RemoveAllLabels.transformInstance(instance, labelIndices); newInstance.insertAttributeAt(newInstance.numAttributes()); //initialize the array voteLabel Arrays.fill(voteLabel, 0); // evaluate all classifiers of the calibrated label beforehand, #numLabels 1 vs. A evaluations MultiLabelOutput virtualMLO = virtualLabelModels.makePrediction(instance); boolean[] virtualBipartition = virtualMLO.getBipartition(); for (int i = 0; i < numLabels; i++) { if (virtualBipartition[i]) { voteLabel[i]++; } else { voteVirtual++; } played[i]++; playedMatrix[i][numLabels] = 1; playedMatrix[numLabels][i] = 1; limits[i] = played[i] - voteLabel[i]; } limitVirtual = numLabels - voteVirtual; played[numLabels] = numLabels; // apply QWeighted iteratively to estimate all relevant labels until the // calibrated label is found boolean found = false; int pos = 0; int player1 = -1; int player2 = -1; while (!allEqualClassesFound && pos < numLabels) { while (!found) { // opponent selection process: pair best against second best w.r.t. to number of "lost games" // player1 = pick player with min(limits[player]) && player isn't ranked sortarr = Utils.sort(limits); player1 = sortarr[0]; player2 = -1; int i = 1; // can we found unplayed matches of player1 ? if (played[player1] < numLabels) { // search for best opponent while (player2 == -1 && i < sortarr.length) { // already played ?? if (playedMatrix[player1][sortarr[i]] == 0) { player2 = sortarr[i]; } i++; } // play found Pairing and update stats int modelIndex = getRRClassifierIndex(player1, player2); newInstance.setDataset(metaDataTest[modelIndex]); double[] distribution = oneVsOneModels[modelIndex].distributionForInstance(newInstance); int maxIndex = (distribution[0] > distribution[1]) ? 0 : 1; // Ensure correct predictions both for class values {0,1} and {1,0} Attribute classAttribute = metaDataTest[modelIndex].classAttribute(); if (classAttribute.value(maxIndex).equals("1")) { voteLabel[player1 > player2 ? player2 : player1]++; } else { voteLabel[player1 > player2 ? player1 : player2]++; } // update stats played[player1]++; played[player2]++; playedMatrix[player1][player2] = 1; playedMatrix[player2][player1] = 1; limits[player1] = played[player1] - voteLabel[player1]; limits[player2] = played[player2] - voteLabel[player2]; } // full played, there are no opponents left else { found = true; } } //arrange already as relevant validated labels at the end of possible opponents limits[player1] = Double.MAX_VALUE; //check for possible labels, which can still gain greater or equal votes as the calibrated label allEqualClassesFound = true; for (int i = 0; i < numLabels; i++) { if (limits[i] <= limitVirtual) { allEqualClassesFound = false; } } // search for next relevant label found = false; pos++; } //Generate Multilabel Output for (int i = 0; i < numLabels; i++) { if (voteLabel[i] >= voteVirtual) { bipartition[i] = true; } else { bipartition[i] = false; } confidences[i] = 1.0 * voteLabel[i] / numLabels; } MultiLabelOutput mlo = new MultiLabelOutput(bipartition, confidences); return mlo; }
From source file:mulan.classifier.transformation.IncludeLabelsClassifier.java
License:Open Source License
protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception { double[] confidences = new double[numLabels]; boolean[] bipartition = new boolean[numLabels]; Instance newInstance = pt6Trans.transformInstance(instance); //calculate confidences //debug(instance.toString()); for (int i = 0; i < numLabels; i++) { newInstance.setDataset(transformed); newInstance.setValue(newInstance.numAttributes() - 2, instance.dataset().attribute(labelIndices[i]).name()); //debug(newInstance.toString()); double[] temp = baseClassifier.distributionForInstance(newInstance); //debug(temp.toString()); confidences[i] = temp[transformed.classAttribute().indexOfValue("1")]; //debug("" + confidences[i]); bipartition[i] = temp[transformed.classAttribute().indexOfValue("1")] >= temp[transformed .classAttribute().indexOfValue("0")] ? true : false; //debug("" + bipartition[i]); }/*from ww w. j ava 2s .c om*/ MultiLabelOutput mlo = new MultiLabelOutput(bipartition, confidences); return mlo; }
From source file:mulan.classifier.transformation.MultiClassLearner.java
License:Open Source License
protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception { //delete labels instance = RemoveAllLabels.transformInstance(instance, labelIndices); instance.setDataset(null);/* www.j a va 2 s . c o m*/ instance.insertAttributeAt(instance.numAttributes()); instance.setDataset(header); double[] distribution = baseClassifier.distributionForInstance(instance); MultiLabelOutput mlo = new MultiLabelOutput(MultiLabelOutput.ranksFromValues(distribution)); return mlo; }
From source file:mulan.classifier.transformation.MultiLabelStacking.java
License:Open Source License
@Override protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception { boolean[] bipartition = new boolean[numLabels]; // the confidences given as final output double[] metaconfidences = new double[numLabels]; // the confidences produced by the first level ensemble of classfiers double[] confidences = new double[numLabels]; if (!(baseClassifier instanceof IBk)) { // getting the confidences for each label for (int labelIndex = 0; labelIndex < numLabels; labelIndex++) { Instance newInstance = BinaryRelevanceTransformation.transformInstance(instance, labelIndices, labelIndices[labelIndex]); newInstance.setDataset(baseLevelData[labelIndex]); double distribution[] = new double[2]; distribution = baseLevelEnsemble[labelIndex].distributionForInstance(newInstance); // Ensure correct predictions both for class values {0,1} and // {1,0} Attribute classAttribute = baseLevelData[labelIndex].classAttribute(); // The confidence of the label being equal to 1 confidences[labelIndex] = distribution[classAttribute.indexOfValue("1")]; }//from w w w. java 2s . c om } else { // old way using brknn // MultiLabelOutput prediction = brknn.makePrediction(instance); // confidences = prediction.getConfidences(); // new way int numOfNeighbors = ((IBk) baseClassifier).getKNN(); Instances knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors)); /* * Get the label confidence vector. */ for (int i = 0; i < numLabels; i++) { // compute sum of counts for each label in KNN double count_for_label_i = 0; for (int k = 0; k < numOfNeighbors; k++) { double value = Double.parseDouble( train.attribute(labelIndices[i]).value((int) knn.instance(k).value(labelIndices[i]))); if (Utils.eq(value, 1.0)) { count_for_label_i++; } } confidences[i] = count_for_label_i / numOfNeighbors; } } // System.out.println(Utils.arrayToString(confidences)); /* creation of the meta-instance with the appropriate values */ double[] values = new double[numLabels + 1]; if (includeAttrs) { values = new double[instance.numAttributes() + 1]; // Copy the original features for (int m = 0; m < featureIndices.length; m++) { values[m] = instance.value(featureIndices[m]); } // Copy the label confidences as additional features for (int m = 0; m < confidences.length; m++) { values[instance.numAttributes() - numLabels + m] = confidences[m]; } } else { for (int m = 0; m < confidences.length; m++) { values[m] = confidences[m]; } } /* application of the meta-level ensemble to the metaInstance */ for (int labelIndex = 0; labelIndex < numLabels; labelIndex++) { // values[values.length - 1] = // instance.value(instance.numAttributes() - numLabels + // labelIndex); values[values.length - 1] = 0; Instance newmetaInstance = DataUtils.createInstance(instance, 1, values); double distribution[] = new double[2]; try { distribution = metaLevelFilteredEnsemble[labelIndex].distributionForInstance(newmetaInstance); } catch (Exception e) { System.out.println(e); return null; } int maxIndex = (distribution[0] > distribution[1]) ? 0 : 1; // Ensure correct predictions both for class values {0,1} and {1,0} Attribute classAttribute = metaLevelData[labelIndex].classAttribute(); bipartition[labelIndex] = (classAttribute.value(maxIndex).equals("1")) ? true : false; // The confidence of the label being equal to 1 metaconfidences[labelIndex] = distribution[classAttribute.indexOfValue("1")]; } MultiLabelOutput mlo = new MultiLabelOutput(bipartition, metaconfidences); return mlo; }
From source file:mulan.classifier.transformation.TwoStageClassifierChainArchitecture.java
License:Open Source License
/** * This method does a prediction for an instance with the values of label * missing according to Two Stage Pruned Classifier Chain (TSPCCA), which is * described in : Madjarov, Gj., Gjorgjevikj, D. and Dzeroski, S. Two stage * architecture for multi-label learning Pattern Recognition, vol. 45, pp. * 10191034, 2012//from w ww . ja v a2s .com * * @param instance the instance used * @return prediction the prediction made * @throws java.lang.Exception Potential exception thrown. To be handled in an upper level. */ private MultiLabelOutput makePredictionTSCCV(Instance instance) throws Exception { boolean[] bipartition = new boolean[numLabels]; double[] confidences = new double[numLabels]; int[] voteLabel = new int[numLabels + 1]; int[] noVoteLabel = new int[numLabels + 1]; int[] voteFromVirtualModels = new int[numLabels]; double[] confidenceFromVirtualModels = new double[numLabels]; //initialize the array voteLabel Arrays.fill(voteLabel, 0); Arrays.fill(noVoteLabel, 0); Arrays.fill(voteFromVirtualModels, 0); Arrays.fill(confidenceFromVirtualModels, 0.0); int voteVirtual = 0; MultiLabelOutput virtualMLO = virtualLabelModels.makePrediction(instance); boolean[] virtualBipartition = virtualMLO.getBipartition(); //number of classifiers of the first layer that forward the instance to the second layer int forwards = 0; for (int i = 0; i < numLabels; i++) { if (virtualMLO.hasConfidences()) { confidenceFromVirtualModels[i] = virtualMLO.getConfidences()[i]; //System.out.print(confidenceFromVirtualModels[i]); //System.out.print("\t"); } if (virtualBipartition[i]) { voteLabel[i]++; voteFromVirtualModels[i]++; } else { voteVirtual++; } if (confidenceFromVirtualModels[i] > threshold) { forwards++; } } Instance newInstanceFirstStage; //add predictions from the vurtual models if (instance instanceof SparseInstance) { newInstanceFirstStage = modifySparseInstance(instance, virtualMLO.getConfidences()); } else { newInstanceFirstStage = modifyDenseInstance(instance, virtualMLO.getConfidences()); } // delete all labels and add a new atribute at the end Instance newInstance = RemoveAllLabels.transformInstance(newInstanceFirstStage, labelIndices); newInstance.insertAttributeAt(newInstance.numAttributes()); int counter = 0; for (int label1 = 0; label1 < numLabels - 1; label1++) { for (int label2 = label1 + 1; label2 < numLabels; label2++) { if (!nodata[counter]) { if (confidenceFromVirtualModels[label1] > threshold && confidenceFromVirtualModels[label2] > threshold) { double distribution[]; try { newInstance.setDataset(metaDataTest[counter]); distribution = oneVsOneModels[counter].distributionForInstance(newInstance); } catch (Exception e) { System.out.println(e); return null; } int maxIndex = (distribution[0] > distribution[1]) ? 0 : 1; // Ensure correct predictions both for class values {0,1} and {1,0} Attribute classAttribute = metaDataTest[counter].classAttribute(); if (classAttribute.value(maxIndex).equals("1")) { voteLabel[label1]++; } else { voteLabel[label2]++; } } else if (confidenceFromVirtualModels[label1] > threshold) { voteLabel[label1]++; } else if (confidenceFromVirtualModels[label2] > threshold) { voteLabel[label2]++; } else { noVoteLabel[label1]++; noVoteLabel[label2]++; } } counter++; } } avgForwards += forwards; for (int i = 0; i < numLabels; i++) { if (voteLabel[i] >= voteVirtual) { bipartition[i] = true; confidences[i] = (1.0 * voteLabel[i]) / (numLabels - noVoteLabel[i]); } else { bipartition[i] = false; confidences[i] = 1.0 * confidenceFromVirtualModels[i] / numLabels; //confidences[i]=confidenceFromVirtualModels[i]; } //System.out.println(bipartition[i]); //System.out.println(confidences[i]); //confidences[i]*=confidenceFromVirtualModels[i]; } MultiLabelOutput mlo = new MultiLabelOutput(bipartition, confidences); return mlo; }
From source file:mulan.classifier.transformation.TwoStagePrunedClassifierChainArchitecture.java
License:Open Source License
/** * This method does a prediction for an instance with the values of label * missing according to Two Stage Voting Method (TSVM), which is described * in : Madjarov, Gj., Gjorgjevikj, D. and Dzeroski, S. Efficient two stage * voting architecture for pairwise multi-label classification. In AI 2010: * Advances in Artificial Intelligence (J. Li, ed.), vol. 6464 of Lecture * Notes in Computer Science, pp. 164173, 2011 * * @param instance/* w w w .j a v a 2 s. co m*/ * @return prediction * @throws java.lang.Exception Potential exception thrown. To be handled in an upper level. */ private MultiLabelOutput makePredictionTSCCA(Instance instance) throws Exception { boolean[] bipartition = new boolean[numLabels]; double[] confidences = new double[numLabels]; int[] voteLabel = new int[numLabels + 1]; int[] noVoteLabel = new int[numLabels + 1]; int[] voteFromVirtualModels = new int[numLabels]; double[] confidenceFromVirtualModels = new double[numLabels]; //System.out.println("Instance:" + instance.toString()); //initialize the array voteLabel Arrays.fill(voteLabel, 0); Arrays.fill(noVoteLabel, 0); Arrays.fill(voteFromVirtualModels, 0); Arrays.fill(confidenceFromVirtualModels, 0.0); int voteVirtual = 0; MultiLabelOutput virtualMLO = virtualLabelModels.makePrediction(instance); boolean[] virtualBipartition = virtualMLO.getBipartition(); //number of classifiers of the first layer that forward the instance to the second layer int forwards = 0; for (int i = 0; i < numLabels; i++) { if (virtualMLO.hasConfidences()) { confidenceFromVirtualModels[i] = virtualMLO.getConfidences()[i]; //System.out.print(confidenceFromVirtualModels[i]); //System.out.print("\t"); } if (virtualBipartition[i]) { voteLabel[i]++; voteFromVirtualModels[i]++; } else { voteVirtual++; } if (confidenceFromVirtualModels[i] > threshold) { forwards++; } } int counter = 0; for (int label1 = 0; label1 < numLabels - 1; label1++) { for (int label2 = label1 + 1; label2 < numLabels; label2++) { Instance newInstanceFirstStage; //add predictions from the vurtual models if (instance instanceof SparseInstance) { newInstanceFirstStage = modifySparseInstance(instance, virtualMLO.getConfidences()[label1], virtualMLO.getConfidences()[label2]); } else { newInstanceFirstStage = modifyDenseInstance(instance, virtualMLO.getConfidences()[label1], virtualMLO.getConfidences()[label2]); } // delete all labels and add a new atribute at the end Instance newInstance = RemoveAllLabels.transformInstance(newInstanceFirstStage, labelIndices); newInstance.insertAttributeAt(newInstance.numAttributes()); if (!nodata[counter]) { if (confidenceFromVirtualModels[label1] > threshold && confidenceFromVirtualModels[label2] > threshold) { double distribution[]; try { newInstance.setDataset(metaDataTest[counter]); distribution = oneVsOneModels[counter].distributionForInstance(newInstance); } catch (Exception e) { System.out.println(e); return null; } int maxIndex = (distribution[0] > distribution[1]) ? 0 : 1; // Ensure correct predictions both for class values {0,1} and {1,0} Attribute classAttribute = metaDataTest[counter].classAttribute(); if (classAttribute.value(maxIndex).equals("1")) { voteLabel[label1]++; } else { voteLabel[label2]++; } } else if (confidenceFromVirtualModels[label1] > threshold) { voteLabel[label1]++; } else if (confidenceFromVirtualModels[label2] > threshold) { voteLabel[label2]++; } else { noVoteLabel[label1]++; noVoteLabel[label2]++; } } counter++; } } avgForwards += forwards; for (int i = 0; i < numLabels; i++) { if (voteLabel[i] >= voteVirtual) { bipartition[i] = true; confidences[i] = (1.0 * voteLabel[i]) / (numLabels - noVoteLabel[i]); } else { bipartition[i] = false; confidences[i] = 1.0 * confidenceFromVirtualModels[i] / numLabels; //confidences[i]=confidenceFromVirtualModels[i]; } //System.out.println(bipartition[i]); //System.out.println(confidences[i]); //confidences[i]*=confidenceFromVirtualModels[i]; } MultiLabelOutput mlo = new MultiLabelOutput(bipartition, confidences); return mlo; }