List of usage examples for weka.core Instance numClasses
public int numClasses();
From source file:Bilbo.java
License:Open Source License
/** * Calculates the class membership probabilities for the given test * instance.// w w w .ja va2 s. c o m * * @param instance the instance to be classified * @return preedicted class probability distribution * @throws Exception if distribution can't be computed successfully */ @Override public double[] distributionForInstance(Instance instance) throws Exception { double[] sums = new double[instance.numClasses()], newProbs; double numPreds = 0; for (int i = 0; i < m_NumIterations; i++) { if (instance.classAttribute().isNumeric() == true) { double pred = ((NewTree) m_Classifiers[i]).classifyInstance(instance); if (!Utils.isMissingValue(pred)) { sums[0] += pred; numPreds++; } } else { newProbs = ((NewTree) m_Classifiers[i]).distributionForInstance(instance); for (int j = 0; j < newProbs.length; j++) sums[j] += newProbs[j]; } } if (instance.classAttribute().isNumeric() == true) { if (numPreds == 0) { sums[0] = Utils.missingValue(); } else { sums[0] /= numPreds; } return sums; } else if (Utils.eq(Utils.sum(sums), 0)) { return sums; } else { Utils.normalize(sums); return sums; } }
From source file:BaggingImprove.java
/** * Calculates the class membership probabilities for the given test * instance./* w w w. j a v a2 s . com*/ * * @param instance the instance to be classified * @return predicted class probability distribution * @throws Exception if distribution can't be computed successfully */ @Override public double[] distributionForInstance(Instance instance) throws Exception { double[] sums = new double[instance.numClasses()], newProbs; //- //System.out.println("\nDistribution For Instance\n"); for (int i = 0; i < m_NumIterations; i++) { if (instance.classAttribute().isNumeric() == true) { //System.out.println(m_Classifiers[i].classifyInstance(instance)); sums[0] += m_Classifiers[i].classifyInstance(instance); } else { //System.out.println(m_Classifiers[i].distributionForInstance(instance)); newProbs = m_Classifiers[i].distributionForInstance(instance); //- // for (int j = 0; j < newProbs.length; j++) { // sums[j] += newProbs[j]; // System.out.println("Sums "+sums[j]); // } //+ } } if (instance.classAttribute().isNumeric() == true) { sums[0] /= m_NumIterations; return sums; } else if (Utils.eq(Utils.sum(sums), 0)) { return sums; } else { Utils.normalize(sums); return sums; } }
From source file:MultiClassClassifier.java
License:Open Source License
/** * Returns the distribution for an instance. * * @param inst the instance to get the distribution for * @return the distribution/* w w w . j a va 2 s.c o m*/ * @throws Exception if the distribution can't be computed successfully */ public double[] distributionForInstance(Instance inst) throws Exception { if (m_Classifiers.length == 1) { return m_Classifiers[0].distributionForInstance(inst); } double[] probs = new double[inst.numClasses()]; if (m_Method == METHOD_1_AGAINST_1) { double[][] r = new double[inst.numClasses()][inst.numClasses()]; double[][] n = new double[inst.numClasses()][inst.numClasses()]; for (int i = 0; i < m_ClassFilters.length; i++) { if (m_Classifiers[i] != null) { Instance tempInst = (Instance) inst.copy(); tempInst.setDataset(m_TwoClassDataset); double[] current = m_Classifiers[i].distributionForInstance(tempInst); Range range = new Range(((RemoveWithValues) m_ClassFilters[i]).getNominalIndices()); range.setUpper(m_ClassAttribute.numValues()); int[] pair = range.getSelection(); if (m_pairwiseCoupling && inst.numClasses() > 2) { r[pair[0]][pair[1]] = current[0]; n[pair[0]][pair[1]] = m_SumOfWeights[i]; } else { if (current[0] > current[1]) { probs[pair[0]] += 1.0; } else { probs[pair[1]] += 1.0; } } } } if (m_pairwiseCoupling && inst.numClasses() > 2) { return pairwiseCoupling(n, r); } } else { // error correcting style methods for (int i = 0; i < m_ClassFilters.length; i++) { m_ClassFilters[i].input(inst); m_ClassFilters[i].batchFinished(); double[] current = m_Classifiers[i].distributionForInstance(m_ClassFilters[i].output()); //Calibrate the binary classifier scores for (int j = 0; j < m_ClassAttribute.numValues(); j++) { if (((MakeIndicator) m_ClassFilters[i]).getValueRange().isInRange(j)) { probs[j] += current[1]; } else { probs[j] += current[0]; } } } } if (Utils.gr(Utils.sum(probs), 0)) { Utils.normalize(probs); return probs; } else { return m_ZeroR.distributionForInstance(inst); } }
From source file:SMO.java
License:Open Source License
/** * Estimates class probabilities for given instance. * /*from ww w.j a va2 s . co m*/ * @param inst the instance to compute the probabilities for * @throws Exception in case of an error */ public double[] distributionForInstance(Instance inst) throws Exception { // Filter instance if (!m_checksTurnedOff) { m_Missing.input(inst); m_Missing.batchFinished(); inst = m_Missing.output(); } if (m_NominalToBinary != null) { m_NominalToBinary.input(inst); m_NominalToBinary.batchFinished(); inst = m_NominalToBinary.output(); } if (m_Filter != null) { m_Filter.input(inst); m_Filter.batchFinished(); inst = m_Filter.output(); } if (!m_fitLogisticModels) { double[] result = new double[inst.numClasses()]; for (int i = 0; i < inst.numClasses(); i++) { for (int j = i + 1; j < inst.numClasses(); j++) { if ((m_classifiers[i][j].m_alpha != null) || (m_classifiers[i][j].m_sparseWeights != null)) { double output = m_classifiers[i][j].SVMOutput(-1, inst); if (output > 0) { result[j] += 1; } else { result[i] += 1; } } } } Utils.normalize(result); return result; } else { // We only need to do pairwise coupling if there are more // then two classes. if (inst.numClasses() == 2) { double[] newInst = new double[2]; newInst[0] = m_classifiers[0][1].SVMOutput(-1, inst); newInst[1] = Instance.missingValue(); return m_classifiers[0][1].m_logistic.distributionForInstance(new Instance(1, newInst)); } double[][] r = new double[inst.numClasses()][inst.numClasses()]; double[][] n = new double[inst.numClasses()][inst.numClasses()]; for (int i = 0; i < inst.numClasses(); i++) { for (int j = i + 1; j < inst.numClasses(); j++) { if ((m_classifiers[i][j].m_alpha != null) || (m_classifiers[i][j].m_sparseWeights != null)) { double[] newInst = new double[2]; newInst[0] = m_classifiers[i][j].SVMOutput(-1, inst); newInst[1] = Instance.missingValue(); r[i][j] = m_classifiers[i][j].m_logistic .distributionForInstance(new Instance(1, newInst))[0]; n[i][j] = m_classifiers[i][j].m_sumOfWeights; } } } return weka.classifiers.meta.MultiClassClassifier.pairwiseCoupling(n, r); } }
From source file:SMO.java
License:Open Source License
/** * Returns an array of votes for the given instance. * @param inst the instance/* w w w . j a v a2 s. c om*/ * @return array of votex * @throws Exception if something goes wrong */ public int[] obtainVotes(Instance inst) throws Exception { // Filter instance if (!m_checksTurnedOff) { m_Missing.input(inst); m_Missing.batchFinished(); inst = m_Missing.output(); } if (m_NominalToBinary != null) { m_NominalToBinary.input(inst); m_NominalToBinary.batchFinished(); inst = m_NominalToBinary.output(); } if (m_Filter != null) { m_Filter.input(inst); m_Filter.batchFinished(); inst = m_Filter.output(); } int[] votes = new int[inst.numClasses()]; for (int i = 0; i < inst.numClasses(); i++) { for (int j = i + 1; j < inst.numClasses(); j++) { double output = m_classifiers[i][j].SVMOutput(-1, inst); if (output > 0) { votes[j] += 1; } else { votes[i] += 1; } } } return votes; }
From source file:ID3Chi.java
License:Open Source License
private double[] classifyInstanceWithToken(Instance instance, double token) { int numClasses = instance.numClasses(); double[] tokenDistribution = new double[numClasses]; if (m_Attribute == null) { for (int j = 0; j < numClasses; j++) { tokenDistribution[j] = token * m_Distribution[j]; }//from ww w . ja v a 2s . co m } else { // for attribute values get token distribution if (instance.isMissing(m_Attribute)) { for (int j = 0; j < m_Attribute.numValues(); j++) { double[] dist = m_Successors[j].classifyInstanceWithToken(instance, token * m_Successors[j].m_Ratio); for (int i = 0; i < numClasses; i++) { tokenDistribution[i] += dist[i]; } } } else { int idx = (int) instance.value(m_Attribute); tokenDistribution = m_Successors[idx].classifyInstanceWithToken(instance, token * m_Successors[idx].m_Ratio); } } return tokenDistribution; }
From source file:ai.BalancedRandomForest.java
License:GNU General Public License
/** * Calculates the class membership probabilities for the given test * instance./*from w w w .j a v a 2 s .co m*/ * * @param instance the instance to be classified * @return predicted class probability distribution */ public double[] distributionForInstance(Instance instance) { double[] sums = new double[instance.numClasses()], newProbs; for (int i = 0; i < numTrees; i++) { newProbs = tree[i].evaluate(instance); for (int j = 0; j < newProbs.length; j++) sums[j] += newProbs[j]; } // Divide by the number of trees for (int j = 0; j < sums.length; j++) sums[j] /= (double) numTrees; return sums; }
From source file:bme.mace.logicdomain.Evaluation.java
License:Open Source License
/** * Evaluates the classifier on a single instance and records the prediction * (if the class is nominal).//from w ww . j ava2 s .com * * @param classifier machine learning classifier * @param instance the test instance to be classified * @return the prediction made by the clasifier * @throws Exception if model could not be evaluated successfully or the data * contains string attributes */ public double evaluateModelOnceAndRecordPrediction(List<LibSVM> classifier, List<Double> classifierWeight, Instance instance) throws Exception { Instance classMissing = (Instance) instance.copy(); double pred = 0; classMissing.setDataset(instance.dataset()); classMissing.setClassMissing(); if (m_ClassIsNominal) { if (m_Predictions == null) { m_Predictions = new FastVector(); } List<double[]> prob = new ArrayList<double[]>();// double[] finalProb = new double[instance.numClasses()]; for (int i = 0; i < classifier.size(); i++) { double[] dist = classifier.get(i).distributionForInstance(classMissing);// prob.add(dist); } for (int i = 0; i < finalProb.length; i++) { for (int j = 0; j < classifier.size(); j++) { finalProb[i] += prob.get(j)[i] * classifierWeight.get(j); } } double sum = 0; for (int i = 0; i < finalProb.length; i++) { sum += finalProb[i]; } for (int i = 0; i < finalProb.length; i++) { finalProb[i] = finalProb[i] / sum; } pred = Utils.maxIndex(finalProb); if (finalProb[(int) pred] <= 0) { pred = Instance.missingValue(); } updateStatsForClassifier(finalProb, instance); m_Predictions.addElement(new NominalPrediction(instance.classValue(), finalProb, instance.weight())); } else { pred = classifier.get(0).classifyInstance(classMissing); updateStatsForPredictor(pred, instance); } return pred; }
From source file:com.spread.experiment.tempuntilofficialrelease.ClassificationViaClustering108.java
License:Open Source License
/** * Returns class probability distribution for the given instance. * // w ww . jav a 2 s . c o m * @param instance the instance to be classified * @return the class probabilities * @throws Exception if an error occurred during the prediction */ @Override public double[] distributionForInstance(Instance instance) throws Exception { if (m_ZeroR != null) { return m_ZeroR.distributionForInstance(instance); } else { double[] result = new double[instance.numClasses()]; if (m_ActualClusterer != null) { // build new instance Instances tempData = m_ClusteringHeader.stringFreeStructure(); double[] values = new double[tempData.numAttributes()]; int n = 0; for (int i = 0; i < instance.numAttributes(); i++) { if (i == instance.classIndex()) { continue; } if (instance.attribute(i).isString()) { values[n] = tempData.attribute(n).addStringValue(instance.stringValue(i)); } else if (instance.attribute(i).isRelationValued()) { values[n] = tempData.attribute(n).addRelation(instance.relationalValue(i)); } else { values[n] = instance.value(i); } n++; } Instance newInst = new DenseInstance(instance.weight(), values); newInst.setDataset(tempData); if (!getLabelAllClusters()) { // determine cluster/class double r = m_ClustersToClasses[m_ActualClusterer.clusterInstance(newInst)]; if (r == -1) { return result; // Unclassified } else { result[(int) r] = 1.0; return result; } } else { double[] classProbs = new double[instance.numClasses()]; double[] dist = m_ActualClusterer.distributionForInstance(newInst); for (int i = 0; i < dist.length; i++) { for (int j = 0; j < instance.numClasses(); j++) { classProbs[j] += dist[i] * m_ClusterClassProbs[i][j]; } } Utils.normalize(classProbs); return classProbs; } } else { return result; // Unclassified } } }
From source file:com.tum.classifiertest.FastRfBagging.java
License:Open Source License
/** * Calculates the class membership probabilities for the given test * instance./* www .j a v a 2 s . com*/ * * @param instance the instance to be classified * * @return preedicted class probability distribution * * @throws Exception if distribution can't be computed successfully */ @Override public double[] distributionForInstance(Instance instance) throws Exception { double[] sums = new double[instance.numClasses()], newProbs; //Log.i("FastRfBagging", " sums length : " + sums.length);//problem is that it is 1 instead of 2. coz instance is not nominal //Log.i("FastRfBagging", "m_NumIterations : " + m_NumIterations); for (int i = 0; i < m_NumIterations; i++) { if (instance.classAttribute().isNumeric()) { sums[0] += m_Classifiers[i].classifyInstance(instance); //Log.i("FastRfBagging", "yesNumeric , sum value : " + sums[0]); } else { newProbs = m_Classifiers[i].distributionForInstance(instance); //Log.i("FastRfBagging", "notNumeric , newProbs length : " + newProbs.length); for (int j = 0; j < newProbs.length; j++) sums[j] += newProbs[j]; } } if (instance.classAttribute().isNumeric()) { sums[0] /= (double) m_NumIterations; return sums; } else if (Utils.eq(Utils.sum(sums), 0)) { return sums; } else { Utils.normalize(sums); return sums; } }