List of usage examples for weka.core Instance value
public double value(Attribute att);
From source file:assign00.KNNClassifier.java
double EuclideanDistance(Instance instanceLHS, Instance instanceRHS) { double distance = 0; for (int i = 0; i < instanceLHS.numAttributes() - 1 && i < instanceRHS.numAttributes() - 1; i++) { if (instanceLHS.attribute(i).isNumeric() && instanceRHS.attribute(i).isNumeric()) { distance += pow(instanceLHS.value(i) - instanceRHS.value(i), 2); } else {/*from w w w.j av a 2 s . co m*/ if (instanceLHS.stringValue(i).equals(instanceRHS.stringValue(i))) { distance += 0; } else { distance += 1; } } } return distance; }
From source file:assign00.KNNClassifier.java
double ManhattenDistance(Instance instanceLHS, Instance instanceRHS) { double distance = 0; for (int i = 0; i < instanceLHS.numAttributes() - 1 && i < instanceRHS.numAttributes() - 1; i++) { if (instanceLHS.attribute(i).isNumeric() && instanceRHS.attribute(i).isNumeric()) { distance += abs(instanceLHS.value(i) - instanceRHS.value(i)); } else {//from ww w . ja v a2s . co m if (instanceLHS.stringValue(i).equals(instanceRHS.stringValue(i))) { distance += 0; } else { distance += 1; } } } return distance; }
From source file:boosting.classifiers.DecisionStumpWritable.java
License:Open Source License
/** * Finds best split for nominal attribute and nominal class * and returns value.// w ww .j a va 2s . c o m * * @param index attribute index * @return value of criterion for the best split * @throws Exception if something goes wrong */ private double findSplitNominalNominal(int index) throws Exception { double bestVal = Double.MAX_VALUE, currVal; double[][] counts = new double[m_Instances.attribute(index).numValues() + 1][m_Instances.numClasses()]; double[] sumCounts = new double[m_Instances.numClasses()]; double[][] bestDist = new double[3][m_Instances.numClasses()]; int numMissing = 0; // Compute counts for all the values for (int i = 0; i < m_Instances.numInstances(); i++) { Instance inst = m_Instances.instance(i); if (inst.isMissing(index)) { numMissing++; counts[m_Instances.attribute(index).numValues()][(int) inst.classValue()] += inst.weight(); } else { counts[(int) inst.value(index)][(int) inst.classValue()] += inst.weight(); } } // Compute sum of counts for (int i = 0; i < m_Instances.attribute(index).numValues(); i++) { for (int j = 0; j < m_Instances.numClasses(); j++) { sumCounts[j] += counts[i][j]; } } // Make split counts for each possible split and evaluate System.arraycopy(counts[m_Instances.attribute(index).numValues()], 0, m_Distribution[2], 0, m_Instances.numClasses()); for (int i = 0; i < m_Instances.attribute(index).numValues(); i++) { for (int j = 0; j < m_Instances.numClasses(); j++) { m_Distribution[0][j] = counts[i][j]; m_Distribution[1][j] = sumCounts[j] - counts[i][j]; } currVal = ContingencyTables.entropyConditionedOnRows(m_Distribution); if (currVal < bestVal) { bestVal = currVal; m_SplitPoint = (double) i; for (int j = 0; j < 3; j++) { System.arraycopy(m_Distribution[j], 0, bestDist[j], 0, m_Instances.numClasses()); } } } // No missing values in training data. if (numMissing == 0) { System.arraycopy(sumCounts, 0, bestDist[2], 0, m_Instances.numClasses()); } m_Distribution = bestDist; return bestVal; }
From source file:boosting.classifiers.DecisionStumpWritable.java
License:Open Source License
/** * Finds best split for nominal attribute and numeric class * and returns value./*w ww . ja v a 2 s . c om*/ * * @param index attribute index * @return value of criterion for the best split * @throws Exception if something goes wrong */ private double findSplitNominalNumeric(int index) throws Exception { double bestVal = Double.MAX_VALUE, currVal; double[] sumsSquaresPerValue = new double[m_Instances.attribute(index).numValues()], sumsPerValue = new double[m_Instances.attribute(index).numValues()], weightsPerValue = new double[m_Instances.attribute(index).numValues()]; double totalSumSquaresW = 0, totalSumW = 0, totalSumOfWeightsW = 0, totalSumOfWeights = 0, totalSum = 0; double[] sumsSquares = new double[3], sumOfWeights = new double[3]; double[][] bestDist = new double[3][1]; // Compute counts for all the values for (int i = 0; i < m_Instances.numInstances(); i++) { Instance inst = m_Instances.instance(i); if (inst.isMissing(index)) { m_Distribution[2][0] += inst.classValue() * inst.weight(); sumsSquares[2] += inst.classValue() * inst.classValue() * inst.weight(); sumOfWeights[2] += inst.weight(); } else { weightsPerValue[(int) inst.value(index)] += inst.weight(); sumsPerValue[(int) inst.value(index)] += inst.classValue() * inst.weight(); sumsSquaresPerValue[(int) inst.value(index)] += inst.classValue() * inst.classValue() * inst.weight(); } totalSumOfWeights += inst.weight(); totalSum += inst.classValue() * inst.weight(); } // Check if the total weight is zero if (totalSumOfWeights <= 0) { return bestVal; } // Compute sum of counts without missing ones for (int i = 0; i < m_Instances.attribute(index).numValues(); i++) { totalSumOfWeightsW += weightsPerValue[i]; totalSumSquaresW += sumsSquaresPerValue[i]; totalSumW += sumsPerValue[i]; } // Make split counts for each possible split and evaluate for (int i = 0; i < m_Instances.attribute(index).numValues(); i++) { m_Distribution[0][0] = sumsPerValue[i]; sumsSquares[0] = sumsSquaresPerValue[i]; sumOfWeights[0] = weightsPerValue[i]; m_Distribution[1][0] = totalSumW - sumsPerValue[i]; sumsSquares[1] = totalSumSquaresW - sumsSquaresPerValue[i]; sumOfWeights[1] = totalSumOfWeightsW - weightsPerValue[i]; currVal = variance(m_Distribution, sumsSquares, sumOfWeights); if (currVal < bestVal) { bestVal = currVal; m_SplitPoint = (double) i; for (int j = 0; j < 3; j++) { if (sumOfWeights[j] > 0) { bestDist[j][0] = m_Distribution[j][0] / sumOfWeights[j]; } else { bestDist[j][0] = totalSum / totalSumOfWeights; } } } } m_Distribution = bestDist; return bestVal; }
From source file:boosting.classifiers.DecisionStumpWritable.java
License:Open Source License
/** * Finds best split for numeric attribute and nominal class * and returns value.//from w w w . j a v a2s.co m * * @param index attribute index * @return value of criterion for the best split * @throws Exception if something goes wrong */ private double findSplitNumericNominal(int index) throws Exception { double bestVal = Double.MAX_VALUE, currVal, currCutPoint; int numMissing = 0; double[] sum = new double[m_Instances.numClasses()]; double[][] bestDist = new double[3][m_Instances.numClasses()]; // Compute counts for all the values for (int i = 0; i < m_Instances.numInstances(); i++) { Instance inst = m_Instances.instance(i); if (!inst.isMissing(index)) { m_Distribution[1][(int) inst.classValue()] += inst.weight(); } else { m_Distribution[2][(int) inst.classValue()] += inst.weight(); numMissing++; } } System.arraycopy(m_Distribution[1], 0, sum, 0, m_Instances.numClasses()); // Save current distribution as best distribution for (int j = 0; j < 3; j++) { System.arraycopy(m_Distribution[j], 0, bestDist[j], 0, m_Instances.numClasses()); } // Sort instances m_Instances.sort(index); // Make split counts for each possible split and evaluate for (int i = 0; i < m_Instances.numInstances() - (numMissing + 1); i++) { Instance inst = m_Instances.instance(i); Instance instPlusOne = m_Instances.instance(i + 1); m_Distribution[0][(int) inst.classValue()] += inst.weight(); m_Distribution[1][(int) inst.classValue()] -= inst.weight(); if (inst.value(index) < instPlusOne.value(index)) { currCutPoint = (inst.value(index) + instPlusOne.value(index)) / 2.0; currVal = ContingencyTables.entropyConditionedOnRows(m_Distribution); if (currVal < bestVal) { m_SplitPoint = currCutPoint; bestVal = currVal; for (int j = 0; j < 3; j++) { System.arraycopy(m_Distribution[j], 0, bestDist[j], 0, m_Instances.numClasses()); } } } } // No missing values in training data. if (numMissing == 0) { System.arraycopy(sum, 0, bestDist[2], 0, m_Instances.numClasses()); } m_Distribution = bestDist; return bestVal; }
From source file:boosting.classifiers.DecisionStumpWritable.java
License:Open Source License
/** * Finds best split for numeric attribute and numeric class * and returns value.//from w w w .ja va2 s.c o m * * @param index attribute index * @return value of criterion for the best split * @throws Exception if something goes wrong */ private double findSplitNumericNumeric(int index) throws Exception { double bestVal = Double.MAX_VALUE, currVal, currCutPoint; int numMissing = 0; double[] sumsSquares = new double[3], sumOfWeights = new double[3]; double[][] bestDist = new double[3][1]; double totalSum = 0, totalSumOfWeights = 0; // Compute counts for all the values for (int i = 0; i < m_Instances.numInstances(); i++) { Instance inst = m_Instances.instance(i); if (!inst.isMissing(index)) { m_Distribution[1][0] += inst.classValue() * inst.weight(); sumsSquares[1] += inst.classValue() * inst.classValue() * inst.weight(); sumOfWeights[1] += inst.weight(); } else { m_Distribution[2][0] += inst.classValue() * inst.weight(); sumsSquares[2] += inst.classValue() * inst.classValue() * inst.weight(); sumOfWeights[2] += inst.weight(); numMissing++; } totalSumOfWeights += inst.weight(); totalSum += inst.classValue() * inst.weight(); } // Check if the total weight is zero if (totalSumOfWeights <= 0) { return bestVal; } // Sort instances m_Instances.sort(index); // Make split counts for each possible split and evaluate for (int i = 0; i < m_Instances.numInstances() - (numMissing + 1); i++) { Instance inst = m_Instances.instance(i); Instance instPlusOne = m_Instances.instance(i + 1); m_Distribution[0][0] += inst.classValue() * inst.weight(); sumsSquares[0] += inst.classValue() * inst.classValue() * inst.weight(); sumOfWeights[0] += inst.weight(); m_Distribution[1][0] -= inst.classValue() * inst.weight(); sumsSquares[1] -= inst.classValue() * inst.classValue() * inst.weight(); sumOfWeights[1] -= inst.weight(); if (inst.value(index) < instPlusOne.value(index)) { currCutPoint = (inst.value(index) + instPlusOne.value(index)) / 2.0; currVal = variance(m_Distribution, sumsSquares, sumOfWeights); if (currVal < bestVal) { m_SplitPoint = currCutPoint; bestVal = currVal; for (int j = 0; j < 3; j++) { if (sumOfWeights[j] > 0) { bestDist[j][0] = m_Distribution[j][0] / sumOfWeights[j]; } else { bestDist[j][0] = totalSum / totalSumOfWeights; } } } } } m_Distribution = bestDist; return bestVal; }
From source file:boosting.classifiers.DecisionStumpWritable.java
License:Open Source License
/** * Returns the subset an instance falls into. * /*from www. ja va2 s . c o m*/ * @param instance the instance to check * @return the subset the instance falls into * @throws Exception if something goes wrong */ private int whichSubset(Instance instance) throws Exception { if (instance.isMissing(m_AttIndex)) { return 2; } else if (instance.attribute(m_AttIndex).isNominal()) { if ((int) instance.value(m_AttIndex) == m_SplitPoint) { return 0; } else { return 1; } } else { if (instance.value(m_AttIndex) <= m_SplitPoint) { return 0; } else { return 1; } } }
From source file:br.com.ufu.lsi.rebfnetwork.RBFClassifier.java
License:Open Source License
/** * Calculates error for single instance. */// w ww . ja v a 2s . c o m protected double calculateError(double[] outputs, Instance inst) { // Want to calculate squared error double SE = 0; // For all class values for (int i = 0; i < m_numClasses; i++) { // Get target (make them slightly different from 0/1 for better convergence) final double target = ((int) inst.value(m_classIndex) == i) ? 0.99 : 0.01; // Add to squared error final double err = getOutput(i, outputs, null) - target; SE += err * err; } return SE; }
From source file:br.com.ufu.lsi.rebfnetwork.RBFClassifier.java
License:Open Source License
/** * Update the gradient for the weights in the output layer. *///from www.j av a 2s.c o m protected void updateGradient(double[] grad, Instance inst, double[] outputs, double[] sigmoidDerivativeOutput, double[] deltaHidden) { // Initialise deltaHidden Arrays.fill(deltaHidden, 0.0); // For all output units for (int j = 0; j < m_numClasses; j++) { // Get output from output unit j double pred = getOutput(j, outputs, sigmoidDerivativeOutput); // Get target (make them slightly different from 0/1 for better convergence) double target = ((int) inst.value(m_classIndex) == j) ? 0.99 : 0.01; // Calculate delta from output unit double deltaOut = (pred - target) * sigmoidDerivativeOutput[0]; // Go to next output unit if update too small if (deltaOut <= m_tolerance && deltaOut >= -m_tolerance) { continue; } // Establish offset int offsetOW = OFFSET_WEIGHTS + (j * (m_numUnits + 1)); // Update deltaHidden for (int i = 0; i < m_numUnits; i++) { deltaHidden[i] += deltaOut * m_RBFParameters[offsetOW + i]; } // Update gradient for output weights for (int i = 0; i < m_numUnits; i++) { grad[offsetOW + i] += deltaOut * outputs[i]; } // Update gradient for bias grad[offsetOW + m_numUnits] += deltaOut; } }
From source file:br.com.ufu.lsi.rebfnetwork.RBFModel.java
License:Open Source License
/** * Calculates partial derivatives in the case of different sigma per attribute and unit. *//*from w w w. j av a 2 s. c om*/ protected void derivativeScalePerAttribute(double[] grad, double[] deltaHidden, Instance inst, int unitIndex) { double constant = deltaHidden[unitIndex]; int offsetC = OFFSET_CENTERS + (unitIndex * m_numAttributes); int offsetS = OFFSET_SCALES + (unitIndex * m_numAttributes); double attWeight = 1.0; for (int j = 0; j < m_classIndex; j++) { double diff = (inst.value(j) - m_RBFParameters[offsetC + j]); double scalePart = (m_RBFParameters[offsetS + j] * m_RBFParameters[offsetS + j]); if (m_useAttributeWeights) { attWeight = m_RBFParameters[OFFSET_ATTRIBUTE_WEIGHTS + j] * m_RBFParameters[OFFSET_ATTRIBUTE_WEIGHTS + j]; grad[OFFSET_ATTRIBUTE_WEIGHTS + j] -= m_RBFParameters[OFFSET_ATTRIBUTE_WEIGHTS + j] * constant * diff * diff / scalePart; } grad[offsetS + j] += constant * attWeight * diff * diff / (scalePart * m_RBFParameters[offsetS + j]); grad[offsetC + j] += constant * attWeight * diff / scalePart; } for (int j = m_classIndex + 1; j < m_numAttributes; j++) { double diff = (inst.value(j) - m_RBFParameters[offsetC + j]); double scalePart = (m_RBFParameters[offsetS + j] * m_RBFParameters[offsetS + j]); if (m_useAttributeWeights) { attWeight = m_RBFParameters[OFFSET_ATTRIBUTE_WEIGHTS + j] * m_RBFParameters[OFFSET_ATTRIBUTE_WEIGHTS + j]; grad[OFFSET_ATTRIBUTE_WEIGHTS + j] -= m_RBFParameters[OFFSET_ATTRIBUTE_WEIGHTS + j] * constant * diff * diff / scalePart; } grad[offsetS + j] += constant * attWeight * diff * diff / (scalePart * m_RBFParameters[offsetS + j]); grad[offsetC + j] += constant * attWeight * diff / scalePart; } }