Example usage for weka.core Instance classIndex

List of usage examples for weka.core Instance classIndex

Introduction

In this page you can find the example usage for weka.core Instance classIndex.

Prototype

public int classIndex();

Source Link

Document

Returns the class attribute's index.

Usage

From source file:moa.classifiers.functions.SGDMultiClass.java

License:Open Source License

/**
 * Calculates the class membership probabilities for the given test
 * instance.//from w w  w.jav a2  s .c  o m
 *
 * @param instance    the instance to be classified
 * @return       predicted class probability distribution
 */
@Override
public double[] getVotesForInstance(Instance inst) {

    if (m_weights == null) {
        return new double[inst.numClasses()];
    }
    double[] result = (inst.classAttribute().isNominal()) ? new double[inst.numClasses()] : new double[1];

    if (inst.classAttribute().isNumeric()) {
        double wx = dotProd(inst, m_weights[0], inst.classIndex());// * m_wScale;
        double z = (wx + m_bias[0]);
        result[0] = z;
        return result;
    }

    for (int i = 0; i < m_weights.length; i++) {
        double wx = dotProd(inst, m_weights[i], inst.classIndex());// * m_wScale;
        double z = (wx + m_bias[i]);
        if (z <= 0) {
            //  z = 0;
            if (m_loss == LOGLOSS) {
                //result[0] = 1.0 / (1.0 + Math.exp(z));
                //result[1] = 1.0 - result[0];
                result[i] = 1.0 - 1.0 / (1.0 + Math.exp(z));
            } else {
                //result[0] = 1;
                result[i] = 0;
            }
        } else {
            if (m_loss == LOGLOSS) {
                //result[1] = 1.0 / (1.0 + Math.exp(-z));
                //result[0] = 1.0 - result[1];
                result[i] = 1.0 / (1.0 + Math.exp(-z));
            } else {
                //result[1] = 1;
                result[i] = 1;
            }
        }
    }
    return result;
}

From source file:moa.classifiers.functions.SGDOld.java

License:Open Source License

/**
 * Trains the classifier with the given instance.
 *
 * @param instance    the new training instance to include in the model
 *///from   w ww.  j a  v a 2 s. co m
@Override
public void trainOnInstanceImpl(Instance instance) {

    if (m_weights == null) {
        m_weights = new double[instance.numAttributes() + 1];
    }

    if (!instance.classIsMissing()) {

        double wx = dotProd(instance, m_weights, instance.classIndex());

        double y;
        double z;
        if (instance.classAttribute().isNominal()) {
            y = (instance.classValue() == 0) ? -1 : 1;
            z = y * (wx + m_weights[m_weights.length - 1]);
        } else {
            y = instance.classValue();
            z = y - (wx + m_weights[m_weights.length - 1]);
            y = 1;
        }

        // Compute multiplier for weight decay
        double multiplier = 1.0;
        if (m_numInstances == 0) {
            multiplier = 1.0 - (m_learningRate * m_lambda) / m_t;
        } else {
            multiplier = 1.0 - (m_learningRate * m_lambda) / m_numInstances;
        }
        for (int i = 0; i < m_weights.length - 1; i++) {
            m_weights[i] *= multiplier;
        }

        // Only need to do the following if the loss is non-zero
        if (m_loss != HINGE || (z < 1)) {

            // Compute Factor for updates
            double factor = m_learningRate * y * dloss(z);

            // Update coefficients for attributes
            int n1 = instance.numValues();
            for (int p1 = 0; p1 < n1; p1++) {
                int indS = instance.index(p1);
                if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
                    m_weights[indS] += factor * instance.valueSparse(p1);
                }
            }

            // update the bias
            m_weights[m_weights.length - 1] += factor;
        }
        m_t++;
    }
}

From source file:moa.classifiers.functions.SGDOld.java

License:Open Source License

/**
 * Calculates the class membership probabilities for the given test
 * instance.//from w w w  . j ava2s. com
 *
 * @param instance    the instance to be classified
 * @return       predicted class probability distribution
 */
@Override
public double[] getVotesForInstance(Instance inst) {

    if (m_weights == null) {
        return new double[inst.numAttributes() + 1];
    }
    double[] result = (inst.classAttribute().isNominal()) ? new double[2] : new double[1];

    double wx = dotProd(inst, m_weights, inst.classIndex());// * m_wScale;
    double z = (wx + m_weights[m_weights.length - 1]);

    if (inst.classAttribute().isNumeric()) {
        result[0] = z;
        return result;
    }

    if (z <= 0) {
        //  z = 0;
        if (m_loss == LOGLOSS) {
            result[0] = 1.0 / (1.0 + Math.exp(z));
            result[1] = 1.0 - result[0];
        } else {
            result[0] = 1;
        }
    } else {
        if (m_loss == LOGLOSS) {
            result[1] = 1.0 / (1.0 + Math.exp(-z));
            result[0] = 1.0 - result[1];
        } else {
            result[1] = 1;
        }
    }
    return result;
}

From source file:moa.classifiers.functions.SPegasos.java

License:Open Source License

/**
 * Trains the classifier with the given instance.
 *
 * @param instance the new training instance to include in the model
 *///from  w  w  w  .  j  a  va  2 s.  c  o m
@Override
public void trainOnInstanceImpl(Instance instance) {

    if (m_weights == null) {
        m_weights = new double[instance.numAttributes() + 1];
    }
    if (!instance.classIsMissing()) {

        double learningRate = 1.0 / (m_lambda * m_t);
        //double scale = 1.0 - learningRate * m_lambda;
        double scale = 1.0 - 1.0 / m_t;
        double y = (instance.classValue() == 0) ? -1 : 1;
        double wx = dotProd(instance, m_weights, instance.classIndex());
        double z = y * (wx + m_weights[m_weights.length - 1]);

        for (int j = 0; j < m_weights.length - 1; j++) {
            if (j != instance.classIndex()) {
                m_weights[j] *= scale;
            }
        }

        if (m_loss == LOGLOSS || (z < 1)) {
            double loss = dloss(z);
            int n1 = instance.numValues();
            for (int p1 = 0; p1 < n1; p1++) {
                int indS = instance.index(p1);
                if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
                    double m = learningRate * loss * (instance.valueSparse(p1) * y);
                    m_weights[indS] += m;
                }
            }

            // update the bias
            m_weights[m_weights.length - 1] += learningRate * loss * y;
        }

        double norm = 0;
        for (int k = 0; k < m_weights.length - 1; k++) {
            if (k != instance.classIndex()) {
                norm += (m_weights[k] * m_weights[k]);
            }
        }

        double scale2 = Math.min(1.0, (1.0 / (m_lambda * norm)));
        if (scale2 < 1.0) {
            scale2 = Math.sqrt(scale2);
            for (int j = 0; j < m_weights.length - 1; j++) {
                if (j != instance.classIndex()) {
                    m_weights[j] *= scale2;
                }
            }
        }
        m_t++;
    }
}

From source file:moa.classifiers.functions.SPegasos.java

License:Open Source License

/**
 * Calculates the class membership probabilities for the given test
 * instance.//from  w  ww.j a  va 2 s .c  om
 *
 * @param instance the instance to be classified
 * @return predicted class probability distribution
 */
@Override
public double[] getVotesForInstance(Instance inst) {

    if (m_weights == null) {
        return new double[inst.numAttributes() + 1];
    }

    double[] result = new double[2];

    double wx = dotProd(inst, m_weights, inst.classIndex());// * m_wScale;
    double z = (wx + m_weights[m_weights.length - 1]);
    //System.out.print("" + z + ": ");
    // System.out.println(1.0 / (1.0 + Math.exp(-z)));
    if (z <= 0) {
        //  z = 0;
        if (m_loss == LOGLOSS) {
            result[0] = 1.0 / (1.0 + Math.exp(z));
            result[1] = 1.0 - result[0];
        } else {
            result[0] = 1;
        }
    } else {
        if (m_loss == LOGLOSS) {
            result[1] = 1.0 / (1.0 + Math.exp(-z));
            result[0] = 1.0 - result[1];
        } else {
            result[1] = 1;
        }
    }
    return result;
}

From source file:moa.classifiers.imbalanced.SamplingClassifier.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    if (inst.classIndex() == 0) {
        this.rareCount += 1.0;
    }/*from ww  w  . ja va 2 s  .c o  m*/
    this.count += 1.0;
    double w;

    if (this.overSampleOption.isSet() && inst.classIndex() == 0) {
        w = 1.0 / (this.rareCount / this.count);
        if (this.logTransformOption.isSet()) {
            w = Math.log(w);
        }
    } else if (this.underSampleOption.isSet() && inst.classIndex() != 0) {
        w = 1.0 - this.rareCount / this.count;
    } else {
        w = 1.0;
    }
    int k = MiscUtils.poisson(w, this.classifierRandom);
    Instance weightedInst = (Instance) inst.copy();
    weightedInst.setWeight(inst.weight() * k);
    this.classifier.trainOnInstance(weightedInst);
}

From source file:moa.classifiers.meta.PAME.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {

    // get the prediction vector back
    double[] ht = this.getPredictions(inst);
    double yt = inst.classValue();
    if (inst.classIndex() == 0) {
        this.rareCount += 1.0;
    }/*from   ww w.j  a  va2  s  . c om*/
    this.count += 1.0;

    // convert to a positive / negative classification scenario
    if (yt == 0) {
        yt = 1.0;
    } else {
        yt = -1.0;
    }

    /*
     * update expert weights
     */
    if (this.updateMethodOption.getChosenIndex() == PAME1) {
        pame1_weights(ht, yt);
    } else if (this.updateMethodOption.getChosenIndex() == PAME2) {
        pame2_weights(ht, yt);
    } else if (this.updateMethodOption.getChosenIndex() == PAME3) {
        pame3_weights(ht, yt);
    }

    /*
     * we are going to use an online bagging / boosting strategy to update the 
     * experts. In the end our objective with the weight formulation is a bit
     * more of a decision theoretic approach. 
     */
    for (int i = 0; i < this.ensemble.length; i++) {
        // sample from a Poisson probability distribution as implemented in 
        // online bagging and boosting]
        double w;
        if (this.overSampleOption.isSet() && inst.classIndex() == 0) {
            w = 1.0 / (this.rareCount / this.count);
            if (this.logTransformOption.isSet()) {
                w = Math.log(w);
            }
        } else if (this.underSampleOption.isSet() && inst.classIndex() != 0) {
            w = 1.0 - this.rareCount / this.count;
        } else {
            w = 1.0;
        }

        int k = MiscUtils.poisson(w, this.classifierRandom);

        // update the expert accordingly 
        if (k > 0) {
            // this works by updating the expert k-times with the same example.
            // thus is k = 4. the expert is trained updated on the same example
            // 4 times in a row. pretty easy.
            Instance weightedInst = (Instance) inst.copy();
            weightedInst.setWeight(inst.weight() * k); // set the # of training times
            this.ensemble[i].trainOnInstance(weightedInst); // update expert
        }
    }

    this.n_negativeWeights = 0;
    for (int i = 0; i < this.weights.length; i++) {
        if (this.weights[i] < 0.0)
            this.n_negativeWeights++;
    }
}

From source file:moa.classifiers.meta.PAMEAdwin.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {

    // get the prediction vector back
    double[] ht = this.getPredictions(inst);
    double yt = inst.classValue();
    if (inst.classIndex() == 0) {
        this.rareCount += 1.0;
    }//from  w ww .j  a v a 2  s  . c o m
    this.count += 1.0;

    // convert to a positive / negative classification scenario
    if (yt == 0) {
        //System.out.println("Y is positive" + yt);
        yt = 1.0;
    } else {
        //System.out.println("Y is negative" + yt);
        yt = -1.0;
    }

    /*
     * update expert weights
     */
    if (this.updateMethodOption.getChosenIndex() == PAME1) {
        pame1_weights(ht, yt);
    } else if (this.updateMethodOption.getChosenIndex() == PAME2) {
        pame2_weights(ht, yt);
    } else if (this.updateMethodOption.getChosenIndex() == PAME3) {
        pame3_weights(ht, yt);
    } else if (this.updateMethodOption.getChosenIndex() == PAME23) {
        pame23_weights(ht, yt);
    }

    /*
     * we are going to use an online bagging / boosting strategy to update the 
     * experts. In the end our objective with the weight formulation is a bit
     * more of a decision theoretic approach. 
     */
    boolean Change = false;
    for (int i = 0; i < this.ensemble.length; i++) {
        // sample from a Poisson probability distribution as implemented in 
        // online bagging and boosting
        double w;
        if (this.overSampleOption.isSet() && inst.classIndex() == 0) {
            w = 1.0 / (this.rareCount / this.count);
            if (this.logTransformOption.isSet()) {
                w = Math.log(w);
            }
        } else if (this.underSampleOption.isSet() && inst.classIndex() != 0) {
            w = 1.0 - this.rareCount / this.count;
        } else {
            w = 1.0;
        }

        int k = MiscUtils.poisson(w, this.classifierRandom);

        // update the expert accordingly 
        if (k > 0) {
            // this works by updating the expert k-times with the same example.
            // thus is k = 4. the expert is trained updated on the same example
            // 4 times in a row. pretty easy.
            Instance weightedInst = (Instance) inst.copy();
            weightedInst.setWeight(inst.weight() * k); // set the # of training times
            this.ensemble[i].trainOnInstance(weightedInst); // update expert
        }
        boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(inst);
        double ErrEstim = this.ADError[i].getEstimation();
        if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) {
            if (this.ADError[i].getEstimation() > ErrEstim) {
                Change = true;
            }
        }
    }

    /*
     * if change was detected, remove the worst expert from the ensemble of 
     * classifiers. 
     */
    if (Change) {
        double max = 0.0;
        int imax = -1;
        for (int i = 0; i < this.ensemble.length; i++) {
            if (max < this.ADError[i].getEstimation()) {
                max = this.ADError[i].getEstimation();
                imax = i;
            }
        }
        if (imax != -1) {
            this.ensemble[imax].resetLearning();
            //this.ensemble[imax].trainOnInstance(inst);
            this.ADError[imax] = new ADWIN();
        }
    }

    this.n_negativeWeights = 0;
    for (int i = 0; i < this.weights.length; i++) {
        if (this.weights[i] < 0.0)
            this.n_negativeWeights++;
    }
}

From source file:moa.classifiers.multilabel.MajorityLabelset.java

License:Open Source License

@Override
public double[] getVotesForInstance(Instance x) {
    int L = x.classIndex() + 1;
    if (m_L != L) {
        System.err.println("set L = " + L);
        m_L = L;/*from  ww  w  .  jav a2  s . c o m*/
        prediction = new double[m_L];
    }
    return prediction;
    //System.out.println("getVotesForInstance(): "+x.classIndex());
}

From source file:moa.classifiers.multilabel.meta.MLOzaBag.java

License:Open Source License

@Override
public double[] getVotesForInstance(Instance x) {

    int L = x.classIndex() + 1;
    if (m_L != L) {
        m_L = L;//from w  ww.  j  a v  a 2s  . c  o  m
    }

    double y[] = new double[m_L];

    for (int i = 0; i < this.ensemble.length; i++) {
        double w[] = this.ensemble[i].getVotesForInstance(x);
        for (int j = 0; j < w.length; j++) {
            y[j] += w[j];
        }
    }

    return y;
}