Example usage for weka.core Instance isMissingSparse

List of usage examples for weka.core Instance isMissingSparse

Introduction

In this page you can find the example usage for weka.core Instance isMissingSparse.

Prototype

public boolean isMissingSparse(int indexOfIndex);

Source Link

Document

Tests if a specific value is "missing" in the sparse representation.

Usage

From source file:moa.classifiers.functions.SGDOld.java

License:Open Source License

/**
 * Trains the classifier with the given instance.
 *
 * @param instance    the new training instance to include in the model
 *///from  ww  w. ja v a 2s  .c  o m
@Override
public void trainOnInstanceImpl(Instance instance) {

    if (m_weights == null) {
        m_weights = new double[instance.numAttributes() + 1];
    }

    if (!instance.classIsMissing()) {

        double wx = dotProd(instance, m_weights, instance.classIndex());

        double y;
        double z;
        if (instance.classAttribute().isNominal()) {
            y = (instance.classValue() == 0) ? -1 : 1;
            z = y * (wx + m_weights[m_weights.length - 1]);
        } else {
            y = instance.classValue();
            z = y - (wx + m_weights[m_weights.length - 1]);
            y = 1;
        }

        // Compute multiplier for weight decay
        double multiplier = 1.0;
        if (m_numInstances == 0) {
            multiplier = 1.0 - (m_learningRate * m_lambda) / m_t;
        } else {
            multiplier = 1.0 - (m_learningRate * m_lambda) / m_numInstances;
        }
        for (int i = 0; i < m_weights.length - 1; i++) {
            m_weights[i] *= multiplier;
        }

        // Only need to do the following if the loss is non-zero
        if (m_loss != HINGE || (z < 1)) {

            // Compute Factor for updates
            double factor = m_learningRate * y * dloss(z);

            // Update coefficients for attributes
            int n1 = instance.numValues();
            for (int p1 = 0; p1 < n1; p1++) {
                int indS = instance.index(p1);
                if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
                    m_weights[indS] += factor * instance.valueSparse(p1);
                }
            }

            // update the bias
            m_weights[m_weights.length - 1] += factor;
        }
        m_t++;
    }
}

From source file:moa.classifiers.functions.SPegasos.java

License:Open Source License

/**
 * Trains the classifier with the given instance.
 *
 * @param instance the new training instance to include in the model
 */// w ww.j  a  va2s.  c  om
@Override
public void trainOnInstanceImpl(Instance instance) {

    if (m_weights == null) {
        m_weights = new double[instance.numAttributes() + 1];
    }
    if (!instance.classIsMissing()) {

        double learningRate = 1.0 / (m_lambda * m_t);
        //double scale = 1.0 - learningRate * m_lambda;
        double scale = 1.0 - 1.0 / m_t;
        double y = (instance.classValue() == 0) ? -1 : 1;
        double wx = dotProd(instance, m_weights, instance.classIndex());
        double z = y * (wx + m_weights[m_weights.length - 1]);

        for (int j = 0; j < m_weights.length - 1; j++) {
            if (j != instance.classIndex()) {
                m_weights[j] *= scale;
            }
        }

        if (m_loss == LOGLOSS || (z < 1)) {
            double loss = dloss(z);
            int n1 = instance.numValues();
            for (int p1 = 0; p1 < n1; p1++) {
                int indS = instance.index(p1);
                if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
                    double m = learningRate * loss * (instance.valueSparse(p1) * y);
                    m_weights[indS] += m;
                }
            }

            // update the bias
            m_weights[m_weights.length - 1] += learningRate * loss * y;
        }

        double norm = 0;
        for (int k = 0; k < m_weights.length - 1; k++) {
            if (k != instance.classIndex()) {
                norm += (m_weights[k] * m_weights[k]);
            }
        }

        double scale2 = Math.min(1.0, (1.0 / (m_lambda * norm)));
        if (scale2 < 1.0) {
            scale2 = Math.sqrt(scale2);
            for (int j = 0; j < m_weights.length - 1; j++) {
                if (j != instance.classIndex()) {
                    m_weights[j] *= scale2;
                }
            }
        }
        m_t++;
    }
}

From source file:org.esa.nest.gpf.SGD.java

/**
 * Updates the classifier with the given instance.
 *
 * @param instance the new training instance to include in the model
 * @exception Exception if the instance could not be incorporated in the
 * model.//from ww  w  .j av  a 2 s.c om
 */
@Override
public void updateClassifier(Instance instance) throws Exception {

    if (!instance.classIsMissing()) {

        double wx = dotProd(instance, m_weights, instance.classIndex());

        double y;
        double z;
        if (instance.classAttribute().isNominal()) {
            y = (instance.classValue() == 0) ? -1 : 1;
            z = y * (wx + m_weights[m_weights.length - 1]);
        } else {
            y = instance.classValue();
            z = y - (wx + m_weights[m_weights.length - 1]);
            y = 1;
        }

        // Compute multiplier for weight decay
        double multiplier = 1.0;
        if (m_numInstances == 0) {
            multiplier = 1.0 - (m_learningRate * m_lambda) / m_t;
        } else {
            multiplier = 1.0 - (m_learningRate * m_lambda) / m_numInstances;
        }
        for (int i = 0; i < m_weights.length - 1; i++) {
            m_weights[i] *= multiplier;
        }

        // Only need to do the following if the loss is non-zero
        if (m_loss != HINGE || (z < 1)) {

            // Compute Factor for updates
            double factor = m_learningRate * y * dloss(z);

            // Update coefficients for attributes
            int n1 = instance.numValues();
            for (int p1 = 0; p1 < n1; p1++) {
                int indS = instance.index(p1);
                if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
                    m_weights[indS] += factor * instance.valueSparse(p1);
                }
            }

            // update the bias
            m_weights[m_weights.length - 1] += factor;
        }
        m_t++;
    }
}

From source file:xlong.urlclassify.others.SPegasos.java

License:Open Source License

/**
 * Updates the classifier with the given instance.
 *
 * @param instance the new training instance to include in the model 
 * @exception Exception if the instance could not be incorporated in
 * the model.//w ww .  ja  v a2 s  .co m
 */
public void updateClassifier(Instance instance) throws Exception {
    if (!instance.classIsMissing()) {

        double learningRate = 1.0 / (m_lambda * m_t);
        //double scale = 1.0 - learningRate * m_lambda;
        double scale = 1.0 - 1.0 / m_t;
        double y = (instance.classValue() == 0) ? -1 : 1;
        double wx = dotProd(instance, m_weights, instance.classIndex());
        double z = y * (wx + m_weights[m_weights.length - 1]);

        for (int j = 0; j < m_weights.length - 1; j++) {
            if (j != instance.classIndex()) {
                m_weights[j] *= scale;
            }
        }

        if (m_loss == LOGLOSS || (z < 1)) {
            double loss = dloss(z);
            int n1 = instance.numValues();
            for (int p1 = 0; p1 < n1; p1++) {
                int indS = instance.index(p1);
                if (indS != instance.classIndex() && !instance.isMissingSparse(p1)) {
                    double m = learningRate * loss * (instance.valueSparse(p1) * y);
                    m_weights[indS] += m;
                }
            }

            // update the bias
            m_weights[m_weights.length - 1] += learningRate * loss * y;
        }

        double norm = 0;
        for (int k = 0; k < m_weights.length - 1; k++) {
            if (k != instance.classIndex()) {
                norm += (m_weights[k] * m_weights[k]);
            }
        }

        double scale2 = Math.min(1.0, (1.0 / (m_lambda * norm)));
        if (scale2 < 1.0) {
            scale2 = Math.sqrt(scale2);
            for (int j = 0; j < m_weights.length - 1; j++) {
                if (j != instance.classIndex()) {
                    m_weights[j] *= scale2;
                }
            }
        }
        m_t++;
    }
}