Example usage for org.apache.mahout.math Vector get

List of usage examples for org.apache.mahout.math Vector get

Introduction

In this page you can find the example usage for org.apache.mahout.math Vector get.

Prototype

double get(int index);

Source Link

Document

Return the value at the given index

Usage

From source file:org.trustedanalytics.atk.giraph.io.DoubleArrayWritable.java

License:Apache License

private double[] vectorToArray(Vector vector) {
    double[] arr = new double[vector.size()];
    for (int i = 0; i < vector.size(); i++) {
        arr[i] = vector.get(i);
    }//w ww . j a v a  2  s  .  c o m
    return arr;
}

From source file:tv.floe.metronome.classification.logisticregression.ParallelOnlineLogisticRegression.java

License:Apache License

/**
 * Custom training for POLR based around accumulating gradient to send to the
 * master process//from  www .  j a  v  a2 s  . c o m
 * 
 * 
 */
@Override
public void train(long trackingKey, String groupKey, int actual, Vector instance) {
    unseal();
    double learningRate = currentLearningRate();

    // push coefficients back to zero based on the prior
    regularize(instance);

    // basically this only gets the results for each classification
    // update each row of coefficients according to result
    Vector gradient = this.default_gradient.apply(groupKey, actual, instance, this);
    for (int i = 0; i < numCategories - 1; i++) {

        double gradientBase = gradient.get(i);

        // we're only going to look at the non-zero elements of the vector
        // then we apply the gradientBase to the resulting element.
        Iterator<Vector.Element> nonZeros = instance.iterateNonZero();

        while (nonZeros.hasNext()) {
            Vector.Element updateLocation = nonZeros.next();
            int j = updateLocation.index();

            double gradient_to_add = gradientBase * learningRate * perTermLearningRate(j) * instance.get(j);

            // double old_beta = beta.getQuick(i, j);

            double newValue = beta.getQuick(i, j)
                    + gradientBase * learningRate * perTermLearningRate(j) * instance.get(j);
            beta.setQuick(i, j, newValue);

            // now update gamma --- we only want the gradient since the last time
            /*        
                    double old_gamma = gamma.getCell(i, j);
                    double new_gamma = old_gamma + gradient_to_add; // gradientBase *
                                                // learningRate *
                                                // perTermLearningRate(j)
                                                // * instance.get(j);
                            
                    gamma.setCell(i, j, new_gamma);
              */
        }
    }

    // remember that these elements got updated
    Iterator<Vector.Element> i = instance.iterateNonZero();
    while (i.hasNext()) {
        Vector.Element element = i.next();
        int j = element.index();
        updateSteps.setQuick(j, getStep());
        updateCounts.setQuick(j, updateCounts.getQuick(j) + 1);
    }
    nextStep();

}