List of usage examples for org.apache.mahout.math Vector dot
double dot(Vector x);
From source file:ca.uwaterloo.cpami.mahout.matrix.utils.GramSchmidt.java
License:Apache License
public static void orthonormalizeColumns(Matrix mx) { //int n = mx.numCols(); int n = mx.numRows(); for (int c = 0; c < n; c++) { System.out.println("col: " + c); Vector col = mx.viewRow(c); for (int c1 = 0; c1 < c; c1++) { Vector viewC1 = mx.viewRow(c1); col.assign(col.minus(viewC1.times(viewC1.dot(col)))); }/* w w w . j av a2 s . c o m*/ final double norm2 = col.norm(2); if (norm2 == 0) { System.out.println("zero"); } col.assign(new DoubleFunction() { @Override public double apply(double x) { return x / norm2; } }); } }
From source file:com.cloudera.science.ml.parallel.covariance.MahalanobisDistance.java
License:Open Source License
public double distance(Vector v) { Vector d = v.minus(m); return d.dot(ci.times(d)); }
From source file:com.twitter.algebra.AlgebraCommon.java
License:Apache License
/** * Multiply a vector with a matrix/*from w ww . ja v a 2 s.co m*/ * @param vector V * @param matrix M * @param resVector will be filled with V * M * @return V * M */ public static Vector vectorTimesMatrix(Vector vector, Matrix matrix, DenseVector resVector) { int nCols = matrix.numCols(); for (int c = 0; c < nCols; c++) { Double resDouble = vector.dot(matrix.viewColumn(c)); resVector.set(c, resDouble); } return resVector; }
From source file:com.twitter.algebra.AlgebraCommon.java
License:Apache License
/** * Multiply a vector with transpose of a matrix * @param vector V//from w w w . j a v a 2 s . c o m * @param transpose of matrix M * @param resVector will be filled with V * M * @return V * M */ public static Vector vectorTimesMatrixTranspose(Vector vector, Matrix matrixTranspose, Vector resVector) { int nCols = matrixTranspose.numRows(); for (int c = 0; c < nCols; c++) { Vector col = matrixTranspose.viewRow(c); double resDouble = 0d; boolean hasNonZero = col.getNumNondefaultElements() != 0; if (hasNonZero) resDouble = vector.dot(col); resVector.set(c, resDouble); } return resVector; }
From source file:com.ydy.cf.solver.impl.AlternatingLeastSquaresImplicitSolver.java
License:Apache License
public TopK<RecommendedItem> buildRecommends(Vector userRatings, Vector userFeatures, int topK) { final Map<Integer, Boolean> alreadyRated = VectorUtils.keys(userRatings); final TopK<RecommendedItem> topKItems = new TopK<RecommendedItem>(topK, VectorUtils.BY_PREFERENCE_VALUE); Iterator<MatrixSlice> rows = Y.iterator(); while (rows.hasNext()) { MatrixSlice row = rows.next();/*from ww w . j a v a 2s .c o m*/ int itemId = row.index(); Vector itemFeatures = row.vector(); if (!alreadyRated.containsKey(itemId)) { double predictedRating = userFeatures.dot(itemFeatures); topKItems.offer(new GenericRecommendedItem(itemId, (float) predictedRating)); } } return topKItems; }
From source file:edu.snu.dolphin.bsp.examples.ml.algorithms.clustering.em.EMMainCmpTask.java
License:Apache License
@Override public void run(final int iteration) { clusterToStats = new HashMap<>(); final int numClusters = clusterSummaries.size(); // Compute the partial statistics of each cluster for (final Vector vector : points) { final int dimension = vector.size(); Matrix outProd = null;/* w ww . java2 s . co m*/ if (isCovarianceDiagonal) { outProd = new SparseMatrix(dimension, dimension); for (int j = 0; j < dimension; j++) { outProd.set(j, j, vector.get(j) * vector.get(j)); } } else { outProd = vector.cross(vector); } double denominator = 0; final double[] numerators = new double[numClusters]; for (int i = 0; i < numClusters; i++) { final ClusterSummary clusterSummary = clusterSummaries.get(i); final Vector centroid = clusterSummary.getCentroid(); final Matrix covariance = clusterSummary.getCovariance(); final Double prior = clusterSummary.getPrior(); final Vector differ = vector.minus(centroid); numerators[i] = prior / Math.sqrt(covariance.determinant()) * Math.exp(differ.dot(inverse(covariance).times(differ)) / (-2)); denominator += numerators[i]; } for (int i = 0; i < numClusters; i++) { final double posterior = denominator == 0 ? 1.0 / numerators.length : numerators[i] / denominator; if (!clusterToStats.containsKey(i)) { clusterToStats.put(i, new ClusterStats(times(outProd, posterior), vector.times(posterior), posterior, false)); } else { clusterToStats.get(i).add( new ClusterStats(times(outProd, posterior), vector.times(posterior), posterior, false)); } } } }
From source file:io.ssc.relationdiscovery.SVD.java
License:Open Source License
public Matrix projectRowsOntoFeatureSpace() { SparseRowMatrix projection = new SparseRowMatrix(A.numRows(), rank); for (int patternIndex = 0; patternIndex < A.numRows(); patternIndex++) { Vector patternOccurrences = A.viewRow(patternIndex); for (int r = 0; r < rank; r++) { WeightedVector singularVector = singularVectors.get(r); double weight = singularVector.getWeight() * patternOccurrences.dot(singularVector); projection.setQuick(patternIndex, r, weight); }//from w ww . j a v a 2 s . co m } return projection; }
From source file:org.gpfvic.mahout.cf.taste.hadoop.als.PredictionMapper.java
License:Apache License
@Override protected void map(IntWritable userIndexWritable, VectorWritable ratingsWritable, Context ctx) throws IOException, InterruptedException { Pair<OpenIntObjectHashMap<Vector>, OpenIntObjectHashMap<Vector>> uAndM = getSharedInstance(); OpenIntObjectHashMap<Vector> U = uAndM.getFirst(); OpenIntObjectHashMap<Vector> M = uAndM.getSecond(); Vector ratings = ratingsWritable.get(); int userIndex = userIndexWritable.get(); final OpenIntHashSet alreadyRatedItems = new OpenIntHashSet(ratings.getNumNondefaultElements()); for (Vector.Element e : ratings.nonZeroes()) { alreadyRatedItems.add(e.index()); }/* www.j av a 2 s. c o m*/ final TopItemsQueue topItemsQueue = new TopItemsQueue(recommendationsPerUser); final Vector userFeatures = U.get(userIndex); M.forEachPair(new IntObjectProcedure<Vector>() { @Override public boolean apply(int itemID, Vector itemFeatures) { if (!alreadyRatedItems.contains(itemID)) { double predictedRating = userFeatures.dot(itemFeatures); MutableRecommendedItem top = topItemsQueue.top(); if (predictedRating > top.getValue()) { top.set(itemID, (float) predictedRating); topItemsQueue.updateTop(); } } return true; } }); List<RecommendedItem> recommendedItems = topItemsQueue.getTopItems(); if (!recommendedItems.isEmpty()) { // cap predictions to maxRating for (RecommendedItem topItem : recommendedItems) { ((MutableRecommendedItem) topItem).capToMaxValue(maxRating); } if (usesLongIDs) { long userID = userIDIndex.get(userIndex); userIDWritable.set(userID); for (RecommendedItem topItem : recommendedItems) { // remap item IDs long itemID = itemIDIndex.get((int) topItem.getItemID()); ((MutableRecommendedItem) topItem).setItemID(itemID); } } else { userIDWritable.set(userIndex); } recommendations.set(recommendedItems); ctx.write(userIDWritable, recommendations); } }
From source file:org.plista.kornakapi.core.optimizer.ErrorALSWRFactorizer.java
License:Apache License
@Override public ErrorFactorization factorize() throws TasteException { log.info("starting to compute the factorization..."); final Features features = new Features(this); /* feature maps necessary for solving for implicit feedback */ OpenIntObjectHashMap<Vector> userY = null; OpenIntObjectHashMap<Vector> itemY = null; if (usesImplicitFeedback) { userY = userFeaturesMapping(dataModel.getUserIDs(), dataModel.getNumUsers(), features.getU()); itemY = itemFeaturesMapping(dataModel.getItemIDs(), dataModel.getNumItems(), features.getM()); }/*from w ww. j a va2 s. c om*/ Double[] errors = new Double[numIterations]; for (int iteration = 0; iteration < numIterations; iteration++) { LongPrimitiveIterator userIDsIterator = dataModel.getUserIDs(); LongPrimitiveIterator itemIDsIterator = dataModel.getItemIDs(); log.info("iteration {}", iteration); /* fix M - compute U */ ExecutorService queue = createQueue(); try { final ImplicitFeedbackAlternatingLeastSquaresSolver implicitFeedbackSolver = usesImplicitFeedback ? new ImplicitFeedbackAlternatingLeastSquaresSolver(numFeatures, lambda, alpha, itemY) : null; while (userIDsIterator.hasNext()) { final long userID = userIDsIterator.nextLong(); if (usesImplicitFeedback) { final PreferenceArray userPrefs = dataModel.getPreferencesFromUser(userID); queue.execute(new Runnable() { @Override public void run() { Vector userFeatures = implicitFeedbackSolver .solve(sparseUserRatingVector(userPrefs)); //userFeatures = userFeatures.divide(Math.sqrt(userFeatures.getLengthSquared())); features.setFeatureColumnInU(userIndex(userID), userFeatures); } }); } else { final LongPrimitiveIterator itemIDsFromUser = dataModel.getItemIDsFromUser(userID) .iterator(); final PreferenceArray userPrefs = dataModel.getPreferencesFromUser(userID); queue.execute(new Runnable() { @Override public void run() { List<Vector> featureVectors = Lists.newArrayList(); while (itemIDsFromUser.hasNext()) { long itemID = itemIDsFromUser.nextLong(); featureVectors.add(features.getItemFeatureColumn(itemIndex(itemID))); } Vector userFeatures = AlternatingLeastSquaresSolver.solve(featureVectors, ratingVector(userPrefs), lambda, numFeatures); features.setFeatureColumnInU(userIndex(userID), userFeatures); } }); } } } finally { queue.shutdown(); try { queue.awaitTermination(dataModel.getNumUsers(), TimeUnit.SECONDS); } catch (InterruptedException e) { log.warn("Error when computing user features", e); } } /* fix U - compute M */ queue = createQueue(); try { final ImplicitFeedbackAlternatingLeastSquaresSolver implicitFeedbackSolver = usesImplicitFeedback ? new ImplicitFeedbackAlternatingLeastSquaresSolver(numFeatures, lambda, alpha, userY) : null; if (usesImplicitFeedback) { while (itemIDsIterator.hasNext()) { final long itemID = itemIDsIterator.nextLong(); final PreferenceArray itemPrefs = dataModel.getPreferencesForItem(itemID); queue.execute(new Runnable() { @Override public void run() { Vector itemFeatures = implicitFeedbackSolver .solve(sparseItemRatingVector(itemPrefs)); //itemFeatures = itemFeatures.divide(Math.sqrt(itemFeatures.getLengthSquared())); features.setFeatureColumnInM(itemIndex(itemID), itemFeatures); } }); } } else { while (itemIDsIterator.hasNext()) { final long itemID = itemIDsIterator.nextLong(); final PreferenceArray itemPrefs = dataModel.getPreferencesForItem(itemID); queue.execute(new Runnable() { @Override public void run() { List<Vector> featureVectors = Lists.newArrayList(); for (Preference pref : itemPrefs) { long userID = pref.getUserID(); featureVectors.add(features.getUserFeatureColumn(userIndex(userID))); } Vector itemFeatures = AlternatingLeastSquaresSolver.solve(featureVectors, ratingVector(itemPrefs), lambda, numFeatures); features.setFeatureColumnInM(itemIndex(itemID), itemFeatures); } }); } } } finally { queue.shutdown(); try { queue.awaitTermination(dataModel.getNumItems(), TimeUnit.SECONDS); } catch (InterruptedException e) { log.warn("Error when computing item features", e); } } userIDsIterator = testModel.getUserIDs(); double error = 0; while (userIDsIterator.hasNext()) { Long userID = userIDsIterator.next(); PreferenceArray userPrefs = testModel.getPreferencesFromUser(userID); Vector userf = features.getUserFeatureColumn(userIndex(userID)); long[] itemIDs = userPrefs.getIDs(); int idx = 0; for (long itemID : itemIDs) { Vector itemf = features.getItemFeatureColumn(itemIndex(itemID)); double pref = itemf.dot(userf); double realpref = userPrefs.getValue(idx); idx++; error = error + Math.abs(pref - realpref); } } errors[iteration] = error; } ErrorFactorization factorization = createErrorFactorization(features.getU(), features.getM(), errors); log.info("finished computation of the factorization..."); return factorization; }
From source file:org.qcri.pca.PCACommon.java
static Vector sparseVectorTimesMatrix(Vector vector, Matrix matrix, DenseVector resVector) { int nCols = matrix.numCols(); for (int c = 0; c < nCols; c++) { Double resDouble = vector.dot(matrix.viewColumn(c)); resVector.set(c, resDouble);// ww w. j av a 2s .co m } return resVector; }