Java tutorial
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package lirmm.inria.fr.peersim.dpmf; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Set; import lirmm.inria.fr.data.DataMatrix; import lirmm.inria.fr.main.Functions; import lirmm.inria.fr.math.BigSparseRealMatrix; import org.apache.commons.math3.util.FastMath; import peersim.config.Configuration; import peersim.core.GeneralNode; /** * * @author rbouadjenek */ public class MainSite extends GeneralNode { private final String parameter_r = "dpmf.lambda.r"; private final String parameter_u = "dpmf.lambda.u"; private final String parameter_v = "dpmf.lambda.v"; private final String parameter_latentDimension = "dpmf.latent.dimension"; private final String parameter_datasource = "data.r"; private final String parameter_testsetrate = "data.testset.rate"; private final DataMatrix R; // The User-Item rating matrix private BigSparseRealMatrix U; // The user latent feature matrix private BigSparseRealMatrix gradU; // The current gradient of U private BigSparseRealMatrix V; // The item latent feature matrix private BigSparseRealMatrix gradV; // The current gradient of V final double lambdaU; final double lambdaV; final double lambdaR; final int latentDimension; // Number of latent dimension private double learningRate = 0.1;//The current step size. private final double increasedCostPercentage = 0.10; //the percentage of the learning rate that will be used when cost increases. private final double decreasedCostPercentage = 0.7; //the percentage of the learning rate that will be used when cost decreases. private double J; // Value of the cost function J public Map<Long, BigSparseRealMatrix> userSourceValue = new HashMap(); // This map embeed the values of the contribution of the users' sources to the gradient of U public Map<Long, BigSparseRealMatrix> itemSourceValue = new HashMap();// This map embeed the values of the contribution of the items' sources to the gradient of V private Map<Long, Set<Mapping>> userMappings; // This store the mapping of users to the sources private Map<Long, Set<Mapping>> itemMappings; // This store the mapping of items to the sources public MainSite(long ID) throws IOException { super(ID); latentDimension = Configuration.getInt(parameter_latentDimension); lambdaU = Configuration.getDouble(parameter_u); lambdaV = Configuration.getDouble(parameter_v); lambdaR = Configuration.getDouble(parameter_r); J = Double.MAX_VALUE; String file = Configuration.getString(parameter_datasource); R = DataMatrix.createDataMatrix(file); U = BigSparseRealMatrix.randomGenerateMatrix(latentDimension, R.getRowDimension()); V = BigSparseRealMatrix.randomGenerateMatrix(latentDimension, R.getColumnDimension()); R.cutDataSet(Configuration.getDouble(parameter_testsetrate), 0); System.err.println("R=[" + R.getRowDimension() + "," + R.getColumnDimension() + "]= " + R.getDataSize() + " (Rate = " + Configuration.getDouble(parameter_testsetrate) + "%)"); // double[][] uData = {{0.04459, 0.06017, 0.09179, 0.04057, 0.08069, 0.06254, 0.07317, 0.02529, 0.0926, 0.04346}, // {0.06973, 0.049, 0.06391, 0.01254, 0.05567, 0.05333, 0.08618, 0.05368, 0.01709, 0.06473}, // {0.08956, 0.09606, 0.06459, 0.09891, 0.04529, 0.04609, 0.08478, 0.05972, 0.01564, 0.06002}}; // // double[][] vData = {{0.06608, 0.02523, 0.0624, 0.0897, 0.03538, 0.05164, 0.06457, 0.07063, 0.09929, 0.08286}, // {0.05791, 0.09987, 0.05437, 0.03309, 0.04333, 0.02943, 0.01195, 0.01118, 0.03416, 0.01022}, // {0.00973, 0.08802, 0.05382, 0.06729, 0.07961, 0.03695, 0.02192, 0.06149, 0.00978, 0.04965}}; // U = new BigSparseRealMatrix(uData); // V = new BigSparseRealMatrix(vData); } /** * This function is the part of derivative of the global objective function * computed by the main site. */ public void computeGradient() { BigSparseRealMatrix temp = R.specialOperation(U.transpose(), V).scalarMultiply(-1);// Compute U'*V-S //-------------------------------------------------------------- //------------ Compute the gradient of U------------------------ //-------------------------------------------------------------- gradU = temp.multiply(V.transpose()).transpose().scalarMultiply(lambdaR); // Compute (U'*Z-S)*U for (Map.Entry<Long, BigSparseRealMatrix> e : userSourceValue.entrySet()) { long ID = e.getKey(); BigSparseRealMatrix result = e.getValue(); Set<Mapping> mappings = userMappings.get(ID); gradU.addToColumn(result, mappings); } gradU = gradU.add(U.scalarMultiply(lambdaU)); // add the regularization parameter userSourceValue.clear(); //------------------------------------------------------------------ //-------------------------------------------------------------- //------------ Compute the gradient of V ----------------------- //-------------------------------------------------------------- gradV = temp.transpose().multiply(U.transpose()).transpose().scalarMultiply(lambdaR); // Compute (U'*V-S)*V for (Map.Entry<Long, BigSparseRealMatrix> e : itemSourceValue.entrySet()) { long ID = e.getKey(); BigSparseRealMatrix result = e.getValue(); Set<Mapping> mappings = itemMappings.get(ID); gradV.addToColumn(result, mappings); } gradV = gradV.add(V.scalarMultiply(lambdaV)); // add the regularization parameter itemSourceValue.clear(); //------------------------------------------------------------------ } /** * This function is a part of the global objective function. This part is * computed by the main site. * * @return A double value which represent the contribution of the main site * in the global cost function */ double computeLocalCostFunction(BigSparseRealMatrix U, BigSparseRealMatrix V) { double currentJ = ((double) lambdaR / 2) * FastMath.pow(((R.specialOperation(U.transpose(), V)).getFrobeniusNorm()), 2); // compute (R-U'*V).^2 currentJ += (lambdaU / 2) * FastMath.pow(U.getFrobeniusNorm(), 2);// Adding the regularization term for U currentJ += (lambdaV / 2) * FastMath.pow(V.getFrobeniusNorm(), 2);// Adding the regularization term for V return currentJ; } /** * Get the current user latent feature matrix U. * * @return the current user latent feature matrix U. */ public BigSparseRealMatrix getU() { return U; } /** * Get the current item latent feature matrix V. * * @return the current user latent feature matrix V. */ public BigSparseRealMatrix getV() { return V; } /** * Get the current user-item rating matrix R. * * @return the current user-item rating matrix R. */ public BigSparseRealMatrix getR() { return R; } /** * Get the temporary user latent feature matrix U. This temporary matrix is * computed using the current gradient of U, and the current learning rate. * * @return the temporary user latent feature matrix U. */ public BigSparseRealMatrix getTempU() { return U.subtract(gradU.scalarMultiply(learningRate)); } /** * Get the temporary user latent feature matrix V. This temporary matrix is * computed using the current gradient of V, and the current learning rate. * * @return the temporary item latent feature matrix V. */ public BigSparseRealMatrix getTempV() { return V.subtract(gradV.scalarMultiply(learningRate)); } /** * @return the current learning rate. */ public double getLearningRate() { return learningRate; } /** * This method will increase the current learning rate (typically by 5%). */ public void increaseLearningRate() { learningRate += learningRate * increasedCostPercentage; //increase the learning rate (typically by 5%). } /** * This method will decrease the current learning rate (typically divide it * by half). */ public void decreaseLearningRate() { learningRate *= decreasedCostPercentage;// cut the learning rate in half. } /** * @return the current value of the cost function. */ public double getJ() { return J; } /** * This method will update the current cost value. * * @param cost the new cost value. */ public void updateJ(double cost) { this.J = cost; } /** * This method will update the user latent feature matrix U. * * @param U the new user latent feature matrix. */ public void updateU(BigSparseRealMatrix U) { this.U = U; } /** * This method will update the item latent feature matrix V. * * @param V the new item latent feature matrix. */ public void updateV(BigSparseRealMatrix V) { this.V = V; } public Map<String, Integer> getUserMapping() { return R.getRowsMapping(); } public Map<String, Integer> getItemMapping() { return R.getColumnsMapping(); } public int getUserMapping(String userid) { return R.getRowsMapping().get(userid); } public int getItemMapping(String itemid) { return R.getColumnsMapping().get(itemid); } public void setUserMappings(Map<Long, Set<Mapping>> userMappings) { this.userMappings = userMappings; } public void setItemMappings(Map<Long, Set<Mapping>> itemMappings) { this.itemMappings = itemMappings; } }