List of usage examples for java.lang Math log
@HotSpotIntrinsicCandidate public static double log(double a)
From source file:hivemall.utils.math.StatsUtils.java
public static double logLoss(final double actual, final double predicted, final double sigma) { double p = pdf(actual, predicted, sigma); if (p == 0.d) { return 0.d; }/*from w w w . j ava 2s. c om*/ return -Math.log(p); }
From source file:com.itemanalysis.psychometrics.polycor.PolychoricMaximumLikelihood.java
private void computeChiSquare() { df = nrow * ncol - nrow - ncol;//from w w w . j a v a 2s. co m if (df <= 0.0) { probChiSquare = 0.0; } else { ChiSquaredDistribution cs = new ChiSquaredDistribution(df); double sum = 0.0; for (int i = 0; i < nrow; i++) { for (int j = 0; j < ncol; j++) { sum += Math.log((data[i][j] + 1e-6) / N) * data[i][j]; } } chiSquare = 2.0 * (fmin + sum); probChiSquare = 1.0 - cs.cumulativeProbability(chiSquare); } }
From source file:eu.crisis_economics.abm.model.configuration.LogNormalDistributionModelParameterConfiguration.java
/** * Create a {@link LogNormalDistributionModelParameterConfiguration} object with * custom parameters. This static method differs from the class constructor with * the same arguments in that the arguments to this method are the {@code mean} * and the {@code standard deviation} of the actual lognormal distribution, not the * {@code mean} and {@code standard deviation} of the log of the lognormal distribution * (namely, the underlying normal distribution). * /*from w w w .j a va2 s . com*/ * @param actualMean * The mean of the lognormal distribution. This argument must be * strictly positive. * @param actualSigma * The standard deviation of the lognormal distribution. This argument * must be strictly positive. */ public static LogNormalDistributionModelParameterConfiguration create(final double actualMean, final double actualSigma) { final double actualMean2 = actualMean * actualMean, actualSigma2 = actualSigma * actualSigma, muLogDist = .5 * Math.log(actualMean2 / (actualSigma2 / actualMean2 + 1.)), sigmaLogDist = Math.sqrt(Math.log(actualSigma2 / actualMean2 + 1.)); return new LogNormalDistributionModelParameterConfiguration(muLogDist, sigmaLogDist); }
From source file:com.itemanalysis.psychometrics.irt.equating.RobustZEquatingTest.java
private void testA() throws IllegalArgumentException { double[] aDiff = new double[nA]; za = new RobustZ[nA]; for (int i = 0; i < nA; i++) { aDiff[i] = Math.log(aX[i]) - Math.log(aY[i]); }/*from w w w . j av a2 s . co m*/ double median = percentile.evaluate(aDiff, 50); double q3 = percentile.evaluate(aDiff, 75); double q1 = percentile.evaluate(aDiff, 25); double iqr = q3 - q1; Mean mean = new Mean(); for (int i = 0; i < nA; i++) { za[i] = new RobustZ(aDiff[i], median, iqr); if (!za[i].significant(significanceLevel)) { mean.increment(aDiff[i]); } } slope = Math.exp(mean.getResult()); }
From source file:com.hmsinc.epicenter.spatial.analysis.BayesianSpatialScanStatistic.java
/** Function that takes in a series of log likelihood values and adds them. * e.g. from L1, L2, L2 produces P(L1 + L2) */ private double addLogs(double... L) { double L1 = L[0]; //calculate residual sum (sum of differences b/w L(2-n) and L1 double residualSum = 0.0; for (int i = 0; i < L.length; i++) { if (!Double.isNaN(Math.exp(L[i] - L1))) { residualSum += Math.exp(L[i] - L1); }/*from w w w . j a va2 s.c om*/ } // Calculate the log sum double logSum = L1 + Math.log(residualSum); logger.trace("logSum = {} + log(1 + {}) = {}", new Object[] { L1, residualSum, logSum }); return logSum; }
From source file:com.analog.lyric.dimple.factorfunctions.Poisson.java
@Override public final double evalEnergy(Value[] arguments) { int index = 0; // First argument of the factor: lambda if (!_lambdaParameterConstant) { _lambda = arguments[index++].getDouble(); if (_lambda < 0) return Double.POSITIVE_INFINITY; _logLambda = Math.log(_lambda); }//ww w . j a va 2s . c om // Second argument of the factor: k final int k = arguments[index++].getInt(); final double negativeLogFactorialK = -org.apache.commons.math3.special.Gamma.logGamma(k + 1); if (_lambda > 0) return -(-_lambda + k * _logLambda + negativeLogFactorialK); else if (_lambda == 0 && k != 0) return Double.POSITIVE_INFINITY; else if (_lambda == 0 && k == 0) return 0; return Double.POSITIVE_INFINITY; }
From source file:com.aelitis.azureus.core.metasearch.Result.java
/** * //from w w w .ja va 2 s. c o m * @return a value between 0 and 1 representing the rank of the result */ public float getRank() { int seeds = getNbSeeds(); int peers = getNbPeers(); if (seeds < 0) { seeds = 0; } if (peers < 0) { peers = 0; } int totalVirtualPeers = 3 * seeds + peers + 2; int superSeeds = getNbSuperSeeds(); if (superSeeds > 0) { totalVirtualPeers += 50 * superSeeds; } int votes = getVotes(); if (votes > 0) { if (votes > 50) { votes = 50; } totalVirtualPeers += 5 * votes; } int votesDown = getVotesDown(); if (votesDown > 0) { totalVirtualPeers -= 200 * votesDown; } if (totalVirtualPeers < 2) totalVirtualPeers = 2; float rank = (float) (Math.log(totalVirtualPeers) / Math.log(10)) / 5f; if (rank > 2f) rank = 2f; if (isPrivate()) { rank /= 2; } String queryString = getSearchQuery(); String name = getName(); if (queryString != null && name != null) { name = name.toLowerCase(Locale.ENGLISH); String token = ""; List<String> tokens = new ArrayList<String>(); char[] chars = queryString.toCharArray(); for (char c : chars) { if (Character.isLetterOrDigit(c)) { token += String.valueOf(c).toLowerCase(Locale.ENGLISH); } else { if (token.length() > 0) { tokens.add(token); token = ""; } } } if (token.length() > 0) { tokens.add(token); } for (String s : tokens) { if (!name.contains(s)) { rank /= 2; } } } rank = applyRankBias(rank); return rank; }
From source file:dkpro.similarity.algorithms.wikipedia.measures.LeacockChodorowComparator.java
/** * Implements the distance measure by Leacock and Chodorow (1998) * lch(c1,c2) = - log(minPathLength(c1,c2) / 2 * depth of the hierarchy) * lch(c1,c2) = - log(minPL(c1,c2) / 2 * depth) = log ( 2*depth / minPL(c1,c2) ) * * minPathLength is measured in nodes, i.e. the distance of a node to itself is 0! * This would cause a logarithm error (or a division by zero)). * Thus we changed the behaviour in order to return a distance of 1, if the nodes are equal or neighbors. * * @param page1 The first page.//from ww w . jav a 2s .c om * @param page2 The second page. * @param strategy The combination strategy. * return A list with the leacock-chodorow relatedness values between all categories of the given pages. */ @Override protected List<Double> computeRelatedness(Page page1, Page page2) throws WikiApiException { List<Double> relatednessValues = new ArrayList<Double>(); Set<Category> categories1 = relatednessUtilities.getCategories(page1); Set<Category> categories2 = relatednessUtilities.getCategories(page2); if (categories1 == null || categories2 == null) { return null; } double depthOfHierarchy = catGraph.getDepth(); // if the depth of the hierarchy is 0, we cannot compute a relatedness value if (depthOfHierarchy == 0) { logger.info("The depth of the hierarchy is 0. Cannot compute LeacockChodorow relatedness."); return null; } for (Category cat1 : categories1) { for (Category cat2 : categories2) { int pathLength = catGraph.getTaxonomicallyBoundPathLengthInEdges(cat1, cat2); // a negative or zero path length shows that there is no path if (pathLength < 0) { continue; } // add one to the path length, as a value of zero would cause a division by zero double relatedness = Math.log((2 * depthOfHierarchy) / (pathLength + 1)); // if (strategy.equals(CombinationStrategy.SelectivityLinear) || strategy.equals(CombinationStrategy.SelectivityLog)) { // relatedness = applySelectivity(relatedness, getSelectivity(catID1, strategy), getSelectivity(catID2, strategy)); // } relatednessValues.add(relatedness); } } return relatednessValues; }
From source file:beast.math.distributions.GammaDistribution.java
/** * the natural log of the probability density function of the distribution * * @param x argument/*from ww w . j a v a2 s.com*/ * @param shape shape parameter * @param scale scale parameter * @return log pdf value */ public static double logPdf(double x, double shape, double scale) { // double a = Math.pow(scale,-shape) * Math.pow(x, shape-1.0); // double b = x/scale + GammaFunction.lnGamma(shape); // return Math.log(a) - b; // AR - changed this to return -ve inf instead of throwing an // exception... This makes things // much easier when using this to calculate log likelihoods. // if (x < 0) throw new IllegalArgumentException(); if (x < 0) return Double.NEGATIVE_INFINITY; if (x == 0) { if (shape == 1.0) return Math.log(1.0 / scale); else return Double.NEGATIVE_INFINITY; } if (shape == 1.0) { return (-x / scale) - Math.log(scale); } if (shape == 0.0) // uninformative return -Math.log(x); /*return ((shape - 1.0) * Math.log(x/scale) - x / scale - GammaFunction .lnGamma(shape)) - Math.log(scale);*/ return ((shape - 1.0) * (Math.log(x) - Math.log(scale)) - x / scale - GammaFunction.lnGamma(shape)) - Math.log(scale); }
From source file:edu.umd.cfar.lamp.chronicle.ChronicleRuler.java
/** * Gets how far from the top of the timeline that the * line starts drawing. this is more useful than the * ruler line length, somehow.// ww w . j ava 2 s .com * @param atMost * @param myWidth * @return */ private int getRuleOffset(int atMost, int myWidth) { int off = rulerHeight - minRuleHeight; double maxFractionHeight = 1; double heightRange = rulerHeight * maxFractionHeight; // max offset heightRange = Math.max(0, heightRange); double fraction = Math.log(widths[myWidth]) / Math.log(widths[atMost]); off -= (int) ((fraction * heightRange)); return Math.max(0, off); }