List of usage examples for java.lang Math exp
@HotSpotIntrinsicCandidate public static double exp(double a)
From source file:edu.uci.ics.jung.algorithms.layout.ISOMLayout.java
private synchronized void updateParameters() { epoch++;/*from w w w . j a v a 2s . c om*/ double factor = Math.exp(-1 * coolingFactor * (1.0 * epoch / maxEpoch)); adaption = Math.max(minAdaption, factor * initialAdaption); //jumpRadius = (int) factor * jumpRadius; //temperature = factor * temperature; if ((radius > minRadius) && (epoch % radiusConstantTime == 0)) { radius--; } }
From source file:geogebra.util.MyMath.java
/** * Factorial function of x. If x is an integer value x! is returned, * otherwise gamma(x + 1) will be returned. For x < 0 Double.NaN is * returned./*from w w w.j a v a 2 s . c o m*/ * @param x * @return factorial */ final public static double factorial(double x) { if (x < 0) return Double.NaN; // bugfix Michael Borcherds 2008-05-04 // big x or floating point x is computed using gamma function if (x < 0 || x > 32 || x - Math.floor(x) > 1E-10) // exp of log(gamma(x+1)) return Math.exp(Gamma.logGamma(x + 1.0)); int n = (int) x; int j; while (factorialTop < n) { j = factorialTop++; factorialTable[factorialTop] = factorialTable[j] * factorialTop; } return factorialTable[n]; }
From source file:br.prof.salesfilho.oci.util.OCIUtils.java
/** * @param signal input signal/*from ww w. j ava 2 s. co m*/ * @param kernel Kernel size * @precond input != null and Kernel size great then 0 and less then 10 * @return AutoCorrentropy array */ public static double[] computeAutoCorrentropy(double[] signal, double kernel) { double twokSizeSquare = 2 * Math.pow(kernel, 2d); int signal_length = signal.length; double[] Y = new double[signal_length]; double b = 1 / kernel * Math.sqrt(2 * Math.PI); int N = signal_length; for (int m = 0; m < signal_length; m++) { for (int n = m + 1; n < signal_length; n++) { double pow = Math.pow((signal[n] - signal[n - m - 1]), 2); double exp = Math.exp(-pow / twokSizeSquare); double equation = (1d / (N - m + 1d)) * b * exp; Y[m] = Y[m] + equation; } } return Y; }
From source file:br.prof.salesfilho.oci.service.ImageDescriptorService.java
/** * @param image input image (signal)// w w w . j a v a 2 s . co m * @param channel RGB and grayscale (1 = RED, 2 = GREEN, 3 = BLUE and 4 = * GRAYSCALE, diferent value GRAYSCALE is returned) * @param kernel Kernel size * @return AutoCorrentropy array */ public double[] autoCorrentropy(BufferedImage image, int channel, double kernel) { //Vetorization and normalization double[] signal = OCIUtils.vetorizeWithSpatialEntropySequence(this.getColorMatrix(image, channel)); double twokSizeSquare = 2 * Math.pow(kernel, 2d); int signal_length = signal.length; double[] autoCorrentropy = new double[signal_length]; double b = 1 / kernel * Math.sqrt(2 * Math.PI); int N = signal_length; for (int m = 0; m < signal_length; m++) { for (int n = m + 1; n < signal_length; n++) { double pow = Math.pow((signal[n] - signal[n - m - 1]), 2); double exp = Math.exp(-pow / twokSizeSquare); double equation = (1d / (N - m + 1d)) * b * exp; autoCorrentropy[m] = autoCorrentropy[m] + equation; } } return autoCorrentropy; }
From source file:com.mapr.synth.samplers.VectorSamplerTest.java
@Test public void testVector() throws IOException { SchemaSampler s = new SchemaSampler( Resources.asCharSource(Resources.getResource("schema029.json"), Charsets.UTF_8).read()); for (int i = 0; i < 10; i++) { JsonNode data = s.sample();//from w w w . ja v a 2 s. com /* { "class": "vector", "name": "prices", "mean": 4.65, "sd": 0.01, "length": 10000, "transform": "exp", "seed": 1, }, */ JsonNode v = data.get("prices"); assertTrue(v.isArray()); assertEquals(10000, v.size()); double[] v1 = new double[10000]; double[] v2 = new double[10000]; for (int j = 0; j < 10000; j++) { v1[j] = v.get(j).asDouble(); v2[j] = Math.log(v1[j]); } assertEquals(100, median(v1), 0.03); assertEquals(100, mean(v1), 0.05); assertEquals(Math.log(100), mean(v2), 0.001); assertEquals(0.01, sd(v2), 0.0003); assertTrue(isNormal(v2, Math.log(100), 0.01)); /* { "class": "vector", "name": "zero", "mean": 0, "sd": 10, "length": 10000, "seed": 2 }, */ v = data.get("zero"); assertTrue(v.isArray()); for (int j = 0; j < 10000; j++) { v1[j] = v.get(j).asDouble(); } assertEquals(0, mean(v1), 0.3); assertEquals(10, sd(v1), 0.2); assertTrue(isNormal(v1, 0, 10)); /* { "class": "vector", "name": "clipped", "mean": 0, "sd": 10, "length": 10000, "max": 0, "seed": 3 }, */ v = data.get("clipped"); assertTrue(v.isArray()); Random rand = new Random(); for (int j = 0; j < 10000; j++) { v1[j] = v.get(j).asDouble(); assertTrue(v1[j] <= 0); v1[j] = v1[j] * (rand.nextBoolean() ? 1 : -1); } assertEquals(0, mean(v1), 0.3); assertEquals(10, sd(v1), 0.3); assertTrue(isNormal(v1, 0, 10)); /* { "class": "vector", "name": "ten", "min": 1, "max": 10, "length": 20000, "transform": "log", "seed": 4 } ] */ v = data.get("ten"); assertTrue(v.isArray()); for (int j = 0; j < 10000; j++) { v1[j] = v.get(j).asDouble(); v2[j] = Math.exp(v1[j]); assertTrue(v1[j] >= 1); assertTrue(v1[j] <= 10); } assertTrue(isUniform(v2, Math.exp(1), Math.exp(10))); v = data.get("coarse"); assertTrue(v.isArray()); for (int j = 0; j < 10000; j++) { double x = v.get(j).asDouble(); assertTrue(x >= 1); assertTrue(x <= 10); assertEquals(Math.rint(x / 0.1) * 0.1, x, 1e-10); } } }
From source file:com.graphhopper.jsprit.core.algorithm.acceptor.ExperimentalSchrimpfAcceptance.java
private double getThreshold(int iteration) { double scheduleVariable = (double) iteration / (double) nOfTotalIterations; // logger.debug("iter={} totalIter={} scheduling={}", iteration, nOfTotalIterations, scheduleVariable); double currentThreshold = initialThreshold * Math.exp(-Math.log(2) * scheduleVariable / alpha); return currentThreshold; }
From source file:com.hmsinc.epicenter.spatial.analysis.BayesianSpatialScanStatistic.java
/** Function that takes in a series of log likelihood values and adds them. * e.g. from L1, L2, L2 produces P(L1 + L2) */ private double addLogs(double... L) { double L1 = L[0]; //calculate residual sum (sum of differences b/w L(2-n) and L1 double residualSum = 0.0; for (int i = 0; i < L.length; i++) { if (!Double.isNaN(Math.exp(L[i] - L1))) { residualSum += Math.exp(L[i] - L1); }// w ww . j av a 2 s. c o m } // Calculate the log sum double logSum = L1 + Math.log(residualSum); logger.trace("logSum = {} + log(1 + {}) = {}", new Object[] { L1, residualSum, logSum }); return logSum; }
From source file:com.opengamma.analytics.financial.model.option.pricing.analytic.twoasset.TwoAssetCorrelationOptionModel.java
/** * Gets the pricing function for a European-style two-asset correlation option * @param definition The option definition * @return The pricing function//from ww w. ja va2 s. com * @throws IllegalArgumentException If the definition is null */ @Override public Function1D<StandardTwoAssetOptionDataBundle, Double> getPricingFunction( final TwoAssetCorrelationOptionDefinition definition) { Validate.notNull(definition, "definition"); return new Function1D<StandardTwoAssetOptionDataBundle, Double>() { @SuppressWarnings("synthetic-access") @Override public Double evaluate(final StandardTwoAssetOptionDataBundle data) { Validate.notNull(data, "data"); final double s1 = data.getFirstSpot(); final double s2 = data.getSecondSpot(); final double k = definition.getStrike(); final double payout = definition.getPayoutLevel(); final double b1 = data.getFirstCostOfCarry(); final double b2 = data.getSecondCostOfCarry(); final double t = definition.getTimeToExpiry(data.getDate()); final double r = data.getInterestRate(t); final double sigma1 = data.getFirstVolatility(t, k); final double sigma2 = data.getSecondVolatility(t, k); final double rho = data.getCorrelation(); final double tSqrt = Math.sqrt(t); final double sigmaT1 = sigma1 * tSqrt; final double sigmaT2 = sigma2 * tSqrt; final double d1 = (Math.log(s1 / k) + t * (b1 - sigma1 * sigma1 / 2)) / sigmaT1; final double d2 = (Math.log(s2 / payout) + t * (b2 - sigma2 * sigma2 / 2)) / sigmaT2; final double df1 = Math.exp(t * (b2 - r)); final double df2 = Math.exp(-r * t); final int sign = definition.isCall() ? 1 : -1; return sign * (s2 * df1 * BIVARIATE.getCDF(new double[] { sign * (d2 + sigmaT2), sign * (d1 + rho * sigmaT2), rho }) - payout * df2 * BIVARIATE.getCDF(new double[] { sign * d2, sign * d1, rho })); } }; }
From source file:com.opengamma.analytics.math.interpolation.ExponentialExtrapolator1D.java
private double[] getLeftSensitivities(final Interpolator1DDataBundle data, final double value) { Validate.notNull(data, "data"); Validate.notNull(value, "value"); final double x = data.firstKey(); final double y = data.firstValue(); final double m = Math.log(y) / x; final double ex = Math.exp(m * value); final double[] result = new double[data.size()]; result[0] = ex * value / (x * y);// ww w . ja v a 2 s . co m return result; }
From source file:bachelorthesis.methods.detection.bayesian.BayesianDetection.java
private double[][] offlineCpd(Value[] data) { int n = data.length; double[] Q = new double[n]; double[] g = new double[n]; double[] G = new double[n]; double[][] P = new double[n][n]; Arrays.fill(g, Math.log(1.d / (data.length + 1))); G[0] = g[0];//from ww w .j av a2 s. c o m for (int i = 1; i < G.length; i++) { G[i] = Math.log((Math.exp(G[i - 1]) + Math.exp(g[i]))); } for (double[] array : P) { Arrays.fill(array, Double.NEGATIVE_INFINITY); } P[n - 1][n - 1] = gaussianObsLogLikelihood(data, n - 1, n); Q[n - 1] = P[n - 1][n - 1]; for (int t = n - 2; t >= 0; t--) { double p_next_cp = Double.NEGATIVE_INFINITY; for (int s = t; s < n - 1; s++) { P[t][s] = gaussianObsLogLikelihood(data, t, s + 1); double summand = P[t][s] + Q[s + 1] + g[s + 1 - t]; p_next_cp = Math.log((Math.exp(p_next_cp) + Math.exp(summand))); if (summand - p_next_cp < BAYESIAN_TRUNCATE) { break; } } P[t][n - 1] = gaussianObsLogLikelihood(data, t, n); double antiG; if (G[n - 1 - t] < -1e-15) { antiG = Math.log(1.d - Math.exp(G[n - 1 - t])); } else { antiG = Math.log(-G[n - 1 - t]); } Q[t] = Math.log((Math.exp(p_next_cp) + Math.exp(P[t][n - 1] + antiG))); } double[][] Pcp = new double[n - 1][n - 1]; for (double[] array : Pcp) { Arrays.fill(array, Double.NEGATIVE_INFINITY); } for (int t = 0; t < n - 1; t++) { Pcp[0][t] = P[0][t] + Q[t + 1] + g[t] - Q[0]; if (Double.isNaN(Pcp[0][t])) { Pcp[0][t] = Double.NEGATIVE_INFINITY; } } for (int j = 1; j < n - 1; j++) { for (int t = j; t < n - 1; t++) { double[] tmp_cond = copyOfRange(Pcp[j - 1], j - 1, t); tmp_cond = add(tmp_cond, getSameEntryOfAllArrays(copyOfRange(P, j, t + 1), t)); double summand = Q[t + 1]; tmp_cond = forEach(tmp_cond, value -> value + summand); tmp_cond = add(tmp_cond, copyOfRange(g, 0, t - j + 1)); double[] negativePart = forEach(copyOfRange(Q, j, t + 1), value -> -value); tmp_cond = add(tmp_cond, negativePart); double[] tempArray = forEach(tmp_cond, value -> Math.exp(value)); Pcp[j][t] = Math.log(sum(tempArray)); if (Double.isNaN(Pcp[j][t])) { Pcp[j][t] = Double.NEGATIVE_INFINITY; } } } return Pcp; }