List of usage examples for java.lang Math log
@HotSpotIntrinsicCandidate public static double log(double a)
From source file:com.opengamma.analytics.math.statistics.distribution.NonCentralChiSquaredDistributionTest.java
/** * Numbers computed from R//ww w . j a v a 2s . c o m */ @Test public void debugTest() { final double dof = 3.666; final double nonCentrality = 75; final double x = 13.89; final NonCentralChiSquaredDistribution chiSq1 = new NonCentralChiSquaredDistribution(dof, nonCentrality); final double y1 = Math.log(chiSq1.getCDF(x)); assertEquals(-15.92129, y1, 1e-5); }
From source file:com.skratchdot.electribe.model.esx.impl.SampleTuneImpl.java
/** * <!-- begin-user-doc -->//from w w w .ja v a 2s . com * Calculates the SampleTune value from the given sampleRate (using a * baseSamplingRate of 44100) * <p>Formula Used:</p> * <table> * <tr><td></td><td align="center">log(sampleRate/44100)</td><td></td></tr> * <tr><td>12 *</td><td align="center">----------------------</td><td>= SampleTune</td></tr> * <tr><td></td><td align="center">log(2)</td><td></td></tr> * </table> * <!-- end-user-doc --> * @generated NOT */ public float calculateSampleTuneFromSampleRate(int sampleRate) { float x = ((float) sampleRate) / 44100; float y = (float) Math.log(x); float z = (float) (y / Math.log(2)); BigDecimal bd = new BigDecimal(12 * z).setScale(2, RoundingMode.HALF_EVEN); return bd.floatValue(); }
From source file:aliview.primer.OligoCalc.java
public static double getBaseStackingTM(String sequence, double conc_primer, double conc_salt, double conc_mg) { sequence = sequence.toUpperCase();/* w w w . ja v a2 s . c o m*/ // to do check only valid bases // to do check len > 0 // effect on entropy by salt correction; von Ahsen et al 1999 // Increase of stability due to presence of Mg; double salt_effect = conc_salt / 1000 + conc_mg / 1000 * 140; double h = 0; double s = 0; // effect on entropy s = 0.368 * (sequence.length() - 1) * Math.log(salt_effect); // terminal corrections. Santalucia 1998 char firstnucleotide = sequence.charAt(0); if (firstnucleotide == 'G' || firstnucleotide == 'C') { h += 0.1; s += -2.8; } if (firstnucleotide == 'A' || firstnucleotide == 'T') { h += 2.3; s += 4.1; } char lastnucleotide = sequence.charAt(sequence.length() - 1); if (lastnucleotide == 'G' || lastnucleotide == 'C') { h += 0.1; s += -2.8; } if (lastnucleotide == 'A' || lastnucleotide == 'T') { h += 2.3; s += 4.1; } // compute new H and s based on sequence. Santalucia 1998 for (int i = 0; i < sequence.length() - 1; i++) { String subSeq = sequence.substring(i, i + 2); h += getEnthalpy(subSeq); s += getEntropy(subSeq); } double tm = ((1000 * h) / (s + (1.987 * Math.log(conc_primer / 2000000000)))) - 273.15; //print "Tm: <font color=880000><b>".round($tm,1)." °C</b></font>"; //print "\n<font color=008800> Enthalpy: ".round($h,2)."\n Entropy: ".round($s,2)."</font>"; return tm; }
From source file:com.itemanalysis.psychometrics.polycor.PolyserialLogLikelihoodTwoStep.java
public double value(double x) { double z = 0.0; double prbZ = 0.0; double loglik = 0.0; double tauStar = 0.0; double tauStarM1 = 0.0; double dif = 0.0; for (int i = 0; i < dataX.length; i++) { z = (dataX[i] - meanX.getResult()) - sdX.getResult(); prbZ = normal.density(z);/*from www . j a va 2s . com*/ tauStar = (alpha[dataY[i] - 1] - x * z) / Math.sqrt(1 - x * x); if (dataY[i] > 1) { tauStarM1 = (alpha[dataY[i] - 1] - x * z) / Math.sqrt(1 - x * x); } else { tauStarM1 = -10;//some large number greater than negative infinity } dif = normal.cumulativeProbability(tauStar) - normal.cumulativeProbability(tauStarM1); loglik += Math.log(prbZ * dif); } return -loglik; }
From source file:com.analog.lyric.dimple.factorfunctions.Binomial.java
@Override public final double evalEnergy(Value[] arguments) { int index = 0; if (!_NParameterConstant) { _N = arguments[index++].getInt(); // First argument is N parameter if (_N < 0) return Double.POSITIVE_INFINITY; _negativeLogFactorialN = -org.apache.commons.math3.special.Gamma.logGamma(_N + 1); }/*from w w w. j av a2 s . c o m*/ final double p = arguments[index++].getDouble(); // Next argument is the probability parameter if (p < 0 || p > 1) return Double.POSITIVE_INFINITY; final int numOnes = arguments[index++].getInt(); // Next argument is the one-count if (numOnes < 0 || numOnes > _N) return Double.POSITIVE_INFINITY; int numZeros = _N - numOnes; if (p == 0) if (numOnes > 0) return Double.POSITIVE_INFINITY; else return 0; else if (p == 1) if (numZeros > 0) return Double.POSITIVE_INFINITY; else return 0; else return -(numOnes * Math.log(p) + numZeros * Math.log(1 - p)) + _negativeLogFactorialN + org.apache.commons.math3.special.Gamma.logGamma(numOnes + 1) + org.apache.commons.math3.special.Gamma.logGamma(numZeros + 1); }
From source file:com.joptimizer.optimizers.NewtonLEConstrainedFSPTest.java
/** * Minimize x - Log[-x^2 + 1], // w w w.jav a 2 s .c o m * dom f ={x | x^2<1} * N.B.: this simulate a centering step of the barrier method * applied to the problem: * Minimize x * s.t. x^2<1 * when t=1. */ public void testOptimize2() throws Exception { log.debug("testOptimize2"); // START SNIPPET: NewtonLEConstrainedFSP-1 // Objective function ConvexMultivariateRealFunction objectiveFunction = new ConvexMultivariateRealFunction() { public double value(double[] X) { double x = X[0]; return x - Math.log(1 - x * x); } public double[] gradient(double[] X) { double x = X[0]; return new double[] { 1 + 2 * x / (1 - x * x) }; } public double[][] hessian(double[] X) { double x = X[0]; return new double[][] { { 4 * Math.pow(x, 2) / Math.pow(1 - x * x, 2) + 2 / (1 - x * x) } }; } public int getDim() { return 1; } }; OptimizationRequest or = new OptimizationRequest(); or.setCheckKKTSolutionAccuracy(true); or.setF0(objectiveFunction); or.setInitialPoint(new double[] { 0 });//must be feasible // optimization NewtonLEConstrainedFSP opt = new NewtonLEConstrainedFSP(); opt.setOptimizationRequest(or); int returnCode = opt.optimize(); // END SNIPPET: NewtonLEConstrainedFSP-1 if (returnCode == OptimizationResponse.FAILED) { fail(); } OptimizationResponse response = opt.getOptimizationResponse(); double[] sol = response.getSolution(); double value = objectiveFunction.value(sol); log.debug("sol : " + ArrayUtils.toString(sol)); log.debug("value : " + value); assertEquals(-0.41421356, sol[0], 0.0000001);//=1-Math.sqrt(2) assertEquals(-0.22598716, value, 0.0000001); }
From source file:com.mapr.synth.samplers.ArrivalSampler.java
@Override public JsonNode sample() { synchronized (this) { TextNode r = new TextNode(df.format(new Date((long) start))); start += minInterval - meanInterval * Math.log(1 - base.nextDouble()); return r; }/*from ww w.j a v a 2s . c om*/ }
From source file:edu.ucuenca.authorsrelatedness.Distance.java
public double NWD(String uri1, String end1, String uri2, String end2, String quy) throws Exception { List<String> prms = new ArrayList(); prms.add(uri1 + "+" + end1); prms.add(uri2 + "+" + end2); prms.add(quy);//from w ww . j a va2 s. c o m Collections.sort(prms); Double rspc = GetCacheDistance(prms.toString()); if (rspc == null) { Map<String, List<String>> map = new HashMap<>(); List<String> Authors = new ArrayList(); Authors.add(uri1); Authors.add(uri2); List<String> Endpoints = new ArrayList(); Endpoints.add(end1); Endpoints.add(end2); Map<String, Double> Result = new HashMap<>(); for (int i = 0; i < Authors.size(); i++) { for (int j = i + 1; j < Authors.size(); j++) { String a1 = Authors.get(i); String a2 = Authors.get(j); List<String> ka1 = null; List<String> ka2 = null; if (map.containsKey(a1)) { ka1 = map.get(a1); } else { ka1 = consultado2(a1, Endpoints.get(i)); //String t1_ = traductor(Joiner.on(" | ").join(ka1)).toLowerCase(); ka1 = traductor(ka1);//new LinkedList<String>(java.util.Arrays.asList(t1_.split("\\s\\|\\s"))); ka1 = clean(ka1); System.out.println(uri1 + "|E:" + Joiner.on(",").join(ka1)); ka1 = TopT(ka1, (int) (2.0 * Math.log(ka1.size()))); System.out.println(uri1 + "|F:" + Joiner.on(",").join(ka1)); map.put(a1, ka1); } if (map.containsKey(a2)) { ka2 = map.get(a2); } else { ka2 = consultado2(a2, Endpoints.get(j)); //String t2_ = traductor(Joiner.on(" | ").join(ka2)).toLowerCase(); ka2 = traductor(ka2);//new LinkedList<String>(java.util.Arrays.asList(t2_.split("\\s\\|\\s"))); ka2 = clean(ka2); System.out.println(uri2 + "|E:" + Joiner.on(",").join(ka2)); ka2 = TopT(ka2, (int) (2.0 * Math.log(ka2.size()))); System.out.println(uri2 + "|F:" + Joiner.on(",").join(ka2)); map.put(a2, ka2); } //System.out.println(ka1.size() + "," + ka2.size()); double sum = 0; double num = 0; for (String t1 : ka1) { for (String t2 : ka2) { num++; String tt1 = t1; String tt2 = t2; double v = NGD(tt1, tt2); sum += v; } } double prom = sum / num; if (num == 0 && sum == 0) { prom = 2; } Result.put(i + "," + j, prom); } } double r = 0; for (Map.Entry<String, Double> cc : Result.entrySet()) { r = cc.getValue(); } rspc = r; PutCacheDistance(prms.toString(), rspc); } return rspc; }
From source file:ml.shifu.shifu.udf.PSICalculatorUDF.java
@Override public Tuple exec(Tuple input) throws IOException { if (input == null || input.size() < 2) { return null; }/*from w w w . j a v a 2 s.c om*/ Integer columnId = (Integer) input.get(0); DataBag databag = (DataBag) input.get(1); ColumnConfig columnConfig = this.columnConfigList.get(columnId); List<Integer> negativeBin = columnConfig.getBinCountNeg(); List<Integer> positiveBin = columnConfig.getBinCountPos(); List<Double> expected = new ArrayList<Double>(negativeBin.size()); for (int i = 0; i < columnConfig.getBinCountNeg().size(); i++) { if (columnConfig.getTotalCount() == 0) { expected.add(0D); } else { expected.add( ((double) negativeBin.get(i) + (double) positiveBin.get(i)) / columnConfig.getTotalCount()); } } Iterator<Tuple> iter = databag.iterator(); Double psi = 0D; List<String> unitStats = new ArrayList<String>(); while (iter.hasNext()) { Tuple tuple = iter.next(); if (tuple != null && tuple.size() != 0) { String subBinStr = (String) tuple.get(1); String[] subBinArr = StringUtils.split(subBinStr, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR); List<Double> subCounter = new ArrayList<Double>(); Double total = 0D; for (String binningElement : subBinArr) { Double dVal = Double.valueOf(binningElement); subCounter.add(dVal); total += dVal; } int i = 0; for (Double sub : subCounter) { if (total == 0) { continue; } else if (expected.get(i) == 0) { continue; } else { double logNum = (sub / total) / expected.get(i); if (logNum <= 0) { continue; } else { psi = psi + ((sub / total - expected.get(i)) * Math.log(logNum)); } } i++; } unitStats.add((String) tuple.get(2)); } } // sort by unit Collections.sort(unitStats); Tuple output = TupleFactory.getInstance().newTuple(3); output.set(0, columnId); output.set(1, psi); output.set(2, StringUtils.join(unitStats, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR)); return output; }
From source file:bide.math.NormalDistribution.java
public static double logPdfBT(double x, double m, double sd, double lLim, double uLim) { double dnorm = 0; try {//from w w w. ja va 2 s. c o m double limit = Math.log(NormalDistribution.cdf(uLim, m, sd) - NormalDistribution.cdf(lLim, m, sd)); if (x > lLim && x < uLim) { dnorm = NormalDistribution.logPdf(x, m, sd) - limit; } } catch (Exception e) { e.printStackTrace(); } return dnorm; }