List of usage examples for java.lang Math pow
@HotSpotIntrinsicCandidate public static double pow(double a, double b)
From source file:org.wallerlab.yoink.density.service.densityProperties.DensityOverlapRegionsIndicatorComputer.java
/** * calculate dori value of a grid point//w w w. j a v a 2s .c o m * * @param densityPoint * -{@link org.wallerlab.yoink.api.model.density.DensityPoint} * @param doriValue * , pre-calculated * @return doriValue, final-calculated * */ protected double getSilvaValue(DensityPoint densityPoint, double doriValue) { double gradient = densityPoint.getGradient(); doriValue *= (4.0 / Math.pow(gradient, 3)); doriValue /= (1.0 + doriValue); return doriValue; }
From source file:ldbc.snb.datagen.generator.distribution.utils.Bucket.java
public static ArrayList<Bucket> bucketizeHistogram(ArrayList<Pair<Integer, Integer>> histogram, int num_buckets) { ArrayList<Bucket> buckets = new ArrayList<Bucket>(); int population = 0; int num_edges = 0; for (Pair<Integer, Integer> i : histogram) { population += i.getValue();/*from w ww. j av a2 s .com*/ num_edges += i.getValue() * i.getKey(); } num_edges /= 2; int avgDegreeAt1B = 200; int avgDegree = num_edges / population; double aCoeff = Math.log(avgDegreeAt1B) / Math.log(1000000000); double bCoeff = (aCoeff - (Math.log(avgDegree) / Math.log(population))) / Math.log10(population); int target_mean = (int) Math.round( Math.pow(DatagenParams.numPersons, (aCoeff - bCoeff * Math.log10(DatagenParams.numPersons)))); System.out.println("Distribution mean degree: " + avgDegree + " Distribution target mean " + target_mean); int bucket_size = (int) (Math.ceil(population / (double) (num_buckets))); int current_histogram_index = 0; int current_histogram_left = histogram.get(current_histogram_index).getValue(); for (int i = 0; i < num_buckets && (current_histogram_index < histogram.size()); ++i) { int current_bucket_count = 0; int min = population; int max = 0; while (current_bucket_count < bucket_size && current_histogram_index < histogram.size()) { int degree = histogram.get(current_histogram_index).getKey(); min = degree < min ? degree : min; max = degree > max ? degree : max; if ((bucket_size - current_bucket_count) > current_histogram_left) { current_bucket_count += current_histogram_left; current_histogram_index++; if (current_histogram_index < histogram.size()) { current_histogram_left = histogram.get(current_histogram_index).getValue(); } } else { current_histogram_left -= (bucket_size - current_bucket_count); current_bucket_count = bucket_size; } } min = (int) (min * target_mean / (double) avgDegree); max = (int) (max * target_mean / (double) avgDegree); buckets.add(new Bucket(min, max)); } return buckets; }
From source file:com.xerox.amazonws.ec2.EC2Utils.java
/** * This method makes a best effort to fetch all instance metadata. * * @return map of metadata/*from www . j a v a 2 s . c o m*/ */ public static Map<String, String> getInstanceMetadata() { HashMap<String, String> result = new HashMap<String, String>(); int retries = 0; while (true) { try { URL url = new URL("http://169.254.169.254/latest/meta-data/"); BufferedReader rdr = new BufferedReader(new InputStreamReader(url.openStream())); String line = rdr.readLine(); while (line != null) { try { String val = getInstanceMetadata(line); result.put(line, val); } catch (IOException ex) { logger.error("Problem fetching piece of instance metadata!", ex); } line = rdr.readLine(); } return result; } catch (IOException ex) { if (retries == 5) { logger.debug("Problem getting instance data, retries exhausted..."); return result; } else { logger.debug("Problem getting instance data, retrying..."); try { Thread.sleep((int) Math.pow(2.0, retries) * 1000); } catch (InterruptedException e) { } } } } }
From source file:change_point_detection.BetaDistributionChangePoint.java
public int/*estimated change point*/ detectChange() throws Exception { int estimatedChangePoint = -1; int N = this.dynamicWindow.size(); this.cushion = Math.max(100, (int) Math.floor(Math.pow(N, gamma))); //mean conf. should not fall below 0.3 if (N > (2 * this.cushion) && calculateMean(0, N - 1) <= 0.3) return N - 1; double threshold = -Math.log(this.sensitivity); double w = 0; int kAtMaxW = -1; for (int k = this.cushion; k <= N - this.cushion; k++) { if (calculateMean(k, N - 1) <= 0.95 * calculateMean(0, k - 1)) { double skn = 0; /* estimate pre and post change parameters */ double alphaPreChange = calcBetaDistAlpha(0, k - 1); double betaPreChange = calculateBetaDistBeta(alphaPreChange, 0, k - 1); double alphaPostChange = calcBetaDistAlpha(k, N - 1); double betaPostChange = calculateBetaDistBeta(alphaPostChange, k, N - 1); BetaDistributionImpl preBetaDist = new BetaDistributionImpl(alphaPreChange, betaPreChange); BetaDistributionImpl postBetaDist = new BetaDistributionImpl(alphaPostChange, betaPostChange); for (int i = k; i < N; i++) { try { skn += Math.log(postBetaDist.density(this.dynamicWindow.get(i).doubleValue()) / preBetaDist.density(this.dynamicWindow.get(i).doubleValue())); } catch (Exception e) { e.printStackTrace(); System.out.println("continuing..."); skn = 0;//from w w w. ja va2 s . c o m break; } } if (skn > w) { w = skn; kAtMaxW = k; } } } if (w >= threshold && kAtMaxW != -1) { System.out.println("\nChangePoint Found!"); estimatedChangePoint = kAtMaxW; System.out.println("Estimated change point is " + estimatedChangePoint); } //force change point if confidence falls down terribly if (estimatedChangePoint == -1 && N >= 100 && calculateMean(0, N - 1) < 0.3) estimatedChangePoint = N - 1; return estimatedChangePoint; }
From source file:de.termininistic.serein.examples.benchmarks.functions.unimodal.SumOfDifferentPowersFunction.java
@Override public double map(RealVector v) { double[] x = v.toArray(); int n = x.length; double sum = 0.0; for (int i = 0; i < n; i++) { sum += Math.pow(Math.abs(x[i]), i + 2); }/*from w w w .j a va 2 s. c om*/ return sum; }
From source file:com.opengamma.analytics.math.interpolation.ExponentialInterpolator1D.java
@Override public double firstDerivative(final Interpolator1DDataBundle data, final Double value) { Validate.notNull(value, "value"); Validate.notNull(data, "data bundle"); final Double x1 = data.getLowerBoundKey(value); final Double y1 = data.get(x1); if (data.getLowerBoundIndex(value) == data.size() - 1) { return 0.; }/* w ww .j ava 2s . c om*/ final Double x2 = data.higherKey(x1); final Double y2 = data.get(x2); final double xDiff = x2 - x1; return Math.pow(y1, value * (x2 - value) / xDiff / x1) * Math.pow(y2, value * (value - x1) / xDiff / x2) * (Math.log(y1) * (x2 - 2. * value) / x1 + Math.log(y2) * (2. * value - x1) / x2) / xDiff; }
From source file:com.itemanalysis.psychometrics.kernel.ScottsBandwidth.java
public double value() { StandardDeviation sd = new StandardDeviation(); double q3 = pcntl.evaluate(x, 75.0); double q1 = pcntl.evaluate(x, 25.0); double IQR = (q3 - q1) / 1.34; double s = sd.evaluate(x); double N = (double) x.length; double m = Math.min(s, IQR); return 1.06 * m * Math.pow(N, -1.0 / 5.0) * adjustmentFactor; }
From source file:com.openkm.util.FormatUtil.java
/** * Format the document size for human readers *///from w ww . ja v a 2 s. c o m public static String formatSize(long bytes) { for (int i = 6; i > 0; i--) { double step = Math.pow(1024, i); if (bytes > step) return String.format(Locale.ROOT, "%3.1f %s", bytes / step, UNITS[i]); } return Long.toString(bytes) + " " + UNITS[0]; }
From source file:com.opengamma.analytics.financial.interestrate.PeriodicInterestRate.java
@Override public InterestRate fromPeriodic(final PeriodicInterestRate periodic) { Validate.notNull(periodic, "periodic"); final int n = periodic.getCompoundingPeriodsPerYear(); final double oneYearValue = Math.pow(1 + periodic.getRate() / n, n); final double r = getCompoundingPeriodsPerYear() * (Math.pow(oneYearValue, 1. / getCompoundingPeriodsPerYear()) - 1); return new PeriodicInterestRate(r, getCompoundingPeriodsPerYear()); }
From source file:org.wallerlab.yoink.adaptive.smooth.smoothfunction.SCMPSmoothFunction.java
/** * this smooth function is used in SCMP method. for details please see: * "Size-Consistent Multipartitioning QM/MM: A Stable and Efficient Adaptive * QM/MM Method"/*from w ww . j a v a 2 s.co m*/ * * @param currentValue * , currentValue(variable) in smooth function * @param min * , minimum value in smooth function * @param max * , maximum value in smooth function * @return smooth factor */ public double evaluate(double currentValue, double min, double max) { double smoothFactor; if (currentValue > max) { smoothFactor = 0; } else if (currentValue < min) { smoothFactor = 1; } else { smoothFactor = Math.pow((currentValue - max), 2); smoothFactor = smoothFactor * (-3 * min + max + 2 * currentValue); smoothFactor = smoothFactor / Math.pow((max - min), 3); } return smoothFactor; }