Example usage for java.lang Math E

List of usage examples for java.lang Math E

Introduction

In this page you can find the example usage for java.lang Math E.

Prototype

double E

To view the source code for java.lang Math E.

Click Source Link

Document

The double value that is closer than any other to e, the base of the natural logarithms.

Usage

From source file:org.gephi.statistics.plugin.DegreeDistribution.java

/**
 *
 * @return The directed version of the report.
 *//*from  ww w .j  a  va 2 s  . c  o  m*/
private String getDirectedReport() {
    double inMax = 0;
    XYSeries inSeries2 = new XYSeries("Series 2");
    for (int i = 1; i < inDistribution[1].length; i++) {
        if (inDistribution[1][i] > 0) {
            inSeries2.add((Math.log(inDistribution[0][i]) / Math.log(Math.E)),
                    (Math.log(inDistribution[1][i]) / Math.log(Math.E)));
            inMax = (float) Math.max((Math.log(inDistribution[0][i]) / Math.log(Math.E)), inMax);
        }
    }
    double inA = inAlpha;
    double inB = inBeta;

    String inImageFile = "";
    String outImageFile = "";
    try {

        XYSeries inSeries1 = new XYSeries(inAlpha + " ");
        inSeries1.add(0, inA);
        inSeries1.add(inMax, inA + inB * inMax);

        XYSeriesCollection inDataset = new XYSeriesCollection();
        inDataset.addSeries(inSeries1);
        inDataset.addSeries(inSeries2);

        JFreeChart inChart = ChartFactory.createXYLineChart("In-Degree Distribution", "In-Degree", "Occurrence",
                inDataset, PlotOrientation.VERTICAL, true, false, false);
        XYPlot inPlot = (XYPlot) inChart.getPlot();
        XYLineAndShapeRenderer inRenderer = new XYLineAndShapeRenderer();
        inRenderer.setSeriesLinesVisible(0, true);
        inRenderer.setSeriesShapesVisible(0, false);
        inRenderer.setSeriesLinesVisible(1, false);
        inRenderer.setSeriesShapesVisible(1, true);
        inRenderer.setSeriesShape(1, new java.awt.geom.Ellipse2D.Double(0, 0, 1, 1));
        inPlot.setBackgroundPaint(java.awt.Color.WHITE);
        inPlot.setDomainGridlinePaint(java.awt.Color.GRAY);
        inPlot.setRangeGridlinePaint(java.awt.Color.GRAY);

        inPlot.setRenderer(inRenderer);

        final ChartRenderingInfo info = new ChartRenderingInfo(new StandardEntityCollection());

        TempDir tempDir = TempDirUtils.createTempDir();
        final String fileName = "inDistribution.png";
        final File file1 = tempDir.createFile(fileName);
        inImageFile = "<IMG SRC=\"file:" + file1.getAbsolutePath() + "\" "
                + "WIDTH=\"600\" HEIGHT=\"400\" BORDER=\"0\" USEMAP=\"#chart\"></IMG>";
        ChartUtilities.saveChartAsPNG(file1, inChart, 600, 400, info);

        double outMax = 0;
        XYSeries outSeries2 = new XYSeries("Series 2");
        for (int i = 1; i < outDistribution[1].length; i++) {
            if (outDistribution[1][i] > 0) {
                outSeries2.add((Math.log(outDistribution[0][i]) / Math.log(Math.E)),
                        (Math.log(outDistribution[1][i]) / Math.log(Math.E)));
                outMax = (float) Math.max((Math.log(outDistribution[0][i]) / Math.log(Math.E)), outMax);
            }
        }
        double outA = outAlpha;
        double outB = outBeta;

        XYSeries outSeries1 = new XYSeries(outAlpha + " ");
        outSeries1.add(0, outA);
        outSeries1.add(outMax, outA + outB * outMax);

        XYSeriesCollection outDataset = new XYSeriesCollection();
        outDataset.addSeries(outSeries1);
        outDataset.addSeries(outSeries2);

        JFreeChart outchart = ChartFactory.createXYLineChart("Out-Degree Distribution", "Out-Degree",
                "Occurrence", outDataset, PlotOrientation.VERTICAL, true, false, false);
        XYPlot outPlot = (XYPlot) outchart.getPlot();
        XYLineAndShapeRenderer outRenderer = new XYLineAndShapeRenderer();
        outRenderer.setSeriesLinesVisible(0, true);
        outRenderer.setSeriesShapesVisible(0, false);
        outRenderer.setSeriesLinesVisible(1, false);
        outRenderer.setSeriesShapesVisible(1, true);
        outRenderer.setSeriesShape(1, new java.awt.geom.Ellipse2D.Double(0, 0, 1, 1));
        outPlot.setBackgroundPaint(java.awt.Color.WHITE);
        outPlot.setDomainGridlinePaint(java.awt.Color.GRAY);
        outPlot.setRangeGridlinePaint(java.awt.Color.GRAY);

        outPlot.setRenderer(outRenderer);

        final ChartRenderingInfo info2 = new ChartRenderingInfo(new StandardEntityCollection());
        final String fileName2 = "outDistribution.png";
        final File file2 = tempDir.createFile(fileName2);
        outImageFile = "<IMG SRC=\"file:" + file2.getAbsolutePath() + "\" "
                + "WIDTH=\"600\" HEIGHT=\"400\" BORDER=\"0\" USEMAP=\"#chart\"></IMG>";
        ChartUtilities.saveChartAsPNG(file2, outchart, 600, 400, info2);
    } catch (IOException e) {
        Exceptions.printStackTrace(e);
    }

    String report = "<HTML> <BODY> <h1>Degree Distribution Metric Report </h1> " + "<hr>" + "<br>"
            + "<h2> Parameters: </h2>" + "Network Interpretation:  " + (isDirected ? "directed" : "undirected")
            + "<br>" + "<br> <h2> Results: </h2>" + "In-Degree Power Law: -" + inAlpha + "\n <BR>" + inImageFile
            + "<br>Out-Degree Power Law: -" + outAlpha + "\n <BR>" + outImageFile + "</BODY> </HTML>";

    return report;
}

From source file:it.unimi.dsi.sux4j.mph.VLPaCoTrieDistributorMonotoneMinimalPerfectHashFunction.java

/** Creates a new PaCo-trie-based monotone minimal perfect hash function using the given
 * elements and transformation strategy. 
 * //from w ww  .ja  v  a2 s.c o  m
 * @param elements the elements among which the trie must be able to rank.
 * @param transform a transformation strategy that must turn the elements in <code>elements</code> into a list of
 * distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
 */
public VLPaCoTrieDistributorMonotoneMinimalPerfectHashFunction(final Iterable<? extends T> elements,
        final TransformationStrategy<? super T> transform) throws IOException {

    this.transform = transform;
    defRetValue = -1; // For the very few cases in which we can decide

    long maxLength = 0;
    long totalLength = 0;
    BitVector bv;
    final RandomGenerator random = new XorShift1024StarRandomGenerator();
    ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    pl.itemsName = "keys";

    pl.start("Creating chunked hash store...");
    final ChunkedHashStore<BitVector> chunkedHashStore = new ChunkedHashStore<BitVector>(
            TransformationStrategies.identity());
    chunkedHashStore.reset(random.nextLong());
    for (T s : elements) {
        bv = transform.toBitVector(s);
        chunkedHashStore.add(bv);
        maxLength = Math.max(maxLength, bv.length());
        totalLength += bv.length();
        pl.lightUpdate();
    }

    pl.done();

    size = chunkedHashStore.size();

    if (size == 0) {
        bucketSize = log2BucketSize = 0;
        distributor = null;
        offset = null;
        chunkedHashStore.close();
        return;
    }

    final long averageLength = (totalLength + size - 1) / size;

    int t = Fast.mostSignificantBit(
            (int) Math.floor(averageLength - Math.log(size) - Math.log(averageLength - Math.log(size)) - 1));
    final int firstbucketSize = 1 << t;
    LOGGER.debug("First bucket size estimate: " + firstbucketSize);

    final Iterable<BitVector> bitVectors = TransformationStrategies.wrap(elements, transform);

    VLPaCoTrieDistributor<BitVector> firstDistributor = new VLPaCoTrieDistributor<BitVector>(bitVectors, size,
            firstbucketSize, TransformationStrategies.identity());

    if (firstDistributor.numBits() == 0 || firstbucketSize >= size)
        log2BucketSize = t;
    else {
        // Reassign bucket size based on empirical estimation
        log2BucketSize = t
                - Fast.mostSignificantBit((int) Math.ceil(size / (firstDistributor.numBits() * Math.log(2))));
    }

    bucketSize = 1 << log2BucketSize;
    LOGGER.debug("Second bucket size estimate: " + bucketSize);

    if (firstbucketSize == bucketSize)
        distributor = firstDistributor;
    else {
        firstDistributor = null;
        distributor = new VLPaCoTrieDistributor<BitVector>(bitVectors, size, bucketSize,
                TransformationStrategies.identity());
    }

    LOGGER.info("Bucket size: " + bucketSize);

    final SparseRank sparseRank;
    if (size > 2 * bucketSize) {
        sparseRank = new SparseRank(distributor.offset.getLong(distributor.offset.size64() - 1) + 1,
                distributor.offset.size64(), distributor.offset.iterator());
        if (ASSERTS) {
            long i = 0;
            for (BitVector b : bitVectors) {
                final long d = distributor.getLong(b);
                assert sparseRank.rank(i) == d : "At " + i + ": " + sparseRank.rank(i) + " != " + d;
                i++;
            }
        }

        select = sparseRank.getSelect();
    } else {
        sparseRank = null;
        select = null;
    }

    if (size > 0) {
        offset = new GOV3Function.Builder<BitVector>().keys(bitVectors)
                .transform(TransformationStrategies.identity()).store(chunkedHashStore)
                .values(new AbstractLongBigList() {
                    public long getLong(long index) {
                        final long rank = sparseRank == null ? 0 : sparseRank.rank(index);
                        if (ASSERTS) {
                            assert rank == 0
                                    || distributor.offset.getLong(rank - 1) <= index : distributor.offset
                                            .getLong(rank - 1) + " >= " + index + "(rank=" + rank + ")";
                            assert rank == 0 && index < bucketSize * 2 || rank > 0
                                    && index - distributor.offset.getLong(rank - 1) < bucketSize * 2;
                        }
                        return rank == 0 ? index : index - distributor.offset.getLong(rank - 1);
                    }

                    public long size64() {
                        return size;
                    }
                }, log2BucketSize + 1).indirect().build();

    } else
        offset = null;

    chunkedHashStore.close();

    LOGGER.debug("Forecast distributor bit cost: "
            + (size / bucketSize) * (maxLength + log2BucketSize - Math.log(size)));
    LOGGER.debug("Actual distributor bit cost: " + distributor.numBits());
    LOGGER.debug("Forecast bit cost per element: " + (GOV3Function.C + Fast.log2(Math.E)
            - Fast.log2(Fast.log2(Math.E)) + Fast.log2(maxLength - Fast.log2(size))));
    LOGGER.info("Actual bit cost per element: " + (double) numBits() / size);
}

From source file:org.libreplan.business.planner.entities.SigmoidFunction.java

private BigDecimal calculateNumberOfAccumulatedHoursAtDay(BigDecimal valueAtOneDay, int totalHours) {
    BigDecimal epow = BigDecimal.valueOf(Math.pow(Math.E, valueAtOneDay.negate().doubleValue()));
    BigDecimal denominator = BigDecimal.valueOf(1).add(epow);
    return BigDecimal.valueOf(totalHours).divide(denominator, PRECISSION, ROUND_MODE);
}

From source file:com.comcast.cns.util.Util.java

/**
 * //w  w  w.  jav  a 2s.  c om
 * @param i The number of retry. Must start with 1
 * @param maxBackOffRetries The total number of retries allowed
 * @param minDelayTarget the minimum retry delay in sec
 * @param maxDelayTarget the max retry delay in sec
 * @param backOffFunction which backoff function to return
 * @return the delay in seconds
 */
public static int getNextRetryDelay(int i, int maxBackOffRetries, int minDelayTarget, int maxDelayTarget,
        CnsBackoffFunction backOffFunction) {
    if (maxBackOffRetries == 0) {
        throw new IllegalArgumentException("maxBackOffRetries cannot be 0");
    }
    double x;
    double a;
    switch (backOffFunction) {
    case linear:
        //equation f(i) = slope*(i-1) + minDelayTarget
        //calculate slope given f(maxBackOffRetries) = maxDelayTarget = slope(maxBackOffRetries - 1) + minDelayTarget
        //=> slope = (maxDelayTarget - minDelayTarget)/ (maxBackOffRetries - 1)
        double slope = (double) (maxDelayTarget - minDelayTarget) / (double) (maxBackOffRetries - 1);
        return (int) (slope * (i - 1) + minDelayTarget);

    case geometric:
        //figure out x using equation: x^(maxBackOffRetries - 1) + minDelayTarget - 1 = maxDelayTarget
        //=> x^(maxBackOffRetries - 1) = maxDelayTarget - minDelayTarget + 1
        //=> x = pow(maxDelayTarget - minDelayTarget + 1, 1/(maxBackOffRetries - 1))
        // and f(i) = x^(i-1) + minDelayTarget - 1

        x = Math.pow(maxDelayTarget - minDelayTarget + 1, 1d / (double) (maxBackOffRetries - 1));
        return (int) Math.pow(x, (i - 1)) + minDelayTarget - 1;

    case exponential:
        //equation to use ae^(i-1) + b - a = y. where b = minDelayTarget
        //=> ae^(maxBackOffRetries -1) + minDelayTarget - a = maxDelayTarget
        //=>a(e^(maxBackOffRetries -1) -1) = maxDelayTarget - minDelayTarget
        //=> a = (maxDelayTarget - minDelayTarget) / (e^(maxBackOffRetries -1) -1)
        a = (maxDelayTarget - minDelayTarget) / (Math.pow(Math.E, maxBackOffRetries - 1) - 1);
        return (int) ((a * Math.pow(Math.E, i - 1)) + minDelayTarget - a);

    case arithmetic:
        //arithmetic is pretty much quadratic for us given euation: ax^2 + b = y
        //f(i) = a(i-1)^2 + b
        //figure out a using b = minDelayTarget & a(maxBackOffRetries-1)^2 + minDelayTarget = maxDelayTarget
        //=> a = (maxDelayTarget-minDelayTarget)/ (maxBackOffRetries-1)^2
        a = (maxDelayTarget - minDelayTarget) / Math.pow(maxBackOffRetries - 1, 2);
        return (int) (a * Math.pow((i - 1), 2) + minDelayTarget);

    default:
        throw new IllegalArgumentException("Unknown backoff" + backOffFunction);
    }
}

From source file:it.unibo.alchemist.utils.MathUtils.java

/**
 * This method calculates the Gamma function (x) using the Stirling
 * approximation.//from w ww. java 2  s .  c  o m
 * 
 * @param x
 *            the variable for (x)
 * @return the Gamma function value with Stirling approximation
 */
public static double stirlingGamma(final double x) {
    return sqrt(2d * Math.PI / x) * pow((x / Math.E), x);
}

From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkLikelihoodFromGTT.java

protected double findOptimalBranchLength(final Network<Object> speciesNetwork,
        final Map<String, List<String>> species2alleles, final List distinctTrees, final List gtCorrespondence,
        final Set<String> singleAlleleSpecies) {
    boolean continueRounds = true; // keep trying to improve network
    for (NetNode<Object> node : speciesNetwork.dfs()) {
        for (NetNode<Object> parent : node.getParents()) {
            node.setParentDistance(parent, 1.0);
            if (node.isNetworkNode()) {
                node.setParentProbability(parent, 0.5);
            }// ww w.  j a v a  2  s .c o m
        }
    }

    Set<NetNode> node2ignoreForBL = findEdgeHavingNoBL(speciesNetwork, singleAlleleSpecies);
    double initalProb = computeProbabilityForCached(speciesNetwork, distinctTrees, species2alleles,
            gtCorrespondence);
    if (_printDetails)
        System.out.println(speciesNetwork.toString() + " : " + initalProb);

    final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>(initalProb); // records the GTProb of the network at all times

    int roundIndex = 0;
    for (; roundIndex < _maxRounds && continueRounds; roundIndex++) {
        /*
        * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score.
        */
        double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents();
        List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here.  Will execute them one by one later.

        for (final NetNode<Object> parent : edu.rice.cs.bioinfo.programs.phylonet.structs.network.util.Networks
                .postTraversal(speciesNetwork)) {

            for (final NetNode<Object> child : parent.getChildren()) {
                if (node2ignoreForBL.contains(child)) {
                    continue;
                }

                assigmentActions.add(new Proc() {
                    public void execute() {

                        UnivariateFunction functionToOptimize = new UnivariateFunction() {
                            public double value(double suggestedBranchLength) {
                                double incumbentBranchLength = child.getParentDistance(parent);

                                child.setParentDistance(parent, suggestedBranchLength);

                                double lnProb = updateProbabilityForCached(speciesNetwork, distinctTrees,
                                        gtCorrespondence, child, parent);
                                //System.out.println(speciesNetwork + ": " + lnProb);
                                if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change
                                {
                                    lnGtProbOfSpeciesNetwork.setContents(lnProb);

                                } else // didn't improve, roll back change
                                {
                                    child.setParentDistance(parent, incumbentBranchLength);
                                }
                                return lnProb;
                            }
                        };
                        BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent.

                        try {
                            optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE,
                                    Double.MIN_VALUE, _maxBranchLength);
                        } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded
                        {
                        }

                        updateProbabilityForCached(speciesNetwork, distinctTrees, gtCorrespondence, child,
                                parent);
                        if (_printDetails)
                            System.out.println(
                                    speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents());

                    }
                });
            }
        }

        for (final NetNode<Object> child : speciesNetwork.getNetworkNodes()) // find every hybrid node
        {

            Iterator<NetNode<Object>> hybridParents = child.getParents().iterator();
            final NetNode hybridParent1 = hybridParents.next();
            final NetNode hybridParent2 = hybridParents.next();

            assigmentActions.add(new Proc() {
                public void execute() {
                    UnivariateFunction functionToOptimize = new UnivariateFunction() {
                        public double value(double suggestedProb) {
                            double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1);

                            child.setParentProbability(hybridParent1, suggestedProb);
                            child.setParentProbability(hybridParent2, 1.0 - suggestedProb);

                            double lnProb = updateProbabilityForCached(speciesNetwork, distinctTrees,
                                    gtCorrespondence, child, null);
                            //System.out.println(speciesNetwork + ": " + lnProb);
                            if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it
                            {

                                lnGtProbOfSpeciesNetwork.setContents(lnProb);
                            } else // change did not improve, roll back
                            {

                                child.setParentProbability(hybridParent1, incumbentHybridProbParent1);
                                child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1);
                            }
                            return lnProb;
                        }
                    };
                    BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent.

                    try {
                        if (child.getName().equals("Y"))
                            optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0.6,
                                    0.8);
                        else
                            optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0);
                    } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded
                    {
                    }
                    updateProbabilityForCached(speciesNetwork, distinctTrees, gtCorrespondence, child, null);
                    if (_printDetails)
                        System.out.println(
                                speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents());
                }
            });

        }

        // add hybrid probs to hybrid edges
        Collections.shuffle(assigmentActions);

        for (Proc assigment : assigmentActions) // for each change attempt, perform attempt
        {
            assigment.execute();
        }
        if (_printDetails) {
            System.out.println("Round end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
            System.out
                    .println(speciesNetwork.toString() + "\n" + lnGtProbOfSpeciesNetwork.getContents() + "\n");
        }
        if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment
        {
            continueRounds = false;
        } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching
        {

            double improvementPercentage = Math.pow(Math.E,
                    (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round
            if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching
            {
                continueRounds = false;
            }
        } else {
            throw new IllegalStateException("Should never have decreased prob.");
        }
    }
    //System.out.println("\n" + lnGtProbOfSpeciesNetwork.getContents() + ": " + speciesNetwork);
    return lnGtProbOfSpeciesNetwork.getContents();
}

From source file:edu.ucla.stat.SOCR.applications.demo.BlackScholesApplication.java

public void calculate() {
    //System.out.println("calculate start");
    double S0 = input[0];
    double E = input[1];
    double sigma = input[2];
    double r = input[3];
    double day = input[4];
    double t = day / 365;
    //   int n = (int)input[5];

    //System.out.println("calculate start: S0="+S0+" E="+E+" sigma="+sigma+" r="+r+" day="+day+" n="+n);
    NormalDistribution nd = new NormalDistribution(0, 1);

    //r=Math.log(1+r);
    double d1 = (Math.log(S0 / E) + (r + .5 * sigma * sigma) * t) / (sigma * Math.sqrt(t));
    double d2 = d1 - sigma * Math.sqrt(t);
    double c = S0 * nd.getCDF(d1) - (E / Math.pow(Math.E, r * t)) * nd.getCDF(d2);

    p_serie = new XYSeries(info.outputName[0], false);
    p_serie.add(0, c);/* www.  j a  va  2 s  .c  om*/
    p_serie.add(Range, c);

    double u, d, rp, p1, p2, k, CC, dt;
    BinomialDistribution b1, b2;

    s_serie = new XYSeries(info.outputName[1], false);

    //working
    /*   for (int i=step; i<=Range; i+=step){
            
          u = Math.pow(Math.E,sigma*Math.sqrt(t/i));
          d = 1/u;
          rp = Math.pow(1+r, t/i)-1;
       //   rp = Math.log(1+r)/(i/t);
          p1 = (1+rp-d)/(u-d) ;
          p2 = (p1*u)/(1+rp);
          k = Math.round(0.5+(Math.log(E/(S0*Math.pow(d, i))))/(Math.log(u/d)));
          //System.out.println("i="+i+" u="+u+" d="+d+" rp="+rp+" p1="+p1+" p2="+p2+" k ="+k);
            
          b1 = new BinomialDistribution(i,p1);
          b2 = new BinomialDistribution(i,p2);
          //System.out.println(b1.getCDF(k-1));
          //System.out.println(b2.getCDF(k-1));
          CC = S0*(1-b2.getCDF(k-1))-(E/Math.pow(1+rp,i))*(1-b1.getCDF(k-1));
          s_serie.add(i,CC);
          //System.out.println(CC);
       }*/

    // better
    for (int i = step; i <= Range; i += step) {
        dt = t / i;
        u = Math.pow(Math.E, sigma * Math.sqrt(t / i));
        d = 1 / u;
        rp = Math.pow(Math.E, r * dt);

        //   rp = Math.log(1+r)/(i/t);
        p1 = (rp - d) / (u - d);
        p2 = (p1 * u) / (rp);
        k = Math.round(0.5 + (Math.log(E / (S0 * Math.pow(d, i)))) / (Math.log(u / d)));
        //System.out.println("i="+i+" u="+u+" d="+d+" rp="+rp+" p1="+p1+" p2="+p2+" k ="+k);

        b1 = new BinomialDistribution(i, p1);
        b2 = new BinomialDistribution(i, p2);
        //System.out.println(b1.getCDF(k-1));
        //System.out.println(b2.getCDF(k-1));
        CC = S0 * (1 - b2.getCDF(k - 1)) - (E * Math.pow(Math.E, -r * t)) * (1 - b1.getCDF(k - 1));
        s_serie.add(i, CC);
        //System.out.println(CC);
    }

    /*     numSs= (int)(1/N);
         S = new double[numSs];
         S[0]=S0;
            
         //System.out.println("numSs="+numSs);
         double sq = Math.sqrt(Delta);
         double e;
         double ds;
         for (int i=0; i<numSs; i++){
            e = nd.simulate();
            ds = Mu*S0*Dt+Delta*S0*sq*e;
            if (i!=0)
     S[i]= S[i-1]+ds;
         }   */
}

From source file:it.unimi.dsi.sux4j.mph.VLLcpMonotoneMinimalPerfectHashFunction.java

@SuppressWarnings("unused")
public VLLcpMonotoneMinimalPerfectHashFunction(final Iterable<? extends T> iterable, final int numElements,
        final TransformationStrategy<? super T> transform) throws IOException {

    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;/*  w  ww.jav a2 s.c o m*/
    pl.displayFreeMemory = true;
    this.transform = transform;
    final RandomGenerator r = new XorShift1024StarRandomGenerator();

    if (numElements == -1) {
        if (iterable instanceof Size64)
            n = ((Size64) iterable).size64();
        else if (iterable instanceof Collection)
            n = ((Collection<?>) iterable).size();
        else {
            long c = 0;
            for (T dummy : iterable)
                c++;
            n = c;
        }
    } else
        n = numElements;

    if (n == 0) {
        bucketSize = bucketSizeMask = log2BucketSize = 0;
        lcp2Bucket = null;
        offsets = null;
        lcpLengths = null;
        mph = null;
        return;
    }

    defRetValue = -1; // For the very few cases in which we can decide

    int theoreticalBucketSize = (int) Math
            .ceil(1 + GOV3Function.C * Math.log(2) + Math.log(n) - Math.log(1 + Math.log(n)));
    log2BucketSize = Fast.ceilLog2(theoreticalBucketSize);
    bucketSize = 1 << log2BucketSize;
    bucketSizeMask = bucketSize - 1;

    final long numBuckets = (n + bucketSize - 1) / bucketSize;

    LongArrayBitVector prev = LongArrayBitVector.getInstance();
    LongArrayBitVector curr = LongArrayBitVector.getInstance();
    int currLcp = 0;
    int maxLcp = 0, minLcp = Integer.MAX_VALUE;
    long maxLength = 0, totalLength = 0;

    @SuppressWarnings("resource")
    final ChunkedHashStore<BitVector> chunkedHashStore = new ChunkedHashStore<BitVector>(
            TransformationStrategies.identity(), pl);
    chunkedHashStore.reset(r.nextLong());
    @SuppressWarnings("resource")
    OfflineIterable<BitVector, LongArrayBitVector> lcps = new OfflineIterable<BitVector, LongArrayBitVector>(
            BitVectors.OFFLINE_SERIALIZER, LongArrayBitVector.getInstance());
    pl.expectedUpdates = n;
    pl.start("Scanning collection...");

    Iterator<? extends T> iterator = iterable.iterator();
    for (long b = 0; b < numBuckets; b++) {
        prev.replace(transform.toBitVector(iterator.next()));
        chunkedHashStore.add(prev);
        pl.lightUpdate();
        maxLength = Math.max(maxLength, prev.length());
        totalLength += Fast.length(1 + prev.length());
        currLcp = (int) prev.length();
        final int currBucketSize = (int) Math.min(bucketSize, n - b * bucketSize);

        for (int i = 0; i < currBucketSize - 1; i++) {
            curr.replace(transform.toBitVector(iterator.next()));
            chunkedHashStore.add(curr);
            pl.lightUpdate();
            final int prefix = (int) curr.longestCommonPrefixLength(prev);
            if (prefix == prev.length() && prefix == curr.length())
                throw new IllegalArgumentException("The input bit vectors are not distinct");
            if (prefix == prev.length() || prefix == curr.length())
                throw new IllegalArgumentException("The input bit vectors are not prefix-free");
            if (prev.getBoolean(prefix))
                throw new IllegalArgumentException("The input bit vectors are not lexicographically sorted");

            currLcp = Math.min(prefix, currLcp);
            prev.replace(curr);

            maxLength = Math.max(maxLength, prev.length());
            totalLength += Fast.length(1 + prev.length());
        }

        lcps.add(prev.subVector(0, currLcp));
        maxLcp = Math.max(maxLcp, currLcp);
        minLcp = Math.min(minLcp, currLcp);
    }

    pl.done();

    // Build function assigning each lcp to its bucket.
    lcp2Bucket = new GOV3Function.Builder<BitVector>().keys(lcps).transform(TransformationStrategies.identity())
            .build();
    final int[][] lcpLength = IntBigArrays.newBigArray(lcps.size64());
    long p = 0;
    for (LongArrayBitVector bv : lcps)
        IntBigArrays.set(lcpLength, p++, (int) bv.length());

    if (DEBUG) {
        for (BitVector v : lcps)
            System.err.println(v + " " + v.length());
        for (BitVector v : lcps) {
            final long value = lcp2Bucket.getLong(v);
            if (p++ != value) {
                System.err.println("p: " + (p - 1) + "  value: " + value + " key:" + v);
                throw new AssertionError();
            }
        }
    }

    lcps.close();

    final Iterable<BitVector> bitVectors = TransformationStrategies.wrap(iterable, transform);
    // Build mph on elements.
    mph = new GOVMinimalPerfectHashFunction.Builder<BitVector>().keys(bitVectors)
            .transform(TransformationStrategies.identity()).store(chunkedHashStore).build();
    this.seed = chunkedHashStore.seed();

    // Build function assigning the lcp length and the bucketing data to each element.
    (offsets = LongArrayBitVector.getInstance().asLongBigList(log2BucketSize)).size(n);
    LongBigList lcpLengthsTemp = LongArrayBitVector.getInstance().asLongBigList(Fast.length(maxLcp));
    lcpLengthsTemp.size(n);

    LOGGER.info("Generating data tables...");

    for (ChunkedHashStore.Chunk chunk : chunkedHashStore) {
        for (long[] quadruple : chunk) {
            final long index = mph.getLongByTriple(quadruple);
            offsets.set(index, quadruple[3] & bucketSizeMask);
            lcpLengthsTemp.set(index, IntBigArrays.get(lcpLength, (int) (quadruple[3] >> log2BucketSize)));
        }
    }

    chunkedHashStore.close();

    lcpLengths = new EliasFanoLongBigList(lcpLengthsTemp.iterator(), minLcp, true);

    if (DEBUG) {
        p = 0;
        for (T key : iterable) {
            BitVector bv = transform.toBitVector(key);
            long index = mph.getLong(bv);
            if (p++ != lcp2Bucket.getLong(bv.subVector(0, lcpLengths.getLong(index))) * bucketSize
                    + offsets.getLong(index)) {
                System.err.println("p: " + (p - 1) + "  Key: " + key + " bucket size: " + bucketSize + " lcp "
                        + transform.toBitVector(key).subVector(0, lcpLengths.getLong(index)) + " lcp length: "
                        + lcpLengths.getLong(index) + " bucket "
                        + lcp2Bucket.getLong(transform.toBitVector(key).subVector(0, lcpLengths.getLong(index)))
                        + " offset: " + offsets.getLong(index));
                throw new AssertionError();
            }
        }
    }

    LOGGER.debug("Bucket size: " + bucketSize);
    final double avgLength = (double) totalLength / n;
    LOGGER.debug("Forecast bit cost per element: " + (2 * GOV3Function.C + 2 + avgLength + Fast.log2(avgLength)
            + Fast.log2(Math.E) - Fast.log2(Fast.log2(Math.E)) + Fast.log2(1 + Fast.log2(n))));
    LOGGER.info("Actual bit cost per element: " + (double) numBits() / n);
}

From source file:com.facebook.presto.operator.scalar.MathFunctions.java

@Description("Euler's number")
@ScalarFunction
@SqlType(StandardTypes.DOUBLE)
public static double e() {
    return Math.E;
}

From source file:geogebra.common.kernel.cas.AlgoSurdText.java

/**
 * Goal: modifies a StringBuilder object sb to be a radical up to quartic
 * roots The precision is adapted, according to setting
 * // w w  w .  j  a v  a2  s .c  o m
 * @param sb
 * @param num
 * @param tpl
 */
protected void PSLQappendGeneral(StringBuilder sb, double num, StringTemplate tpl) {

    // Zero Test: Is num 0?
    if (Kernel.isZero(num)) {
        sb.append(kernel.format(0, tpl));
        return;
    }

    // Rational Number Test. num is not 0. Is num rational (with small
    // denominator <= 1000) ?
    AlgebraicFit fitter = new AlgebraicFit(null, null, AlgebraicFittingType.RATIONAL_NUMBER, tpl);
    fitter.setCoeffBound(1000);
    fitter.compute(num);

    ValidExpression ve = sbToCAS(fitter.formalSolution);

    if (fitter.formalSolution.length() > 0 && Kernel.isEqual(ve.evaluateDouble(), num)) {
        sb.append(kernel.getGeoGebraCAS().evaluateGeoGebraCAS(ve, null, tpl, kernel));
        return;
    }

    double[] testValues;
    String[] testNames;

    if (list != null) {

        ArrayList<Double> values = new ArrayList<Double>();
        ArrayList<String> names = new ArrayList<String>();

        for (int i = 0; i < list.size(); i++) {
            double x = list.get(i).evaluateDouble();

            if (Kernel.isEqual(x, Math.PI)) {
                values.add(Math.PI);
                names.add("pi");
            } else if (Kernel.isEqual(x, 1 / Math.PI)) {
                values.add(1 / Math.PI);
                names.add("1/pi");
            } else if (Kernel.isEqual(x, Math.PI * Math.PI)) {
                values.add(Math.PI * Math.PI);
                names.add("pi^2");
            } else if (Kernel.isEqual(x, Math.sqrt(Math.PI))) {
                values.add(Math.sqrt(Math.PI));
                names.add("sqrt(pi)");
            } else if (Kernel.isEqual(x, Math.E)) {
                values.add(Math.E);
                names.add(Unicode.EULER_STRING);
            } else if (Kernel.isEqual(x, 1 / Math.E)) {
                values.add(1 / Math.E);
                names.add("1/" + Unicode.EULER_STRING);
            } else if (Kernel.isEqual(x, Math.E * Math.E)) {
                values.add(Math.E * Math.PI);
                names.add(Unicode.EULER_STRING + "^2");
            } else if (Kernel.isEqual(x, Math.sqrt(Math.E))) {
                values.add(Math.sqrt(Math.E));
                names.add("sqrt(" + Unicode.EULER_STRING + ")");
            } else {
                int j;
                for (j = 2; j < 100; j++) {
                    double sqrt = Math.sqrt(j);
                    if (!Kernel.isInteger(sqrt) && Kernel.isEqual(x, sqrt)) {
                        values.add(sqrt);
                        names.add("sqrt(" + j + ")");
                        break;
                    }

                    double ln = Math.log(j);
                    if (Kernel.isEqual(x, ln)) {
                        values.add(ln);
                        names.add("ln(" + j + ")");
                        break;
                    }
                }
            }
        }

        testValues = new double[values.size()];
        testNames = new String[values.size()];

        for (int i = 0; i < values.size(); i++) {
            testValues[i] = values.get(i);
            testNames[i] = names.get(i);

            // App.debug(testNames[i]);
        }

    } else {

        // default constants if none supplied
        testValues = new double[] { Math.sqrt(2.0), Math.sqrt(3.0), Math.sqrt(5.0), Math.sqrt(6.0),
                Math.sqrt(7.0), Math.sqrt(10.0), Math.PI };
        testNames = new String[] { "sqrt(2)", "sqrt(3)", "sqrt(5)", "sqrt(6)", "sqrt(7)", "sqrt(10)", "pi" };
    }

    boolean success = fitLinearComb(num, testNames, testValues, 100, sb, tpl);

    if (success) {
        return;
    }

    sb.append(kernel.format(num, StringTemplate.maxPrecision));

}