List of usage examples for java.lang Math E
double E
To view the source code for java.lang Math E.
Click Source Link
From source file:org.geogebra.common.kernel.cas.AlgoSurdText.java
/** * Goal: modifies a StringBuilder object sb to be a radical up to quartic * roots The precision is adapted, according to setting * /* ww w . j a v a 2s .c om*/ * @param sb * @param num * @param tpl */ protected void PSLQappendGeneral(StringBuilder sb, double num, StringTemplate tpl) { // Zero Test: Is num 0? if (Kernel.isZero(num)) { sb.append(kernel.format(0, tpl)); return; } // Rational Number Test. num is not 0. Is num rational (with small // denominator <= 1000) ? AlgebraicFit fitter = new AlgebraicFit(null, null, AlgebraicFittingType.RATIONAL_NUMBER, tpl); fitter.setCoeffBound(1000); fitter.compute(num); ValidExpression ve = sbToCAS(fitter.formalSolution); if (fitter.formalSolution.length() > 0 && Kernel.isEqual(ve.evaluateDouble(), num)) { sb.append(kernel.getGeoGebraCAS().evaluateGeoGebraCAS(ve, null, tpl, null, kernel)); return; } double[] testValues; String[] testNames; if (list != null) { ArrayList<Double> values = new ArrayList<Double>(); ArrayList<String> names = new ArrayList<String>(); for (int i = 0; i < list.size(); i++) { double x = list.get(i).evaluateDouble(); if (Kernel.isEqual(x, Math.PI)) { values.add(Math.PI); names.add("pi"); } else if (Kernel.isEqual(x, 1 / Math.PI)) { values.add(1 / Math.PI); names.add("1/pi"); } else if (Kernel.isEqual(x, Math.PI * Math.PI)) { values.add(Math.PI * Math.PI); names.add("pi^2"); } else if (Kernel.isEqual(x, Math.sqrt(Math.PI))) { values.add(Math.sqrt(Math.PI)); names.add("sqrt(pi)"); } else if (Kernel.isEqual(x, Math.E)) { values.add(Math.E); names.add(Unicode.EULER_STRING); } else if (Kernel.isEqual(x, 1 / Math.E)) { values.add(1 / Math.E); names.add("1/" + Unicode.EULER_STRING); } else if (Kernel.isEqual(x, Math.E * Math.E)) { values.add(Math.E * Math.PI); names.add(Unicode.EULER_STRING + "^2"); } else if (Kernel.isEqual(x, Math.sqrt(Math.E))) { values.add(Math.sqrt(Math.E)); names.add("sqrt(" + Unicode.EULER_STRING + ")"); } else { int j; for (j = 2; j < 100; j++) { double sqrt = Math.sqrt(j); if (!Kernel.isInteger(sqrt) && Kernel.isEqual(x, sqrt)) { values.add(sqrt); names.add("sqrt(" + j + ")"); break; } double ln = Math.log(j); if (Kernel.isEqual(x, ln)) { values.add(ln); names.add("ln(" + j + ")"); break; } } } } testValues = new double[values.size()]; testNames = new String[values.size()]; for (int i = 0; i < values.size(); i++) { testValues[i] = values.get(i); testNames[i] = names.get(i); // App.debug(testNames[i]); } } else { // default constants if none supplied testValues = new double[] { Math.sqrt(2.0), Math.sqrt(3.0), Math.sqrt(5.0), Math.sqrt(6.0), Math.sqrt(7.0), Math.sqrt(10.0), Math.PI }; testNames = new String[] { "sqrt(2)", "sqrt(3)", "sqrt(5)", "sqrt(6)", "sqrt(7)", "sqrt(10)", "pi" }; } boolean success = fitLinearComb(num, testNames, testValues, 100, sb, tpl); if (success) { return; } sb.append(kernel.format(num, StringTemplate.maxPrecision)); }
From source file:org.gephi.statistics.plugin.DegreeDistribution.java
/** * * @return The undirected version of this report. *///from w ww . j a va2 s. c o m private String getUndirectedReport() { double max = 0; XYSeries series2 = new XYSeries("Series 2"); for (int i = 1; i < combinedDistribution[1].length; i++) { if (combinedDistribution[1][i] > 0) { series2.add((Math.log(combinedDistribution[0][i]) / Math.log(Math.E)), (Math.log(combinedDistribution[1][i]) / Math.log(Math.E))); max = (float) Math.max((Math.log(combinedDistribution[0][i]) / Math.log(Math.E)), max); } } double a = combinedAlpha; double b = combinedBeta; XYSeries series1 = new XYSeries(combinedAlpha + " "); series1.add(0, a); series1.add(max, a + b * max); XYSeriesCollection dataset = new XYSeriesCollection(); dataset.addSeries(series1); dataset.addSeries(series2); JFreeChart chart = ChartFactory.createXYLineChart("Degree Distribution", "Degree", "Occurrence", dataset, PlotOrientation.VERTICAL, true, false, false); XYPlot plot = (XYPlot) chart.getPlot(); XYLineAndShapeRenderer renderer = new XYLineAndShapeRenderer(); renderer.setSeriesLinesVisible(0, true); renderer.setSeriesShapesVisible(0, false); renderer.setSeriesLinesVisible(1, false); renderer.setSeriesShapesVisible(1, true); renderer.setSeriesShape(1, new java.awt.geom.Ellipse2D.Double(0, 0, 1, 1)); plot.setBackgroundPaint(java.awt.Color.WHITE); plot.setDomainGridlinePaint(java.awt.Color.GRAY); plot.setRangeGridlinePaint(java.awt.Color.GRAY); plot.setRenderer(renderer); String imageFile = ""; try { final ChartRenderingInfo info = new ChartRenderingInfo(new StandardEntityCollection()); TempDir tempDir = TempDirUtils.createTempDir(); final String fileName = "distribution.png"; final File file1 = tempDir.createFile(fileName); imageFile = "<IMG SRC=\"file:" + file1.getAbsolutePath() + "\" " + "WIDTH=\"600\" HEIGHT=\"400\" BORDER=\"0\" USEMAP=\"#chart\"></IMG>"; ChartUtilities.saveChartAsPNG(file1, chart, 600, 400, info); } catch (IOException e) { System.out.println(e.toString()); } String report = "<HTML> <BODY> <h1>Degree Distribution Metric Report </h1> " + "<hr>" + "<br>" + "<h2> Parameters: </h2>" + "Network Interpretation: " + (isDirected ? "directed" : "undirected") + "<br>" + "<br> <h2> Results: </h2>" + "Degree Power Law: -" + combinedAlpha + "\n <BR>" + imageFile + "</BODY> </HTML>"; return report; }
From source file:org.mobicents.media.server.impl.rtcp.RtcpHandlerTest.java
@Test public void testJoinAndLeaveRtpSession() throws InterruptedException { // given//from w ww . ja v a2s . c o m // senders > members * 0.25 AND we_sent == false, then C = avg_rtcp_size / rtcp_bw double c = statistics.getRtcpAvgSize() / RtpStatistics.RTCP_DEFAULT_BW; // senders > members * 0.25 THEN n = members int n = statistics.getMembers(); // initial == true, then Tmin = 2.5 seconds double tMin = RtcpIntervalCalculator.INITIAL_MIN_TIME; double tD = Math.max(tMin, n * c); // T = [Td * 0.5; Td * 1.5] // t = T / (e - 3/2) double compensation = Math.E - (3.0 / 2.0); long lowT = (long) (((tD * 0.5) / compensation) * 1000); long highT = (long) (((tD * 1.5) / compensation) * 1000); // when handler.joinRtpSession(); long nextReport = handler.getNextScheduledReport(); // give time to handler supposedly send the initial report Thread.sleep(handler.getNextScheduledReport()); // then Assert.assertTrue(handler.isJoined()); String msg = String.format(INTERVAL_RANGE, nextReport, lowT, highT); Assert.assertTrue(msg, nextReport >= lowT); Assert.assertTrue(msg, nextReport <= highT); Assert.assertEquals(RtcpPacketType.RTCP_REPORT, statistics.getRtcpPacketType()); // Notice handler will still be in initial stage since it cannot send packets // (we have not set a proper data channel) Assert.assertTrue(handler.isInitial()); // when /* * When the participant decides to leave the system, tp is reset to tc, * the current time, members and pmembers are initialized to 1, initial * is set to 1, we_sent is set to false, senders is set to 0, and * avg_rtcp_size is set to the size of the compound BYE packet. * * The calculated interval T is computed. The BYE packet is then * scheduled for time tn = tc + T. */ handler.leaveRtpSession(); // then Assert.assertFalse(handler.isJoined()); Assert.assertEquals(RtcpPacketType.RTCP_BYE, statistics.getRtcpPacketType()); }
From source file:com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitorTest.java
@Test public void testSameValueAdapter() { final Double value = Math.PI; final InstrumentDerivativeVisitor<Double, Double> visitor = new InstrumentDerivativeVisitorSameValueAdapter<>( value);/*from ww w . j av a 2s .c om*/ for (final InstrumentDerivative derivative : ALL_DERIVATIVES) { assertEquals(value, derivative.accept(visitor)); assertEquals(value, derivative.accept(visitor, Math.E)); } }
From source file:com.tc.object.ApplicatorDNAEncodingTest.java
public void testBasic() throws Exception { final TCByteBufferOutputStream output = new TCByteBufferOutputStream(); final List<Object> data = new ArrayList<Object>(); data.add(new ObjectID(1)); data.add("one"); data.add(new Boolean(true)); data.add("two"); data.add(new Byte((byte) 42)); data.add("three"); data.add(new Character('\t')); data.add("four"); data.add(new Double(Math.PI)); data.add("five"); data.add(new Float(Math.E)); data.add("six"); data.add(new Integer(Integer.MAX_VALUE)); data.add("seven"); data.add(new Long(System.currentTimeMillis() % 17)); data.add("eight"); data.add(new Short((short) -1)); final DNAEncoding encoding = getApplicatorEncoding(); for (Object d : data) { encoding.encode(d, output);//from w w w .j av a2s.co m } final TCByteBufferInputStream input = new TCByteBufferInputStream(output.toArray()); for (Object orig : data) { final Object decoded = encoding.decode(input); assertEquals(orig, decoded); } assertEquals(0, input.available()); }
From source file:it.unimi.dsi.sux4j.mph.TwoStepsLcpMonotoneMinimalPerfectHashFunction.java
/** * Creates a new two-steps LCP monotone minimal perfect hash function for the given keys. * /*from w ww. ja v a2 s.c o m*/ * @param keys the keys to hash. * @param numKeys the number of keys, or -1 if the number of keys is not known (will be computed). * @param transform a transformation strategy for the keys. * @param signatureWidth a signature width, or 0 for no signature. * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory. */ @SuppressWarnings("unused") protected TwoStepsLcpMonotoneMinimalPerfectHashFunction(final Iterable<? extends T> keys, final long numKeys, final TransformationStrategy<? super T> transform, final int signatureWidth, final File tempDir) throws IOException { final ProgressLogger pl = new ProgressLogger(LOGGER); pl.displayLocalSpeed = true; pl.displayFreeMemory = true; this.transform = transform; final RandomGenerator r = new XorShift1024StarRandomGenerator(); if (numKeys == -1) { if (keys instanceof Size64) n = ((Size64) keys).size64(); else if (keys instanceof Collection) n = ((Collection<?>) keys).size(); else { long c = 0; for (T dummy : keys) c++; n = c; } } else n = numKeys; defRetValue = -1; // For the very few cases in which we can decide if (n == 0) { seed = bucketSize = bucketSizeMask = log2BucketSize = 0; lcp2Bucket = null; offsets = null; lcpLengths = null; signatureMask = 0; signatures = null; return; } int t = (int) Math.ceil(1 + GOV3Function.C * Math.log(2) + Math.log(n) - Math.log(1 + Math.log(n))); log2BucketSize = Fast.ceilLog2(t); bucketSize = 1 << log2BucketSize; bucketSizeMask = bucketSize - 1; LOGGER.debug("Bucket size: " + bucketSize); final long numBuckets = (n + bucketSize - 1) / bucketSize; LongArrayBitVector prev = LongArrayBitVector.getInstance(); LongArrayBitVector curr = LongArrayBitVector.getInstance(); int currLcp = 0; @SuppressWarnings("resource") final OfflineIterable<BitVector, LongArrayBitVector> lcps = new OfflineIterable<BitVector, LongArrayBitVector>( BitVectors.OFFLINE_SERIALIZER, LongArrayBitVector.getInstance()); final int[][] lcpLengths = IntBigArrays.newBigArray((n + bucketSize - 1) / bucketSize); int maxLcp = 0; long maxLength = 0; @SuppressWarnings("resource") final ChunkedHashStore<BitVector> chunkedHashStore = new ChunkedHashStore<BitVector>( TransformationStrategies.identity(), pl); chunkedHashStore.reset(r.nextLong()); pl.expectedUpdates = n; pl.start("Scanning collection..."); Iterator<? extends T> iterator = keys.iterator(); for (long b = 0; b < numBuckets; b++) { prev.replace(transform.toBitVector(iterator.next())); chunkedHashStore.add(prev); pl.lightUpdate(); maxLength = Math.max(maxLength, prev.length()); currLcp = (int) prev.length(); final int currBucketSize = (int) Math.min(bucketSize, n - b * bucketSize); for (int i = 0; i < currBucketSize - 1; i++) { curr.replace(transform.toBitVector(iterator.next())); chunkedHashStore.add(curr); pl.lightUpdate(); final int prefix = (int) curr.longestCommonPrefixLength(prev); if (prefix == prev.length() && prefix == curr.length()) throw new IllegalArgumentException("The input bit vectors are not distinct"); if (prefix == prev.length() || prefix == curr.length()) throw new IllegalArgumentException("The input bit vectors are not prefix-free"); if (prev.getBoolean(prefix)) throw new IllegalArgumentException("The input bit vectors are not lexicographically sorted"); currLcp = Math.min(prefix, currLcp); prev.replace(curr); maxLength = Math.max(maxLength, prev.length()); } lcps.add(prev.subVector(0, currLcp)); IntBigArrays.set(lcpLengths, b, currLcp); maxLcp = Math.max(maxLcp, currLcp); } pl.done(); // We must be sure that both functions are built on the same store. chunkedHashStore.checkAndRetry(TransformationStrategies.wrap(keys, transform)); this.seed = chunkedHashStore.seed(); if (ASSERTS) { ObjectOpenHashSet<BitVector> s = new ObjectOpenHashSet<BitVector>(); for (LongArrayBitVector bv : lcps) s.add(bv.copy()); assert s.size() == lcps.size() : s.size() + " != " + lcps.size(); // No duplicates. } // Build function assigning each lcp to its bucket. lcp2Bucket = new GOV3Function.Builder<BitVector>().keys(lcps).transform(TransformationStrategies.identity()) .build(); if (DEBUG) { int p = 0; for (BitVector v : lcps) System.err.println(v + " " + v.length()); for (BitVector v : lcps) { final long value = lcp2Bucket.getLong(v); if (p++ != value) { System.err.println("p: " + (p - 1) + " value: " + value + " key:" + v); throw new AssertionError(); } } } lcps.close(); // Build function assigning the bucket offset to each element. offsets = new GOV3Function.Builder<BitVector>().store(chunkedHashStore).values(new AbstractLongBigList() { public long getLong(long index) { return index & bucketSizeMask; } public long size64() { return n; } }, log2BucketSize).indirect().build(); // Build function assigning the lcp length to each element. this.lcpLengths = new TwoStepsGOV3Function.Builder<BitVector>().store(chunkedHashStore) .values(new AbstractLongBigList() { public long getLong(long index) { return IntBigArrays.get(lcpLengths, index >>> log2BucketSize); } public long size64() { return n; } }).build(); // Build function assigning the lcp length and the bucketing data to each element. final double p = 1.0 / (this.lcpLengths.rankMean + 1); final double s = s(p, this.lcpLengths.width); LOGGER.debug("Forecast best threshold: " + s); if (DEBUG) { int j = 0; for (T key : keys) { BitVector bv = transform.toBitVector(key); if (j++ != lcp2Bucket.getLong(bv.subVector(0, this.lcpLengths.getLong(bv))) * bucketSize + offsets.getLong(bv)) { System.err.println("p: " + (j - 1) + " Key: " + key + " bucket size: " + bucketSize + " lcp " + transform.toBitVector(key).subVector(0, this.lcpLengths.getLong(bv)) + " lcp length: " + this.lcpLengths.getLong(bv) + " bucket " + lcp2Bucket .getLong(transform.toBitVector(key).subVector(0, this.lcpLengths.getLong(bv))) + " offset: " + offsets.getLong(bv)); throw new AssertionError(); } } } double secondFunctionForecastBitsPerElement = (s + GOV3Function.C + (Math.pow(2, s) - 1) * this.lcpLengths.width / n + (this.lcpLengths.width + GOV3Function.C) * (Math.pow(1 - p, Math.pow(2, s) + 1))); LOGGER.debug("Forecast bit cost per element: " + (log2BucketSize + GOV3Function.C + secondFunctionForecastBitsPerElement + (Fast.log2(Math.E)))); LOGGER.info("Actual bit cost per element: " + (double) numBits() / n); if (signatureWidth != 0) { signatureMask = -1L >>> Long.SIZE - signatureWidth; chunkedHashStore.filter(null); // two-steps functions use filtering. signatures = chunkedHashStore.signatures(signatureWidth, pl); } else { signatureMask = 0; signatures = null; } chunkedHashStore.close(); }
From source file:edu.rice.cs.bioinfo.programs.phylonet.commands.SearchBranchLengthsMaxGTProb.java
@Override protected String produceResult() { StringBuffer result = new StringBuffer(); final List<Tree> geneTrees = new ArrayList<Tree>(); final List<Integer> counter = new ArrayList<Integer>(); for (NetworkNonEmpty geneTree : _geneTrees) { String phylonetGeneTree = NetworkTransformer.toENewickTree(geneTree); NewickReader nr = new NewickReader(new StringReader(phylonetGeneTree)); STITree<Double> newtr = new STITree<Double>(true); try {//from w ww . ja v a 2s.com nr.readTree(newtr); } catch (Exception e) { errorDetected.execute(e.getMessage(), this._motivatingCommand.getLine(), this._motivatingCommand.getColumn()); } boolean found = false; int index = 0; for (Tree tr : geneTrees) { if (Trees.haveSameRootedTopology(tr, newtr)) { found = true; break; } index++; } if (found) { counter.set(index, counter.get(index) + 1); } else { geneTrees.add(newtr); counter.add(1); } } NetworkFactoryFromRNNetwork transformer = new NetworkFactoryFromRNNetwork(); final Network<Double> speciesNetwork = transformer.makeNetwork(_speciesNetwork); /* * Make the branch length of every edge initially 1 if no initial user value is specified. * Make the hybrid prob of every hybrid edge initially .5 if no initial user value is specified. */ for (NetNode<Double> parent : speciesNetwork.bfs()) { for (NetNode<Double> child : parent.getChildren()) { double initialBL = child.getParentDistance(parent); if (initialBL == NetNode.NO_DISTANCE || Double.isNaN(initialBL)) // no specification from user on branch length initialBL = 1.0; child.setParentDistance(parent, initialBL); if (child.getParentCount() == 2) { for (NetNode<Double> hybridParent : child.getParents()) { if (child.getParentProbability(hybridParent) == 1.0) // no specification from user on hybrid prob { child.setParentProbability(parent, 0.5); } } } } } /* * Try to assign branch lengths and hybrid probs to increase GTProb from the initial network. * Except branch lengths of leaf edges. They don't impact GTProb. */ // def: a round is an attempt to tweak each branch length and each hybrid prob. boolean continueRounds = true; // keep trying to improve network final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>( _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter)); // records the GTProb of the network at all times int assigmentRound = 0; for (; assigmentRound < _assigmentRounds && continueRounds; assigmentRound++) { /* * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score. */ double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. // add branch length adjustments to the list for (final NetNode<Double> parent : speciesNetwork.bfs()) { for (final NetNode<Double> child : parent.getChildren()) { if (!parent.isRoot()) // jd tmp continue; if (child.isLeaf()) // leaf edge, skip continue; assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedBranchLength) { // brent suggests a new branch length double incumbentBranchLength = child.getParentDistance(parent); // mutate and see if it yields an improved network child.setParentDistance(parent, suggestedBranchLength); double lnProb = _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter); RnNewickPrinter<Double> rnNewickPrinter = new RnNewickPrinter<Double>(); StringWriter sw = new StringWriter(); rnNewickPrinter.print(speciesNetwork, sw); // String inferredNetwork = sw.toString(); // System.out.println(inferredNetwork + "\t" + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); // System.out.println("(improved)"); } else // didn't improve, roll back change { child.setParentDistance(parent, incumbentBranchLength); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(.000000000001, .0000000000000001); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxAssigmentAttemptsPerBranchParam, functionToOptimize, GoalType.MAXIMIZE, Double.MIN_VALUE, _maxBranchLength); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } // System.out.println("-----------------------------------------------------------------------"); } }); } } // add hybrid probs to hybrid edges for (final NetNode<Double> child : speciesNetwork.bfs()) // find every hybrid node { if (child.isRoot()) // calling getParentNumber on root causes NPE. Bug workaround. continue; if (child.getParentCount() == 2) // hybrid node { Iterator<NetNode<Double>> hybridParents = child.getParents().iterator(); final NetNode<Double> hybridParent1 = hybridParents.next(); final NetNode<Double> hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); // try new pair of hybrid probs child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = _computeGTProbStrategy.execute(speciesNetwork, geneTrees, counter); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(.000000000001, .0000000000000001); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxAssigmentAttemptsPerBranchParam, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } } }); } } // Collections.shuffle(assigmentActions); // randomize the order we will try to adjust network edge properties for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } RnNewickPrinter<Double> rnNewickPrinter = new RnNewickPrinter<Double>(); StringWriter sw = new StringWriter(); rnNewickPrinter.print(speciesNetwork, sw); String inferredNetwork = sw.toString(); this.richNewickGenerated(inferredNetwork); result.append( "\nTotal log probability: " + lnGtProbOfSpeciesNetwork.getContents() + ": " + inferredNetwork); return result.toString(); }
From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkPseudoLikelihoodFromGTT.java
protected double findOptimalBranchLength(final Network<Object> speciesNetwork, final Map<String, List<String>> species2alleles, final List tripleFrequencies, final List gtCorrespondence, final Set<String> singleAlleleSpecies) { boolean continueRounds = true; // keep trying to improve network for (NetNode<Object> node : speciesNetwork.dfs()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, 1.0); if (node.isNetworkNode()) { node.setParentProbability(parent, 0.5); }//from w ww . j av a 2 s .c o m } } Set<NetNode> node2ignoreForBL = findEdgeHavingNoBL(speciesNetwork); double initalProb = computeProbability(speciesNetwork, tripleFrequencies, species2alleles, gtCorrespondence); if (_printDetails) System.out.println(speciesNetwork.toString() + " : " + initalProb); final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>(initalProb); // records the GTProb of the network at all times int roundIndex = 0; for (; roundIndex < _maxRounds && continueRounds; roundIndex++) { /* * Prepare a random ordering of network edge examinations each of which attempts to change a branch length or hybrid prob to improve the GTProb score. */ double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. for (final NetNode<Object> parent : edu.rice.cs.bioinfo.programs.phylonet.structs.network.util.Networks .postTraversal(speciesNetwork)) { for (final NetNode<Object> child : parent.getChildren()) { if (node2ignoreForBL.contains(child)) { continue; } assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedBranchLength) { double incumbentBranchLength = child.getParentDistance(parent); child.setParentDistance(parent, suggestedBranchLength); double lnProb = computeProbability(speciesNetwork, tripleFrequencies, species2alleles, gtCorrespondence); //System.out.println(speciesNetwork + ": " + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // didn't improve, roll back change { child.setParentDistance(parent, incumbentBranchLength); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, Double.MIN_VALUE, _maxBranchLength); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } if (_printDetails) System.out.println( speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents()); } }); } } for (final NetNode<Object> child : speciesNetwork.getNetworkNodes()) // find every hybrid node { Iterator<NetNode<Object>> hybridParents = child.getParents().iterator(); final NetNode hybridParent1 = hybridParents.next(); final NetNode hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = computeProbability(speciesNetwork, tripleFrequencies, species2alleles, gtCorrespondence); //System.out.println(speciesNetwork + ": " + lnProb); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } if (_printDetails) System.out.println( speciesNetwork.toString() + " : " + lnGtProbOfSpeciesNetwork.getContents()); } }); } // add hybrid probs to hybrid edges Collections.shuffle(assigmentActions); for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (_printDetails) { System.out.println("Round end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"); System.out .println(speciesNetwork.toString() + "\n" + lnGtProbOfSpeciesNetwork.getContents() + "\n"); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } //System.out.println(speciesNetwork + " " + lnGtProbOfSpeciesNetwork.getContents()); return lnGtProbOfSpeciesNetwork.getContents(); }
From source file:org.fhcrc.cpl.toolbox.statistics.BasicStatistics.java
/** * Calculate the density of the normal distribution function with parameters mu and sigma, * at point x/*from w w w. j ava 2 s . c o m*/ * @param mu * @param sigma * @param x * @return */ public static double calcNormalDistDensity(double mu, double sigma, double x) { //sqrt(2pi) = 2.50662827463 return Math.pow(Math.E, -.5 * Math.pow((x - mu) / sigma, 2)) / (sigma * 2.50662827463); }
From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkLikelihoodFromGTTBL.java
protected double findOptimalBranchLength(final Network<Object> speciesNetwork, final Map<String, List<String>> species2alleles, final List gts, final List gtCorrespondence, final Set<String> singleAlleleSpecies) { boolean continueRounds = true; // keep trying to improve network if (_pair2time == null) { computePairwiseCoalesceTime(gts, species2alleles); }//ww w. j a va 2 s .c om //System.out.println("\n"+speciesNetwork); final Map<NetNode, Double> node2constraints = new Hashtable<NetNode, Double>(); computeNodeHeightUpperbound(speciesNetwork, node2constraints); final Map<NetNode<Object>, Double> node2height = new Hashtable<NetNode<Object>, Double>(); initializeNetwork(speciesNetwork, node2constraints, node2height); double initialProb = computeProbability(speciesNetwork, gts, species2alleles, gtCorrespondence); final Container<Double> lnGtProbOfSpeciesNetwork = new Container<Double>(initialProb); // records the GTProb of the network at all times final Container<Map<NetNode<Object>, Double>> node2heightContainer = new Container<Map<NetNode<Object>, Double>>( node2height); int roundIndex = 0; for (; roundIndex < _maxRounds && continueRounds; roundIndex++) { double lnGtProbLastRound = lnGtProbOfSpeciesNetwork.getContents(); List<Proc> assigmentActions = new ArrayList<Proc>(); // store adjustment commands here. Will execute them one by one later. for (final NetNode<Object> child : speciesNetwork.getNetworkNodes()) // find every hybrid node { Iterator<NetNode<Object>> hybridParents = child.getParents().iterator(); final NetNode hybridParent1 = hybridParents.next(); final NetNode hybridParent2 = hybridParents.next(); assigmentActions.add(new Proc() { public void execute() { UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedProb) { double incumbentHybridProbParent1 = child.getParentProbability(hybridParent1); child.setParentProbability(hybridParent1, suggestedProb); child.setParentProbability(hybridParent2, 1.0 - suggestedProb); double lnProb = computeProbability(speciesNetwork, gts, species2alleles, gtCorrespondence); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // change improved GTProb, keep it { lnGtProbOfSpeciesNetwork.setContents(lnProb); } else // change did not improve, roll back { child.setParentProbability(hybridParent1, incumbentHybridProbParent1); child.setParentProbability(hybridParent2, 1.0 - incumbentHybridProbParent1); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, 0, 1.0); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } } }); } for (final NetNode<Object> node : Networks.postTraversal(speciesNetwork)) { if (node.isLeaf()) { continue; } assigmentActions.add(new Proc() { public void execute() { final Container<Double> minHeight = new Container<Double>(0.0); final Container<Double> maxHeight = new Container<Double>(Double.MAX_VALUE); for (NetNode<Object> child : node.getChildren()) { double childHeight = node2heightContainer.getContents().get(child); minHeight.setContents(Math.max(minHeight.getContents(), childHeight)); } double minParent = Double.MAX_VALUE; if (!node.isRoot()) { for (NetNode<Object> parent : node.getParents()) { double parentHeight = node2heightContainer.getContents().get(parent); minParent = Math.min(minParent, parentHeight); } } else { minParent = minHeight.getContents() + _maxBranchLength; } maxHeight.setContents(Math.min(minParent, node2constraints.get(node))); //System.out.println("\nChanging node " + node.getName() + " from " + minHeight.getContents() + " to " + maxHeight.getContents()); UnivariateFunction functionToOptimize = new UnivariateFunction() { public double value(double suggestedHeight) { // brent suggests a new branch length double incumbentHeight = node2heightContainer.getContents().get(node); for (NetNode<Object> child : node.getChildren()) { child.setParentDistance(node, suggestedHeight - node2heightContainer.getContents().get(child)); } if (!node.isRoot()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, node2heightContainer.getContents().get(parent) - suggestedHeight); } } double lnProb = computeProbability(speciesNetwork, gts, species2alleles, gtCorrespondence); //System.out.print("suggest: "+ suggestedHeight + " " + lnProb + " vs. " + lnGtProbOfSpeciesNetwork.getContents() + ": "); if (lnProb > lnGtProbOfSpeciesNetwork.getContents()) // did improve, keep change { lnGtProbOfSpeciesNetwork.setContents(lnProb); node2heightContainer.getContents().put(node, suggestedHeight); //System.out.println( " better "); } else // didn't improve, roll back change { for (NetNode<Object> child : node.getChildren()) { child.setParentDistance(node, incumbentHeight - node2heightContainer.getContents().get(child)); } if (!node.isRoot()) { for (NetNode<Object> parent : node.getParents()) { node.setParentDistance(parent, node2heightContainer.getContents().get(parent) - incumbentHeight); } } //System.out.println( " worse "); } return lnProb; } }; BrentOptimizer optimizer = new BrentOptimizer(_Brent1, _Brent2); // very small numbers so we control when brent stops, not brent. try { optimizer.optimize(_maxTryPerBranch, functionToOptimize, GoalType.MAXIMIZE, minHeight.getContents(), maxHeight.getContents()); } catch (TooManyEvaluationsException e) // _maxAssigmentAttemptsPerBranchParam exceeded { } //System.out.println(network2String(speciesNetwork) + " " + lnGtProbOfSpeciesNetwork.getContents()); } }); } Collections.shuffle(assigmentActions); for (Proc assigment : assigmentActions) // for each change attempt, perform attempt { assigment.execute(); } if (((double) lnGtProbOfSpeciesNetwork.getContents()) == lnGtProbLastRound) // if no improvement was made wrt to last around, stop trying to find a better assignment { continueRounds = false; } else if (lnGtProbOfSpeciesNetwork.getContents() > lnGtProbLastRound) // improvement was made, ensure it is large enough wrt to improvement threshold to continue searching { double improvementPercentage = Math.pow(Math.E, (lnGtProbOfSpeciesNetwork.getContents() - lnGtProbLastRound)) - 1.0; // how much did we improve over last round //System.out.println(improvementPercentage + " vs. " + _improvementThreshold); if (improvementPercentage < _improvementThreshold) // improved, but not enough to keep searching { continueRounds = false; } } else { throw new IllegalStateException("Should never have decreased prob."); } } //System.out.print("\n" + lnGtProbOfSpeciesNetwork.getContents() + ": " + speciesNetwork); return lnGtProbOfSpeciesNetwork.getContents(); }