List of usage examples for java.lang Double NEGATIVE_INFINITY
double NEGATIVE_INFINITY
To view the source code for java.lang Double NEGATIVE_INFINITY.
Click Source Link
From source file:beast.structuredCoalescent.distribution.ExactStructuredCoalescent.java
public double calculateLogP() { // Calculate the tree intervals (time between events, which nodes participate at a event etc.) treeIntervalsInput.get().calculateIntervals(); treeIntervalsInput.get().swap();/*from w ww .ja v a 2 s. c o m*/ // Set up for lineage state probabilities activeLineages = new ArrayList<Integer>(); lineStateProbs = new ArrayList<Double>(); // Compute likelihood at each integration time and tree event starting at final sampling time and moving backwards logP = 0; // Initialize the line state probabilities // total number of intervals final int intervalCount = treeIntervalsInput.get().getIntervalCount(); // counts in which interval we are in int t = 0; nr_lineages = 0; // Captures the probabilities of lineages being in a state double[] p; // Initialize the migration rates matrix int c = 0; for (int k = 0; k < states; k++) { for (int l = 0; l < states; l++) { if (k != l) { migration_rates[c] = migrationRatesInput.get().getArrayValue(c); migration_map[k][l] = c; c++; } else { coalescent_rates[k] = coalescentRatesInput.get().getArrayValue(k) / 2; } } } boolean first = true; // integrate until there are no more tree intervals do { double nextIntervalTime = treeIntervalsInput.get().getInterval(t); // Length of the current interval final double duration = nextIntervalTime;// - currTime; // if the current interval has a length greater than 0, integrate if (duration > 0) { p = new double[jointStateProbabilities.size()]; // Captures the probabilities of lineages being in a state // convert the array list to double[] for (int i = 0; i < jointStateProbabilities.size(); i++) p[i] = jointStateProbabilities.get(i); double[] p_for_ode = new double[p.length]; double ts = timeStep; if (duration < timeStep) ts = duration / 2; // initialize integrator FirstOrderIntegrator integrator = new ClassicalRungeKuttaIntegrator(ts); // set the odes FirstOrderDifferentialEquations ode = new ode_integrator(migration_rates, coalescent_rates, nr_lineages, states, connectivity, sums); // integrate integrator.integrate(ode, 0, p, duration, p_for_ode); // if the dimension is equal to the max integer, this means that a calculation // of a probability of a configuration resulted in a value below 0 and the // run will be stopped if (ode.getDimension() == Integer.MAX_VALUE) { System.out.println("lalalallal"); return Double.NEGATIVE_INFINITY; } // set the probabilities of the system being in a configuration again for (int i = 0; i < p_for_ode.length; i++) jointStateProbabilities.set(i, p_for_ode[i]); } /* * compute contribution of event to likelihood */ if (treeIntervalsInput.get().getIntervalType(t) == IntervalType.COALESCENT) { nr_lineages--; logP += coalesce(t); } /* * add new lineage */ if (treeIntervalsInput.get().getIntervalType(t) == IntervalType.SAMPLE) { nr_lineages++; addLineages(t, first); first = false; } t++; } while (t < intervalCount); //Compute likelihood of remaining tree intervals (coal events occuring before origin) if (Double.isInfinite(logP)) logP = Double.NEGATIVE_INFINITY; if (max_posterior < logP && logP < 0) { max_posterior = logP; max_mig = new double[states * (states - 1)]; max_coal = new double[states]; for (int i = 0; i < 1; i++) max_mig[i] = migrationRatesInput.get().getArrayValue(i); for (int i = 0; i < 1; i++) max_coal[i] = coalescentRatesInput.get().getArrayValue(i); } return logP; }
From source file:com.rapidminer.gui.plotter.charts.Abstract2DChartPlotter.java
private void prepareNumericalData() { this.nominal = false; dataSet = new DefaultXYZDataset(); if (axis[X_AXIS] >= 0 && axis[Y_AXIS] >= 0) { this.minColor = Double.POSITIVE_INFINITY; this.maxColor = Double.NEGATIVE_INFINITY; List<double[]> dataList = new LinkedList<double[]>(); List<String> idList = new LinkedList<String>(); synchronized (dataTable) { Iterator<DataTableRow> i = this.dataTable.iterator(); while (i.hasNext()) { DataTableRow row = i.next(); double xValue = Double.NaN; if (axis[X_AXIS] >= 0) { xValue = row.getValue(axis[X_AXIS]); }//ww w .j a va 2s .c om double yValue = Double.NaN; if (axis[Y_AXIS] >= 0) { yValue = row.getValue(axis[Y_AXIS]); } double colorValue = Double.NaN; if (colorColumn >= 0) { colorValue = row.getValue(colorColumn); } if (plotColumnsLogScale) { if (Tools.isLessEqual(colorValue, 0.0d)) { colorValue = 0; } else { colorValue = Math.log10(colorValue); } } // TM: removed check // if (!Double.isNaN(xValue) && !Double.isNaN(yValue)) { double[] data = new double[3]; data[X_AXIS] = xValue; data[Y_AXIS] = yValue; data[COLOR_AXIS] = colorValue; if (!Double.isNaN(colorValue)) { this.minColor = Math.min(this.minColor, colorValue); this.maxColor = Math.max(this.maxColor, colorValue); } dataList.add(data); idList.add(row.getId()); // } } } double[][] data = new double[3][dataList.size()]; double minX = Double.POSITIVE_INFINITY; double maxX = Double.NEGATIVE_INFINITY; double minY = Double.POSITIVE_INFINITY; double maxY = Double.NEGATIVE_INFINITY; int index = 0; for (double[] d : dataList) { data[X_AXIS][index] = d[X_AXIS]; data[Y_AXIS][index] = d[Y_AXIS]; data[COLOR_AXIS][index] = d[COLOR_AXIS]; minX = MathFunctions.robustMin(minX, d[X_AXIS]); maxX = MathFunctions.robustMax(maxX, d[X_AXIS]); minY = MathFunctions.robustMin(minY, d[Y_AXIS]); maxY = MathFunctions.robustMax(maxY, d[Y_AXIS]); index++; } // jittering if (this.jitterAmount > 0) { Random jitterRandom = new Random(2001); double oldXRange = maxX - minX; double oldYRange = maxY - minY; for (int i = 0; i < dataList.size(); i++) { if (Double.isInfinite(oldXRange) || Double.isNaN(oldXRange)) { oldXRange = 0; } if (Double.isInfinite(oldYRange) || Double.isNaN(oldYRange)) { oldYRange = 0; } double pertX = oldXRange * (jitterAmount / 200.0d) * jitterRandom.nextGaussian(); double pertY = oldYRange * (jitterAmount / 200.0d) * jitterRandom.nextGaussian(); data[X_AXIS][i] += pertX; data[Y_AXIS][i] += pertY; } } // add data ((DefaultXYZDataset) dataSet).addSeries("All", data); // id handling int idCounter = 0; for (String id : idList) { idMap.put(new SeriesAndItem(0, idCounter++), id); } } }
From source file:net.sf.maltcms.chromaui.charts.FastHeatMapPlot.java
/** * * @param xyz/*from w ww . j a v a2s . c o m*/ * @param sl * @param spm * @param xybr * @param activeGraphics * @param dataArea * @param info * @param crosshairState * @return */ public BufferedImage prepareData(final XYZDataset xyz, final int sl, final int spm, final XYBlockRenderer xybr, Graphics2D activeGraphics, Rectangle2D dataArea, PlotRenderingInfo info, CrosshairState crosshairState) { long start = System.currentTimeMillis(); final PaintScale ps = xybr.getPaintScale(); double minz = Double.POSITIVE_INFINITY, maxz = Double.NEGATIVE_INFINITY; for (int i = 0; i < xyz.getSeriesCount(); i++) { final int items = xyz.getItemCount(i); for (int j = 0; j < items; j++) { minz = Math.min(xyz.getZValue(i, j), minz); maxz = Math.max(xyz.getZValue(i, j), maxz); } } if (ps instanceof GradientPaintScale) { ((GradientPaintScale) ps).setUpperBound(maxz); ((GradientPaintScale) ps).setLowerBound(minz); } Logger.getLogger(getClass().getName()).log(Level.INFO, "Finding min and max data took{0}ms", (System.currentTimeMillis() - start)); // VolatileImage bi = null; // if (bi == null) { // if (this.getOrientation() == PlotOrientation.VERTICAL) { BufferedImage bi = createCompatibleImage(sl, spm, BufferedImage.TRANSLUCENT); // } else { // bi = createCompatibleImage(spm, sl); // } // }else{ // img.validate(g.getDeviceConfiguration()) // } Graphics2D g2 = (Graphics2D) bi.getGraphics(); g2.setColor((Color) ps.getPaint(ps.getLowerBound())); g2.fillRect(0, 0, sl, spm); // System.out.println("Using Threshold: " + threshold); int height = bi.getHeight(); //final WritableRaster wr = bi.getRaster(); XYItemRendererState xyrs = xybr.initialise(g2, dataArea, this, xyz, info); for (int i = 0; i < xyz.getSeriesCount(); i++) { final int items = xyz.getItemCount(i); for (int j = 0; j < items; j++) { final double tmp = xyz.getZValue(i, j); if (tmp > this.threshholdCutOff) { //if(j%50==0)System.out.println("Value > threshold: "+tmp); final Paint p = ps.getPaint(tmp); // final Paint tp = ps.getPaint(this.threshholdCutOff); // if (!tp.equals(p)) { if (p instanceof Color) { final Color c = (Color) p; g2.setColor(c); // xybr.drawItem(g2, xyrs, dataArea, info, this, domainAxis, rangeAxis, xyz, i, j, crosshairState, 0); // if (this.getOrientation() == PlotOrientation.VERTICAL) { g2.fillRect((int) xyz.getXValue(i, j), height - (int) xyz.getYValue(i, j), 1, 1); // wr.setPixel(, , new int[]{c.getRed(), // c.getGreen(), c.getBlue(), c.getAlpha()}); // } else { // wr.setPixel((int) xyz.getYValue(i, j), (int) xyz.getXValue(i, j), new int[]{c.getRed(), // c.getGreen(), c.getBlue(), c.getAlpha()}); // } // } // } } } } } Logger.getLogger(getClass().getName()).log(Level.INFO, "Creating image and drawing items took {0}ms", (System.currentTimeMillis() - start)); return bi; }
From source file:clus.algo.tdidt.tune.CDTuneSizeConstrPruning.java
public double getRange(ArrayList graph) { double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; for (int i = 0; i < graph.size(); i++) { SingleStatList elem = (SingleStatList) graph.get(i); if (elem.getY() < min) min = elem.getY();/* w w w.j av a2 s . com*/ if (elem.getY() > max) max = elem.getY(); } return Math.abs(max - min); }
From source file:net.pms.util.Rational.java
/** * Returns an instance that represents the value of {@code value}. * * @param value the value.//w ww .j a va 2 s .c o m * @return An instance that represents the value of {@code value}. */ @Nonnull public static Rational valueOf(double value) { if (value == Double.POSITIVE_INFINITY) { return POSITIVE_INFINITY; } if (value == Double.NEGATIVE_INFINITY) { return NEGATIVE_INFINITY; } if (Double.isNaN(value)) { return NaN; } return valueOf(BigDecimal.valueOf(value)); }
From source file:dr.evomodel.arg.coalescent.ARGUniformPrior.java
public double getLogLikelihood() { if (likelihoodKnown) { return logLikelihood; }//from www . j ava 2 s . c o m likelihoodKnown = true; logLikelihood = calculateLogLikelihood(); if (arg.getReassortmentNodeCount() > maxReassortments) logLikelihood = Double.NEGATIVE_INFINITY; else logLikelihood = calculateLogLikelihood(); if (!currentARGValid(true)) { logLikelihood = Double.NEGATIVE_INFINITY; } return logLikelihood; }
From source file:ffx.numerics.LBFGS.java
/** * This method solves the unconstrained minimization problem * <pre>//from w ww. jav a 2s. co m * min f(x), x = (x1,x2,...,x_n), * </pre> using the limited-memory BFGS method. The routine is especially * effective on problems involving a large number of variables. In a typical * iteration of this method an approximation <code>Hk</code> to the inverse * of the Hessian is obtained by applying <code>m</code> BFGS updates to a * diagonal matrix <code>Hk0</code>, using information from the previous * <code>m</code> steps. * * The user specifies the number <code>m</code>, which determines the amount * of storage required by the routine. * * The user is required to calculate the function value <code>f</code> and * its gradient <code>g</code>. * * The steplength is determined at each iteration by means of the line * search routine <code>lineSearch</code>, which is a slight modification of * the routine <code>CSRCH</code> written by More' and Thuente. * * @param n The number of variables in the minimization problem. * Restriction: <code>n > 0</code>. * @param mSave The number of corrections used in the BFGS update. Values of * <code>mSave</code> less than 3 are not recommended; large values of * <code>mSave</code> will result in excessive computing time. * <code>3 <= mSave <= 7</code> is recommended. * Restriction: * <code>mSave > 0</code>. * @param x On initial entry this must be set by the user to the values of * the initial estimate of the solution vector. On exit it contains the * values of the variables at the best point found (usually a solution). * @param f The value of the function <code>f</code> at the point * <code>x</code>. * @param g The components of the gradient <code>g</code> at the point * <code>x</code>. * @param eps Determines the accuracy with which the solution is to be * found. The subroutine terminates when <code> * G RMS < EPS * </code> * @param maxIterations Maximum number of optimization steps. * @param potential Implements the {@link Potential} interface to supply * function values and gradients. * @param listener Implements the {@link OptimizationListener} interface and * will be notified after each successful step. * @return status code (0 = success, 1 = max iterations reached, -1 = * failed) * @since 1.0 */ public static int minimize(final int n, int mSave, final double[] x, double f, double[] g, final double eps, final int maxIterations, Potential potential, OptimizationListener listener) { assert (n > 0); assert (mSave > 0); assert (maxIterations > 0); assert (x != null && x.length >= n); assert (g != null && g.length >= n); if (mSave > n) { logger.fine(format(" Resetting the number of saved L-BFGS vectors to %d.", n)); mSave = n; } int iterations = 0; int evaluations = 1; int nErrors = 0; int maxErrors = 2; double rms = sqrt(n); double scaling[] = potential.getScaling(); if (scaling == null) { scaling = new double[n]; fill(scaling, 1.0); } /** * Initial search direction is the steepest decent direction. */ double s[][] = new double[mSave][n]; double y[][] = new double[mSave][n]; for (int i = 0; i < n; i++) { s[0][i] = -g[i]; } double grms = 0.0; double gnorm = 0.0; for (int i = 0; i < n; i++) { double gi = g[i]; if (gi == Double.NaN || gi == Double.NEGATIVE_INFINITY || gi == Double.POSITIVE_INFINITY) { String message = format("The gradient of variable %d is %8.3f.", i, gi); logger.warning(message); return 1; } double gis = gi * scaling[i]; gnorm += gi * gi; grms += gis * gis; } gnorm = sqrt(gnorm); grms = sqrt(grms) / rms; /** * Notify the listeners of initial conditions. */ if (listener != null) { if (!listener.optimizationUpdate(iterations, evaluations, grms, 0.0, f, 0.0, 0.0, null)) { /** * Terminate the optimization. */ return 1; } } else { log(iterations, evaluations, grms, 0.0, f, 0.0, 0.0, null); } /** * The convergence criteria may already be satisfied. */ if (grms <= eps) { return 0; } final double prevX[] = new double[n]; final double prevG[] = new double[n]; final double r[] = new double[n]; final double p[] = new double[n]; final double h0[] = new double[n]; final double q[] = new double[n]; final double alpha[] = new double[mSave]; final double rho[] = new double[mSave]; double gamma = 1.0; /** * Line search parameters. */ final LineSearch lineSearch = new LineSearch(n); final LineSearchResult info[] = { LineSearchResult.Success }; final int nFunctionEvals[] = { 0 }; final double angle[] = { 0.0 }; double df = 0.5 * STEPMAX * gnorm; int m = -1; while (true) { iterations++; if (iterations > maxIterations) { logger.info(format(" Maximum number of iterations reached: %d.", maxIterations)); return 1; } int muse = min(iterations - 1, mSave); m++; if (m > mSave - 1) { m = 0; } /** * Estimate the Hessian Diagonal. */ fill(h0, gamma); arraycopy(g, 0, q, 0, n); int k = m; for (int j = 0; j < muse; j++) { k--; if (k < 0) { k = mSave - 1; } alpha[k] = XdotY(n, s[k], 0, 1, q, 0, 1); alpha[k] *= rho[k]; aXplusY(n, -alpha[k], y[k], 0, 1, q, 0, 1); } for (int i = 0; i < n; i++) { r[i] = h0[i] * q[i]; } for (int j = 0; j < muse; j++) { double beta = XdotY(n, r, 0, 1, y[k], 0, 1); beta *= rho[k]; aXplusY(n, alpha[k] - beta, s[k], 0, 1, r, 0, 1); k++; if (k > mSave - 1) { k = 0; } } /** * Set the search direction. */ for (int i = 0; i < n; i++) { p[i] = -r[i]; } arraycopy(x, 0, prevX, 0, n); arraycopy(g, 0, prevG, 0, n); /** * Perform the line search along the new conjugate direction. */ nFunctionEvals[0] = 0; double prevF = f; f = lineSearch.search(n, x, f, g, p, angle, df, info, nFunctionEvals, potential); evaluations += nFunctionEvals[0]; /** * Update variables based on the results of this iteration. */ for (int i = 0; i < n; i++) { s[m][i] = x[i] - prevX[i]; y[m][i] = g[i] - prevG[i]; } double ys = XdotY(n, y[m], 0, 1, s[m], 0, 1); double yy = XdotY(n, y[m], 0, 1, y[m], 0, 1); gamma = abs(ys / yy); rho[m] = 1.0 / ys; /** * Get the sizes of the moves made during this iteration. */ df = prevF - f; double xrms = 0.0; grms = 0.0; for (int i = 0; i < n; i++) { double dx = (x[i] - prevX[i]) / scaling[i]; xrms += dx * dx; double gx = g[i] * scaling[i]; grms += gx * gx; } xrms = sqrt(xrms) / rms; grms = sqrt(grms) / rms; boolean done = false; if (info[0] == LineSearchResult.BadIntpln || info[0] == LineSearchResult.IntplnErr) { nErrors++; if (nErrors >= maxErrors) { logger.log(Level.OFF, " Algorithm failure: bad interpolation."); done = true; } } else { nErrors = 0; } if (listener != null) { if (!listener.optimizationUpdate(iterations, evaluations, grms, xrms, f, df, angle[0], info[0])) { /** * Terminate the optimization. */ return 1; } } else { log(iterations, evaluations, grms, xrms, f, df, angle[0], info[0]); } /** * Terminate the optimization if the line search failed or upon * satisfying the convergence criteria. */ if (done) { return -1; } else if (grms <= eps) { return 0; } } }
From source file:net.sourceforge.jasa.report.HistoricalDataReport.java
public double getHighestUnacceptedBidPrice() { if (highestUnacceptedBid != null) { return highestUnacceptedBid.getPriceAsDouble(); }/*from w w w . j a v a 2 s . c om*/ Iterator<Order> i = bids.iterator(); double highestUnacceptedBidPrice = Double.NEGATIVE_INFINITY; while (i.hasNext()) { Order s = i.next(); if (!accepted(s)) { if (s.getPriceAsDouble() > highestUnacceptedBidPrice) { highestUnacceptedBidPrice = s.getPriceAsDouble(); highestUnacceptedBid = s; } } } return highestUnacceptedBidPrice; }
From source file:edu.cmu.tetrad.data.DataUtils.java
public static DataSet replaceMissingWithRandom(DataSet inData) { DataSet outData;/*w w w. j av a 2 s. c o m*/ try { outData = (DataSet) new MarshalledObject(inData).get(); } catch (Exception e) { throw new RuntimeException(e); } for (int j = 0; j < outData.getNumColumns(); j++) { Node variable = outData.getVariable(j); if (variable instanceof DiscreteVariable) { List<Integer> values = new ArrayList<Integer>(); for (int i = 0; i < outData.getNumRows(); i++) { int value = outData.getInt(i, j); if (value == -99) continue; values.add(value); } Collections.sort(values); for (int i = 0; i < outData.getNumRows(); i++) { if (outData.getInt(i, j) == -99) { int value = RandomUtil.getInstance().nextInt(values.size()); outData.setInt(i, j, values.get(value)); } } } else { double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; for (int i = 0; i < outData.getNumRows(); i++) { double value = outData.getDouble(i, j); if (value < min) min = value; if (value > max) max = value; } for (int i = 0; i < outData.getNumRows(); i++) { double random = RandomUtil.getInstance().nextDouble(); outData.setDouble(i, j, min + random * (max - min)); } } } return outData; }
From source file:lfsom.visualization.clustering.LFSKMeans.java
/** * Get a new centroid for empty clusters. We therefore take the instance * with the largest SSE to the cluster centroid having the largest SSE. Get * the idea? Read slowly.//from w w w. j a v a 2 s .c o m * * @return a new centroid (rather: a clone thereof :)) */ private double[] getSubstituteCentroid() { double maxSSE = Double.NEGATIVE_INFINITY; int maxSSEIndex = -1; for (int clusterIndex = 0; clusterIndex < k; clusterIndex++) { clusters[clusterIndex].calculateCentroid(data); double currentSSE = clusters[clusterIndex].SSE(data); if (currentSSE > maxSSE) { maxSSE = currentSSE; maxSSEIndex = clusterIndex; } } // System.out.println(maxSSEIndex); // System.out.println(clusters.length); // System.out.println(clusters[maxSSEIndex].getInstanceIndexWithMaxSSE(data)); // FIXME is this the right way of handling this (if the max sse exists // in a cluster that has no instances // assigned) if (clusters[maxSSEIndex].getInstanceIndexWithMaxSSE(data) == -1) { return null; } return data[clusters[maxSSEIndex].getInstanceIndexWithMaxSSE(data)].clone(); }