List of usage examples for java.lang Double isFinite
public static boolean isFinite(double d)
From source file:de.bund.bfr.knime.nls.chart.ChartCreator.java
private static XYDataset createDataSet(String key, double[][] points, double[][] errors) { if (points != null) { if (errors != null) { YIntervalSeriesCollection functionDataset = new YIntervalSeriesCollection(); YIntervalSeries series = new YIntervalSeries(key); for (int j = 0; j < points[0].length; j++) { double error = Double.isFinite(errors[1][j]) ? errors[1][j] : 0.0; series.add(points[0][j], points[1][j], points[1][j] - error, points[1][j] + error); }/*w w w . j a va 2 s .c o m*/ functionDataset.addSeries(series); return functionDataset; } else { DefaultXYDataset functionDataset = new DefaultXYDataset(); functionDataset.addSeries(key, points); return functionDataset; } } return null; }
From source file:eu.amidst.core.inference.ImportanceSamplingRobust.java
private double robustSumOfLogarithms(double log_x1, double log_x2) { double result; if (log_x1 != 0 && log_x2 != 0) { double aux_max = Math.max(log_x1, log_x2); double aux_min = Math.min(log_x1, log_x2); double tail; double aux = Math.exp(aux_min - aux_max); if (aux < 0.5) { tail = Math.log1p(aux); } else {//from w ww . j a va2s. c o m tail = Math.log(1 + aux); } // tail = Math.log( 1+aux ); //double tail = Math.log1p( Math.exp(aux_min-aux_max) ); result = aux_max + (Double.isFinite(tail) ? tail : 0); } else if (log_x1 == 0) { result = log_x2; } else { result = log_x1; } return result; }
From source file:gov.noaa.pfel.erddap.dataset.EDDTableFromHttpGet.java
/** * This figures out the name of the relevant file (which may or * may not exist./* w w w . j a v a 2 s. co m*/ * * @param startDir with trailing slash * @param tDirStructureColumnNames For each part, the variable's source column name * or "" if not used for this part. * Any column names here should be in requiredColumnNames. * @param tDirStructureNs For each part, the number of YEAR, MONTH, ... * or -1 if not used for this part * @param tDirStructureCalendars For each part, * Calendar.YEAR, MONTH, DATE, HOUR_OF_DAY, MINUTE, SECOND, MILLISECOND, * or -1 if not used for this part * @param tColumnNames the source names of the columns * @param tColumnValues the source values associated with the tColumnNames. * All should have the same size(). * Only values on the specified row will be used. * @param row the value of the rightmost array of tColumnSourceValues * @param timeEpSec the time value, in epoch seconds. * It is usually a requiredColumn, but not always. * It is an error if it is needed here, but timeEpSec is NaN. * @return the full file dir+name, starting with startDir. */ public static String whichFile(String startDir, StringArray tDirStructureColumnNames, IntArray tDirStructureNs, IntArray tDirStructureCalendars, String tColumnNames[], PrimitiveArray tColumnValues[], int row, double timeEpSec) { StringBuilder dirSB = new StringBuilder(startDir); StringBuilder nameSB = new StringBuilder(); int nParts = tDirStructureColumnNames.size(); for (int i = 0; i < nParts; i++) { if (i > 0) { dirSB.append('/'); nameSB.append('_'); } int cal = tDirStructureCalendars.get(i); if (cal == -1) { //Find the var. Add its value. int sni = String2.indexOf(tColumnNames, tDirStructureColumnNames.get(i)); if (sni < 0) throw new SimpleException(String2.ERROR + " in directoryStructure part#" + i + ": column=" + tDirStructureColumnNames.get(i) + " isn't in columnNames=" + String2.toCSSVString(tColumnNames) + "."); //data value of "" is a valid value. It will be converted to something. String tp = String2.encodeFileNameSafe(tColumnValues[sni].getString(row)); if (i < nParts - 1) dirSB.append(tp); nameSB.append(tp); } else { //Find the time part. Round down to n'th precision. //e.g., 17 seconds to 5seconds precision is 15 seconds. //(MONTH is 0-based, so that works correctly as is.) if (!Double.isFinite(timeEpSec)) throw new SimpleException( String2.ERROR + " in directoryStructure part#" + i + ": time value is NaN!"); //need a new gc for each part since gc is modified GregorianCalendar gc = Calendar2.epochSecondsToGc(timeEpSec); int n = tDirStructureNs.get(i); gc.set(cal, (gc.get(cal) / n) * n); //Get the ISO 8601 date/time string just to that precision/field. String s = Calendar2.formatAsISODateTimeT3(gc); //to millis int nChar = s.length(); if (cal == Calendar.YEAR) nChar = 4; else if (cal == Calendar.MONTH) nChar = 7; else if (cal == Calendar.DATE) nChar = 10; else if (cal == Calendar.HOUR_OF_DAY) nChar = 13; else if (cal == Calendar.MINUTE) nChar = 16; else if (cal == Calendar.SECOND) nChar = 19; //else to millis precision String tp = s.substring(0, nChar); tp = String2.replaceAll(tp, ':', '-'); //make fileNameSafe if (i < nParts - 1) dirSB.append(tp); nameSB.append(tp); } } return dirSB.toString() + nameSB.toString() + ".nc"; }
From source file:org.rhwlab.variationalbayesian.GaussianMixture.java
public double L7() { double sum = 0.0; for (int k = 0; k < K; ++k) { double s1 = 0.5 * lnLambdaTilde[k]; double s2 = 0.5 * X.getD() * Math.log(beta[k] / (2 * Math.PI)); double s3 = -X.getD() / 2.0; double s4 = -H(detW[k], nu[k], lnLambdaTilde[k]); sum = sum + s1 + s2 + s3 + s4;// w ww . j a v a2 s .c om if (!Double.isFinite(sum)) { int iusahf = 0; } } return sum; }
From source file:org.rhwlab.variationalbayesian.GaussianMixture.java
static public double lnB(double detW, double nu, int D) { double p = 0.0; for (int i = 0; i <= D; ++i) { p = p + Gamma.logGamma((nu + 1 - i) / 2.0); }//from ww w.j a va 2s . co m double t1 = -0.5 * nu * Math.log(detW); double t2 = 0.5 * nu * D * Math.log(2.0); double t3 = 0.25 * D * (D - 1.0) * Math.log(Math.PI); double lnB = t1 - t2 - t3 - p; if (!Double.isFinite(lnB)) { int iusahdf = 0; } return lnB; }
From source file:org.rhwlab.variationalbayesian.GaussianMixture.java
public double H(double detW, double nu, double lambdaTildeExp) { double s1 = -(lnB(detW, nu, X.getD())); double s2 = -lambdaTildeExp * (nu - X.getD() - 1) / 2; double s3 = nu * X.getD() / 2; double sum = s1 + s2 + s3; if (!Double.isFinite(sum)) { int iuasfduisd = 0; }/* ww w . j a va2s . com*/ return sum; }
From source file:qupath.lib.gui.panels.survival.KaplanMeierDisplay.java
@SuppressWarnings("unchecked") private void generatePlot() { KaplanMeierDisplay.ScoreData newScoreData = scoreData; // If we have a hierarchy, update the scores with the most recent data if (hierarchy != null) { List<TMACoreObject> cores = PathObjectTools.getTMACoreObjects(hierarchy, false); double[] survival = new double[cores.size()]; boolean[] censored = new boolean[cores.size()]; double[] scores = new double[cores.size()]; // // Optionally sort by scores... helps a bit when debugging e.g. p-values, Hazard ratios etc. // cores.sort((c1, c2) -> Double.compare(c1.getMeasurementList().getMeasurementValue(scoreColumn), c2.getMeasurementList().getMeasurementValue(scoreColumn))); // scoreColumn = "Positive %"; // scoreColumn = "RoughScore"; for (int i = 0; i < cores.size(); i++) { TMACoreObject core = cores.get(i); MeasurementList ml = core.getMeasurementList(); survival[i] = core.getMeasurementList().getMeasurementValue(survivalColumn); double censoredValue = core.getMeasurementList().getMeasurementValue(censoredColumn); boolean hasCensoredValue = !Double.isNaN(censoredValue) && (censoredValue == 0 || censoredValue == 1); censored[i] = censoredValue != 0; if (!hasCensoredValue) { // If we don't have a censored value, ensure we mask out everything else scores[i] = Double.NaN; survival[i] = Double.NaN; } else if (ml.containsNamedMeasurement(scoreColumn)) // Get the score if we can scores[i] = ml.getMeasurementValue(scoreColumn); else { // // Try to compute score if we need to // Map<String, Number> map = ROIMeaningfulMeasurements.getPathClassSummaryMeasurements(core.getChildObjects(), true); // Number value = map.get(scoreColumn); // if (value == null) scores[i] = Double.NaN; // else // scores[i] = value.doubleValue(); }/*ww w . j a va 2s .c o m*/ } // Mask out any scores that don't have associated survival data for (int i = 0; i < survival.length; i++) { if (Double.isNaN(survival[i])) scores[i] = Double.NaN; } newScoreData = new ScoreData(scores, survival, censored); } if (newScoreData == null || newScoreData.scores.length == 0) return; // KaplanMeier kmHigh = new KaplanMeier("Above threshold"); // KaplanMeier kmLow = new KaplanMeier("Below threshold"); double[] quartiles = StatisticsHelper.getQuartiles(newScoreData.scores); double q1 = quartiles[0]; double median = quartiles[1]; double q3 = quartiles[2]; double[] thresholds; if (params != null) { Object thresholdMethod = params.getChoiceParameterValue("scoreThresholdMethod"); if (thresholdMethod.equals("Median")) { // panelParams.setNumericParameterValue("scoreThreshold", median); // ((DoubleParameter)params.getParameters().get("scoreThreshold")).setValue(median); // TODO: UPDATE DIALOG! thresholds = new double[] { median }; } else if (thresholdMethod.equals("Tertiles")) { // ((DoubleParameter)params.getParameters().get("scoreThreshold")).setValue(median); // TODO: UPDATE DIALOG! thresholds = StatisticsHelper.getTertiles(newScoreData.scores); } else if (thresholdMethod.equals("Quartiles")) { // ((DoubleParameter)params.getParameters().get("scoreThreshold")).setValue(median); // TODO: UPDATE DIALOG! thresholds = new double[] { q1, median, q3 }; } else if (thresholdMethod.equals("Manual (1)")) { thresholds = new double[] { params.getDoubleParameterValue("threshold1") }; } else if (thresholdMethod.equals("Manual (2)")) { thresholds = new double[] { params.getDoubleParameterValue("threshold1"), params.getDoubleParameterValue("threshold2") }; } else //if (thresholdMethod.equals("Manual (3)")) { thresholds = new double[] { params.getDoubleParameterValue("threshold1"), params.getDoubleParameterValue("threshold2"), params.getDoubleParameterValue("threshold3") }; } else thresholds = new double[] { median }; double minVal = Double.POSITIVE_INFINITY; double maxVal = Double.NEGATIVE_INFINITY; int numNonNaN = 0; for (double d : newScoreData.scores) { if (Double.isNaN(d)) continue; if (d < minVal) minVal = d; if (d > maxVal) maxVal = d; numNonNaN++; } boolean scoresValid = maxVal > minVal; // If not this, we don't have valid scores that we can work with double maxTimePoint = 0; for (double d : newScoreData.survival) { if (Double.isNaN(d)) continue; if (d > maxTimePoint) maxTimePoint = d; } if (panelParams != null && maxTimePoint > ((IntParameter) params.getParameters().get("censorTimePoints")).getUpperBound()) { panelParams.setNumericParameterValueRange("censorTimePoints", 0, Math.ceil(maxTimePoint)); } // Optionally censor at specified time double censorThreshold = params == null ? maxTimePoint : params.getIntParameterValue("censorTimePoints"); // Compute log-rank p-values for *all* possible thresholds // Simultaneously determine the threshold that yields the lowest p-value, // resolving ties in favour of a more even split between high/low numbers of events boolean pValuesChanged = false; if (calculateAllPValues) { if (!(pValues != null && pValueThresholds != null && newScoreData.equals(scoreData) && censorThreshold == lastPValueCensorThreshold)) { Map<Double, Double> mapLogRank = new TreeMap<>(); Set<Double> setObserved = new HashSet<>(); for (int i = 0; i < newScoreData.scores.length; i++) { Double d = newScoreData.scores[i]; boolean observed = !newScoreData.censored[i] && newScoreData.survival[i] < censorThreshold; if (observed) setObserved.add(d); if (mapLogRank.containsKey(d)) continue; List<KaplanMeierData> kmsTemp = splitByThresholds(newScoreData, new double[] { d }, censorThreshold, false); // if (kmsTemp.get(1).nObserved() == 0 || kmsTemp.get(1).nObserved() == 0) // continue; LogRankResult test = LogRankTest.computeLogRankTest(kmsTemp.get(0), kmsTemp.get(1)); double pValue = test.getPValue(); // double pValue = test.hazardRatio < 1 ? test.hazardRatio : 1.0/test.hazardRatio; // Checking usefulness of Hazard ratios... if (!Double.isFinite(pValue)) continue; // if (!Double.isFinite(test.getHazardRatio())) { //// continue; // pValue = Double.NaN; // } mapLogRank.put(d, pValue); } pValueThresholds = new double[mapLogRank.size()]; pValues = new double[mapLogRank.size()]; pValueThresholdsObserved = new boolean[mapLogRank.size()]; int count = 0; for (Entry<Double, Double> entry : mapLogRank.entrySet()) { pValueThresholds[count] = entry.getKey(); pValues[count] = entry.getValue(); if (setObserved.contains(entry.getKey())) pValueThresholdsObserved[count] = true; count++; } // Find the longest 'significant' stretch int maxSigCount = 0; int maxSigInd = -1; int sigCurrent = 0; int[] sigCount = new int[pValues.length]; for (int i = 0; i < pValues.length; i++) { if (pValues[i] < 0.05) { sigCurrent++; sigCount[i] = sigCurrent; if (sigCurrent > maxSigCount) { maxSigCount = sigCurrent; maxSigInd = i; } } else sigCurrent = 0; } if (maxSigCount == 0) { logger.info("No p-values < 0.05"); } else { double minThresh = maxSigInd - maxSigCount < 0 ? pValueThresholds[0] - 0.0000001 : pValueThresholds[maxSigInd - maxSigCount]; double maxThresh = pValueThresholds[maxSigInd]; int nBetween = 0; int nBetweenObserved = 0; for (int i = 0; i < newScoreData.scores.length; i++) { if (newScoreData.scores[i] > minThresh && newScoreData.scores[i] <= maxThresh) { nBetween++; if (newScoreData.survival[i] < censorThreshold && !newScoreData.censored[i]) nBetweenObserved++; } } logger.info("Longest stretch of p-values < 0.05: {} - {} ({} entries, {} observed)", minThresh, maxThresh, nBetween, nBetweenObserved); } pValuesSmoothed = new double[pValues.length]; Arrays.fill(pValuesSmoothed, Double.NaN); int n = (pValues.length / 20) * 2 + 1; logger.info("Smoothing log-rank test p-values by " + n); for (int i = n / 2; i < pValues.length - n / 2; i++) { double sum = 0; for (int k = i - n / 2; k < i - n / 2 + n; k++) { sum += pValues[k]; } pValuesSmoothed[i] = sum / n; } // for (int i = 0; i < pValues.length; i++) { // double sum = 0; // for (int k = Math.max(0, i-n/2); k < Math.min(pValues.length, i-n/2+n); k++) { // sum += pValues[k]; // } // pValuesSmoothed[i] = sum/n; // } // pValues = pValuesSmoothed; lastPValueCensorThreshold = censorThreshold; pValuesChanged = true; } } else { lastPValueCensorThreshold = Double.NaN; pValueThresholds = null; pValues = null; } // if (params != null && !Double.isNaN(bestThreshold) && (params.getChoiceParameterValue("scoreThresholdMethod").equals("Lowest p-value"))) if (params != null && (params.getChoiceParameterValue("scoreThresholdMethod").equals("Lowest p-value"))) { int bestIdx = -1; double bestPValue = Double.POSITIVE_INFINITY; for (int i = pValueThresholds.length / 10; i < pValueThresholds.length * 9 / 10; i++) { if (pValues[i] < bestPValue) { bestIdx = i; bestPValue = pValues[i]; } } thresholds = bestIdx >= 0 ? new double[] { pValueThresholds[bestIdx] } : new double[0]; } else if (params != null && (params.getChoiceParameterValue("scoreThresholdMethod").equals("Lowest smoothed p-value"))) { int bestIdx = -1; double bestPValue = Double.POSITIVE_INFINITY; for (int i = pValueThresholds.length / 10; i < pValueThresholds.length * 9 / 10; i++) { if (pValuesSmoothed[i] < bestPValue) { bestIdx = i; bestPValue = pValuesSmoothed[i]; } } thresholds = bestIdx >= 0 ? new double[] { pValueThresholds[bestIdx] } : new double[0]; } // Split into different curves using the provided thresholds List<KaplanMeierData> kms = splitByThresholds(newScoreData, thresholds, censorThreshold, params != null && "Quartiles".equals(params.getChoiceParameterValue("scoreThresholdMethod"))); // for (KaplanMeier km : kms) // km.censorAtTime(censorThreshold); //// kmHigh.censorAtTime(censorThreshold); //// kmLow.censorAtTime(censorThreshold); // logger.info("High: " + kmHigh.toString()); // logger.info("Low: " + kmLow.toString()); // logger.info("Log rank comparison: {}", LogRankTest.computeLogRankTest(kmLow, kmHigh)); if (plotter == null) { plotter = new KaplanMeierChartWrapper(survivalColumn + " time"); // plotter.setBorder(BorderFactory.createTitledBorder("Survival plot")); // plotter.getCanvas().setWidth(300); // plotter.getCanvas().setHeight(300); } KaplanMeierData[] kmArray = new KaplanMeierData[kms.size()]; plotter.setKaplanMeierCurves(survivalColumn + " time", kms.toArray(kmArray)); tableModel.setSurvivalCurves(thresholds, params != null && params.getChoiceParameterValue("scoreThresholdMethod").equals("Lowest p-value"), kmArray); // Bar width determined using 'Freedman and Diaconis' rule' (but overridden if this gives < 16 bins...) double barWidth = (2 * q3 - q1) * Math.pow(numNonNaN, -1.0 / 3.0); int nBins = 100; if (!Double.isNaN(barWidth)) barWidth = (int) Math.max(16, Math.ceil((maxVal - minVal) / barWidth)); Histogram histogram = scoresValid ? new Histogram(newScoreData.scores, nBins) : null; if (histogramPanel == null) { GridPane paneHistogram = new GridPane(); histogramPanel = new HistogramPanelFX(); histogramPanel.getChart().setAnimated(false); histogramWrapper = new ThresholdedChartWrapper(histogramPanel.getChart()); for (ObservableNumberValue val : threshProperties) histogramWrapper.addThreshold(val, ColorToolsFX.getCachedColor(240, 0, 0, 128)); histogramWrapper.getPane().setPrefHeight(150); paneHistogram.add(histogramWrapper.getPane(), 0, 0); Tooltip.install(histogramPanel.getChart(), new Tooltip("Distribution of scores")); GridPane.setHgrow(histogramWrapper.getPane(), Priority.ALWAYS); GridPane.setVgrow(histogramWrapper.getPane(), Priority.ALWAYS); NumberAxis xAxis = new NumberAxis(); xAxis.setLabel("Score threshold"); NumberAxis yAxis = new NumberAxis(); yAxis.setLowerBound(0); yAxis.setUpperBound(1); yAxis.setTickUnit(0.1); yAxis.setAutoRanging(false); yAxis.setLabel("P-value"); chartPValues = new LineChart<>(xAxis, yAxis); chartPValues.setAnimated(false); chartPValues.setLegendVisible(false); // Make chart so it can be navigated ChartToolsFX.makeChartInteractive(chartPValues, xAxis, yAxis); pValuesChanged = true; Tooltip.install(chartPValues, new Tooltip( "Distribution of p-values (log-rank test) comparing low vs. high for all possible score thresholds")); // chartPValues.getYAxis().setAutoRanging(false); pValuesWrapper = new ThresholdedChartWrapper(chartPValues); for (ObservableNumberValue val : threshProperties) pValuesWrapper.addThreshold(val, ColorToolsFX.getCachedColor(240, 0, 0, 128)); pValuesWrapper.getPane().setPrefHeight(150); paneHistogram.add(pValuesWrapper.getPane(), 0, 1); GridPane.setHgrow(pValuesWrapper.getPane(), Priority.ALWAYS); GridPane.setVgrow(pValuesWrapper.getPane(), Priority.ALWAYS); ContextMenu popup = new ContextMenu(); ChartToolsFX.addChartExportMenu(chartPValues, popup); RadioMenuItem miZoomY1 = new RadioMenuItem("0-1"); miZoomY1.setOnAction(e -> { yAxis.setAutoRanging(false); yAxis.setUpperBound(1); yAxis.setTickUnit(0.2); }); RadioMenuItem miZoomY05 = new RadioMenuItem("0-0.5"); miZoomY05.setOnAction(e -> { yAxis.setAutoRanging(false); yAxis.setUpperBound(0.5); yAxis.setTickUnit(0.1); }); RadioMenuItem miZoomY02 = new RadioMenuItem("0-0.2"); miZoomY02.setOnAction(e -> { yAxis.setAutoRanging(false); yAxis.setUpperBound(0.2); yAxis.setTickUnit(0.05); }); RadioMenuItem miZoomY01 = new RadioMenuItem("0-0.1"); miZoomY01.setOnAction(e -> { yAxis.setAutoRanging(false); yAxis.setUpperBound(0.1); yAxis.setTickUnit(0.05); }); RadioMenuItem miZoomY005 = new RadioMenuItem("0-0.05"); miZoomY005.setOnAction(e -> { yAxis.setAutoRanging(false); yAxis.setUpperBound(0.05); yAxis.setTickUnit(0.01); }); RadioMenuItem miZoomY001 = new RadioMenuItem("0-0.01"); miZoomY001.setOnAction(e -> { yAxis.setAutoRanging(false); yAxis.setUpperBound(0.01); yAxis.setTickUnit(0.005); }); ToggleGroup tgZoom = new ToggleGroup(); miZoomY1.setToggleGroup(tgZoom); miZoomY05.setToggleGroup(tgZoom); miZoomY02.setToggleGroup(tgZoom); miZoomY01.setToggleGroup(tgZoom); miZoomY005.setToggleGroup(tgZoom); miZoomY001.setToggleGroup(tgZoom); Menu menuZoomY = new Menu("Set y-axis range"); menuZoomY.getItems().addAll(miZoomY1, miZoomY05, miZoomY02, miZoomY01, miZoomY005, miZoomY001); MenuItem miCopyData = new MenuItem("Copy chart data"); miCopyData.setOnAction(e -> { String dataString = ChartToolsFX.getChartDataAsString(chartPValues); ClipboardContent content = new ClipboardContent(); content.putString(dataString); Clipboard.getSystemClipboard().setContent(content); }); popup.getItems().addAll(miCopyData, menuZoomY); chartPValues.setOnContextMenuRequested(e -> { popup.show(chartPValues, e.getScreenX(), e.getScreenY()); }); for (int col = 0; col < tableModel.getColumnCount(); col++) { TableColumn<Integer, String> column = new TableColumn<>(tableModel.getColumnName(col)); int colNumber = col; column.setCellValueFactory( new Callback<CellDataFeatures<Integer, String>, ObservableValue<String>>() { @Override public ObservableValue<String> call(CellDataFeatures<Integer, String> p) { return new SimpleStringProperty( (String) tableModel.getValueAt(p.getValue(), colNumber)); } }); column.setCellFactory(new Callback<TableColumn<Integer, String>, TableCell<Integer, String>>() { @Override public TableCell<Integer, String> call(TableColumn<Integer, String> param) { TableCell<Integer, String> cell = new TableCell<Integer, String>() { @Override protected void updateItem(String item, boolean empty) { super.updateItem(item, empty); setText(item); setTooltip(new Tooltip(item)); } }; return cell; } }); table.getColumns().add(column); } table.setPrefHeight(250); table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); table.maxHeightProperty().bind(table.prefHeightProperty()); params = new ParameterList(); // maxTimePoint = 0; // for (TMACoreObject core : hierarchy.getTMAGrid().getTMACoreList()) { // double os = core.getMeasurementList().getMeasurementValue(TMACoreObject.KEY_OVERALL_SURVIVAL); // double rfs = core.getMeasurementList().getMeasurementValue(TMACoreObject.KEY_RECURRENCE_FREE_SURVIVAL); // if (os > maxTimePoint) // maxTimePoint = os; // if (rfs > maxTimePoint) // maxTimePoint = rfs; // } params.addIntParameter("censorTimePoints", "Max censored time", (int) (censorThreshold + 0.5), null, 0, (int) Math.ceil(maxTimePoint), "Latest time point beyond which data will be censored"); // params.addChoiceParameter("scoreThresholdMethod", "Threshold method", "Manual", Arrays.asList("Manual", "Median", "Log-rank test")); if (calculateAllPValues) // Don't include "Lowest smoothed p-value" - it's not an established method and open to misinterpretation... params.addChoiceParameter("scoreThresholdMethod", "Threshold method", "Median", Arrays.asList("Manual (1)", "Manual (2)", "Manual (3)", "Median", "Tertiles", "Quartiles", "Lowest p-value")); // params.addChoiceParameter("scoreThresholdMethod", "Threshold method", "Median", Arrays.asList("Manual (1)", "Manual (2)", "Manual (3)", "Median", "Tertiles", "Quartiles", "Lowest p-value", "Lowest smoothed p-value")); else params.addChoiceParameter("scoreThresholdMethod", "Threshold method", "Median", Arrays.asList("Manual (1)", "Manual (2)", "Manual (3)", "Median", "Tertiles", "Quartiles")); params.addDoubleParameter("threshold1", "Threshold 1", thresholds.length > 0 ? thresholds[0] : (minVal + maxVal) / 2, null, "Threshold to distinguish between patient groups"); params.addDoubleParameter("threshold2", "Threshold 2", thresholds.length > 1 ? thresholds[1] : (minVal + maxVal) / 2, null, "Threshold to distinguish between patient groups"); params.addDoubleParameter("threshold3", "Threshold 3", thresholds.length > 2 ? thresholds[2] : (minVal + maxVal) / 2, null, "Threshold to distinguish between patient groups"); params.addBooleanParameter("showAtRisk", "Show at risk", plotter.getShowAtRisk(), "Show number of patients at risk below the plot"); params.addBooleanParameter("showTicks", "Show censored ticks", plotter.getShowCensoredTicks(), "Show ticks to indicate censored data"); params.addBooleanParameter("showKey", "Show key", plotter.getShowKey(), "Show key indicating display of each curve"); // params.addBooleanParameter("useColor", "Use color", plotter.getUseColor(), "Show each curve in a different color"); // params.addBooleanParameter("useStrokes", "Use strokes", plotter.getUseStrokes(), "Show each curve with a differed line stroke"); // Hide threshold parameters if threshold can't be used if (!scoresValid) { // params.setHiddenParameters(true, "scoreThresholdMethod", "scoreThreshold"); histogramPanel.getChart().setVisible(false); } panelParams = new ParameterPanelFX(params); panelParams.addParameterChangeListener(this); updateThresholdsEnabled(); for (int i = 0; i < threshProperties.length; i++) { String p = "threshold" + (i + 1); threshProperties[i].addListener((v, o, n) -> { if (interactiveThresholds()) { // Need to do a decent double check with tolerance to text field value changing while typing if (!GeneralTools.almostTheSame(params.getDoubleParameterValue(p), n.doubleValue(), 0.0001)) panelParams.setNumericParameterValue(p, n); } }); } BorderPane paneBottom = new BorderPane(); TitledPane paneOptions = new TitledPane("Options", panelParams.getPane()); // paneOptions.setCollapsible(false); Pane paneCanvas = new StackPane(); paneCanvas.getChildren().add(plotter.getCanvas()); GridPane paneLeft = new GridPane(); paneLeft.add(paneOptions, 0, 0); paneLeft.add(table, 0, 1); GridPane.setHgrow(paneOptions, Priority.ALWAYS); GridPane.setHgrow(table, Priority.ALWAYS); paneBottom.setLeft(paneLeft); paneBottom.setCenter(paneHistogram); paneMain.setCenter(paneCanvas); paneMain.setBottom(paneBottom); paneMain.setPadding(new Insets(10, 10, 10, 10)); } else if (thresholds.length > 0) { // Ensure the sliders/text fields are set sensibly if (!GeneralTools.almostTheSame(thresholds[0], params.getDoubleParameterValue("threshold1"), 0.0001)) { panelParams.setNumericParameterValue("threshold1", thresholds[0]); } if (thresholds.length > 1 && !GeneralTools.almostTheSame(thresholds[1], params.getDoubleParameterValue("threshold2"), 0.0001)) { panelParams.setNumericParameterValue("threshold2", thresholds[1]); } if (thresholds.length > 2 && !GeneralTools.almostTheSame(thresholds[2], params.getDoubleParameterValue("threshold3"), 0.0001)) { panelParams.setNumericParameterValue("threshold3", thresholds[2]); } } if (histogram != null) { histogramPanel.getHistogramData() .setAll(HistogramPanelFX.createHistogramData(histogram, false, (Color) null)); histogramPanel.getChart().getXAxis().setLabel(scoreColumn); histogramPanel.getChart().getYAxis().setLabel("Count"); ChartToolsFX.addChartExportMenu(histogramPanel.getChart(), null); // histogramWrapper.setVerticalLines(thresholds, ColorToolsFX.getCachedColor(240, 0, 0, 128)); // Deal with threshold adjustment // histogramWrapper.getThresholds().addListener((Observable o) -> generatePlot()); } if (pValues != null) { // TODO: Raise earlier where p-value calculation is if (pValuesChanged) { ObservableList<XYChart.Data<Number, Number>> data = FXCollections.observableArrayList(); for (int i = 0; i < pValueThresholds.length; i++) { double pValue = pValues[i]; if (Double.isNaN(pValue)) continue; data.add(new XYChart.Data<>(pValueThresholds[i], pValue, pValueThresholdsObserved[i])); } ObservableList<XYChart.Data<Number, Number>> dataSmoothed = null; if (pValuesSmoothed != null) { dataSmoothed = FXCollections.observableArrayList(); for (int i = 0; i < pValueThresholds.length; i++) { double pValueSmoothed = pValuesSmoothed[i]; if (Double.isNaN(pValueSmoothed)) continue; dataSmoothed.add(new XYChart.Data<>(pValueThresholds[i], pValueSmoothed)); } } // Don't bother showing the smoothed data... it tends to get in the way... // if (dataSmoothed != null) // chartPValues.getData().setAll(new XYChart.Series<>("P-values", data), new XYChart.Series<>("Smoothed P-values", dataSmoothed)); // else chartPValues.getData().setAll(new XYChart.Series<>("P-values", data)); // Add line to show 0.05 significance threshold if (pValueThresholds.length > 1) { Data<Number, Number> sigData1 = new Data<>(pValueThresholds[0], 0.05); Data<Number, Number> sigData2 = new Data<>(pValueThresholds[pValueThresholds.length - 1], 0.05); XYChart.Series<Number, Number> dataSignificant = new XYChart.Series<>("Signficance 0.05", FXCollections.observableArrayList(sigData1, sigData2)); chartPValues.getData().add(dataSignificant); sigData1.getNode().setVisible(false); sigData2.getNode().setVisible(false); } // chartPValues.getData().get(0).getNode().setVisible(true); // pValuesWrapper.clearThresholds(); for (XYChart.Data<Number, Number> dataPoint : data) { if (!Boolean.TRUE.equals(dataPoint.getExtraValue())) dataPoint.getNode().setVisible(false); } // if (dataSmoothed != null) { // for (XYChart.Data<Number, Number> dataPoint : dataSmoothed) { // dataPoint.getNode().setVisible(false); // } // chartPValues.getData().get(1).getNode().setOpacity(0.5); // } // int count = 0; // for (int i = 0; i < pValueThresholds.length; i++) { // double pValue = pValues[i]; // if (Double.isNaN(pValue)) // continue; // boolean observed = pValueThresholdsObserved[i]; //// if (observed) //// pValuesWrapper.addThreshold(new ReadOnlyDoubleWrapper(pValueThresholds[i]), Color.rgb(0, 0, 0, 0.05)); // // if (!observed) { //// StackPane pane = (StackPane)data.get(count).getNode(); //// pane.setEffect(new DropShadow()); // data.get(count).getNode().setVisible(false); // } // count++; // } } for (int i = 0; i < threshProperties.length; i++) { if (i < thresholds.length) threshProperties[i].set(thresholds[i]); else threshProperties[i].set(Double.NaN); } boolean isInteractive = interactiveThresholds(); histogramWrapper.setIsInteractive(isInteractive); pValuesWrapper.setIsInteractive(isInteractive); chartPValues.setVisible(true); } // else // chartPValues.setVisible(false); // Store values for next time scoreData = newScoreData; }
From source file:gr.cti.android.experimentation.controller.api.RestApiDataController.java
private JSONArray doCalculations(final Set<Result> results, final long end, final DecimalFormat df) { final Map<String, Map<String, Map<String, DescriptiveStatistics>>> dataAggregates = new HashMap<>(); final DescriptiveStatistics wholeDataStatistics = new DescriptiveStatistics(); final Map<String, Map<String, Long>> locationsHeatMap = new HashMap<>(); for (final Result result : results) { try {//w ww .ja va2 s .c o m if (!result.getMessage().startsWith("{")) { continue; } if (end != 0 && result.getTimestamp() > end) { continue; } final JSONObject message = new JSONObject(result.getMessage()); if (message.has(LATITUDE) && message.has(LONGITUDE)) { final String longitude = df.format(message.getDouble(LONGITUDE)); final String latitude = df.format(message.getDouble(LATITUDE)); if (!dataAggregates.containsKey(longitude)) { dataAggregates.put(longitude, new HashMap<>()); } if (!dataAggregates.get(longitude).containsKey(latitude)) { dataAggregates.get(longitude).put(latitude, new HashMap<>()); } //HeatMap if (!locationsHeatMap.containsKey(longitude)) { locationsHeatMap.put(longitude, new HashMap<>()); } if (!locationsHeatMap.get(longitude).containsKey(latitude)) { locationsHeatMap.get(longitude).put(latitude, 0L); } final Long val = locationsHeatMap.get(longitude).get(latitude); locationsHeatMap.get(longitude).put(latitude, val + 1); final Iterator iterator = message.keys(); if (longitude != null && latitude != null) { while (iterator.hasNext()) { final String key = (String) iterator.next(); if (key.equals(LATITUDE) || key.equals(LONGITUDE)) { continue; } if (!dataAggregates.get(longitude).get(latitude).containsKey(key)) { dataAggregates.get(longitude).get(latitude).put(key, new DescriptiveStatistics()); } try { String data = message.getString(key); try { final double doubleData = Double.parseDouble(data); dataAggregates.get(longitude).get(latitude).get(key).addValue(doubleData); wholeDataStatistics.addValue(doubleData); } catch (NumberFormatException ignore) { dataAggregates.get(longitude).get(latitude).get(key).addValue(1); wholeDataStatistics.addValue(1); } } catch (Exception e) { LOGGER.error(e, e); } } } } } catch (Exception e) { LOGGER.error(e, e); } } final JSONArray addressPoints = new JSONArray(); for (final String longitude : dataAggregates.keySet()) { for (final String latitude : dataAggregates.get(longitude).keySet()) { LOGGER.info("{" + longitude + ":" + latitude + "}"); final JSONArray measurement = new JSONArray(); try { measurement.put(Double.parseDouble(latitude)); measurement.put(Double.parseDouble(longitude)); if (locationsHeatMap.containsKey(longitude) && locationsHeatMap.get(longitude).containsKey(latitude)) { measurement.put(String.valueOf(locationsHeatMap.get(longitude).get(latitude))); } else { measurement.put(1); } final JSONObject data = new JSONObject(); measurement.put(data); for (final Object key : dataAggregates.get(longitude).get(latitude).keySet()) { final String keyString = (String) key; final String part = keyString.split("\\.")[keyString.split("\\.").length - 1]; double value = dataAggregates.get(longitude).get(latitude).get(keyString).getMean(); LOGGER.info("value: " + value); if (Double.isFinite(value) && value != 1) { data.put(part, value); } else { value = dataAggregates.get(longitude).get(latitude).get(keyString).getValues().length; data.put(part, value); } } addressPoints.put(measurement); } catch (JSONException e) { LOGGER.error(e, e); } } } return addressPoints; }
From source file:automenta.vivisect.dimensionalize.HyperassociativeMap.java
/** vertices is passed as a list because the Set iterator from JGraphT is slow */ public ArrayRealVector align(V nodeToAlign, ObjectDoubleHashMap<V> neighbors, V[] vertices) { double nodeSpeed = getSpeedFactor(nodeToAlign); ArrayRealVector originalPosition = new ArrayRealVector(new double[2], true); //getPosition(nodeToAlign); //getCurrentPosition(nodeToAlign); getPosition(nodeToAlign, originalPosition.getDataRef()); if (nodeSpeed == 0) return originalPosition; // calculate equilibrium with neighbors ArrayRealVector position = (ArrayRealVector) originalPosition.mapMultiplyToSelf(1.0 / scale); getNeighbors(nodeToAlign, neighbors); ArrayRealVector delta = newVector(); double radius = getRadius(nodeToAlign); double targetDistance = radius + equilibriumDistance; double learningRate = this.learningRate; ArrayRealVector tmpAttractVector = newVector(); // align with neighbours neighbors.forEachKeyValue((neighbor, distToNeighbor) -> { ArrayRealVector attractVector = getThePosition(neighbor, tmpAttractVector).combineToSelf(1, -1, position);/*w w w . j a v a 2 s. c o m*/ double oldDistance = magnitude(attractVector); double newDistance; double factor = 0; double deltaDist = oldDistance - distToNeighbor; if (oldDistance > distToNeighbor) { newDistance = Math.pow(deltaDist, attractionStrength); } else { newDistance = -targetDistance * atanh((-deltaDist) / distToNeighbor); if (Math.abs(newDistance) > (Math.abs(deltaDist))) { newDistance = -targetDistance * (-deltaDist); } } newDistance *= learningRate; if (oldDistance != 0) { factor = newDistance / oldDistance; } add(delta, attractVector, factor); }); ArrayRealVector repelVector = newVector(); double maxEffectiveDistance = targetDistance * maxRepulsionDistance; ArrayRealVector nodePos = newVector(); // calculate repulsion with all non-neighbors DistanceMetric distanceFunction = this.distanceFunction; double minDistance = this.minDistance; double repulsiveWeakness = this.repulsiveWeakness; for (V node : vertices) { if (node == null) continue; //vertices.forEach((Consumer<N>)node -> { //for (final N node : vertices) { if ((node == nodeToAlign) || (neighbors.containsKey(node))) continue; double oldDistance = distanceFunction.subtractIfLessThan(getThePosition(node, nodePos), position, repelVector, maxEffectiveDistance); if (oldDistance == Double.POSITIVE_INFINITY) continue; //too far to matter if (oldDistance < minDistance) oldDistance = minDistance; //continue; //throw new RuntimeException("invalid oldDistance"); double newDistance = -targetDistance * Math.pow(oldDistance, -repulsiveWeakness); if (Math.abs(newDistance) > targetDistance) { newDistance = Math.copySign(targetDistance, newDistance); } newDistance *= learningRate; add(delta, repelVector, newDistance / oldDistance); } /*if (normalizeRepulsion) nodeSpeed/=delta.getNorm(); //TODO check when norm = 0*/ if (nodeSpeed != 1.0) { delta.mapMultiplyToSelf(nodeSpeed); } double moveDistance = magnitude(delta); if (!Double.isFinite(moveDistance)) throw new RuntimeException("invalid magnitude"); if (moveDistance > targetDistance * acceptableMaxDistanceFactor) { double newLearningRate = ((targetDistance * acceptableMaxDistanceFactor) / moveDistance); if (newLearningRate < learningRate) { this.learningRate = newLearningRate; } else { this.learningRate *= LEARNING_RATE_INCREASE_FACTOR / vertices.length; } moveDistance = DEFAULT_TOTAL_MOVEMENT; } else { add(position, delta); } if (moveDistance > maxMovement) { maxMovement = moveDistance; } totalMovement += moveDistance; originalPosition.mapMultiplyToSelf(scale); move(nodeToAlign, originalPosition.getEntry(0), originalPosition.getEntry(1)); return originalPosition; }
From source file:beast.evolution.tree.RandomTree.java
/** * @param nodes//w ww. j a va 2s . c o m * @param demographic * @return the root node of the given array of nodes after simulation of the * coalescent under the given demographic model. * @throws beast.evolution.tree.RandomTree.ConstraintViolatedException */ public Node simulateCoalescentWithMax(final List<Node> nodes, final PopulationFunction demographic, final double maxHeight) throws ConstraintViolatedException { // sanity check - disjoint trees // if( ! Tree.Utils.allDisjoint(nodes) ) { // throw new RuntimeException("non disjoint trees"); // } if (nodes.size() == 0) { throw new IllegalArgumentException("empty nodes set"); } for (int attempts = 0; attempts < 1000; ++attempts) { final List<Node> rootNode = simulateCoalescent(nodes, demographic, 0.0, maxHeight); if (rootNode.size() == 1) { return rootNode.get(0); } } if (Double.isFinite(maxHeight)) { double h = -1; for (Node n : nodeList) { h = Math.max(h, n.getHeight()); } assert h < maxHeight; double dt = (maxHeight - h) / (nodeList.size() + 1); while (nodeList.size() > 1) { int k = nodeList.size() - 1; final Node left = nodeList.remove(k); final Node right = nodeList.get(k - 1); final Node newNode = newNode(); newNode.setNr(nextNodeNr++); // multiple tries may generate an excess of nodes assert(nextNodeNr <= nrOfTaxa*2-1); newNode.setHeight(h + dt); newNode.setLeft(left); left.setParent(newNode); newNode.setRight(right); right.setParent(newNode); nodeList.set(k - 1, newNode); } assert (nodeList.size() == 1); return nodeList.get(0); } throw new RuntimeException("failed to merge trees after 1000 tries!"); }