List of usage examples for java.lang Double NaN
double NaN
To view the source code for java.lang Double NaN.
Click Source Link
From source file:eu.crisis_economics.utilities.EmpiricalDistribution.java
/** Get the minimum recoded value (range minimum). */ public double getMaxRecordedValue() { if (lastValueInserted == Double.NaN) throw new IllegalStateException("EmpiricalDistribution.getMaxRecordedValue: no records."); return maxRecordValue; }
From source file:org.ulyssis.ipp.snapshot.TeamState.java
public TeamState addTagSeenEvent(Snapshot snapshot, TagSeenEvent event) { int newTagFragmentCount = tagFragmentCount; double newSpeed = Double.NaN; double newPredictedSpeed = Double.NaN; int lastEventId = 0; if (lastTagSeenEvent.isPresent()) { TagSeenEvent lastEvent = lastTagSeenEvent.get(); if (lastEvent.getReaderId() == event.getReaderId() && Duration.between(lastEvent.getTime(), event.getTime()).minusSeconds(MIN_TIME_BETWEEN_UPDATES) .isNegative()) { LOG.info("Rejecting event because a tag for this team passed less than {} seconds ago", MIN_TIME_BETWEEN_UPDATES); return this; }//from w ww .ja va 2 s . c o m lastEventId = lastEvent.getReaderId(); } int diff = (event.getReaderId() - lastEventId); if (diff < 0) { diff = Config.getCurrentConfig().getNbReaders() + diff; } else if (diff == 0 && (lastTagSeenEvent.isPresent() || // TODO: Refactor this fustercluck of comparisons (snapshot.getStartTime().isBefore(event.getTime()) && !Duration.between(snapshot.getStartTime(), event.getTime()) .minusSeconds(MIN_TIME_BETWEEN_UPDATES).isNegative()))) { diff = Config.getCurrentConfig().getNbReaders(); } newTagFragmentCount += diff; List<ReaderConfig> readers = Config.getCurrentConfig().getReaders(); double distance = 0; if (lastTagSeenEvent.isPresent()) { for (int i = tagFragmentCount; i < newTagFragmentCount; i++) { int j = i % Config.getCurrentConfig().getNbReaders(); int k = (i + 1) % Config.getCurrentConfig().getNbReaders(); if (k > j) { distance += readers.get(k).getPosition() - readers.get(j).getPosition(); } else if (j > k) { assert k == 0; distance += Config.getCurrentConfig().getTrackLength() - readers.get(j).getPosition(); } else { // This can happen when there is only one reader. distance += Config.getCurrentConfig().getTrackLength(); } } double time = Duration.between(lastTagSeenEvent.get().getTime(), event.getTime()).toMillis() / 1000D; newSpeed = distance / time; if (Double.isNaN(predictedSpeed)) { newPredictedSpeed = newSpeed; } else { newPredictedSpeed = newSpeed * ALPHA + predictedSpeed * (1 - ALPHA); } } else if (snapshot.getStartTime().isBefore(event.getTime()) && !Duration.between(snapshot.getStartTime(), event.getTime()) .minusSeconds(MIN_TIME_BETWEEN_UPDATES).isNegative()) { double time = Duration.between(snapshot.getStartTime(), event.getTime()).toMillis() / 1000D; distance = readers.get(event.getReaderId()).getPosition(); newSpeed = distance / time; if (Double.isNaN(predictedSpeed)) { newPredictedSpeed = newSpeed; } else { newPredictedSpeed = newSpeed * ALPHA + predictedSpeed * (1 - ALPHA); } } return new TeamState(Optional.of(event), newTagFragmentCount, newSpeed, newPredictedSpeed); }
From source file:com.fay.statics.SummaryStat.java
public double lastValue() { if (numObs == 0) return Double.NaN; return lastValue; }
From source file:org.jfree.data.xy.DefaultOHLCDataset.java
/** * Returns the low-value (as a double primitive) for an item within a * series.//from w w w. ja va 2 s. c om * * @param series the series (zero-based index). * @param item the item (zero-based index). * * @return The low-value. */ @Override public double getLowValue(int series, int item) { double result = Double.NaN; Number low = getLow(series, item); if (low != null) { result = low.doubleValue(); } return result; }
From source file:edu.harvard.iq.dataverse.util.SumStatCalculator.java
private static double calculateMean(double[] values, final int begin, final int length) { if (values == null || length == 0) { return Double.NaN; }/*from w ww.jav a2 s. co m*/ double sampleSize = length; // Compute initial estimate using definitional formula double xbar = calculateSum(values) / sampleSize; // Compute correction factor in second pass double correction = 0; for (int i = begin; i < begin + length; i++) { correction += values[i] - xbar; } return xbar + (correction / sampleSize); }
From source file:com.opengamma.engine.view.calc.SingleNodeExecutor.java
@Override public Future<ExecutionResult> execute(final DependencyGraph graph, final Queue<ExecutionResult> executionResultQueue, final GraphExecutorStatisticsGatherer statistics) { long jobId = JobIdSource.getId(); CalculationJobSpecification jobSpec = new CalculationJobSpecification(_cycle.getUniqueId(), graph.getCalculationConfigurationName(), _cycle.getValuationTime(), jobId); List<DependencyNode> order = graph.getExecutionOrder(); List<CalculationJobItem> items = new ArrayList<CalculationJobItem>(); final Set<ValueSpecification> privateValues = new HashSet<ValueSpecification>(); final Set<ValueSpecification> sharedValues = new HashSet<ValueSpecification>( graph.getTerminalOutputSpecifications()); for (DependencyNode node : order) { final Set<ValueSpecification> inputs = node.getInputValues(); final CalculationJobItem jobItem = new CalculationJobItem( node.getFunction().getFunction().getFunctionDefinition().getUniqueId(), node.getFunction().getParameters(), node.getComputationTarget(), inputs, node.getOutputValues()); items.add(jobItem);/*w ww . ja v a2 s.com*/ // If node has dependencies which AREN'T in the graph, its outputs for those nodes are "shared" values for (ValueSpecification specification : node.getOutputValues()) { if (sharedValues.contains(specification)) { continue; } boolean isPrivate = true; for (DependencyNode dependent : node.getDependentNodes()) { if (!graph.containsNode(dependent)) { isPrivate = false; break; } } if (isPrivate) { privateValues.add(specification); } else { sharedValues.add(specification); } } // If node has inputs which haven't been seen already, they can't have been generated within this graph so are "shared" for (ValueSpecification specification : inputs) { if (sharedValues.contains(specification) || privateValues.contains(specification)) { continue; } sharedValues.add(specification); } } s_logger.debug("{} private values, {} shared values in graph", privateValues.size(), sharedValues.size()); final CacheSelectHint cacheHint; if (privateValues.size() < sharedValues.size()) { cacheHint = CacheSelectHint.privateValues(privateValues); } else { cacheHint = CacheSelectHint.sharedValues(sharedValues); } s_logger.info("Enqueuing {} to invoke {} functions", new Object[] { jobSpec, items.size() }); statistics.graphProcessed(graph.getCalculationConfigurationName(), 1, items.size(), Double.NaN, Double.NaN); AtomicExecutorCallable runnable = new AtomicExecutorCallable(executionResultQueue); AtomicExecutorFuture future = new AtomicExecutorFuture(runnable, order.toArray(new DependencyNode[order.size()]), statistics); _executingSpecifications.put(jobSpec, future); _cycle.getViewProcessContext().getViewProcessorQueryReceiver().addJob(jobSpec, graph); Cancelable cancel = _cycle.getViewProcessContext().getComputationJobDispatcher() .dispatchJob(new CalculationJob(jobSpec, _cycle.getFunctionInitId(), null, items, cacheHint), this); future.setCancel(cancel); return future; }
From source file:com.clustercontrol.performance.operator.RevercePorlishNotation.java
/** * ?// w w w.j ava 2s . com */ @Override public double calc(DataTable currentTable, DataTable previousTable, String deviceName) throws CollectedDataNotFoundException, InvalidValueException { double right = 0D; double left = 0D; double result = 0D; Stack<Double> _stack = new Stack<Double>(); if (this.expArray.length == 1) { result = getVal(expArray[0], currentTable, previousTable, deviceName); } else { for (int i = 0; i < this.expArray.length; i++) { try { if (expArray[i] instanceof OPERATOR) { right = _stack.pop(); left = _stack.pop(); switch ((OPERATOR) expArray[i]) { case ADDITION: result = left + right; break; case SUBTRACTION: result = left - right; break; case MULTIPLICATION: result = left * right; break; case DIVISION: if (right == 0) { log.warn("0-devided, expression=" + expression); // 0-devide???????NaN? return Double.NaN; } result = left / right; break; } _stack.push(new Double(result)); } else { _stack.push(getVal(expArray[i], currentTable, previousTable, deviceName)); } } catch (CollectedDataNotFoundException | IllegalStateException | EmptyStackException e) { log.warn("calc [" + expression + "], " + e.getClass().getName() + ", " + e.getMessage()); throw new InvalidValueException(e.getMessage()); } catch (Exception e) { log.warn("calc [" + expression + "], " + e.getClass().getName() + ", " + e.getMessage(), e); throw new InvalidValueException(e.getMessage()); } } if (_stack.size() > 1) { String messages = "expression is invalid, expression-" + expression; log.warn("calc : " + messages); throw new InvalidValueException(messages); } } return result; }
From source file:ch.epfl.leb.sass.models.illuminations.internal.SquareUniformIllumination.java
/** * Returns the irradiance in the sample at the point (x, y, z). * /*from w w w. j av a 2s. c om*/ * @param x The x-position in the sample. * @param y The y-position in the sample. * @param z The z-position in the sample. */ @Override public double getIrradiance(double x, double y, double z) { // Compute the absorption, if any. double abs = Math.exp(-4 * Math.PI * electricField.getRefractiveIndex().getN(x, y, z).getImaginary() * z / electricField.getWavelength()); double irrad = this.power * abs / width / height; // TODO: Change this to an exception! assert (irrad != Double.NaN); return irrad; }
From source file:de.unijena.bioinf.FragmentationTreeConstruction.computation.recalibration.HypothesenDrivenRecalibration.java
@Override public Recalibration recalibrate(final FTree tree, final MassDeviationVertexScorer scorer, final boolean force) { // get peaks from tree final List<Fragment> fragments = new ArrayList<Fragment>(tree.getFragments()); final FragmentAnnotation<ProcessedPeak> peakAno = tree.getFragmentAnnotationOrThrow(ProcessedPeak.class); Collections.sort(fragments, new Comparator<Fragment>() { @Override/* w w w. j ava 2 s. c om*/ public int compare(Fragment o1, Fragment o2) { return new Double(o1.getFormula().getMass()).compareTo(o2.getFormula().getMass()); } }); final SimpleMutableSpectrum spec = new SimpleMutableSpectrum(); final SimpleMutableSpectrum ref = new SimpleMutableSpectrum(); final PrecursorIonType ion = tree.getAnnotationOrThrow(PrecursorIonType.class); for (Fragment f : fragments) { if (peakAno.get(f) == null) continue; spec.addPeak(new Peak(peakAno.get(f).getOriginalMz(), peakAno.get(f).getRelativeIntensity())); final double referenceMass = ion.getIonization().addToMass(f.getFormula().getMass()); ref.addPeak(new Peak(referenceMass, peakAno.get(f).getRelativeIntensity())); } final UnivariateFunction recalibrationFunction = method.recalibrate(spec, ref); return new Recalibration() { private double scoreBonus = Double.NaN; private FTree correctedTree = null; private boolean recomputeTree = false; @Override public double getScoreBonus() { if (Double.isNaN(scoreBonus)) { calculateScoreBonus(); } return scoreBonus; } @Override public FTree getCorrectedTree(FragmentationPatternAnalysis analyzer, FTree oldTree) { if (correctedTree != null) return correctedTree; else return recomputeTree(analyzer, oldTree); } @Override public FTree getCorrectedTree(FragmentationPatternAnalysis analyzer) { return getCorrectedTree(analyzer, null); } private FTree recomputeTree(FragmentationPatternAnalysis analyzer, FTree oldTree) { getScoreBonus(); final UnivariateFunction f = recalibrationFunction; if (f instanceof Identity && !force) { correctedTree = tree; return tree; } final ProcessedInput originalInput = tree.getAnnotationOrThrow(ProcessedInput.class); final MutableMeasurementProfile prof = new MutableMeasurementProfile( originalInput.getMeasurementProfile()); prof.setStandardMs2MassDeviation(prof.getStandardMs2MassDeviation().multiply(deviationScale)); final TreeScoring treeScoring = tree.getAnnotationOrThrow(TreeScoring.class); // TODO: Check if this is working correct ProcessedInput pinp = analyzer.preprocessing(originalInput.getOriginalInput(), prof, toPolynomial(f)); MultipleTreeComputation mtc = analyzer.computeTrees(pinp) .onlyWith(Arrays.asList(tree.getRoot().getFormula())) .withLowerbound(force ? 0 : treeScoring.getOverallScore()).withoutRecalibration(); if (oldTree != null) mtc = mtc.withBackbones(oldTree); correctedTree = mtc.optimalTree(); if (correctedTree == null) { //assert !force; correctedTree = tree; } if (deviationScale == 1) { if (correctedTree.getAnnotationOrThrow(TreeScoring.class).getOverallScore() >= oldTree .getAnnotationOrThrow(TreeScoring.class).getOverallScore()) return correctedTree; else return oldTree; } final FTree ft2 = analyzer .computeTrees(analyzer.preprocessing(originalInput.getOriginalInput(), prof)) .onlyWith(Arrays.asList(tree.getRoot().getFormula())) .withLowerbound(0/*correctedTree.getScore()*/).withoutRecalibration() .withBackbones(correctedTree).optimalTree(); if (ft2 == null) return correctedTree; else if (ft2.getAnnotationOrThrow(TreeScoring.class).getOverallScore() > correctedTree .getAnnotationOrThrow(TreeScoring.class).getOverallScore()) return ft2; return correctedTree; } private void calculateScoreBonus() { if (recalibrationFunction instanceof Identity) { scoreBonus = 0d; return; } final ProcessedInput input = tree.getAnnotationOrThrow(ProcessedInput.class); final Deviation dev = input.getMeasurementProfile().getStandardMs2MassDeviation(); final PrecursorIonType ion = tree.getAnnotationOrThrow(PrecursorIonType.class); double sc = 0d; double distance = 0d; final FragmentAnnotation<ProcessedPeak> peakAno = tree .getFragmentAnnotationOrThrow(ProcessedPeak.class); for (Fragment f : fragments) { if (peakAno.get(f) == null) continue; final double oldMz = peakAno.get(f).getOriginalMz(); final double newMz = recalibrationFunction.value(oldMz); distance += Math.abs(newMz - oldMz); final double theoreticalMz = ion.getIonization().addToMass(f.getFormula().getMass()); final NormalDistribution dist = scorer.getDistribution(newMz, peakAno.get(f).getRelativeIntensity(), input); final double newScore = Math.log(dist.getErrorProbability(newMz - theoreticalMz)); final double oldScore = Math.log(dist.getErrorProbability(oldMz - theoreticalMz)); sc += (newScore - oldScore); } this.scoreBonus = sc; final double avgDist = distance / fragments.size(); recomputeTree = (avgDist >= distanceThreshold); } @Override public boolean shouldRecomputeTree() { getScoreBonus(); return recomputeTree; } @Override public UnivariateFunction recalibrationFunction() { return recalibrationFunction; } }; }
From source file:edu.cudenver.bios.power.glmm.GLMMTestPillaiBartlett.java
/** * Calculate the numerator degrees of freedom for the PBT, based on * whether the null or alternative hypothesis is assumed true. * //from w w w.jav a 2s . c o m * @param type distribution type * @return numerator degrees of freedom * @throws IllegalArgumentException */ @Override public double getNumeratorDF(DistributionType type) { double a = C.getRowDimension(); double b = U.getColumnDimension(); double s = (a < b) ? a : b; double df = Double.NaN; if (fMethod == FApproximation.PILLAI_ONE_MOMENT || fMethod == FApproximation.PILLAI_ONE_MOMENT_OMEGA_MULT) { df = a * b; } else { double mu1 = a * b / (totalN - rank + a); double factor1 = (totalN - rank + a - b) / (totalN - rank + a - 1); double factor2 = (totalN - rank) / (totalN - rank + a + 2); double variance = 2 * a * b * factor1 * factor2 / ((totalN - rank + a) * (totalN - rank + a)); double mu2 = variance + mu1 * mu1; double m1 = mu1 / s; double m2 = mu2 / (s * s); double denom = m2 - m1 * m1; df = 2 * m1 * (m1 - m2) / denom; } return df; }