List of usage examples for java.lang Double isFinite
public static boolean isFinite(double d)
From source file:nl.rivm.cib.episim.model.disease.infection.MSEIRSTest.java
public static Observable<Entry<Double, long[]>> stochasticSellke(final SIRConfig config, final double maxDt) { return Observable.create(sub -> { final double beta = config.reproduction() / config.recovery(); final long[] y = config.population(); final double[] T = config.t(); final double dt = Double.isFinite(maxDt) && maxDt > 0 ? maxDt : T[1]; final Long seed = config.seed(); final RandomGenerator rng = new MersenneTwister(seed == null ? System.currentTimeMillis() : seed); final ExponentialDistribution resistanceDist = new ExponentialDistribution(rng, 1), recoverDist = new ExponentialDistribution(rng, config.recovery()); // pending infections (mapping resistance -> amount) final TreeMap<Double, Integer> tInfect = IntStream.range(0, (int) y[0]) .mapToObj(i -> resistanceDist.sample()) .collect(Collectors.toMap(r -> r, r -> 1, Integer::sum, TreeMap::new)); // pending recoveries (mapping time -> amount) final TreeMap<Double, Integer> tRecover = new TreeMap<>(); double cumPres = 0; // Re-initialize infectives as susceptibles with zero resistance tInfect.put(cumPres, (int) y[1]); y[0] += y[1]; // I -> S y[1] -= y[1]; // I -> 0 for (double t = T[0]; t < T[1];) { publishCopy(sub, t, y);/*from w w w. j ava 2 s. c om*/ final long localPopSize = y[0] + y[1] + y[2]; final Double ri = tInfect.isEmpty() ? null : tInfect.firstKey(), ti = ri == null ? null : // now + remaining resistance per relative pressure t + (ri - cumPres) / (beta * Math.max(y[1], 1) / localPopSize), tr = tRecover.isEmpty() ? null : tRecover.firstKey(); // time of next infection is earliest if (ti != null && (tr == null || ti < tr)) { final int ni = tInfect.remove(ri); cumPres = ri; // publish intermediate values for (double t1 = Math.min(ti, t + dt), tMax = Math.min(T[1], ti); t1 < tMax; t1 += dt) publishCopy(sub, t1, y); // infect t = ti; y[0] -= ni; // from S y[1] += ni; // to I // schedule S_t recoveries at t+Exp(1/gamma) for (int i = 0; i < ni; i++) tRecover.compute(t + recoverDist.sample(), (k, v) -> v == null ? 1 : v + 1); } // time of next recovery is earliest else if (tr != null) { final int nr = tRecover.remove(tr); if (ri != null) // advance cumulative pressure by dt * relative pressure cumPres += (tr - t) * beta * y[1] / localPopSize; // publish intermediate values for (double t1 = Math.min(tr, t + dt), tMax = Math.min(T[1], tr); t1 < tMax; t1 += dt) publishCopy(sub, t1, y); // recover t = tr; y[1] -= nr; // from I y[2] += nr; // to R } // no events remaining else { // publish intermediate values for (double t1 = t + dt; t1 < T[1]; t1 += dt) publishCopy(sub, t1, y); // time ends break; } } sub.onComplete(); }); }
From source file:gr.cti.android.experimentation.controller.api.RestApiDataController.java
private JSONObject getExperimentHourlyData(final String experiment, final int deviceId, final String after, final String to, final int accuracy) { final String format = getFormat(accuracy); final DecimalFormat df = new DecimalFormat(format); final long start = parseDateMillis(after); final long end = parseDateMillis(to); final Set<Result> results; if (deviceId == 0) { results = resultRepository.findByExperimentIdAndTimestampAfter(Integer.parseInt(experiment), start); } else {//from ww w . j a va2 s .co m results = resultRepository.findByExperimentIdAndDeviceIdAndTimestampAfterOrderByTimestampAsc( Integer.parseInt(experiment), deviceId, start); } try { final Map<Integer, Map<String, Map<String, Map<String, DescriptiveStatistics>>>> dataAggregates = new HashMap<>(); String longitude; String latitude; final DescriptiveStatistics wholeDataStatistics = new DescriptiveStatistics(); final Map<Integer, Map<String, Map<String, Long>>> locationsHeatMap = new HashMap<>(); for (final Result result : results) { try { if (!result.getMessage().startsWith("{")) { continue; } if (end != 0 && result.getTimestamp() > end) { continue; } final JSONObject message = new JSONObject(result.getMessage()); int hour = new DateTime(result.getTimestamp()).getHourOfDay(); if (message.has(LATITUDE) && message.has(LONGITUDE)) { longitude = df.format(message.getDouble(LONGITUDE)); latitude = df.format(message.getDouble(LATITUDE)); if (!dataAggregates.containsKey(hour)) { dataAggregates.put(hour, new HashMap<>()); } if (!dataAggregates.get(hour).containsKey(longitude)) { dataAggregates.get(hour).put(longitude, new HashMap<>()); } if (!dataAggregates.get(hour).get(longitude).containsKey(latitude)) { dataAggregates.get(hour).get(longitude).put(latitude, new HashMap<>()); } //HeatMap if (!locationsHeatMap.containsKey(hour)) { locationsHeatMap.put(hour, new HashMap<>()); } if (!locationsHeatMap.get(hour).containsKey(longitude)) { locationsHeatMap.get(hour).put(longitude, new HashMap<>()); } if (!locationsHeatMap.get(hour).get(longitude).containsKey(latitude)) { locationsHeatMap.get(hour).get(longitude).put(latitude, 0L); } final Long val = locationsHeatMap.get(hour).get(longitude).get(latitude); locationsHeatMap.get(hour).get(longitude).put(latitude, val + 1); final Iterator iterator = message.keys(); if (longitude != null && latitude != null) { while (iterator.hasNext()) { final String key = (String) iterator.next(); if (key.equals(LATITUDE) || key.equals(LONGITUDE)) { continue; } if (!dataAggregates.get(hour).get(longitude).get(latitude).containsKey(key)) { dataAggregates.get(hour).get(longitude).get(latitude).put(key, new DescriptiveStatistics()); } try { String data = message.getString(key); try { final double doubleData = Double.parseDouble(data); dataAggregates.get(hour).get(longitude).get(latitude).get(key) .addValue(doubleData); wholeDataStatistics.addValue(doubleData); } catch (NumberFormatException ignore) { dataAggregates.get(hour).get(longitude).get(latitude).get(key).addValue(1); wholeDataStatistics.addValue(1); } } catch (Exception e) { LOGGER.error(e, e); } } } } } catch (Exception e) { LOGGER.error(e, e); } } final JSONObject hourlyPoints = new JSONObject(); for (final Integer hour : dataAggregates.keySet()) { final JSONArray addressPoints = new JSONArray(); for (final String longit : dataAggregates.get(hour).keySet()) { for (final String latit : dataAggregates.get(hour).get(longit).keySet()) { LOGGER.info("{" + longit + ":" + latit + "}"); final JSONArray measurement = new JSONArray(); try { measurement.put(Double.parseDouble(latit)); measurement.put(Double.parseDouble(longit)); if (locationsHeatMap.containsKey(hour) && locationsHeatMap.get(hour).containsKey(longit) && locationsHeatMap.get(hour).get(longit).containsKey(latit)) { measurement.put(locationsHeatMap.get(hour).get(longit).get(latit)); } else { measurement.put(0); } final JSONObject data = new JSONObject(); measurement.put(data); for (final Object key : dataAggregates.get(hour).get(longit).get(latit).keySet()) { final String keyString = (String) key; final String part = keyString.split("\\.")[keyString.split("\\.").length - 1]; double value = dataAggregates.get(hour).get(longit).get(latit).get(keyString) .getMean(); LOGGER.info("value: " + value); if (Double.isFinite(value) && value != 1) { data.put(part, value); } else { value = dataAggregates.get(hour).get(longit).get(latit).get(keyString) .getValues().length; data.put(part, value); } } addressPoints.put(measurement); } catch (JSONException e) { LOGGER.error(e, e); } } } try { hourlyPoints.put(String.valueOf(hour), addressPoints); } catch (JSONException e) { LOGGER.error(e, e); } } LOGGER.info(hourlyPoints.toString()); return hourlyPoints; } catch (Exception e) { LOGGER.error(e, e); } return null; }
From source file:de.tudarmstadt.lt.ltbot.postprocessor.DecesiveValueProducerPerplexity.java
double getPerplexity(CrawlURI uri) { assert _lmprvdr != null : "String provider service must not be null here. This should have been checked before."; String cleaned_plaintext = _textExtractorInstance.getCleanedUtf8PlainText(uri).trim(); String cleaned_plaintext_abbr = MULTIPLE_SPACES_PATTERN .matcher(StringUtils.abbreviate(cleaned_plaintext, 50)).replaceAll(" "); addExtraInfo(uri, EXTRA_INFO_PLAINTEXT_ABBREVIATED, cleaned_plaintext_abbr); if (cleaned_plaintext.isEmpty()) return Double.POSITIVE_INFINITY; double perplexity = Double.POSITIVE_INFINITY; try {/*from ww w . j av a 2 s . c om*/ String docid = "#" + Integer.toHexString(cleaned_plaintext.hashCode()); LOG.finest(String.format("Sending text with id '%s' to StringProvider: '%s' (length %d).", docid, cleaned_plaintext_abbr, cleaned_plaintext.length())); perplexity = computePerplexity(cleaned_plaintext); // if (Double.isNaN(perplexity)) { // double perplexity_new = -1d; // LOG.log(Level.WARNING, String.format("[%s '%s'] failed to get meaningful perplexity: %g. Setting perplexity to %g.", uri.toString(), cleaned_plaintext_abbr, perplexity, perplexity_new)); // perplexity = perplexity_new; // } LOG.finest(String.format("[%s, '%s'] perplexity: %g.", uri.toString(), cleaned_plaintext_abbr, perplexity)); } catch (Throwable t) { for (int i = 1; t != null && i < 10; i++) { LOG.log(Level.SEVERE, String.format("Could not compute perplexity for URI '%s' and text: '%s'. (%d %s:%s)", uri.toString(), cleaned_plaintext_abbr, i, t.getClass().getSimpleName(), t.getMessage()), t); t = t.getCause(); LOG.log(Level.SEVERE, "Requesting to pause crawl."); getCrawlController().requestCrawlPause(); _paused_due_to_error = true; } } if (!Double.isFinite(perplexity) || perplexity <= 1) { LOG.log(Level.FINE, String.format( "[%s '%s'] resetting infinite perplexity to predefined maximum perplexity value (-1).", uri.toString(), cleaned_plaintext_abbr)); perplexity = -1; } return perplexity; }
From source file:io.gravitee.repository.elasticsearch.analytics.ElasticAnalyticsRepository.java
private HistogramResponse toHistogramResponse(SearchResponse searchResponse, String key) { HistogramResponse histogramResponse = new HistogramResponse(); if (searchResponse.getAggregations() == null) { return histogramResponse; }// w ww . j a v a 2s . co m // Prepare data Bucket histogramBucket = new Bucket(key); Histogram dateHistogram = (Histogram) searchResponse.getAggregations().iterator().next(); for (Histogram.Bucket dateBucket : dateHistogram.getBuckets()) { final long keyAsDate = ((DateTime) dateBucket.getKey()).getMillis(); histogramResponse.timestamps().add(keyAsDate); Iterator<Aggregation> subAggregationsIte = dateBucket.getAggregations().iterator(); if (subAggregationsIte.hasNext()) { while (subAggregationsIte.hasNext()) { Map<String, List<Data>> bucketData = histogramBucket.data(); List<Data> data; Aggregation subAggregation = subAggregationsIte.next(); if (subAggregation instanceof InternalAggregation) switch (((InternalAggregation) subAggregation).type().name()) { case "terms": for (Terms.Bucket subTermsBucket : ((Terms) subAggregation).getBuckets()) { data = bucketData.get(subTermsBucket.getKeyAsString()); if (data == null) { data = new ArrayList<>(); bucketData.put(subTermsBucket.getKeyAsString(), data); } data.add(new Data(keyAsDate, subTermsBucket.getDocCount())); } break; case "min": InternalMin internalMin = ((InternalMin) subAggregation); if (Double.isFinite(internalMin.getValue())) { data = bucketData.get(internalMin.getName()); if (data == null) { data = new ArrayList<>(); bucketData.put(internalMin.getName(), data); } data.add(new Data(keyAsDate, (long) internalMin.getValue())); } break; case "max": InternalMax internalMax = ((InternalMax) subAggregation); if (Double.isFinite(internalMax.getValue())) { data = bucketData.get(internalMax.getName()); if (data == null) { data = new ArrayList<>(); bucketData.put(internalMax.getName(), data); } data.add(new Data(keyAsDate, (long) internalMax.getValue())); } break; case "avg": InternalAvg internalAvg = ((InternalAvg) subAggregation); if (Double.isFinite(internalAvg.getValue())) { data = bucketData.get(internalAvg.getName()); if (data == null) { data = new ArrayList<>(); bucketData.put(internalAvg.getName(), data); } data.add(new Data(keyAsDate, (long) internalAvg.getValue())); } break; default: // nothing to do } } } else { Map<String, List<Data>> bucketData = histogramBucket.data(); List<Data> data = bucketData.get("hits"); if (data == null) { data = new ArrayList<>(); bucketData.put("hits", data); } data.add(new Data(keyAsDate, dateBucket.getDocCount())); } } histogramResponse.values().add(histogramBucket); return histogramResponse; }
From source file:de.tudarmstadt.lt.ltbot.prefetch.DecesiveValuePrioritizer.java
int getPriorityAsSchedulingDirective(double perplexity) { if (perplexity <= 1d) return -1; // remove from frontier if (!Double.isFinite(perplexity)) if (!Double.isFinite(_assignmentBoundaries[SchedulingConstants.NORMAL])) return SchedulingConstants.NORMAL; // default else/*w ww.ja v a 2 s .c om*/ return -1; // remove // HIGHEST = 0, HIGH = 1, ... but reserve HIGHEST for prerequistes if (perplexity <= _assignmentBoundaries[SchedulingConstants.HIGH]) return SchedulingConstants.HIGH; // higher than medium if (perplexity <= _assignmentBoundaries[SchedulingConstants.MEDIUM]) return SchedulingConstants.MEDIUM; // higher than normal if (perplexity <= _assignmentBoundaries[SchedulingConstants.NORMAL]) return SchedulingConstants.NORMAL; // default // else best remove from frontier // should not happen // assert false : "You should not be here"; return -1; }
From source file:org.rhwlab.variationalbayesian.SuperVoxelGaussianMixture.java
public double L7() { double sum = 0.0; for (int k = 0; k < K; ++k) { double s1 = 0.5 * lnLambdaTilde[k]; double s2 = 0.5 * X.getD() * beta[k] / (2 * Math.PI); double s3 = -X.getD() / 2.0; double s4 = -H(detW[k], nu[k], lnLambdaTilde[k]); sum = sum + s1 + s2 + s3 + s4;/*from w ww. ja va 2 s .c om*/ if (!Double.isFinite(sum)) { int iusahf = 0; } } return sum; }
From source file:de.bund.bfr.knime.nls.fitting.FittingNodeModel.java
private Map<String, OptimizationResult> doFitting(Function f, BufferedDataTable table, ExecutionContext exec) throws ParseException, CanceledExecutionException { if (f.getTimeVariable() != null) { return new LinkedHashMap<>(); }//from w ww.java2s .c o m ListMultimap<String, Double> targetValues = ArrayListMultimap.create(); Map<String, ListMultimap<String, Double>> argumentValues = new LinkedHashMap<>(); for (String indep : f.getIndependentVariables()) { argumentValues.put(indep, ArrayListMultimap.create()); } loop: for (DataRow row : table) { String id = IO.getString(row.getCell(table.getSpec().findColumnIndex(NlsUtils.ID_COLUMN))); if (id == null) { continue loop; } Map<String, Double> values = new LinkedHashMap<>(); for (String var : f.getVariables()) { Double value = IO.getDouble(row.getCell(table.getSpec().findColumnIndex(var))); if (value == null || !Double.isFinite(value)) { continue loop; } values.put(var, value); } targetValues.put(id, values.get(f.getDependentVariable())); for (String indep : f.getIndependentVariables()) { argumentValues.get(indep).put(id, values.get(indep)); } } Map<String, OptimizationResult> results = new LinkedHashMap<>(); List<String> ids = readIds(table); numberOfFittings = ids.size(); currentFitting = 0; for (String id : ids) { Map<String, List<Double>> argumentLists = new LinkedHashMap<>(); for (String indep : f.getIndependentVariables()) { argumentLists.put(indep, argumentValues.get(indep).get(id)); } Optimization optimizer; if (set.getLevelOfDetection() != null) { optimizer = MultivariateOptimization.createLodOptimizer(f.getTerms().get(f.getDependentVariable()), f.getParameters(), targetValues.get(id), argumentLists, set.getLevelOfDetection()); } else { optimizer = LeastSquaresOptimization.createVectorOptimizer( f.getTerms().get(f.getDependentVariable()), f.getParameters(), targetValues.get(id), argumentLists); if (set.isEnforceLimits()) { ((LeastSquaresOptimization) optimizer).getMinValues().putAll(set.getMinStartValues()); ((LeastSquaresOptimization) optimizer).getMaxValues().putAll(set.getMaxStartValues()); } } if (!set.getStartValues().isEmpty()) { results.put(id, optimizer.optimize(set.getnParameterSpace(), set.getnLevenberg(), set.isStopWhenSuccessful(), set.getStartValues(), new LinkedHashMap<>(0), set.getMaxLevenbergIterations(), progressListener, exec)); } else { results.put(id, optimizer.optimize(set.getnParameterSpace(), set.getnLevenberg(), set.isStopWhenSuccessful(), set.getMinStartValues(), set.getMaxStartValues(), set.getMaxLevenbergIterations(), progressListener, exec)); } currentFitting++; } return results; }
From source file:eu.amidst.core.inference.ImportanceSamplingRobust.java
/** * {@inheritDoc}/*w ww. java2 s . c o m*/ */ @Override public double getExpectedValue(Variable var, Function<Double, Double> function) { // if(keepDataOnMemory) { // weightedSampleStream = weightedSampleList.stream().sequential(); // }else{ // computeWeightedSampleStream(false); // } if (parallelMode) { weightedSampleStream.parallel(); } List<Double> sum = weightedSampleStream .map(ws -> Arrays.asList(Math.exp(ws.logWeight), Math.exp(ws.logWeight) * function.apply(ws.assignment.getValue(var)))) .filter(array -> (Double.isFinite(array.get(0)) && Double.isFinite(array.get(1)))) .reduce(Arrays.asList(new Double(0.0), new Double(0.0)), (e1, e2) -> Arrays.asList(e1.get(0) + e2.get(0), e1.get(1) + e2.get(1))); return sum.get(1) / sum.get(0); }
From source file:beast.evolution.tree.ConstrainedClusterTree.java
/** go through MRCAPriors * Since we can easily scale a clade, start with the highest MRCAPrior, then process the nested ones * @throws MathException **//* w w w . j av a2s.c o m*/ static public void handlebounds(Node node, Map<Node, MRCAPrior> nodeToBoundMap, double EPSILON) throws MathException { if (!node.isLeaf()) { if (nodeToBoundMap.containsKey(node)) { MRCAPrior calibration = nodeToBoundMap.get(node); if (calibration.distInput.get() != null) { ParametricDistribution distr = calibration.distInput.get(); distr.initAndValidate(); double lower = distr.inverseCumulativeProbability(0.0) + distr.offsetInput.get(); double upper = distr.inverseCumulativeProbability(1.0) + distr.offsetInput.get(); // make sure the timing fits the constraint double height = node.getHeight(); double newHeight = Double.NEGATIVE_INFINITY; if (height < lower) { if (Double.isFinite(upper)) { newHeight = (lower + upper) / 2.0; } else { newHeight = lower; } } if (height > upper) { if (Double.isFinite(lower)) { newHeight = (lower + upper) / 2.0; } else { newHeight = upper; } } if (Double.isFinite(newHeight)) { double scale = newHeight / height; // scale clade node.scale(scale); // adjust parents if necessary Node node2 = node; Node parent = node2.getParent(); while (parent != null && parent.getHeight() < node2.getHeight()) { parent.setHeight(node2.getHeight() + EPSILON); node2 = node2.getParent(); parent = node2.getParent(); } } } } for (Node child : node.getChildren()) { handlebounds(child, nodeToBoundMap, EPSILON); } } }
From source file:org.rhwlab.variationalbayesian.GaussianMixture.java
public double L5() { double sum = 0.0; for (int n = 0; n < X.getN(); ++n) { for (int k = 0; k < K; ++k) { double lnr = Math.log(r.getEntry(n, k)); if (Double.isFinite(lnr)) { sum = sum + r.getEntry(n, k) * lnr; }/*www . ja va2 s . co m*/ } } return sum; }