List of usage examples for java.lang Double isNaN
public static boolean isNaN(double v)
From source file:es.udc.gii.common.eaf.plugin.parameter.jade.JADECRAdaptiveParameter.java
@Override public double get(EvolutionaryAlgorithm algorithm) { double meana_cr; double cr_i;//from w w w . j av a 2s .c o m List<Individual> individuals; int cr_individuals; double cr_ind; //Hay que "chequear" que los individuos sean del tipo JADE: if (algorithm.getGenerations() > this.alg_generation) { //Calculamos mu; individuals = algorithm.getPopulation().getIndividuals(); meana_cr = 0.0; cr_individuals = 0; for (Individual i : individuals) { if (i instanceof JADEIndividual) { cr_ind = ((JADEIndividual) i).getCR(); if (cr_ind != -Double.MAX_VALUE) { meana_cr += cr_ind; cr_individuals++; } } else { throw new ConfigurationException( "JADECRAdaptiveParameter requires individuals of type JADEIndividual"); } } meana_cr /= cr_individuals; if (!Double.isNaN(meana_cr) && !Double.isInfinite(meana_cr)) { this.mu_cr = (1.0 - this.c) * this.mu_cr + this.c * meana_cr; this.mu_cr = (this.mu_cr > 1.0 ? 1.0 : (this.mu_cr < 0.0 ? 0.0 : this.mu_cr)); } this.alg_generation++; //System.out.println(this.mu_cr); } cr_i = 0.0; NormalDistributionImpl n = new NormalDistributionImpl(this.mu_cr, this.std_cr); try { double r = EAFRandom.nextDouble(); cr_i = n.inverseCumulativeProbability(r); cr_i = (cr_i > 1.0 ? 1.0 : (cr_i < 0.0 ? 0.0 : cr_i)); } catch (MathException ex) { Logger.getLogger(JADECRAdaptiveParameter.class.getName()).log(Level.SEVERE, null, ex); } return cr_i; }
From source file:geogebra.kernel.AlgoIntegralDefinite.java
protected final void compute() { if (!f.isDefined() || !ageo.isDefined() || !bgeo.isDefined()) { n.setUndefined();//from w ww.j a v a 2s . c o m return; } // check for equal bounds double lowerLimit = a.getDouble(); double upperLimit = b.getDouble(); if (Kernel.isEqual(lowerLimit, upperLimit)) { n.setValue(0); return; } // check if f(a) and f(b) are defined double fa = f.evaluate(lowerLimit); double fb = f.evaluate(upperLimit); if (Double.isNaN(fa) || Double.isInfinite(fa) || Double.isNaN(fb) || Double.isInfinite(fb)) { n.setUndefined(); return; } // return if it should not be evaluated (i.e. is shade-only) if (evaluate != null && !evaluate.getBoolean()) { n.setValue(0); return; } /* * Try to use symbolic integral * * We only do this for functions that do NOT include divisions by their variable. * Otherwise there might be problems like: * Integral[ 1/x, -2, -1 ] would be undefined (log(-1) - log(-2)) * Integral[ 1/x^2, -1, 1 ] would be defined (-2) */ if (symbIntegral != null && symbIntegral.isDefined() && !f.includesDivisionByVar()) { double val = symbIntegral.evaluate(upperLimit) - symbIntegral.evaluate(lowerLimit); n.setValue(val); if (n.isDefined()) return; } // numerical integration // max_error = ACCURACY; // current maximum error //maxstep = 0; double integral = numericIntegration(f, lowerLimit, upperLimit); n.setValue(integral); /* Application.debug("***\nsteps: " + maxstep); Application.debug("max_error: " + max_error); */ }
From source file:ml.shifu.shifu.core.dtrain.nn.NNParquetWorker.java
@Override public void load(GuaguaWritableAdapter<LongWritable> currentKey, GuaguaWritableAdapter<Tuple> currentValue, WorkerContext<NNParams, NNParams> workerContext) { // init field list for later read this.initFieldList(); LOG.info("subFeatureSet size: {} ; subFeatureSet: {}", subFeatureSet.size(), subFeatureSet); super.count += 1; if ((super.count) % 5000 == 0) { LOG.info("Read {} records.", super.count); }/*from w w w. j a va 2 s .co m*/ float[] inputs = new float[super.featureInputsCnt]; float[] ideal = new float[super.outputNodeCount]; if (super.isDry) { // dry train, use empty data. addDataPairToDataSet(0, new BasicFloatMLDataPair(new BasicFloatMLData(inputs), new BasicFloatMLData(ideal))); return; } long hashcode = 0; float significance = 1f; // use guava Splitter to iterate only once // use NNConstants.NN_DEFAULT_COLUMN_SEPARATOR to replace getModelConfig().getDataSetDelimiter(), super follows // the function in akka mode. int index = 0, inputsIndex = 0, outputIndex = 0; Tuple tuple = currentValue.getWritable(); // back from foreach to for loop because of in earlier version, tuple cannot be iterable. for (int i = 0; i < tuple.size(); i++) { Object element = null; try { element = tuple.get(i); } catch (ExecException e) { throw new GuaguaRuntimeException(e); } float floatValue = 0f; if (element != null) { if (element instanceof Float) { floatValue = (Float) element; } else { // check here to avoid bad performance in failed NumberFormatUtils.getFloat(input, 0f) floatValue = element.toString().length() == 0 ? 0f : NumberFormatUtils.getFloat(element.toString(), 0f); } } // no idea about why NaN in input data, we should process it as missing value TODO , according to norm type floatValue = (Float.isNaN(floatValue) || Double.isNaN(floatValue)) ? 0f : floatValue; if (index == (super.inputNodeCount + super.outputNodeCount)) { // do we need to check if not weighted directly set to 1f; if such logic non-weight at first, then // weight, how to process??? if (StringUtils.isBlank(modelConfig.getWeightColumnName())) { significance = 1f; // break here if we reach weight column which is last column break; } assert element != null; if (element != null && element instanceof Float) { significance = (Float) element; } else { // check here to avoid bad performance in failed NumberFormatUtils.getFloat(input, 0f) significance = element.toString().length() == 0 ? 1f : NumberFormatUtils.getFloat(element.toString(), 1f); } // if invalid weight, set it to 1f and warning in log if (Float.compare(significance, 0f) < 0) { LOG.warn( "The {} record in current worker weight {} is less than 0f, it is invalid, set it to 1.", count, significance); significance = 1f; } // break here if we reach weight column which is last column break; } else { int columnIndex = requiredFieldList.getFields().get(index).getIndex(); if (columnIndex >= super.columnConfigList.size()) { assert element != null; if (element != null && element instanceof Float) { significance = (Float) element; } else { // check here to avoid bad performance in failed NumberFormatUtils.getFloat(input, 0f) significance = element.toString().length() == 0 ? 1f : NumberFormatUtils.getFloat(element.toString(), 1f); } break; } else { ColumnConfig columnConfig = super.columnConfigList.get(columnIndex); if (columnConfig != null && columnConfig.isTarget()) { if (modelConfig.isRegression()) { ideal[outputIndex++] = floatValue; } else { if (modelConfig.getTrain().isOneVsAll()) { // if one vs all, set correlated idea value according to trainerId which means in // trainer with id 0, target 0 is treated with 1, other are 0. Such target value are set // to index of tags like [0, 1, 2, 3] compared with ["a", "b", "c", "d"] ideal[outputIndex++] = Float.compare(floatValue, trainerId) == 0 ? 1f : 0f; } else { if (modelConfig.getTags().size() == 2) { // if only 2 classes, output node is 1 node. if target = 0 means 0 is the index for // positive prediction, set positive to 1 and negative to 0 int ideaIndex = (int) floatValue; ideal[0] = ideaIndex == 0 ? 1f : 0f; } else { // for multiple classification int ideaIndex = (int) floatValue; ideal[ideaIndex] = 1f; } } } } else { if (subFeatureSet.contains(columnIndex)) { inputs[inputsIndex++] = floatValue; hashcode = hashcode * 31 + Double.valueOf(floatValue).hashCode(); } } } } index += 1; } // output delimiter in norm can be set by user now and if user set a special one later changed, this exception // is helped to quick find such issue. if (inputsIndex != inputs.length) { String delimiter = workerContext.getProps().getProperty(Constants.SHIFU_OUTPUT_DATA_DELIMITER, Constants.DEFAULT_DELIMITER); throw new RuntimeException("Input length is inconsistent with parsing size. Input original size: " + inputs.length + ", parsing size:" + inputsIndex + ", delimiter:" + delimiter + "."); } // sample negative only logic here if (modelConfig.getTrain().getSampleNegOnly()) { if (this.modelConfig.isFixInitialInput()) { // if fixInitialInput, sample hashcode in 1-sampleRate range out if negative records int startHashCode = (100 / this.modelConfig.getBaggingNum()) * this.trainerId; // here BaggingSampleRate means how many data will be used in training and validation, if it is 0.8, we // should take 1-0.8 to check endHashCode int endHashCode = startHashCode + Double.valueOf((1d - this.modelConfig.getBaggingSampleRate()) * 100).intValue(); if ((modelConfig.isRegression() || (modelConfig.isClassification() && modelConfig.getTrain().isOneVsAll())) // regression or // onevsall && (int) (ideal[0] + 0.01d) == 0 // negative record && isInRange(hashcode, startHashCode, endHashCode)) { return; } } else { // if not fixed initial input, and for regression or onevsall multiple classification (regression also). // if negative record if ((modelConfig.isRegression() || (modelConfig.isClassification() && modelConfig.getTrain().isOneVsAll())) // regression or // onevsall && (int) (ideal[0] + 0.01d) == 0 // negative record && Double.compare(super.sampelNegOnlyRandom.nextDouble(), this.modelConfig.getBaggingSampleRate()) >= 0) { return; } } } FloatMLDataPair pair = new BasicFloatMLDataPair(new BasicFloatMLData(inputs), new BasicFloatMLData(ideal)); // up sampling logic if (modelConfig.isRegression() && isUpSampleEnabled() && Double.compare(ideal[0], 1d) == 0) { // Double.compare(ideal[0], 1d) == 0 means positive tags; sample + 1 to avoid sample count to 0 pair.setSignificance(significance * (super.upSampleRng.sample() + 1)); } else { pair.setSignificance(significance); } boolean isValidation = false; if (workerContext.getAttachment() != null && workerContext.getAttachment() instanceof Boolean) { isValidation = (Boolean) workerContext.getAttachment(); } boolean isInTraining = addDataPairToDataSet(hashcode, pair, isValidation); // do bagging sampling only for training data if (isInTraining) { float subsampleWeights = sampleWeights(pair.getIdealArray()[0]); if (isPositive(pair.getIdealArray()[0])) { this.positiveSelectedTrainCount += subsampleWeights * 1L; } else { this.negativeSelectedTrainCount += subsampleWeights * 1L; } // set weights to significance, if 0, significance will be 0, that is bagging sampling pair.setSignificance(pair.getSignificance() * subsampleWeights); } else { // for validation data, according bagging sampling logic, we may need to sampling validation data set, while // validation data set are only used to compute validation error, not to do real sampling is ok. } }
From source file:gdsc.smlm.ij.plugins.SummariseResults.java
private void addSummary(MemoryPeakResults result) { DescriptiveStatistics[] stats = new DescriptiveStatistics[2]; for (int i = 0; i < stats.length; i++) { stats[i] = new DescriptiveStatistics(); }//from ww w. j a v a 2s .c om // Only process the statistics if we have a noise component final int size = result.size(); if (size > 0 && result.getResults().get(0).noise > 0) { int ii = 0; final double nmPerPixel = result.getNmPerPixel(); final double gain = result.getGain(); final boolean emCCD = result.isEMCCD(); for (PeakResult peakResult : result.getResults()) { if (peakResult == null) { System.out.printf("Null result in summary @ %d\n", ++ii); continue; } stats[0].addValue(peakResult.getPrecision(nmPerPixel, gain, emCCD)); stats[1].addValue(peakResult.getSignal() / peakResult.noise); } } StringBuilder sb = new StringBuilder(); sb.append(result.getName()); sb.append("\t").append(result.size()); int maxT = getMaxT(result); sb.append("\t").append(maxT); final double exposureTime = (result.getCalibration() != null) ? result.getCalibration().exposureTime : 0; sb.append("\t").append(Utils.timeToString(maxT * exposureTime)); if (size > 0) { boolean includeDeviations = result.getResults().get(0).paramsStdDev != null; long memorySize = MemoryPeakResults.estimateMemorySize(size, includeDeviations); String memory = MemoryPeakResults.memorySizeString(memorySize); sb.append("\t").append(memory); } else { sb.append("\t-"); } Rectangle bounds = result.getBounds(true); sb.append(String.format("\t%d,%d,%d,%d\t%s\t%s\t%s", bounds.x, bounds.y, bounds.x + bounds.width, bounds.y + bounds.height, Utils.rounded(result.getNmPerPixel(), 4), Utils.rounded(result.getGain(), 4), Utils.rounded(exposureTime, 4))); for (int i = 0; i < stats.length; i++) { if (Double.isNaN(stats[i].getMean())) { sb.append("\t-\t-\t-\t-"); } else { sb.append("\t").append(IJ.d2s(stats[i].getMean(), 3)); sb.append("\t").append(IJ.d2s(stats[i].getPercentile(50), 3)); sb.append("\t").append(IJ.d2s(stats[i].getMin(), 3)); sb.append("\t").append(IJ.d2s(stats[i].getMax(), 3)); } } summary.append(sb.toString()); }
From source file:gedi.lfc.LfcAlignedReadsProcessor.java
private boolean multimode() { return Double.isNaN(credi) || (after == null && before.getNumMergedConditions() != 2) || (after != null && after.getNumMergedConditions() != 2); }
From source file:com.rapidminer.gui.viewer.metadata.model.NumericalAttributeStatisticsModel.java
/** * Creates a {@link HistogramDataset} for this {@link Attribute}. * * @param exampleSet/*from www .j a va 2 s .co m*/ * @return */ private HistogramDataset createHistogramDataset(ExampleSet exampleSet) { HistogramDataset dataset = new HistogramDataset(); double[] array = new double[exampleSet.size()]; int count = 0; for (Example example : exampleSet) { double value = example.getDataRow().get(getAttribute()); // don't use missing values because otherwise JFreeChart tries to plot them too which // can lead to false histograms if (!Double.isNaN(value)) { array[count++] = value; } } // add points to data set (if any) if (count > 0) { // truncate array if necessary if (count < array.length) { array = Arrays.copyOf(array, count); } dataset.addSeries(getAttribute().getName(), array, Math.min(array.length, MAX_BINS_HISTOGRAM)); } return dataset; }
From source file:geogebra.kernel.AlgoRootNewton.java
private boolean checkRoot(Function fun, double root) { // check what we got return !Double.isNaN(root) && (Math.abs(fun.evaluate(root)) < Kernel.MIN_PRECISION); }
From source file:net.myrrix.online.som.SelfOrganizingMaps.java
/** * @param vectors user-feature or item-feature matrix from current computation generation * @param maxMapSize maximum desired dimension of the (square) 2D map * @param samplingRate fraction of input to consider when creating the map * size overall, nodes will be pruned to remove least-matching assignments, and not all vectors in the * input will be assigned.// w ww . jav a 2 s. co m * @return a square, 2D array of {@link Node} representing the map, with dimension {@code mapSize} */ public Node[][] buildSelfOrganizedMap(FastByIDMap<float[]> vectors, int maxMapSize, double samplingRate) { Preconditions.checkNotNull(vectors); Preconditions.checkArgument(!vectors.isEmpty()); Preconditions.checkArgument(maxMapSize > 0); Preconditions.checkArgument(Double.isNaN(samplingRate) || (samplingRate > 0.0 && samplingRate <= 1.0)); if (Double.isNaN(samplingRate)) { // Compute a sampling rate that shoots for 1 assignment per node on average double expectedNodeSize = (double) vectors.size() / (maxMapSize * maxMapSize); samplingRate = expectedNodeSize > 1.0 ? 1.0 / expectedNodeSize : 1.0; } log.debug("Sampling rate: {}", samplingRate); int mapSize = FastMath.min(maxMapSize, (int) FastMath.sqrt(vectors.size() * samplingRate)); Node[][] map = buildInitialMap(vectors, mapSize); sketchMapParallel(vectors, samplingRate, map); for (Node[] mapRow : map) { for (Node node : mapRow) { node.clearAssignedIDs(); } } assignVectorsParallel(vectors, samplingRate, map); sortMembers(map); int numFeatures = vectors.entrySet().iterator().next().getValue().length; buildProjections(numFeatures, map); return map; }
From source file:com.ning.metrics.collector.util.Stats.java
/** * 99.99th percentile/*from w w w . j av a 2s. c om*/ * * @return 99.99th percentile */ @Managed @SuppressWarnings("unused") public double getMillisTP9999() { double percentile = millisStats.getPercentile(99.99); return Double.isNaN(percentile) ? 0.0 : percentile; }
From source file:org.openfaces.component.chart.impl.renderers.XYLineFillRenderer.java
public void drawItem(Graphics2D g2, XYItemRendererState state, Rectangle2D dataArea, PlotRenderingInfo info, XYPlot plot, ValueAxis domainAxis, ValueAxis rangeAxis, XYDataset dataSet, int series, int item, CrosshairState crosshairState, int pass) { if (!getItemVisible(series, item)) { return;/* ww w . ja va 2 s . com*/ } double itemXValue = dataSet.getXValue(series, item); double itemYValue = dataSet.getYValue(series, item); if (Double.isNaN(itemYValue) || Double.isNaN(itemXValue)) { return; } double currentItemX = calculateItemXPoint(series, item, dataArea, domainAxis, dataSet, plot); double currentItemY = calculateItemYPoint(series, item, dataArea, rangeAxis, dataSet, plot); int previousItemIndex = item > 0 ? item - 1 : 0; double previousItemX = calculateItemXPoint(series, previousItemIndex, dataArea, domainAxis, dataSet, plot); double previousItemY = calculateItemYPoint(series, previousItemIndex, dataArea, rangeAxis, dataSet, plot); final int lastItemIndex = dataSet.getItemCount(series) - 1; int nextItemIndex = item < lastItemIndex ? item + 1 : lastItemIndex; double nextItemX = calculateItemXPoint(series, nextItemIndex, dataArea, domainAxis, dataSet, plot); double nextItemY = calculateItemYPoint(series, nextItemIndex, dataArea, rangeAxis, dataSet, plot); double zeroRangePoint = rangeAxis.valueToJava2D(0.0, dataArea, plot.getRangeAxisEdge()); if (isAreaAndLinePass(pass)) { XYLineFillItemRendererState rendererState = (XYLineFillItemRendererState) state; renderLineArea(g2, info, plot, series, item, rendererState, dataSet, currentItemX, currentItemY, previousItemX, previousItemY, zeroRangePoint); } else if (isShapesAndLabelsPass(pass)) { Shape entityArea = renderShapeAndLabel(g2, dataArea, plot, dataSet, series, item, itemYValue, currentItemX, currentItemY, previousItemX, previousItemY, nextItemX, nextItemY, zeroRangePoint); int domainAxisIndex = plot.getDomainAxisIndex(domainAxis); int rangeAxisIndex = plot.getRangeAxisIndex(rangeAxis); updateCrosshairValues(crosshairState, itemXValue, itemYValue, domainAxisIndex, rangeAxisIndex, currentItemX, currentItemY, plot.getOrientation()); EntityCollection entityCollection = state.getEntityCollection(); if (entityCollection != null) { addEntity(entityCollection, entityArea, dataSet, series, item, 0.0, 0.0); } } else { throw new IllegalStateException("Unknown pass: " + pass); } }