List of usage examples for java.lang Double MIN_VALUE
double MIN_VALUE
To view the source code for java.lang Double MIN_VALUE.
Click Source Link
From source file:org.jlinda.core.coregistration.estimation.utils.JamaUtils.java
/** * Gets the maximum value and [row, col] indices, in a matrix, in the absolute sense. * * @param m the matrix// w ww . j av a 2 s . c o m * @return the maximum value and [row, col] indices in m. */ public static double[] getAbsArgMax(Matrix m) { int numRows = m.getRowDimension(); int numCols = m.getColumnDimension(); double abs; double[] maxArray = new double[3]; // value, row, col // compute the max of the matrix double maxValue = Double.MIN_VALUE; for (int i = 0; i < numRows; i++) { for (int j = 0; j < numCols; j++) { abs = Math.abs(m.get(i, j)); if (maxValue <= abs) { maxValue = abs; maxArray[0] = maxValue; maxArray[1] = i; maxArray[2] = j; } } } return maxArray; }
From source file:uk.org.funcube.fcdw.server.extract.csv.RealTimeCsvExtractor.java
private static void setupPaTemps() { // Data from adc 79 to 252 measured, // 0-79 continues using gradient of last // three values, 252 to 255 likewise final double[][] tempToAdc = { { 87.983, Double.MIN_VALUE }, { 87.983, 0 }, { /* * first measured value */55.3, 79 }, { 49.6, 91 }, { 45.3, 103 }, { 41.1, 115 }, { 37.6, 125 }, { 35.7, 129 }, { 33.6, 137 }, { 30.6, 145 }, { 27.6, 154 }, { 25.1, 161 }, { 22.6, 169 }, { 20, 176 }, { 17.6, 183 }, { 15.1, 189 }, { 12.6, 196 }, { 10, 203 }, { 7.5, 208 }, { 5, 214 }, { 2.4, 220 }, { 0, 224 }, { -2.9, 230 }, { -5, 233 }, { -7.5, 237 }, { -10, 241 }, { -12.3, 244 }, { -15, 247 }, { /* * last measured value */-20, 252 }, { -22.846, 255 }, { -22.846, Double.MAX_VALUE } }; // calc values for all possible 8bit values for (int adc = 0; adc < 256; ++adc) { for (int j = 0; j < tempToAdc.length; j++) { if (adc != 0 && adc < tempToAdc[j][1]) { double t1 = tempToAdc[j][0]; double a1 = tempToAdc[j][1]; double diffa = tempToAdc[j - 1][1] - a1; double difft = tempToAdc[j - 1][0] - t1; double value = ((adc - a1) * (difft / diffa)) + t1; PA_TEMPS[adc] = value;//from ww w .ja v a 2s. c o m break; } } } }
From source file:com.rapidminer.operator.generator.ExampleSetGenerator.java
@Override public List<ParameterType> getParameterTypes() { List<ParameterType> types = super.getParameterTypes(); ParameterType type = new ParameterTypeStringCategory(PARAMETER_TARGET_FUNCTION, "Specifies the target function of this example set", KNOWN_FUNCTION_NAMES, KNOWN_FUNCTION_NAMES[0]); type.setExpert(false);/* w ww.ja v a 2 s .c o m*/ types.add(type); type = new ParameterTypeInt(PARAMETER_NUMBER_EXAMPLES, "The number of generated examples.", 1, Integer.MAX_VALUE, 100); type.setExpert(false); types.add(type); type = new ParameterTypeInt(PARAMETER_NUMBER_OF_ATTRIBUTES, "The number of attributes.", 1, Integer.MAX_VALUE, 5); type.setExpert(false); types.add(type); NonEqualStringCondition useTwoBounds = new NonEqualStringCondition(this, PARAMETER_TARGET_FUNCTION, false, (String[]) ArrayUtils.addAll(FUCTIONS_IGNORING_BOUND, FUNCTIONS_USING_SINGLE_BOUND)); type = new ParameterTypeDouble(PARAMETER_ATTRIBUTES_LOWER_BOUND, "The minimum value for the attributes. In case of target functions using Gaussian distribution, the attribute values may exceed this value.", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, -10); type.registerDependencyCondition(new OrParameterCondition(this, false, new BelowOrEqualOperatorVersionCondition(this, VERSION_TARGET_PARAMETERS_CHANGED), useTwoBounds)); types.add(type); type = new ParameterTypeDouble(PARAMETER_ATTRIBUTES_UPPER_BOUND, "The maximum value for the attributes. In case of target functions using Gaussian distribution, the attribute values may exceed this value.", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, 10); type.registerDependencyCondition(new OrParameterCondition(this, false, new BelowOrEqualOperatorVersionCondition(this, VERSION_TARGET_PARAMETERS_CHANGED), useTwoBounds)); types.add(type); type = new ParameterTypeDouble(PARAMETER_ATTRIBUTES_GAUSSIAN_STDDEV, "Standard deviation of the Gaussian distribution used for generating attributes.", Double.MIN_VALUE, Double.POSITIVE_INFINITY, 10); type.registerDependencyCondition( new AboveOperatorVersionCondition(this, VERSION_TARGET_PARAMETERS_CHANGED)); type.registerDependencyCondition( new EqualStringCondition(this, PARAMETER_TARGET_FUNCTION, false, FUNCTIONS_USING_GAUSSIAN_STDDEV)); types.add(type); type = new ParameterTypeDouble(PARAMETER_ATTRIBUTES_LARGEST_RADIUS, "The radius of the outermost ring cluster.", 10.0, Double.POSITIVE_INFINITY, 10); type.registerDependencyCondition( new AboveOperatorVersionCondition(this, VERSION_TARGET_PARAMETERS_CHANGED)); type.registerDependencyCondition( new EqualStringCondition(this, PARAMETER_TARGET_FUNCTION, false, FUNCTIONS_USING_LARGEST_RADIUS)); types.add(type); types.addAll(RandomGenerator.getRandomGeneratorParameters(this)); DataManagementParameterHelper.addParameterTypes(types, this); return types; }
From source file:put.ci.cevo.framework.algorithms.ApacheCMAES.java
/** * {@inheritDoc}//from ww w.ja v a 2 s . com */ @Override protected PointValuePair doOptimize() { // -------------------- Initialization -------------------------------- isMinimize = getGoalType().equals(GoalType.MINIMIZE); final double[] guess = getStartPoint(); // number of objective variables/problem dimension dimension = guess.length; initializeCMA(guess); iterations = 0; double bestValue = (isMinimize ? Double.MAX_VALUE : Double.MIN_VALUE); push(fitnessHistory, bestValue); PointValuePair optimum = new PointValuePair(getStartPoint(), isMinimize ? bestValue : -bestValue); PointValuePair lastResult = null; // -------------------- Generation Loop -------------------------------- EvaluatedPopulation<double[]> evaluatedPopulation = null; Stopwatch stopwatch = Stopwatch.createUnstarted(); generationLoop: for (iterations = 1; iterations <= maxIterations; iterations++) { stopwatch.reset(); stopwatch.start(); incrementIterationCount(); // Generate and evaluate lambda offspring final RealMatrix arz = randn1(dimension, lambda); final RealMatrix arx = zeros(dimension, lambda); final double[] fitness = new double[lambda]; // generate random offspring for (int k = 0; k < lambda; k++) { RealMatrix arxk = null; for (int i = 0; i < checkFeasableCount + 1; i++) { if (diagonalOnly <= 0) { arxk = xmean.add(BD.multiply(arz.getColumnMatrix(k)).scalarMultiply(sigma)); // m + sig * Normal(0,C) } else { arxk = xmean.add(times(diagD, arz.getColumnMatrix(k)).scalarMultiply(sigma)); } //if (i >= checkFeasableCount || // fitfun.isFeasible(arxk.getColumn(0))) { // break; //} // regenerate random arguments for row arz.setColumn(k, randn(dimension)); } copyColumn(arxk, 0, arx, k); //try { // valuePenaltyPairs[k] = fitfun.value(arx.getColumn(k)); // compute fitness //} catch (TooManyEvaluationsException e) { // break generationLoop; //} } double newPopTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0; stopwatch.reset(); stopwatch.start(); ArrayList<double[]> population = new ArrayList<>(lambda); // This is mine. I ignore constraints. for (int k = 0; k < lambda; ++k) { population.add(arx.getColumn(k)); } evaluatedPopulation = populationEvaluator.evaluate(population, iterations - 1, random); final ValuePenaltyPair[] valuePenaltyPairs = new ValuePenaltyPair[lambda]; for (int k = 0; k < lambda; ++k) { valuePenaltyPairs[k] = new ValuePenaltyPair(evaluatedPopulation.getPopulation().get(k).getFitness(), 0.0); } // Compute fitnesses by adding value and penalty after scaling by value range. double valueRange = valueRange(valuePenaltyPairs); for (int iValue = 0; iValue < valuePenaltyPairs.length; iValue++) { fitness[iValue] = valuePenaltyPairs[iValue].value + valuePenaltyPairs[iValue].penalty * valueRange; if (!isMinimize) fitness[iValue] = -fitness[iValue]; } double evalTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0; stopwatch.reset(); stopwatch.start(); // Sort by fitness and compute weighted mean into xmean final int[] arindex = sortedIndices(fitness); // Calculate new xmean, this is selection and recombination final RealMatrix xold = xmean; // for speed up of Eq. (2) and (3) final RealMatrix bestArx = selectColumns(arx, MathArrays.copyOf(arindex, mu)); xmean = bestArx.multiply(weights); final RealMatrix bestArz = selectColumns(arz, MathArrays.copyOf(arindex, mu)); final RealMatrix zmean = bestArz.multiply(weights); final boolean hsig = updateEvolutionPaths(zmean, xold); if (diagonalOnly <= 0) { updateCovariance(hsig, bestArx, arz, arindex, xold); } else { updateCovarianceDiagonalOnly(hsig, bestArz); } // Adapt step size sigma - Eq. (5) sigma *= FastMath.exp(FastMath.min(1, (normps / chiN - 1) * cs / damps)); final double bestFitness = fitness[arindex[0]]; final double worstFitness = fitness[arindex[arindex.length - 1]]; if (bestValue > bestFitness) { bestValue = bestFitness; lastResult = optimum; optimum = new PointValuePair(bestArx.getColumn(0), isMinimize ? bestFitness : -bestFitness); if (getConvergenceChecker() != null && lastResult != null && getConvergenceChecker().converged(iterations, optimum, lastResult)) { break generationLoop; } } // handle termination criteria // Break, if fitness is good enough if (stopFitness != 0 && bestFitness < (isMinimize ? stopFitness : -stopFitness)) { break generationLoop; } final double[] sqrtDiagC = sqrt(diagC).getColumn(0); final double[] pcCol = pc.getColumn(0); for (int i = 0; i < dimension; i++) { if (sigma * FastMath.max(FastMath.abs(pcCol[i]), sqrtDiagC[i]) > stopTolX) { break; } if (i >= dimension - 1) { break generationLoop; } } for (int i = 0; i < dimension; i++) { if (sigma * sqrtDiagC[i] > stopTolUpX) { break generationLoop; } } final double historyBest = min(fitnessHistory); final double historyWorst = max(fitnessHistory); if (iterations > 2 && FastMath.max(historyWorst, worstFitness) - FastMath.min(historyBest, bestFitness) < stopTolFun) { break generationLoop; } if (iterations > fitnessHistory.length && historyWorst - historyBest < stopTolHistFun) { break generationLoop; } // condition number of the covariance matrix exceeds 1e14 if (max(diagD) / min(diagD) > 1e7) { break generationLoop; } // user defined termination if (getConvergenceChecker() != null) { final PointValuePair current = new PointValuePair(bestArx.getColumn(0), isMinimize ? bestFitness : -bestFitness); if (lastResult != null && getConvergenceChecker().converged(iterations, current, lastResult)) { break generationLoop; } lastResult = current; } // Adjust step size in case of equal function values (flat fitness) if (bestValue == fitness[arindex[(int) (0.1 + lambda / 4.)]]) { sigma *= FastMath.exp(0.2 + cs / damps); } if (iterations > 2 && FastMath.max(historyWorst, bestFitness) - FastMath.min(historyBest, bestFitness) == 0) { sigma *= FastMath.exp(0.2 + cs / damps); } // store best in history push(fitnessHistory, bestFitness); if (generateStatistics) { statisticsSigmaHistory.add(sigma); statisticsFitnessHistory.add(bestFitness); statisticsMeanHistory.add(xmean.transpose()); statisticsDHistory.add(diagD.transpose().scalarMultiply(1E5)); } double cmaesTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0; stopwatch.reset(); stopwatch.start(); listener.onNextIteraction(evaluatedPopulation); double listernerTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0; logger.info(String.format("NewPop: %.2f, Eval: %.2f, CMAES: %.2f, Listerner: %.2f", newPopTime, evalTime, cmaesTime, listernerTime)); } listener.onLastIteraction(evaluatedPopulation); return optimum; }
From source file:ubic.gemma.core.analysis.preprocess.ProcessedExpressionDataVectorCreateHelperServiceImpl.java
private DoubleArrayList getRanks(ExpressionDataDoubleMatrix intensities, ProcessedExpressionDataVectorDao.RankMethod method) { ProcessedExpressionDataVectorCreateHelperServiceImpl.log.debug("Getting ranks"); assert intensities != null; DoubleArrayList result = new DoubleArrayList(intensities.rows()); for (ExpressionDataMatrixRowElement de : intensities.getRowElements()) { double[] rowObj = ArrayUtils.toPrimitive(intensities.getRow(de.getDesignElement())); double valueForRank = Double.MIN_VALUE; if (rowObj != null) { DoubleArrayList row = new DoubleArrayList(rowObj); switch (method) { case max: valueForRank = DescriptiveWithMissing.max(row); break; case mean: valueForRank = DescriptiveWithMissing.mean(row); break; default: throw new UnsupportedOperationException(); }//from www . j a v a2 s. c o m } result.add(valueForRank); } return Rank.rankTransform(result); }
From source file:lfsom.visualization.clustering.LFSKMeans.java
/** * Utility method to get the min, max, and diff values of the data set. This * is used for scaling the (random) values in the initialisation functions. *///from w ww .j a v a2 s. co m private void initMinAndMaxValues() { minValues = new double[numberOfAttributes]; maxValues = new double[numberOfAttributes]; differences = new double[numberOfAttributes]; // for each attribute for (int j = 0; j < numberOfAttributes; j++) { // in each instance (i.e. each single value now :-)) minValues[j] = Double.MAX_VALUE; maxValues[j] = Double.MIN_VALUE; for (double[] element : data) { if (element[j] < minValues[j]) { minValues[j] = element[j]; } if (element[j] > maxValues[j]) { maxValues[j] = element[j]; } } differences[j] = maxValues[j] - minValues[j]; } }
From source file:org.voltdb.TestParameterSet.java
public void testRoundtrip() throws IOException { Byte byteparam = new Byte((byte) 2); Short shortparam = new Short(Short.MAX_VALUE); Integer intparam = new Integer(Integer.MIN_VALUE); Long longparam = new Long(Long.MAX_VALUE - 1); Double doubleparam = new Double(Double.MAX_VALUE - 1); String stringparam = new String("ABCDE"); TimestampType dateparam = new TimestampType(); // current time BigDecimal bigdecimalparam = new BigDecimal(7654321).setScale(VoltDecimalHelper.kDefaultScale); VoltTable volttableparam = new VoltTable(new VoltTable.ColumnInfo("foo", VoltType.INTEGER)); volttableparam.addRow(Integer.MAX_VALUE); byte[] bytearray = new byte[] { (byte) 'f', (byte) 'o', (byte) 'o' }; short[] shortarray = new short[] { Short.MAX_VALUE, Short.MIN_VALUE, (short) 5 }; int[] intarray = new int[] { Integer.MAX_VALUE, Integer.MIN_VALUE, 5 }; double[] doublearray = new double[] { Double.MAX_VALUE, Double.MIN_VALUE, 5.5 }; String[] stringarray = new String[] { "ABC", "DEF", "HIJ" }; TimestampType[] datearray = new TimestampType[] { new TimestampType(), new TimestampType(), new TimestampType() }; BigDecimal bdtmp1 = new BigDecimal(7654321).setScale(VoltDecimalHelper.kDefaultScale); BigDecimal bdtmp2 = new BigDecimal(654321).setScale(VoltDecimalHelper.kDefaultScale); BigDecimal bdtmp3 = new BigDecimal(54321).setScale(VoltDecimalHelper.kDefaultScale); BigDecimal[] bigdecimalarray = new BigDecimal[] { bdtmp1, bdtmp2, bdtmp3 }; VoltTable vttmp1 = new VoltTable(new VoltTable.ColumnInfo("foo", VoltType.INTEGER), new VoltTable.ColumnInfo("bar", VoltType.STRING)); vttmp1.addRow(Integer.MAX_VALUE, "ry@nlikestheyankees"); VoltTable vttmp2 = new VoltTable(new VoltTable.ColumnInfo("bar", VoltType.INTEGER), new VoltTable.ColumnInfo("bar", VoltType.STRING)); vttmp2.addRow(Integer.MIN_VALUE, null); VoltTable vttmp3 = new VoltTable(new VoltTable.ColumnInfo("far", VoltType.INTEGER), new VoltTable.ColumnInfo("bar", VoltType.STRING)); vttmp3.addRow(new Integer(5), ""); VoltTable[] volttablearray = new VoltTable[] { vttmp1, vttmp2, vttmp3 }; assertTrue(bigdecimalparam.scale() == VoltDecimalHelper.kDefaultScale); assertTrue(bdtmp1.scale() == VoltDecimalHelper.kDefaultScale); assertTrue(bdtmp2.scale() == VoltDecimalHelper.kDefaultScale); assertTrue(bdtmp3.scale() == VoltDecimalHelper.kDefaultScale); ParameterSet pset = ParameterSet.fromArrayNoCopy(byteparam, shortparam, intparam, longparam, doubleparam, stringparam, dateparam, bigdecimalparam, volttableparam, bytearray, shortarray, intarray, doublearray, stringarray, datearray, bigdecimalarray, volttablearray); ByteBuffer buf = ByteBuffer.allocate(pset.getSerializedSize()); pset.flattenToBuffer(buf);/*from ww w . j av a 2s . co m*/ buf.flip(); ParameterSet pset2 = ParameterSet.fromByteBuffer(buf); Object[] pset1array = pset.toArray(); Object[] pset2array = pset2.toArray(); assertTrue(Arrays.deepEquals(pset1array, pset2array)); }
From source file:ml.shifu.shifu.core.dtrain.nn.NNMaster.java
@SuppressWarnings("unchecked") @Override/*from w w w .ja v a2s. c o m*/ public void init(MasterContext<NNParams, NNParams> context) { Properties props = context.getProps(); try { SourceType sourceType = SourceType .valueOf(props.getProperty(CommonConstants.MODELSET_SOURCE_TYPE, SourceType.HDFS.toString())); this.modelConfig = CommonUtils.loadModelConfig(props.getProperty(CommonConstants.SHIFU_MODEL_CONFIG), sourceType); this.columnConfigList = CommonUtils .loadColumnConfigList(props.getProperty(CommonConstants.SHIFU_COLUMN_CONFIG), sourceType); } catch (IOException e) { throw new RuntimeException(e); } int trainerId = Integer.valueOf(context.getProps().getProperty(CommonConstants.SHIFU_TRAINER_ID, "0")); GridSearch gs = new GridSearch(modelConfig.getTrain().getParams(), modelConfig.getTrain().getGridConfigFileContent()); validParams = this.modelConfig.getTrain().getParams(); if (gs.hasHyperParam()) { validParams = gs.getParams(trainerId); LOG.info("Start grid search master with params: {}", validParams); } Boolean enabledEarlyStop = DTrainUtils.getBoolean(validParams, CommonConstants.ENABLE_EARLY_STOP, Boolean.FALSE); if (enabledEarlyStop) { Double validTolerance = DTrainUtils.getDouble(validParams, CommonConstants.VALIDATION_TOLERANCE, null); if (validTolerance == null) { LOG.info("Early Stop is enabled. use WindowEarlyStop"); // windowSize default 20, user should could adjust it this.earlyStopStrategy = new WindowEarlyStop(context, this.modelConfig, DTrainUtils .getInt(context.getProps(), CommonConstants.SHIFU_TRAIN_EARLYSTOP_WINDOW_SIZE, 20)); } else { LOG.info("Early Stop is enabled. use ConvergeAndValiToleranceEarlyStop"); Double threshold = this.modelConfig.getTrain().getConvergenceThreshold(); this.earlyStopStrategy = new ConvergeAndValidToleranceEarlyStop( threshold == null ? Double.MIN_VALUE : threshold.doubleValue(), validTolerance); } } Object pObject = validParams.get(CommonConstants.PROPAGATION); this.propagation = pObject == null ? "Q" : (String) pObject; this.rawLearningRate = Double.valueOf(validParams.get(CommonConstants.LEARNING_RATE).toString()); Object dropoutRateObj = validParams.get(CommonConstants.DROPOUT_RATE); if (dropoutRateObj != null) { this.dropoutRate = Double.valueOf(dropoutRateObj.toString()); } LOG.info("'dropoutRate' in master is : {}", this.dropoutRate); Object learningDecayO = validParams.get(CommonConstants.LEARNING_DECAY); if (learningDecayO != null) { this.learningDecay = Double.valueOf(learningDecayO.toString()); } LOG.info("'learningDecay' in master is :{}", learningDecay); Object momentumO = validParams.get("Momentum"); if (momentumO != null) { this.momentum = Double.valueOf(momentumO.toString()); } LOG.info("'momentum' in master is :{}", momentum); Object adamBeta1O = validParams.get("AdamBeta1"); if (adamBeta1O != null) { this.adamBeta1 = Double.valueOf(adamBeta1O.toString()); } LOG.info("'adamBeta1' in master is :{}", adamBeta1); Object adamBeta2O = validParams.get("AdamBeta2"); if (adamBeta2O != null) { this.adamBeta2 = Double.valueOf(adamBeta2O.toString()); } LOG.info("'adamBeta2' in master is :{}", adamBeta2); this.wgtInit = "default"; Object wgtInitObj = validParams.get(CommonConstants.WEIGHT_INITIALIZER); if (wgtInitObj != null) { this.wgtInit = wgtInitObj.toString(); } this.isContinuousEnabled = Boolean.TRUE.toString() .equalsIgnoreCase(context.getProps().getProperty(CommonConstants.CONTINUOUS_TRAINING)); Object rconstant = validParams.get(CommonConstants.REGULARIZED_CONSTANT); this.regularizedConstant = NumberFormatUtils.getDouble(rconstant == null ? "" : rconstant.toString(), 0d); // We do not update weight in fixed layers so that we could fine tune other layers of NN Object fixedLayers2O = validParams.get(CommonConstants.FIXED_LAYERS); if (fixedLayers2O != null) { this.fixedLayers = (List<Integer>) fixedLayers2O; } LOG.info("Fixed layers in master is :{}", this.fixedLayers.toString()); Object fixedBiasObj = validParams.getOrDefault(CommonConstants.FIXED_BIAS, true); this.fixedBias = (Boolean) fixedBiasObj; Object hiddenLayerNumObj = validParams.get(CommonConstants.NUM_HIDDEN_LAYERS); if (hiddenLayerNumObj != null && StringUtils.isNumeric(hiddenLayerNumObj.toString())) { this.hiddenLayerNum = Integer.valueOf(hiddenLayerNumObj.toString()); } LOG.info("hiddenLayerNum in master is :{}", this.hiddenLayerNum); // check if variables are set final selected int[] inputOutputIndex = DTrainUtils.getNumericAndCategoricalInputAndOutputCounts(this.columnConfigList); this.isAfterVarSelect = (inputOutputIndex[3] == 1); // cache all feature list for sampling features this.allFeatures = NormalUtils.getAllFeatureList(columnConfigList, isAfterVarSelect); String subsetStr = context.getProps().getProperty(CommonConstants.SHIFU_NN_FEATURE_SUBSET); if (StringUtils.isBlank(subsetStr)) { this.subFeatures = this.allFeatures; } else { String[] splits = subsetStr.split(","); this.subFeatures = new ArrayList<Integer>(splits.length); for (String split : splits) { this.subFeatures.add(Integer.parseInt(split)); } } // recover master states here is globalNNParams // not init but not first iteration, first recover from last master result set from guagua if (!context.isFirstIteration()) { NNParams params = context.getMasterResult(); if (params != null && params.getWeights() != null) { this.globalNNParams.setWeights(params.getWeights()); } else { // else read from checkpoint params = initOrRecoverParams(context); this.globalNNParams.setWeights(params.getWeights()); } } }
From source file:au.org.ala.delta.intkey.directives.invocation.SummaryDirectiveInvocation.java
private List<Object> generateRealSummaryInformation(RealCharacter ch, List<Attribute> attrs) { int numUnknown = 0; int numInapplicable = 0; int numRecorded = 0; double minValue = Double.MAX_VALUE; double maxValue = Double.MIN_VALUE; int minValueTaxonIndex = 0; int maxValueTaxonIndex = 0; // Collect data points to use to calculate mean and standard deviation List<Double> valuesForMeanAndStdDev = new ArrayList<Double>(); for (Item taxon : _taxa) { RealAttribute attr = (RealAttribute) attrs.get(taxon.getItemNumber() - 1); if (attr.isUnknown() && !attr.isInapplicable()) { numUnknown++;/*from w w w . j av a 2 s . com*/ continue; } else if (attr.isUnknown() && attr.isInapplicable()) { numInapplicable++; continue; } else { numRecorded++; } FloatRange presentRange = attr.getPresentRange(); if (presentRange.getMinimumDouble() < minValue) { minValue = presentRange.getMinimumDouble(); minValueTaxonIndex = taxon.getItemNumber(); } if (presentRange.getMaximumDouble() > maxValue) { maxValue = presentRange.getMaximumDouble(); maxValueTaxonIndex = taxon.getItemNumber(); } // for calculating the mean and standard deviation, use the average // the two numbers that // specify the range. valuesForMeanAndStdDev.add((presentRange.getMinimumDouble() + presentRange.getMaximumDouble()) / 2); } Pair<Double, Double> pairMeanStdDev = calcuateMeanAndStandardDeviation(valuesForMeanAndStdDev); double mean = pairMeanStdDev.getFirst(); double stdDev = pairMeanStdDev.getSecond(); return Arrays.asList(new Object[] { numUnknown, numInapplicable, numRecorded, minValue, maxValue, minValueTaxonIndex, maxValueTaxonIndex, mean, stdDev }); }
From source file:gamlss.utilities.MakeLinkFunction.java
/** * Calculates the values of distribution parameter Eta vector according to log link function * @param eta - vector of linear predictor values * @return muEta vector/* w w w .ja va 2 s . c o m*/ */ public ArrayRealVector logShiftTo2DistParameterEta(ArrayRealVector eta) { int size = eta.getDimension(); double[] out = new double[size]; for (int i = 0; i < size; i++) { //mu.eta <- function(eta) pmax(.Machine$double.eps, exp(eta)) out[i] = FastMath.exp(eta.getEntry(i)); if (out[i] < Double.MIN_VALUE) //!!!! .Machine$double.eps { out[i] = Double.MIN_VALUE; } } return new ArrayRealVector(out, false); }