List of usage examples for java.lang Double MAX_VALUE
double MAX_VALUE
To view the source code for java.lang Double MAX_VALUE.
Click Source Link
From source file:edu.stanford.cfuller.imageanalysistools.filter.VoronoiFilter.java
/** * Checks whether a given coordinate would be on a boundary of a * Voronoi diagram created from the given points. * **/// w w w .j a va 2 s . co m public boolean isOnEightConnectedBoundary(java.util.List<Vector2D> points, ImageCoordinate ic) { int x = ic.get(ImageCoordinate.X); int y = ic.get(ImageCoordinate.Y); int closestIndex = 0; int nextIndex = 0; double closestDist = Double.MAX_VALUE; double nextDist = Double.MAX_VALUE; for (int i = 0; i < points.size(); i++) { Vector2D pt = points.get(i); double dist = Math.hypot(pt.getX() - x, pt.getY() - y); if (dist < closestDist) { nextDist = closestDist; nextIndex = closestIndex; closestDist = dist; closestIndex = i; } else if (dist < nextDist) { nextDist = dist; nextIndex = i; } } Vector2D projectedCoordinate = this.projectPointOntoVector(points.get(closestIndex), new Vector2D(x, y), points.get(nextIndex)); double distToNext = points.get(nextIndex).subtract(projectedCoordinate).getNorm(); double distToClosest = points.get(closestIndex).subtract(projectedCoordinate).getNorm(); final double cutoff = 1.3 * Math.sqrt(2); if (distToNext - distToClosest < cutoff) { return true; } return false; }
From source file:com.joptimizer.util.MPSParserNetlibTest.java
/** * Tests the parsing of a netlib problem. *///w ww . ja v a 2 s . c o m public void xxxtestSingleNetlib() throws Exception { log.debug("testSingleNetlib"); //String problemName = "afiro"; //String problemName = "afiroPresolved"; //String problemName = "adlittle"; //String problemName = "kb2"; //String problemName = "sc50a"; //String problemName = "sc50b"; //String problemName = "blend"; //String problemName = "scorpion"; //String problemName = "recipe"; //String problemName = "recipePresolved"; //String problemName = "sctap1"; //String problemName = "fit1d"; //String problemName = "israel"; //String problemName = "grow15"; //String problemName = "etamacro"; //String problemName = "pilot"; //String problemName = "pilot4"; //String problemName = "osa-14"; //String problemName = "brandyPresolved"; String problemName = "maros"; File f = Utils.getClasspathResourceAsFile("lp" + File.separator + "netlib" + File.separator + problemName + File.separator + problemName + ".mps"); MPSParser mpsParser = new MPSParser(); mpsParser.parse(f); Properties expectedSolProps = null; try { //this is the solution of the mps problem given by Mathematica expectedSolProps = load(Utils.getClasspathResourceAsFile( "lp" + File.separator + "netlib" + File.separator + problemName + File.separator + "sol.txt")); } catch (Exception e) { } log.debug("name: " + mpsParser.getName()); log.debug("n : " + mpsParser.getN()); log.debug("meq : " + mpsParser.getMeq()); log.debug("mieq: " + mpsParser.getMieq()); log.debug("meq+mieq: " + (mpsParser.getMeq() + mpsParser.getMieq())); List<String> variablesNames = mpsParser.getVariablesNames(); log.debug("x: " + ArrayUtils.toString(variablesNames)); // log.debug("c: " + ArrayUtils.toString(p.getC())); // log.debug("G: " + ArrayUtils.toString(p.getG())); // log.debug("h: " + ArrayUtils.toString(p.getH())); // log.debug("A: " + ArrayUtils.toString(p.getA())); // log.debug("b: " + ArrayUtils.toString(p.getB())); // log.debug("lb:" + ArrayUtils.toString(p.getLb())); // log.debug("ub:" + ArrayUtils.toString(p.getUb())); //check consistency: if the problem was correctly parsed, the expectedSol must be its solution double delta = 1.e-7; if (expectedSolProps != null) { //key = variable name //value = sol value assertEquals(expectedSolProps.size(), variablesNames.size()); RealVector expectedSol = new ArrayRealVector(variablesNames.size()); for (int i = 0; i < variablesNames.size(); i++) { expectedSol.setEntry(i, Double.parseDouble(expectedSolProps.getProperty(variablesNames.get(i)))); } log.debug("expectedSol: " + ArrayUtils.toString(expectedSol.toArray())); //check objective function value Map<String, LPNetlibProblem> problemsMap = LPNetlibProblem.loadAllProblems(); LPNetlibProblem problem = problemsMap.get(problemName); RealVector c = new ArrayRealVector(mpsParser.getC().toArray()); double value = c.dotProduct(expectedSol); log.debug("optimalValue: " + problem.optimalValue); log.debug("value : " + value); assertEquals(problem.optimalValue, value, delta); //check G.x < h if (mpsParser.getG() != null) { RealMatrix G = new Array2DRowRealMatrix(mpsParser.getG().toArray()); RealVector h = new ArrayRealVector(mpsParser.getH().toArray()); RealVector Gxh = G.operate(expectedSol).subtract(h); double maxGxh = -Double.MAX_VALUE; for (int i = 0; i < Gxh.getDimension(); i++) { //log.debug(i); maxGxh = Math.max(maxGxh, Gxh.getEntry(i)); assertTrue(Gxh.getEntry(i) <= 0); } log.debug("max(G.x - h): " + maxGxh); } //check A.x = b if (mpsParser.getA() != null) { RealMatrix A = new Array2DRowRealMatrix(mpsParser.getA().toArray()); RealVector b = new ArrayRealVector(mpsParser.getB().toArray()); RealVector Axb = A.operate(expectedSol).subtract(b); double norm = Axb.getNorm(); log.debug("||A.x -b||: " + norm); assertEquals(0., norm, delta * mpsParser.getN());//some more tolerance } //check upper and lower bounds for (int i = 0; i < mpsParser.getLb().size(); i++) { double di = Double.isNaN(mpsParser.getLb().getQuick(i)) ? -Double.MAX_VALUE : mpsParser.getLb().getQuick(i); assertTrue(di <= expectedSol.getEntry(i)); } for (int i = 0; i < mpsParser.getUb().size(); i++) { double di = Double.isNaN(mpsParser.getUb().getQuick(i)) ? Double.MAX_VALUE : mpsParser.getUb().getQuick(i); assertTrue(di >= expectedSol.getEntry(i)); } } Utils.writeDoubleArrayToFile(mpsParser.getC().toArray(), "target" + File.separator + "c.txt"); Utils.writeDoubleMatrixToFile(mpsParser.getG().toArray(), "target" + File.separator + "G.csv"); Utils.writeDoubleArrayToFile(mpsParser.getH().toArray(), "target" + File.separator + "h.txt"); Utils.writeDoubleMatrixToFile(mpsParser.getA().toArray(), "target" + File.separator + "A.csv"); Utils.writeDoubleArrayToFile(mpsParser.getB().toArray(), "target" + File.separator + "b.txt"); Utils.writeDoubleArrayToFile(mpsParser.getLb().toArray(), "target" + File.separator + "lb.txt"); Utils.writeDoubleArrayToFile(mpsParser.getUb().toArray(), "target" + File.separator + "ub.txt"); }
From source file:com.joptimizer.algebra.Matrix1NornRescaler.java
/** * Scaling factors for symmetric (not singular) matrices. * Just the subdiagonal elements of the matrix are required. * @see Daniel Ruiz, "A scaling algorithm to equilibrate both rows and columns norms in matrices" * @see Philip A. Knight, Daniel Ruiz, Bora Ucar "A Symmetry Preserving Algorithm for Matrix Scaling" *//*from ww w. ja v a2s . c o m*/ @Override public DoubleMatrix1D getMatrixScalingFactorsSymm(DoubleMatrix2D A) { DoubleFactory1D F1 = DoubleFactory1D.dense; DoubleFactory2D F2 = DoubleFactory2D.sparse; int dim = A.columns(); DoubleMatrix1D D1 = F1.make(dim, 1); DoubleMatrix2D AK = A.copy(); DoubleMatrix2D DR = F2.identity(dim); DoubleMatrix1D DRInv = F1.make(dim); int maxIteration = 50; for (int k = 0; k <= maxIteration; k++) { double normR = -Double.MAX_VALUE; for (int i = 0; i < dim; i++) { double dri = getRowInfinityNorm(AK, i); DR.setQuick(i, i, Math.sqrt(dri)); DRInv.setQuick(i, 1. / Math.sqrt(dri)); normR = Math.max(normR, Math.abs(1 - dri)); if (Double.isNaN(normR)) { throw new IllegalArgumentException("matrix is singular"); } } if (normR < eps) { break; } for (int i = 0; i < dim; i++) { double prevD1I = D1.getQuick(i); double newD1I = prevD1I * DRInv.getQuick(i); D1.setQuick(i, newD1I); } if (k == maxIteration) { log.warn("max iteration reached"); } AK = ColtUtils.diagonalMatrixMult(DRInv, AK, DRInv); } return D1; }
From source file:Questao3.java
/** * Metodo que retorna o menor valor de uma matriz * @param matriz/*from w ww. j av a 2s .com*/ * @return double min */ public double minValue(double[][] matriz) { double min = Double.MAX_VALUE; for (double[] linha : matriz) { for (double d : linha) { if (d < min) { min = d; } } } return min; }
From source file:br.unicamp.ic.recod.gpsi.applications.gpsiJGAPEvolver.java
@Override public void run() throws InvalidConfigurationException, InterruptedException, Exception { int i, j, k;/*from w w w.ja v a 2s. co m*/ byte nFolds = 5; gpsiDescriptor descriptor; gpsiMLDataset mlDataset; gpsiVoxelRawDataset dataset; GPGenotype gp; double[][] fitnessCurves; String[] curveLabels = new String[] { "train", "train_val", "val" }; double bestScore, currentScore; IGPProgram current, bestVal; Mean mean = new Mean(); StandardDeviation sd = new StandardDeviation(); double validationScore, trainScore, bestValidationScore, bestTrainScore; double[][][] samples; for (byte f = 0; f < nFolds; f++) { System.out.println("\nRun " + (f + 1) + "\n"); rawDataset.assignFolds(new byte[] { f, (byte) ((f + 1) % nFolds), (byte) ((f + 2) % nFolds) }, new byte[] { (byte) ((f + 3) % nFolds) }, new byte[] { (byte) ((f + 4) % nFolds) }); dataset = (gpsiVoxelRawDataset) rawDataset; gp = create(config, dataset.getnBands(), fitness); // 0: train, 1: train_val, 2: val fitnessCurves = new double[super.numGenerations][]; current = null; bestVal = null; bestScore = -Double.MAX_VALUE; bestValidationScore = -1.0; bestTrainScore = -1.0; for (int generation = 0; generation < super.numGenerations; generation++) { gp.evolve(1); gp.getGPPopulation().sortByFitness(); if (this.dumpGens) { double[][][] dists; descriptor = new gpsiScalarSpectralIndexDescriptor( new gpsiJGAPVoxelCombiner(fitness.getB(), gp.getGPPopulation().getGPPrograms()[0])); mlDataset = new gpsiMLDataset(descriptor); mlDataset.loadWholeDataset(rawDataset, true); dists = (new gpsiWholeSampler()).sample(mlDataset.getTrainingEntities(), this.classLabels); for (i = 0; i < this.classLabels.length; i++) { stream.register(new gpsiDoubleCsvIOElement(dists[i], null, "gens/f" + (f + 1) + "/" + classLabels[i] + "/" + (generation + 1) + ".csv")); } } for (i = 0; i < super.validation; i++) { current = gp.getGPPopulation().getGPPrograms()[i]; descriptor = new gpsiScalarSpectralIndexDescriptor( new gpsiJGAPVoxelCombiner(fitness.getB(), current)); mlDataset = new gpsiMLDataset(descriptor); mlDataset.loadWholeDataset(rawDataset, true); samples = this.fitness.getSampler().sample(mlDataset.getValidationEntities(), classLabels); validationScore = fitness.getScore().score(samples); trainScore = current.getFitnessValue() - 1.0; currentScore = mean.evaluate(new double[] { trainScore, validationScore }) - sd.evaluate(new double[] { trainScore, validationScore }); if (currentScore > bestScore) { bestVal = current; bestScore = currentScore; bestTrainScore = trainScore; bestValidationScore = validationScore; } } if (validation > 0) { best = new IGPProgram[2]; best[0] = gp.getAllTimeBest(); best[1] = bestVal; fitnessCurves[generation] = new double[] { best[0].getFitnessValue() - 1.0, bestTrainScore, bestValidationScore }; System.out.printf("%3dg: %.4f %.4f %.4f\n", generation + 1, fitnessCurves[generation][0], fitnessCurves[generation][1], fitnessCurves[generation][2]); } else { best = new IGPProgram[1]; best[0] = gp.getAllTimeBest(); fitnessCurves[generation] = new double[] { gp.getAllTimeBest().getFitnessValue() - 1.0 }; System.out.printf("%3dg: %.4f\n", generation + 1, fitnessCurves[generation][0]); } } stream.register(new gpsiDoubleCsvIOElement(fitnessCurves, curveLabels, "curves/f" + (f + 1) + ".csv")); System.out.println("Best solution for trainning: " + gp.getAllTimeBest().toStringNorm(0)); stream.register(new gpsiStringIOElement(gp.getAllTimeBest().toStringNorm(0), "programs/f" + (f + 1) + "train.program")); if (validation > 0) { System.out.println("Best solution for trainning and validation: " + bestVal.toStringNorm(0)); stream.register(new gpsiStringIOElement(bestVal.toStringNorm(0), "programs/f" + (f + 1) + "train_val.program")); } descriptor = new gpsiScalarSpectralIndexDescriptor(new gpsiJGAPVoxelCombiner(fitness.getB(), best[0])); gpsi1NNToMomentScalarClassificationAlgorithm classificationAlgorithm = new gpsi1NNToMomentScalarClassificationAlgorithm( new Mean()); gpsiClassifier classifier = new gpsiClassifier(descriptor, classificationAlgorithm); classifier.fit(this.rawDataset.getTrainingEntities()); classifier.predict(this.rawDataset.getTestEntities()); int[][] confusionMatrix = classifier.getConfusionMatrix(); stream.register(new gpsiIntegerCsvIOElement(confusionMatrix, null, "confusion_matrices/f" + (f + 1) + "_train.csv")); if (validation > 0) { descriptor = new gpsiScalarSpectralIndexDescriptor( new gpsiJGAPVoxelCombiner(fitness.getB(), best[1])); classificationAlgorithm = new gpsi1NNToMomentScalarClassificationAlgorithm(new Mean()); classifier = new gpsiClassifier(descriptor, classificationAlgorithm); classifier.fit(this.rawDataset.getTrainingEntities()); classifier.predict(this.rawDataset.getTestEntities()); confusionMatrix = classifier.getConfusionMatrix(); stream.register(new gpsiIntegerCsvIOElement(confusionMatrix, null, "confusion_matrices/f" + (f + 1) + "_train_val.csv")); } } }
From source file:com.google.blockly.model.FieldNumber.java
/** * Sets the constraints on valid number values. * <p/>/*from w w w . j a v a 2 s . c o m*/ * Changing the constraints may trigger a {@link ChangeEvent}, even if the value does not * change. * * @param min The minimum allowed value, inclusive. * @param max The maximum allowed value, inclusive. * @param precision The precision of allowed values. Valid values are multiples of this number, * such as 1, 0.1, 100, or 0.125. */ public void setConstraints(double min, double max, double precision) { if (max == Double.POSITIVE_INFINITY || Double.isNaN(max)) { max = NO_CONSTRAINT; } else if (max == Double.NEGATIVE_INFINITY) { throw new IllegalArgumentException("Max cannot be -Inf. No valid values would exist."); } if (min == Double.NEGATIVE_INFINITY || Double.isNaN(min)) { min = NO_CONSTRAINT; } else if (min == Double.POSITIVE_INFINITY) { throw new IllegalArgumentException("Min cannot be Inf. No valid values would exist."); } if (precision == 0 || Double.isNaN(precision)) { precision = NO_CONSTRAINT; } if (Double.isInfinite(precision)) { throw new IllegalArgumentException("Precision cannot be infinite."); } if (!Double.isNaN(min) && !Double.isNaN(max) && min > max) { throw new IllegalArgumentException("Minimum value must be less than max. Found " + min + " > " + max); } if (!Double.isNaN(precision) && precision <= 0) { throw new IllegalArgumentException("Precision must be positive. Found " + precision); } double effectiveMin = Double.isNaN(min) ? -Double.MAX_VALUE : min; double effectiveMax = Double.isNaN(max) ? Double.MAX_VALUE : max; if (!Double.isNaN(precision)) { if (effectiveMin < 0) { double multiplier = Math.floor(-effectiveMin / precision); effectiveMin = precision * -multiplier; } else { double multiplier = Math.ceil(effectiveMin / precision); effectiveMin = precision * multiplier; } if (effectiveMax < 0) { double multiplier = Math.ceil(-effectiveMax / precision); effectiveMax = precision * -multiplier; } else { double multiplier = Math.floor(effectiveMax / precision); effectiveMax = precision * multiplier; } if (effectiveMin > effectiveMax) { throw new IllegalArgumentException("No valid value in range."); } } mMin = min; mMax = max; mPrecision = precision; mEffectiveMin = effectiveMin; mEffectiveMax = effectiveMax; mIntegerPrecision = (precision == Math.round(precision)); if (!hasPrecision()) { mFormatter = NAIVE_DECIMAL_FORMAT; } else if (mIntegerPrecision) { mFormatter = INTEGER_DECIMAL_FORMAT; } else { String precisionStr = NAIVE_DECIMAL_FORMAT.format(precision); int decimalChar = precisionStr.indexOf('.'); if (decimalChar == -1) { mFormatter = INTEGER_DECIMAL_FORMAT; } else { int significantDigits = precisionStr.length() - decimalChar; StringBuilder sb = new StringBuilder("0."); char[] sigDigitsFormat = new char[significantDigits]; Arrays.fill(sigDigitsFormat, '#'); sb.append(sigDigitsFormat); mFormatter = new DecimalFormat(sb.toString()); } } setValueImpl(mValue, true); }
From source file:com.joptimizer.algebra.Matrix1NormRescaler.java
/** * Scaling factors for symmetric (not singular) matrices. * Just the subdiagonal elements of the matrix are required. * @see Daniel Ruiz, "A scaling algorithm to equilibrate both rows and columns norms in matrices" * @see Philip A. Knight, Daniel Ruiz, Bora Ucar "A Symmetry Preserving Algorithm for Matrix Scaling" *//*from ww w . jav a 2s.c o m*/ public DoubleMatrix1D getMatrixScalingFactorsSymm(DoubleMatrix2D A) { DoubleFactory1D F1 = DoubleFactory1D.dense; DoubleFactory2D F2 = DoubleFactory2D.sparse; Algebra ALG = Algebra.DEFAULT; int dim = A.columns(); DoubleMatrix1D D1 = F1.make(dim, 1); DoubleMatrix2D AK = A.copy(); DoubleMatrix2D DR = F2.identity(dim); DoubleMatrix1D DRInv = F1.make(dim); //log.debug("eps : " + eps); int maxIteration = 50; for (int k = 0; k <= maxIteration; k++) { double normR = -Double.MAX_VALUE; for (int i = 0; i < dim; i++) { //double dri = ALG.normInfinity(AK.viewRow(i)); double dri = this.getRowInfinityNorm(AK, i); DR.setQuick(i, i, Math.sqrt(dri)); DRInv.setQuick(i, 1. / Math.sqrt(dri)); normR = Math.max(normR, Math.abs(1 - dri)); if (Double.isNaN(normR)) { throw new IllegalArgumentException("matrix is singular"); } } //log.debug("normR: " + normR); if (normR < eps) { break; } for (int i = 0; i < dim; i++) { double prevD1I = D1.getQuick(i); double newD1I = prevD1I * DRInv.getQuick(i); D1.setQuick(i, newD1I); } //logger.debug("D1: " + ArrayUtils.toString(D1.toArray())); if (k == maxIteration) { log.warn("max iteration reached"); } AK = ColtUtils.diagonalMatrixMult(DRInv, AK, DRInv); } return D1; }
From source file:ml.shifu.shifu.core.binning.UpdateBinningInfoReducer.java
@Override protected void reduce(IntWritable key, Iterable<BinningInfoWritable> values, Context context) throws IOException, InterruptedException { long start = System.currentTimeMillis(); double sum = 0d; double squaredSum = 0d; double tripleSum = 0d; double quarticSum = 0d; double p25th = 0d; double median = 0d; double p75th = 0d; long count = 0L, missingCount = 0L; double min = Double.MAX_VALUE, max = Double.MIN_VALUE; List<Double> binBoundaryList = null; List<String> binCategories = null; long[] binCountPos = null; long[] binCountNeg = null; double[] binWeightPos = null; double[] binWeightNeg = null; long[] binCountTotal = null; int columnConfigIndex = key.get() >= this.columnConfigList.size() ? key.get() % this.columnConfigList.size() : key.get();//w w w . j a v a 2 s . c o m ColumnConfig columnConfig = this.columnConfigList.get(columnConfigIndex); HyperLogLogPlus hyperLogLogPlus = null; Set<String> fis = new HashSet<String>(); long totalCount = 0, invalidCount = 0, validNumCount = 0; int binSize = 0; for (BinningInfoWritable info : values) { if (info.isEmpty()) { // mapper has no stats, skip it continue; } CountAndFrequentItemsWritable cfiw = info.getCfiw(); totalCount += cfiw.getCount(); invalidCount += cfiw.getInvalidCount(); validNumCount += cfiw.getValidNumCount(); fis.addAll(cfiw.getFrequetItems()); if (hyperLogLogPlus == null) { hyperLogLogPlus = HyperLogLogPlus.Builder.build(cfiw.getHyperBytes()); } else { try { hyperLogLogPlus = (HyperLogLogPlus) hyperLogLogPlus .merge(HyperLogLogPlus.Builder.build(cfiw.getHyperBytes())); } catch (CardinalityMergeException e) { throw new RuntimeException(e); } } if (columnConfig.isHybrid() && binBoundaryList == null && binCategories == null) { binBoundaryList = info.getBinBoundaries(); binCategories = info.getBinCategories(); binSize = binBoundaryList.size() + binCategories.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } else if (columnConfig.isNumerical() && binBoundaryList == null) { binBoundaryList = info.getBinBoundaries(); binSize = binBoundaryList.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } else if (columnConfig.isCategorical() && binCategories == null) { binCategories = info.getBinCategories(); binSize = binCategories.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } count += info.getTotalCount(); missingCount += info.getMissingCount(); // for numeric, such sums are OK, for categorical, such values are all 0, should be updated by using // binCountPos and binCountNeg sum += info.getSum(); squaredSum += info.getSquaredSum(); tripleSum += info.getTripleSum(); quarticSum += info.getQuarticSum(); if (Double.compare(max, info.getMax()) < 0) { max = info.getMax(); } if (Double.compare(min, info.getMin()) > 0) { min = info.getMin(); } for (int i = 0; i < (binSize + 1); i++) { binCountPos[i] += info.getBinCountPos()[i]; binCountNeg[i] += info.getBinCountNeg()[i]; binWeightPos[i] += info.getBinWeightPos()[i]; binWeightNeg[i] += info.getBinWeightNeg()[i]; binCountTotal[i] += info.getBinCountPos()[i]; binCountTotal[i] += info.getBinCountNeg()[i]; } } if (columnConfig.isNumerical()) { long p25Count = count / 4; long medianCount = p25Count * 2; long p75Count = p25Count * 3; p25th = min; median = min; p75th = min; int currentCount = 0; for (int i = 0; i < binBoundaryList.size(); i++) { double left = getCutoffBoundary(binBoundaryList.get(i), max, min); double right = ((i == binBoundaryList.size() - 1) ? max : getCutoffBoundary(binBoundaryList.get(i + 1), max, min)); if (p25Count >= currentCount && p25Count < currentCount + binCountTotal[i]) { p25th = ((p25Count - currentCount) / (double) binCountTotal[i]) * (right - left) + left; } if (medianCount >= currentCount && medianCount < currentCount + binCountTotal[i]) { median = ((medianCount - currentCount) / (double) binCountTotal[i]) * (right - left) + left; } if (p75Count >= currentCount && p75Count < currentCount + binCountTotal[i]) { p75th = ((p75Count - currentCount) / (double) binCountTotal[i]) * (right - left) + left; // when get 75 percentile stop it break; } currentCount += binCountTotal[i]; } LOG.info("Coloumn num is {}, p25 value is {}, median value is {}, p75 value is {}", columnConfig.getColumnNum(), p25th, median, p75th); } LOG.info("Coloumn num is {}, columnType value is {}, cateMaxNumBin is {}, binCategory size is {}", columnConfig.getColumnNum(), columnConfig.getColumnType(), modelConfig.getStats().getCateMaxNumBin(), (CollectionUtils.isNotEmpty(columnConfig.getBinCategory()) ? columnConfig.getBinCategory().size() : 0)); // To merge categorical binning if (columnConfig.isCategorical() && modelConfig.getStats().getCateMaxNumBin() > 0 && CollectionUtils.isNotEmpty(binCategories) && binCategories.size() > modelConfig.getStats().getCateMaxNumBin()) { // only category size large then expected max bin number CateBinningStats cateBinningStats = rebinCategoricalValues( new CateBinningStats(binCategories, binCountPos, binCountNeg, binWeightPos, binWeightNeg)); LOG.info("For variable - {}, {} bins is rebined to {} bins", columnConfig.getColumnName(), binCategories.size(), cateBinningStats.binCategories.size()); binCategories = cateBinningStats.binCategories; binCountPos = cateBinningStats.binCountPos; binCountNeg = cateBinningStats.binCountNeg; binWeightPos = cateBinningStats.binWeightPos; binWeightNeg = cateBinningStats.binWeightNeg; } double[] binPosRate; if (modelConfig.isRegression()) { binPosRate = computePosRate(binCountPos, binCountNeg); } else { // for multiple classfication, use rate of categories to compute a value binPosRate = computeRateForMultiClassfication(binCountPos); } String binBounString = null; if (columnConfig.isHybrid()) { if (binCategories.size() > this.maxCateSize) { LOG.warn("Column {} {} with invalid bin category size.", key.get(), columnConfig.getColumnName(), binCategories.size()); return; } binBounString = binBoundaryList.toString(); binBounString += Constants.HYBRID_BIN_STR_DILIMETER + Base64Utils.base64Encode( "[" + StringUtils.join(binCategories, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR) + "]"); } else if (columnConfig.isCategorical()) { if (binCategories.size() > this.maxCateSize) { LOG.warn("Column {} {} with invalid bin category size.", key.get(), columnConfig.getColumnName(), binCategories.size()); return; } binBounString = Base64Utils.base64Encode( "[" + StringUtils.join(binCategories, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR) + "]"); // recompute such value for categorical variables min = Double.MAX_VALUE; max = Double.MIN_VALUE; sum = 0d; squaredSum = 0d; for (int i = 0; i < binPosRate.length; i++) { if (!Double.isNaN(binPosRate[i])) { if (Double.compare(max, binPosRate[i]) < 0) { max = binPosRate[i]; } if (Double.compare(min, binPosRate[i]) > 0) { min = binPosRate[i]; } long binCount = binCountPos[i] + binCountNeg[i]; sum += binPosRate[i] * binCount; double squaredVal = binPosRate[i] * binPosRate[i]; squaredSum += squaredVal * binCount; tripleSum += squaredVal * binPosRate[i] * binCount; quarticSum += squaredVal * squaredVal * binCount; } } } else { if (binBoundaryList.size() == 0) { LOG.warn("Column {} {} with invalid bin boundary size.", key.get(), columnConfig.getColumnName(), binBoundaryList.size()); return; } binBounString = binBoundaryList.toString(); } ColumnMetrics columnCountMetrics = null; ColumnMetrics columnWeightMetrics = null; if (modelConfig.isRegression()) { columnCountMetrics = ColumnStatsCalculator.calculateColumnMetrics(binCountNeg, binCountPos); columnWeightMetrics = ColumnStatsCalculator.calculateColumnMetrics(binWeightNeg, binWeightPos); } // To make it be consistent with SPDT, missingCount is excluded to compute mean, stddev ... long realCount = this.statsExcludeMissingValue ? (count - missingCount) : count; double mean = sum / realCount; double stdDev = Math.sqrt(Math.abs((squaredSum - (sum * sum) / realCount + EPS) / (realCount - 1))); double aStdDev = Math.sqrt(Math.abs((squaredSum - (sum * sum) / realCount + EPS) / realCount)); double skewness = ColumnStatsCalculator.computeSkewness(realCount, mean, aStdDev, sum, squaredSum, tripleSum); double kurtosis = ColumnStatsCalculator.computeKurtosis(realCount, mean, aStdDev, sum, squaredSum, tripleSum, quarticSum); sb.append(key.get()) // column id .append(Constants.DEFAULT_DELIMITER).append(binBounString) // column bins .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binCountNeg)) // bin count negative .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binCountPos)) // bin count positive .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(new double[0])) // deprecated .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binPosRate)) // bin positive rate .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : df.format(columnCountMetrics.getKs())) // KS .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : df.format(columnCountMetrics.getIv())) // IV .append(Constants.DEFAULT_DELIMITER).append(df.format(max)) // max .append(Constants.DEFAULT_DELIMITER).append(df.format(min)) // min .append(Constants.DEFAULT_DELIMITER).append(df.format(mean)) // mean .append(Constants.DEFAULT_DELIMITER).append(df.format(stdDev)) // standard deviation .append(Constants.DEFAULT_DELIMITER).append(columnConfig.getColumnType().toString()) // column type .append(Constants.DEFAULT_DELIMITER).append(median) // median value ? .append(Constants.DEFAULT_DELIMITER).append(missingCount) // missing count .append(Constants.DEFAULT_DELIMITER).append(count) // count .append(Constants.DEFAULT_DELIMITER).append(missingCount * 1.0d / count) // missing ratio .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binWeightNeg)) // bin weighted negative .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binWeightPos)) // bin weighted positive .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : columnCountMetrics.getWoe()) // WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getWoe()) // weighted WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getKs()) // weighted KS .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getIv()) // weighted IV .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? Arrays.toString(new double[binSize + 1]) : columnCountMetrics.getBinningWoe().toString()) // bin WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? Arrays.toString(new double[binSize + 1]) : columnWeightMetrics.getBinningWoe().toString()) // bin weighted WOE .append(Constants.DEFAULT_DELIMITER).append(skewness) // skewness .append(Constants.DEFAULT_DELIMITER).append(kurtosis) // kurtosis .append(Constants.DEFAULT_DELIMITER).append(totalCount) // total count .append(Constants.DEFAULT_DELIMITER).append(invalidCount) // invalid count .append(Constants.DEFAULT_DELIMITER).append(validNumCount) // valid num count .append(Constants.DEFAULT_DELIMITER).append(hyperLogLogPlus.cardinality()) // cardinality .append(Constants.DEFAULT_DELIMITER).append(Base64Utils.base64Encode(limitedFrequentItems(fis))) // frequent items .append(Constants.DEFAULT_DELIMITER).append(p25th) // the 25 percentile value .append(Constants.DEFAULT_DELIMITER).append(p75th); outputValue.set(sb.toString()); context.write(NullWritable.get(), outputValue); sb.delete(0, sb.length()); LOG.debug("Time:{}", (System.currentTimeMillis() - start)); }
From source file:playground.sergioo.networkBusLaneAdder2012.gui.BusLaneAdderPanel.java
public BusLaneAdderPanel(BusLaneAdderWindow busLaneAdderWindow, NetworkPainter networkPainter, File imageFile, double[] upLeft, double[] downRight, CoordinateTransformation coordinateTransformation) throws IOException { super();//from w ww . j ava 2 s.co m addressLocator = new AddressLocator(coordinateTransformation); this.busLaneAdderWindow = busLaneAdderWindow; ImagePainter imagePainter = new ImagePainter(imageFile, this); imagePainter.setImageCoordinates(upLeft, downRight); addLayer(new Layer(imagePainter, false)); addLayer(new Layer(networkPainter), true); addLayer(new Layer(new LinesPainter(), false)); this.setBackground(backgroundColor); calculateBoundaries(); super.setPreferredSize(Toolkit.getDefaultToolkit().getScreenSize().width, Toolkit.getDefaultToolkit().getScreenSize().height); addMouseListener(this); addMouseMotionListener(this); addMouseWheelListener(this); addKeyListener(this); setFocusable(true); TravelDisutility travelMinCost = new TravelDisutility() { @Override public double getLinkTravelDisutility(final Link link, final double time, final Person person, final Vehicle vehicle) { return getLinkMinimumTravelDisutility(link); } @Override public double getLinkMinimumTravelDisutility(Link link) { if (link.getAllowedModes().contains("bus")) return link.getLength() / BUS_SPEED; else return Double.MAX_VALUE; } }; TravelTime timeFunction = new TravelTime() { @Override public double getLinkTravelTime(Link link, double time, Person person, Vehicle vehicle) { if (link.getAllowedModes().contains("bus")) return link.getLength() / BUS_SPEED; else return Double.MAX_VALUE; } }; PreProcessDijkstra preProcessData = new PreProcessDijkstra(); preProcessData.run(busLaneAdderWindow.getNetwork()); dijkstra = new Dijkstra(busLaneAdderWindow.getNetwork(), travelMinCost, timeFunction, preProcessData); }
From source file:geogebra.common.kernel.implicit.PolynomialUtils.java
private static boolean rootPolishing(double[] pair, GeoImplicitPoly p1, GeoImplicitPoly p2, double[] line) { double x = pair[0], y = pair[1]; double p, q;/* ww w.j ava2s . c o m*/ if (p1 == null) { return false; } if (p2 == null && (line == null || line.length != 3)) { return false; } p = p1.evalPolyAt(x, y); if (p2 != null) q = p2.evalPolyAt(x, y); else q = line[0] + x * line[1] + y * line[2]; double lastErr = Double.MAX_VALUE; double err = Math.abs(p) + Math.abs(q); int n = 0; int MAX_ITERATIONS = 20; while (err < 10 * lastErr && err > Kernel.STANDARD_PRECISION && ++n < MAX_ITERATIONS) { double px, py; double qx, qy; px = p1.evalDiffXPolyAt(x, y); py = p1.evalDiffYPolyAt(x, y); if (p2 != null) { qx = p2.evalDiffXPolyAt(x, y); qy = p2.evalDiffYPolyAt(x, y); } else { qx = line[1]; qy = line[2]; } double det = px * qy - py * qx; if (Kernel.isZero(det)) { break; } x -= (p * qy - q * py) / det; y -= (q * px - p * qx) / det; lastErr = err; p = p1.evalPolyAt(x, y); if (p2 != null) { q = p2.evalPolyAt(x, y); } else { q = line[0] + x * line[1] + y * line[2]; } err = Math.abs(p) + Math.abs(q); } pair[0] = x; pair[1] = y; return err < Kernel.STANDARD_PRECISION; }