List of usage examples for java.lang Double compare
public static int compare(double d1, double d2)
From source file:playground.johannes.gsv.matrices.analysis.CalcShares.java
private static List<Tuple<String, String>> getRelations(KeyMatrix m, int num) { Map<Double, Tuple<String, String>> map = new TreeMap<>(new Comparator<Double>() { @Override//from w ww. j a v a 2s . c o m public int compare(Double o1, Double o2) { int result = -Double.compare(o1, o2); if (result == 0) return 1;//o1.hashCode() - o2.hashCode(); else return result; } }); Set<String> keys = m.keys(); for (String i : keys) { for (String j : keys) { // if(i != j) { Double val = m.get(i, j); if (val != null) { map.put(val, new Tuple<String, String>(i, j)); } // } } } List<Tuple<String, String>> list = new ArrayList<>(num); int cnt = 0; for (Entry<Double, Tuple<String, String>> entry : map.entrySet()) { list.add(entry.getValue()); cnt++; if (cnt > num) { break; } } return list; }
From source file:com.joptimizer.optimizers.LPStandardConverterTest.java
/** * Standardization (to the strictly standard form) of a problem on the form: * min(c) s.t.//from w w w . j a va 2 s. c o m * G.x < h * A.x = b * lb <= x <= ub * @TODO: the strict conversion is net yet ready. */ public void xxxtestCGhAbLbUb1Strict() throws Exception { log.debug("testCGhAbLbUb1Strict"); String problemId = "1"; double[] c = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "c" + problemId + ".txt"); double[][] G = Utils.loadDoubleMatrixFromFile( "lp" + File.separator + "standardization" + File.separator + "G" + problemId + ".csv", ",".charAt(0)); double[] h = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "h" + problemId + ".txt"); ; double[][] A = Utils.loadDoubleMatrixFromFile( "lp" + File.separator + "standardization" + File.separator + "A" + problemId + ".csv", ",".charAt(0)); double[] b = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "b" + problemId + ".txt"); double[] lb = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "lb" + problemId + ".txt"); double[] ub = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "ub" + problemId + ".txt"); double[] expectedSol = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "sol" + problemId + ".txt"); double expectedValue = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "value" + problemId + ".txt")[0]; double expectedTolerance = MatrixUtils.createRealMatrix(A) .operate(MatrixUtils.createRealVector(expectedSol)).subtract(MatrixUtils.createRealVector(b)) .getNorm(); int nOfSlackVariables = 0; for (int i = 0; i < c.length; i++) { double lbi = lb[i]; int lbCompare = Double.compare(lbi, 0.); if (lbCompare != 0 && !Double.isNaN(lbi)) { nOfSlackVariables++; } if (!Double.isNaN(ub[i])) { nOfSlackVariables++; } } int expectedS = G.length + nOfSlackVariables; //standard form conversion boolean strictlyStandardForm = true; LPStandardConverter lpConverter = new LPStandardConverter(strictlyStandardForm); lpConverter.toStandardForm(c, G, h, A, b, lb, ub); int n = lpConverter.getStandardN(); int s = lpConverter.getStandardS(); c = lpConverter.getStandardC().toArray(); A = lpConverter.getStandardA().toArray(); b = lpConverter.getStandardB().toArray(); lb = lpConverter.getStandardLB().toArray(); ub = (lpConverter.getStandardUB() == null) ? null : ub; log.debug("n : " + n); log.debug("s : " + s); log.debug("c : " + ArrayUtils.toString(c)); log.debug("A : " + ArrayUtils.toString(A)); log.debug("b : " + ArrayUtils.toString(b)); log.debug("lb : " + ArrayUtils.toString(lb)); //log.debug("ub : " + ArrayUtils.toString(ub)); //check consistency assertEquals(expectedS, s); assertEquals(lb.length, n); assertTrue(ub == null); //check constraints RealMatrix GOrig = new Array2DRowRealMatrix(G); RealVector hOrig = new ArrayRealVector(h); RealMatrix AStandard = new Array2DRowRealMatrix(A); RealVector bStandard = new ArrayRealVector(b); RealVector expectedSolVector = new ArrayRealVector(expectedSol); double[] expectedStandardSol = lpConverter.getStandardComponents(expectedSol); RealVector expectedStandardSolVector = new ArrayRealVector(expectedStandardSol); for (int i = 0; i < expectedStandardSolVector.getDimension(); i++) { assertTrue(expectedStandardSolVector.getEntry(i) >= 0.); } RealVector Axmb = AStandard.operate(expectedStandardSolVector).subtract(bStandard); assertEquals(0., Axmb.getNorm(), expectedTolerance); Utils.writeDoubleArrayToFile(new double[] { s }, "target" + File.separator + "standardS_" + problemId + ".txt"); Utils.writeDoubleArrayToFile(c, "target" + File.separator + "standardC_" + problemId + ".txt"); Utils.writeDoubleMatrixToFile(A, "target" + File.separator + "standardA_" + problemId + ".csv"); Utils.writeDoubleArrayToFile(b, "target" + File.separator + "standardB_" + problemId + ".txt"); Utils.writeDoubleArrayToFile(lb, "target" + File.separator + "standardLB_" + problemId + ".txt"); //ub is null Utils.writeDoubleArrayToFile(ub, "target" + File.separator + "standardUB_"+problemId+".txt"); }
From source file:org.mrgeo.colorscale.ColorScale.java
public boolean equals(final ColorScale cs) { if (min == null && cs.min != null || min != null && cs.min == null) { return false; }/*from w w w . j a v a 2 s . co m*/ if ((min != null && cs.min != null) && Double.compare(min, cs.min) != 0) { return false; } if (max == null && cs.max != null || max != null && cs.max == null) { return false; } if ((max != null && cs.max != null) && Double.compare(max, cs.max) != 0) { return false; } if (!scaling.equals(cs.scaling)) { return false; } if (interpolate != cs.interpolate) { return false; } if (forceValuesIntoRange != cs.forceValuesIntoRange) { return false; } if (reliefShading != cs.reliefShading) { return false; } if (nullColor.length != cs.nullColor.length) { return false; } for (int i = 0; i < nullColor.length; i++) { if (nullColor[i] != cs.nullColor[i]) { return false; } } if (size() != cs.size()) { return false; } final Iterator<Double> iterator1 = cs.keySet().iterator(); for (final Double d1 : this.keySet()) { final Double d2 = iterator1.next(); if (d1.compareTo(d2) != 0) { return false; } final Color value1 = get(d1); final Color value2 = get(d2); if (!value1.equals(value2)) { return false; } } return true; }
From source file:org.noise_planet.noisecapture.MeasurementService.java
public void startRecording() { canceled.set(false);/*from w w w . java 2 s . c o m*/ initLocalisationServices(); isRecording.set(true); this.audioProcess = new AudioProcess(isRecording, canceled); if (Double.compare(0, dBGain) != 0) { audioProcess.setGain((float) Math.pow(10, dBGain / 20)); } audioProcess.getListeners().addPropertyChangeListener(doProcessing); // Start measurement new Thread(audioProcess).start(); // Change notification icon message showNotification(); }
From source file:org.rhwlab.BHC.NodeBase.java
@Override public int compareTo(Object o) { NodeBase other = (NodeBase) o;//from www. ja va 2 s .c o m int ret = Double.compare(this.lnR, other.lnR); // int ret = Double.compare(this.lnLike,other.lnLike); if (ret == 0) { ret = Integer.compare(this.hashCode(), other.hashCode()); } return ret; }
From source file:org.stockchart.core.AxisRange.java
public boolean expandViewValues(double maxValue, double minValue) { double newMinValue = fMinViewValue; double newMaxValue = fMaxViewValue; if (0 == Double.compare(fMinViewValue, Double.NaN)) newMinValue = minValue;//ww w .j a v a 2s . co m else if (minValue < fMinViewValue) newMinValue = minValue; if (0 == Double.compare(fMaxViewValue, Double.NaN)) newMaxValue = maxValue; else if (maxValue > fMaxViewValue) newMaxValue = maxValue; return this.setViewValues(newMinValue, newMaxValue); // if(maxValue <= this.getMaxOrAutoValue() && minValue >= this.getMinOrAutoValue()) // { // if(0 == Double.compare(fMinViewValue, Double.NaN)) // fMinViewValue = minValue; // else if(minValue < fMinViewValue) // fMinViewValue = minValue; // // if(0 == Double.compare(fMaxViewValue, Double.NaN)) // fMaxViewValue = maxValue; // else if(maxValue > fMaxViewValue) // fMaxViewValue = maxValue; // } }
From source file:VQVAD.VQVADTrainer.java
/** * Matlab equivalent of [~,idx] = sort(energies) * * @param energies/*ww w . j a v a 2s. com*/ * @return */ public Integer[] sortedEnergyIndices(final double[] energies) { final Integer[] idx = new Integer[energies.length]; for (int i = 0; i < energies.length; i++) idx[i] = i; Arrays.sort(idx, new Comparator<Integer>() { @Override public int compare(final Integer o1, final Integer o2) { return Double.compare(energies[o1], energies[o2]); } }); return idx; }
From source file:ml.shifu.shifu.core.binning.UpdateBinningInfoReducer.java
@Override protected void reduce(IntWritable key, Iterable<BinningInfoWritable> values, Context context) throws IOException, InterruptedException { long start = System.currentTimeMillis(); double sum = 0d; double squaredSum = 0d; double tripleSum = 0d; double quarticSum = 0d; double p25th = 0d; double median = 0d; double p75th = 0d; long count = 0L, missingCount = 0L; double min = Double.MAX_VALUE, max = Double.MIN_VALUE; List<Double> binBoundaryList = null; List<String> binCategories = null; long[] binCountPos = null; long[] binCountNeg = null; double[] binWeightPos = null; double[] binWeightNeg = null; long[] binCountTotal = null; int columnConfigIndex = key.get() >= this.columnConfigList.size() ? key.get() % this.columnConfigList.size() : key.get();/*from ww w.j a v a 2 s. c o m*/ ColumnConfig columnConfig = this.columnConfigList.get(columnConfigIndex); HyperLogLogPlus hyperLogLogPlus = null; Set<String> fis = new HashSet<String>(); long totalCount = 0, invalidCount = 0, validNumCount = 0; int binSize = 0; for (BinningInfoWritable info : values) { if (info.isEmpty()) { // mapper has no stats, skip it continue; } CountAndFrequentItemsWritable cfiw = info.getCfiw(); totalCount += cfiw.getCount(); invalidCount += cfiw.getInvalidCount(); validNumCount += cfiw.getValidNumCount(); fis.addAll(cfiw.getFrequetItems()); if (hyperLogLogPlus == null) { hyperLogLogPlus = HyperLogLogPlus.Builder.build(cfiw.getHyperBytes()); } else { try { hyperLogLogPlus = (HyperLogLogPlus) hyperLogLogPlus .merge(HyperLogLogPlus.Builder.build(cfiw.getHyperBytes())); } catch (CardinalityMergeException e) { throw new RuntimeException(e); } } if (columnConfig.isHybrid() && binBoundaryList == null && binCategories == null) { binBoundaryList = info.getBinBoundaries(); binCategories = info.getBinCategories(); binSize = binBoundaryList.size() + binCategories.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } else if (columnConfig.isNumerical() && binBoundaryList == null) { binBoundaryList = info.getBinBoundaries(); binSize = binBoundaryList.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } else if (columnConfig.isCategorical() && binCategories == null) { binCategories = info.getBinCategories(); binSize = binCategories.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } count += info.getTotalCount(); missingCount += info.getMissingCount(); // for numeric, such sums are OK, for categorical, such values are all 0, should be updated by using // binCountPos and binCountNeg sum += info.getSum(); squaredSum += info.getSquaredSum(); tripleSum += info.getTripleSum(); quarticSum += info.getQuarticSum(); if (Double.compare(max, info.getMax()) < 0) { max = info.getMax(); } if (Double.compare(min, info.getMin()) > 0) { min = info.getMin(); } for (int i = 0; i < (binSize + 1); i++) { binCountPos[i] += info.getBinCountPos()[i]; binCountNeg[i] += info.getBinCountNeg()[i]; binWeightPos[i] += info.getBinWeightPos()[i]; binWeightNeg[i] += info.getBinWeightNeg()[i]; binCountTotal[i] += info.getBinCountPos()[i]; binCountTotal[i] += info.getBinCountNeg()[i]; } } if (columnConfig.isNumerical()) { long p25Count = count / 4; long medianCount = p25Count * 2; long p75Count = p25Count * 3; p25th = min; median = min; p75th = min; int currentCount = 0; for (int i = 0; i < binBoundaryList.size(); i++) { double left = getCutoffBoundary(binBoundaryList.get(i), max, min); double right = ((i == binBoundaryList.size() - 1) ? max : getCutoffBoundary(binBoundaryList.get(i + 1), max, min)); if (p25Count >= currentCount && p25Count < currentCount + binCountTotal[i]) { p25th = ((p25Count - currentCount) / (double) binCountTotal[i]) * (right - left) + left; } if (medianCount >= currentCount && medianCount < currentCount + binCountTotal[i]) { median = ((medianCount - currentCount) / (double) binCountTotal[i]) * (right - left) + left; } if (p75Count >= currentCount && p75Count < currentCount + binCountTotal[i]) { p75th = ((p75Count - currentCount) / (double) binCountTotal[i]) * (right - left) + left; // when get 75 percentile stop it break; } currentCount += binCountTotal[i]; } LOG.info("Coloumn num is {}, p25 value is {}, median value is {}, p75 value is {}", columnConfig.getColumnNum(), p25th, median, p75th); } LOG.info("Coloumn num is {}, columnType value is {}, cateMaxNumBin is {}, binCategory size is {}", columnConfig.getColumnNum(), columnConfig.getColumnType(), modelConfig.getStats().getCateMaxNumBin(), (CollectionUtils.isNotEmpty(columnConfig.getBinCategory()) ? columnConfig.getBinCategory().size() : 0)); // To merge categorical binning if (columnConfig.isCategorical() && modelConfig.getStats().getCateMaxNumBin() > 0 && CollectionUtils.isNotEmpty(binCategories) && binCategories.size() > modelConfig.getStats().getCateMaxNumBin()) { // only category size large then expected max bin number CateBinningStats cateBinningStats = rebinCategoricalValues( new CateBinningStats(binCategories, binCountPos, binCountNeg, binWeightPos, binWeightNeg)); LOG.info("For variable - {}, {} bins is rebined to {} bins", columnConfig.getColumnName(), binCategories.size(), cateBinningStats.binCategories.size()); binCategories = cateBinningStats.binCategories; binCountPos = cateBinningStats.binCountPos; binCountNeg = cateBinningStats.binCountNeg; binWeightPos = cateBinningStats.binWeightPos; binWeightNeg = cateBinningStats.binWeightNeg; } double[] binPosRate; if (modelConfig.isRegression()) { binPosRate = computePosRate(binCountPos, binCountNeg); } else { // for multiple classfication, use rate of categories to compute a value binPosRate = computeRateForMultiClassfication(binCountPos); } String binBounString = null; if (columnConfig.isHybrid()) { if (binCategories.size() > this.maxCateSize) { LOG.warn("Column {} {} with invalid bin category size.", key.get(), columnConfig.getColumnName(), binCategories.size()); return; } binBounString = binBoundaryList.toString(); binBounString += Constants.HYBRID_BIN_STR_DILIMETER + Base64Utils.base64Encode( "[" + StringUtils.join(binCategories, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR) + "]"); } else if (columnConfig.isCategorical()) { if (binCategories.size() > this.maxCateSize) { LOG.warn("Column {} {} with invalid bin category size.", key.get(), columnConfig.getColumnName(), binCategories.size()); return; } binBounString = Base64Utils.base64Encode( "[" + StringUtils.join(binCategories, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR) + "]"); // recompute such value for categorical variables min = Double.MAX_VALUE; max = Double.MIN_VALUE; sum = 0d; squaredSum = 0d; for (int i = 0; i < binPosRate.length; i++) { if (!Double.isNaN(binPosRate[i])) { if (Double.compare(max, binPosRate[i]) < 0) { max = binPosRate[i]; } if (Double.compare(min, binPosRate[i]) > 0) { min = binPosRate[i]; } long binCount = binCountPos[i] + binCountNeg[i]; sum += binPosRate[i] * binCount; double squaredVal = binPosRate[i] * binPosRate[i]; squaredSum += squaredVal * binCount; tripleSum += squaredVal * binPosRate[i] * binCount; quarticSum += squaredVal * squaredVal * binCount; } } } else { if (binBoundaryList.size() == 0) { LOG.warn("Column {} {} with invalid bin boundary size.", key.get(), columnConfig.getColumnName(), binBoundaryList.size()); return; } binBounString = binBoundaryList.toString(); } ColumnMetrics columnCountMetrics = null; ColumnMetrics columnWeightMetrics = null; if (modelConfig.isRegression()) { columnCountMetrics = ColumnStatsCalculator.calculateColumnMetrics(binCountNeg, binCountPos); columnWeightMetrics = ColumnStatsCalculator.calculateColumnMetrics(binWeightNeg, binWeightPos); } // To make it be consistent with SPDT, missingCount is excluded to compute mean, stddev ... long realCount = this.statsExcludeMissingValue ? (count - missingCount) : count; double mean = sum / realCount; double stdDev = Math.sqrt(Math.abs((squaredSum - (sum * sum) / realCount + EPS) / (realCount - 1))); double aStdDev = Math.sqrt(Math.abs((squaredSum - (sum * sum) / realCount + EPS) / realCount)); double skewness = ColumnStatsCalculator.computeSkewness(realCount, mean, aStdDev, sum, squaredSum, tripleSum); double kurtosis = ColumnStatsCalculator.computeKurtosis(realCount, mean, aStdDev, sum, squaredSum, tripleSum, quarticSum); sb.append(key.get()) // column id .append(Constants.DEFAULT_DELIMITER).append(binBounString) // column bins .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binCountNeg)) // bin count negative .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binCountPos)) // bin count positive .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(new double[0])) // deprecated .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binPosRate)) // bin positive rate .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : df.format(columnCountMetrics.getKs())) // KS .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : df.format(columnCountMetrics.getIv())) // IV .append(Constants.DEFAULT_DELIMITER).append(df.format(max)) // max .append(Constants.DEFAULT_DELIMITER).append(df.format(min)) // min .append(Constants.DEFAULT_DELIMITER).append(df.format(mean)) // mean .append(Constants.DEFAULT_DELIMITER).append(df.format(stdDev)) // standard deviation .append(Constants.DEFAULT_DELIMITER).append(columnConfig.getColumnType().toString()) // column type .append(Constants.DEFAULT_DELIMITER).append(median) // median value ? .append(Constants.DEFAULT_DELIMITER).append(missingCount) // missing count .append(Constants.DEFAULT_DELIMITER).append(count) // count .append(Constants.DEFAULT_DELIMITER).append(missingCount * 1.0d / count) // missing ratio .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binWeightNeg)) // bin weighted negative .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binWeightPos)) // bin weighted positive .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : columnCountMetrics.getWoe()) // WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getWoe()) // weighted WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getKs()) // weighted KS .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getIv()) // weighted IV .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? Arrays.toString(new double[binSize + 1]) : columnCountMetrics.getBinningWoe().toString()) // bin WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? Arrays.toString(new double[binSize + 1]) : columnWeightMetrics.getBinningWoe().toString()) // bin weighted WOE .append(Constants.DEFAULT_DELIMITER).append(skewness) // skewness .append(Constants.DEFAULT_DELIMITER).append(kurtosis) // kurtosis .append(Constants.DEFAULT_DELIMITER).append(totalCount) // total count .append(Constants.DEFAULT_DELIMITER).append(invalidCount) // invalid count .append(Constants.DEFAULT_DELIMITER).append(validNumCount) // valid num count .append(Constants.DEFAULT_DELIMITER).append(hyperLogLogPlus.cardinality()) // cardinality .append(Constants.DEFAULT_DELIMITER).append(Base64Utils.base64Encode(limitedFrequentItems(fis))) // frequent items .append(Constants.DEFAULT_DELIMITER).append(p25th) // the 25 percentile value .append(Constants.DEFAULT_DELIMITER).append(p75th); outputValue.set(sb.toString()); context.write(NullWritable.get(), outputValue); sb.delete(0, sb.length()); LOG.debug("Time:{}", (System.currentTimeMillis() - start)); }
From source file:bme.iclef.weka.featureselection.InfoGain.java
public AttributeInfoGain[] topAttributes(final int n) { Queue<AttributeInfoGain> all = new PriorityQueue<AttributeInfoGain>(m_InfoGains.length, new Comparator<AttributeInfoGain>() { @Override/*from w w w . jav a 2 s .c om*/ public int compare(AttributeInfoGain o1, AttributeInfoGain o2) { return Double.compare(o2.infoGain, o1.infoGain); // descending } }); for (int i = 0; i < m_InfoGains.length; i++) all.add(new AttributeInfoGain(i, m_InfoGains[i])); AttributeInfoGain[] best = new AttributeInfoGain[n]; for (int i = 0; i < best.length; i++) { best[i] = all.remove(); } return best; }
From source file:org.opensha.commons.util.XMLUtils.java
/** * Returns a list of sub elements sorted by the numerical value of the given attribute * /*w w w . j a v a 2 s . c o m*/ * @param parentEl * @param subElName name of sub elements, or null to consider any subelements * @param sortAttributeName * @return */ public static List<Element> getSortedChildElements(Element parentEl, String subElName, final String sortAttributeName) { Iterator<Element> it; if (subElName != null && !subElName.isEmpty()) it = parentEl.elementIterator(subElName); else it = parentEl.elementIterator(); List<Element> elems = Lists.newArrayList(it); // now sort Collections.sort(elems, new Comparator<Element>() { @Override public int compare(Element e1, Element e2) { double d1 = Double.parseDouble(e1.attributeValue(sortAttributeName)); double d2 = Double.parseDouble(e2.attributeValue(sortAttributeName)); return Double.compare(d1, d2); } }); return elems; }