List of usage examples for java.lang Double compare
public static int compare(double d1, double d2)
From source file:io.druid.query.aggregation.hyperloglog.HyperLogLogCollectorTest.java
@Test public void testCompare1() throws Exception { HyperLogLogCollector collector1 = HyperLogLogCollector.makeLatestCollector(); HyperLogLogCollector collector2 = HyperLogLogCollector.makeLatestCollector(); collector1.add(fn.hashLong(0).asBytes()); HyperUniquesAggregatorFactory factory = new HyperUniquesAggregatorFactory("foo", "bar"); Comparator comparator = factory.getComparator(); for (int i = 1; i < 100; i = i + 2) { collector1.add(fn.hashLong(i).asBytes()); collector2.add(fn.hashLong(i + 1).asBytes()); Assert.assertEquals(1, comparator.compare(collector1, collector2)); Assert.assertEquals(1,/*from w w w . ja v a2 s . com*/ Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality())); } }
From source file:com.aurel.track.report.dashboard.AverageTimeToCloseItem.java
public Double getMaxPlannedValue(SortedMap<Date, EarnedValueTimeSlice> earnedValueTimeSliceMap) { Double maxValue = -1.0;/* ww w . j ava 2 s . c om*/ for (Map.Entry<Date, EarnedValueTimeSlice> entry : earnedValueTimeSliceMap.entrySet()) { Double d1 = entry.getValue().getPlannedValue(); if (Double.compare(d1, maxValue) > 0) { maxValue = entry.getValue().getPlannedValue(); } } return maxValue; }
From source file:com.aurel.track.report.dashboard.AverageTimeToCloseItem.java
public Double getMaxEarnedValue(SortedMap<Date, EarnedValueTimeSlice> earnedValueTimeSliceMap) { Double maxValue = -1.0;//from ww w . ja v a 2 s . c o m for (Map.Entry<Date, EarnedValueTimeSlice> entry : earnedValueTimeSliceMap.entrySet()) { if (entry.getValue().getEarnedvalue() != null) { Double d1 = entry.getValue().getEarnedvalue(); if (Double.compare(d1, maxValue) > 0) { maxValue = entry.getValue().getEarnedvalue(); } } } return maxValue; }
From source file:io.druid.query.aggregation.hyperloglog.HyperLogLogCollectorTest.java
@Test public void testCompare2() throws Exception { Random rand = new Random(0); HyperUniquesAggregatorFactory factory = new HyperUniquesAggregatorFactory("foo", "bar"); Comparator comparator = factory.getComparator(); for (int i = 1; i < 1000; ++i) { HyperLogLogCollector collector1 = HyperLogLogCollector.makeLatestCollector(); int j = rand.nextInt(50); for (int l = 0; l < j; ++l) { collector1.add(fn.hashLong(rand.nextLong()).asBytes()); }/*from w w w . j a va2 s . c o m*/ HyperLogLogCollector collector2 = HyperLogLogCollector.makeLatestCollector(); int k = j + 1 + rand.nextInt(5); for (int l = 0; l < k; ++l) { collector2.add(fn.hashLong(rand.nextLong()).asBytes()); } Assert.assertEquals(Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), comparator.compare(collector1, collector2)); } for (int i = 1; i < 100; ++i) { HyperLogLogCollector collector1 = HyperLogLogCollector.makeLatestCollector(); int j = rand.nextInt(500); for (int l = 0; l < j; ++l) { collector1.add(fn.hashLong(rand.nextLong()).asBytes()); } HyperLogLogCollector collector2 = HyperLogLogCollector.makeLatestCollector(); int k = j + 2 + rand.nextInt(5); for (int l = 0; l < k; ++l) { collector2.add(fn.hashLong(rand.nextLong()).asBytes()); } Assert.assertEquals(Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), comparator.compare(collector1, collector2)); } for (int i = 1; i < 10; ++i) { HyperLogLogCollector collector1 = HyperLogLogCollector.makeLatestCollector(); int j = rand.nextInt(100000); for (int l = 0; l < j; ++l) { collector1.add(fn.hashLong(rand.nextLong()).asBytes()); } HyperLogLogCollector collector2 = HyperLogLogCollector.makeLatestCollector(); int k = j + 20000 + rand.nextInt(100000); for (int l = 0; l < k; ++l) { collector2.add(fn.hashLong(rand.nextLong()).asBytes()); } Assert.assertEquals(Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), comparator.compare(collector1, collector2)); } }
From source file:nlmt.topicmodels.HierarchicalLDAModel.java
@Override public boolean equals(Object o) { if (this == o) { return true; }//from w w w. j a v a 2s . c o m if ((o == null) || (getClass() != o.getClass())) { return false; } HierarchicalLDAModel that = (HierarchicalLDAModel) o; if (maxDepth != that.maxDepth) { return false; } if (Double.compare(that.gamma, gamma) != 0) { return false; } if (Double.compare(that.m, m) != 0) { return false; } if (Double.compare(that.pi, pi) != 0) { return false; } if (!Arrays.equals(eta, that.eta)) { return false; } // Probably incorrect - comparing Object[] arrays with Arrays.equals if (!Arrays.equals(documents, that.documents)) { return false; } if (!vocabulary.equals(that.vocabulary)) { return false; } if (!nodeMapper.equals(that.nodeMapper)) { return false; } // Probably incorrect - comparing Object[] arrays with Arrays.equals return Arrays.equals(documentPaths, that.documentPaths) && !(rootNode != null ? !rootNode.equals(that.rootNode) : that.rootNode != null); }
From source file:com.joptimizer.optimizers.LPPrimalDualMethod.java
/** * Solves a presolved standard form LP problem in the form of * min(c) s.t./*from ww w . j ava 2s .c o m*/ * A.x = b * lb <= x <= ub */ protected int optimizePresolvedStandardLP() throws Exception { log.info("optimizePresolvedStandardLP"); long tStart = System.currentTimeMillis(); LPOptimizationRequest lpRequest = getLPOptimizationRequest(); if (log.isDebugEnabled() && lpRequest.isDumpProblem()) { log.debug("LP problem: " + lpRequest.toString()); } if (this.dim <= -1) { if (getLb().size() != getUb().size()) { log.error("Lower and upper bounds must have the same dimension"); throw new IllegalArgumentException("Lower and upper bounds must have the same dimension"); } this.dim = getLb().size(); double minDeltaBoundsValue = Double.MAX_VALUE; int minDeltaBoundsIndex = -1; for (int i = 0; i < getDim(); i++) { double deltai = getUb().getQuick(i) - getLb().getQuick(i); if (deltai < minDeltaBoundsValue) { minDeltaBoundsValue = deltai; minDeltaBoundsIndex = i; } } log.info("min delta bounds index: " + minDeltaBoundsIndex); log.info("min delta bounds value: " + minDeltaBoundsValue); } //this.boundedLb = new boolean[getDim()]; //this.boundedUb = new boolean[getDim()]; // for(int i=0; i<getDim(); i++){ // if(!isLbUnbounded(getLb().getQuick(i))){ // boundedLb[i] = true; // nOfBoundedLb++; // } // if(!isUbUnbounded(getUb().getQuick(i))){ // boundedUb[i] = true; // nOfBoundedUb++; // } // } this.meq = (this.meq > -1) ? this.meq : ((getA() != null) ? getA().rows() : 0); //this.mieq = (this.mieq>-1)? this.mieq : (nOfBoundedLb+nOfBoundedUb); this.mieq = (this.mieq > -1) ? this.mieq : (2 * getDim()); if (log.isDebugEnabled()) { log.debug("dim : " + getDim()); log.debug("meq : " + getMeq()); log.debug("mieq: " + getMieq()); } LPOptimizationResponse lpResponse = new LPOptimizationResponse(); DoubleMatrix1D X0 = getInitialPoint(); if (X0 == null) { DoubleMatrix1D X0NF = getNotFeasibleInitialPoint(); if (X0NF != null) { double rPriX0NFNorm = Math.sqrt(ALG.norm2(rPri(X0NF))); DoubleMatrix1D fiX0NF = getFi(X0NF); int maxIndex = Utils.getMaxIndex(fiX0NF); double maxValue = fiX0NF.get(maxIndex); if (log.isDebugEnabled()) { log.debug("rPriX0NFNorm : " + rPriX0NFNorm); log.debug("X0NF : " + ArrayUtils.toString(X0NF.toArray())); log.debug("fiX0NF : " + ArrayUtils.toString(fiX0NF.toArray())); } if (maxValue < 0 && rPriX0NFNorm <= getToleranceFeas()) { //the provided not-feasible starting point is already feasible log.debug("the provided initial point is already feasible"); X0 = X0NF; } } if (X0 == null) { BasicPhaseILPPDM bf1 = new BasicPhaseILPPDM(this); X0 = bf1.findFeasibleInitialPoint(); } } //check X0 feasibility DoubleMatrix1D fiX0 = getFi(X0); int maxIndex = Utils.getMaxIndex(fiX0); double maxValue = fiX0.get(maxIndex); double rPriX0Norm = Math.sqrt(ALG.norm2(rPri(X0))); if (maxValue >= 0. || rPriX0Norm > getToleranceFeas()) {//must be fi STRICTLY < 0 log.warn("rPriX0Norm : " + rPriX0Norm); log.warn("ineqX0 : " + ArrayUtils.toString(fiX0.toArray())); log.warn("max ineq index: " + maxIndex); log.warn("max ineq value: " + maxValue); //the point must be INTERNAL, fi are used as denominators throw new Exception("initial point must be strictly feasible"); } DoubleMatrix1D V0 = F1.make(getMeq()); if (getYlb() != null && getYub() != null) { //NB: the Lagrangian multipliers for eq. constraints used in this interior point method (v) //are the opposite of the Lagrangian multipliers for eq. constraints used in the presolver (y) //and so Ylb<=y<=Yub becomes -Yub<=v<=-Ylb for (int i = 0; i < getMeq(); i++) { double v0i = 0; if (!isLbUnbounded(getYlb().getQuick(i))) { if (!isUbUnbounded(getYub().getQuick(i))) { v0i = -(getYub().getQuick(i) + getYlb().getQuick(i)) / 2; } else { v0i = -getYlb().getQuick(i); } } else { if (!isUbUnbounded(getYub().getQuick(i))) { v0i = -getYub().getQuick(i); } else { v0i = 0; } } V0.setQuick(i, v0i); } } DoubleMatrix1D L0 = getInitialLagrangian(); if (L0 != null) { for (int j = 0; j < L0.size(); j++) { // must be >0 if (L0.get(j) <= 0) { throw new IllegalArgumentException("initial lagrangian must be strictly > 0"); } } } else { L0 = F1.make(getMieq(), 1.);// must be >0 strictly if (getZlb() != null && getZub() != null) { //Zlb<= L <=Zub, meaning that: //zlb[i] and zub[i] are the bounds on the Lagrangian of the constraint associated with lb[i]<x[i]<ub[i] //note that zlb.size = zub.size = lb.size = ub.size (and = n of variables of the problem (= getDim()) //and that L.size = nOfBoundedLb + nOfBoundedUb (and in general < 2*getDim()) int cntLB = 0; int cntUB = 0; for (int i = 0; i < getDim(); i++) { double zlbi = (isLbUnbounded(getZlb().getQuick(i))) ? 0 : getZlb().getQuick(i);//L must be > 0 double zubi = (isUbUnbounded(getZub().getQuick(i))) ? 1 : getZub().getQuick(i); L0.setQuick(cntLB, (zubi - zlbi) / 2); cntLB++; L0.setQuick(getDim() + cntUB, (zubi - zlbi) / 2); cntUB++; } } else { //inequalities comes from the pairs lower bounds-upper bounds //in the calculation of the H matrix fro the KKT system, each pairs gives terms of the form: //t = tl + tu //tl = -L[i] / fi[i] for the lower bound //tu = L[dim+i] / fi[dim+i] for the upper bound //we want t = 1, and hence //L[i] > -cc * fi[i] //L[dim+i] = (1 + L[i] / fi[i]) * fi[dim+i] // double cc = 10; // int nOfLB = getMieq()/2; // for (int i = 0; i < nOfLB; i++) { // L0.setQuick(i, -cc * fiX0.getQuick(i)); // L0.setQuick(nOfLB + i, (1 - 10) * fiX0.getQuick(nOfLB + i)); // double sum = -L0.getQuick(i)/fiX0.getQuick(i)+L0.getQuick(nOfLB + i)/fiX0.getQuick(nOfLB + i); // log.debug("sum["+i+"]: " + sum); // } } } if (log.isDebugEnabled()) { log.debug("X0: " + ArrayUtils.toString(X0.toArray())); log.debug("V0: " + ArrayUtils.toString(V0.toArray())); log.debug("L0: " + ArrayUtils.toString(L0.toArray())); } if (log.isInfoEnabled()) { log.info("toleranceFeas: " + getToleranceFeas()); log.info("tolerance : " + getTolerance()); } DoubleMatrix1D X = X0; DoubleMatrix1D V = V0; DoubleMatrix1D L = L0; double previousF0X = Double.NaN; double previousRPriXNorm = Double.NaN; double previousRDualXLVNorm = Double.NaN; double previousSurrDG = Double.NaN; double t; int iteration = 0; //List<DoubleMatrix1D> XList = new ArrayList<DoubleMatrix1D>(); //List<double[]> SList = new ArrayList<double[]>(); while (true) { iteration++; // iteration limit condition if (iteration == getMaxIteration() + 1) { lpResponse.setReturnCode(OptimizationResponse.FAILED); log.error("Max iterations limit reached"); throw new Exception("Max iterations limit reached"); } //XList.add(XList.size(), X); double F0X = getF0(X); if (log.isInfoEnabled()) { log.info("iteration: " + iteration); log.info("f0(X)=" + F0X); } if (log.isDebugEnabled()) { log.debug("X=" + ArrayUtils.toString(X.toArray())); log.debug("L=" + ArrayUtils.toString(L.toArray())); log.debug("V=" + ArrayUtils.toString(V.toArray())); } // determine functions evaluations DoubleMatrix1D gradF0X = getGradF0(X); DoubleMatrix1D fiX = getFi(X); log.debug("fiX=" + ArrayUtils.toString(fiX.toArray())); //DoubleMatrix2D GradFiX = getGradFi(X); //DoubleMatrix2D GradFiXOLD = getGradFiOLD(X); // determine t double surrDG = getSurrogateDualityGap(fiX, L); t = getMu() * getMieq() / surrDG; log.debug("t: " + t); // determine residuals DoubleMatrix1D rPriX = rPri(X); DoubleMatrix1D rCentXLt = rCent(fiX, L, t); DoubleMatrix1D rDualXLV = rDual(gradF0X, L, V); //DoubleMatrix1D rDualXLVOLD = rDualOLD(GradFiXOLD, gradF0X, L, V); //log.debug("delta: " + ALG.normInfinity(rDualXLVOLD.assign(rDualXLV, Functions.minus))); double rPriXNorm = Math.sqrt(ALG.norm2(rPriX)); double rCentXLtNorm = Math.sqrt(ALG.norm2(rCentXLt)); double rDualXLVNorm = Math.sqrt(ALG.norm2(rDualXLV)); double normRXLVt = Math .sqrt(Math.pow(rPriXNorm, 2) + Math.pow(rCentXLtNorm, 2) + Math.pow(rDualXLVNorm, 2)); //@TODO: set log.debug not log.info log.info("rPri norm: " + rPriXNorm); log.info("rCent norm: " + rCentXLtNorm); log.info("rDual norm: " + rDualXLVNorm); log.info("surrDG : " + surrDG); // custom exit condition if (checkCustomExitConditions(X)) { lpResponse.setReturnCode(OptimizationResponse.SUCCESS); break; } // exit condition if (rPriXNorm <= getToleranceFeas() && rDualXLVNorm <= getToleranceFeas() && surrDG <= getTolerance()) { lpResponse.setReturnCode(OptimizationResponse.SUCCESS); break; } // progress conditions if (isCheckProgressConditions()) { if (!Double.isNaN(previousRPriXNorm) && !Double.isNaN(previousRDualXLVNorm) && !Double.isNaN(previousSurrDG)) { if ((previousRPriXNorm <= rPriXNorm && rPriXNorm >= getToleranceFeas()) || (previousRDualXLVNorm <= rDualXLVNorm && rDualXLVNorm >= getToleranceFeas())) { log.error("No progress achieved, exit iterations loop without desired accuracy"); lpResponse.setReturnCode(OptimizationResponse.FAILED); throw new Exception("No progress achieved, exit iterations loop without desired accuracy"); } } previousRPriXNorm = rPriXNorm; previousRDualXLVNorm = rDualXLVNorm; previousSurrDG = surrDG; } // compute primal-dual search direction // a) prepare 11.55 system DoubleMatrix2D Hpd = GradLSum(L, fiX); //DoubleMatrix2D HpdOLD = GradLSumOLD(GradFiXOLD, L, fiX); //log.debug("delta: " + ALG.normInfinity(HpdOLD.assign(Hpd, Functions.minus))); DoubleMatrix1D gradSum = gradSum(t, fiX); DoubleMatrix1D g = null; //if(getAT()==null){ if (getA() == null) { g = ColtUtils.add(gradF0X, gradSum); } else { //g = ColtUtils.add(ColtUtils.add(gradF0X, gradSum), ALG.mult(getAT(), V)); g = ColtUtils.add(ColtUtils.add(gradF0X, gradSum), ColtUtils.zMultTranspose(getA(), V, F1.make(getDim()), 0)); } // b) solving 11.55 system if (this.kktSolver == null) { this.kktSolver = new UpperDiagonalHKKTSolver(getDim(), lpRequest.isRescalingDisabled()); //this.kktSolver = new DiagonalHKKTSolver(getDim(), lpRequest.isRescalingDisabled()); } if (isCheckKKTSolutionAccuracy()) { kktSolver.setCheckKKTSolutionAccuracy(true); kktSolver.setToleranceKKT(getToleranceKKT()); } kktSolver.setHMatrix(Hpd); kktSolver.setGVector(g); if (getA() != null) { kktSolver.setAMatrix(getA()); //kktSolver.setATMatrix(getAT()); kktSolver.setHVector(rPriX); // if(rPriXNorm > getToleranceFeas()){ // kktSolver.setHVector(rPriX); // } } DoubleMatrix1D[] sol = kktSolver.solve(); DoubleMatrix1D stepX = sol[0]; //double[] signa = new double[stepX.size()]; // for(int p=0; p<stepX.size(); p++){ // signa[p] = Math.signum(stepX.getQuick(p)); // } //SList.add(SList.size(), signa); DoubleMatrix1D stepV = (sol[1] != null) ? sol[1] : F1.make(0); if (log.isDebugEnabled()) { log.debug("stepX: " + ArrayUtils.toString(stepX.toArray())); log.debug("stepV: " + ArrayUtils.toString(stepV.toArray())); } // c) solving for L DoubleMatrix1D stepL = F1.make(getMieq()); DoubleMatrix1D gradFiStepX = gradFiStepX(stepX); for (int i = 0; i < getMieq(); i++) { stepL.setQuick(i, (-L.getQuick(i) * gradFiStepX.getQuick(i) + rCentXLt.getQuick(i)) / fiX.getQuick(i)); } if (log.isDebugEnabled()) { log.debug("stepL: " + ArrayUtils.toString(stepL.toArray())); } // line search and update // a) sMax computation double sMax = Double.MAX_VALUE; for (int j = 0; j < getMieq(); j++) { if (stepL.get(j) < 0) { sMax = Math.min(-L.get(j) / stepL.get(j), sMax); } } sMax = Math.min(1, sMax); double s = 0.99 * sMax; // b) backtracking with f DoubleMatrix1D X1 = F1.make(X.size()); DoubleMatrix1D L1 = F1.make(L.size()); DoubleMatrix1D V1 = F1.make(V.size()); DoubleMatrix1D fiX1 = null; DoubleMatrix1D gradF0X1 = null; //DoubleMatrix2D GradFiX1 = null; //DoubleMatrix2D GradFiX1 = null; DoubleMatrix1D rPriX1 = null; DoubleMatrix1D rCentX1L1t = null; DoubleMatrix1D rDualX1L1V1 = null; int cnt = 0; boolean areAllNegative = true; while (cnt < 500) { cnt++; // X1 = X + s*stepX X1 = stepX.copy().assign(Mult.mult(s)).assign(X, Functions.plus); DoubleMatrix1D ineqValueX1 = getFi(X1); areAllNegative = true; for (int j = 0; areAllNegative && j < getMieq(); j++) { areAllNegative = (Double.compare(ineqValueX1.get(j), 0.) < 0); } if (areAllNegative) { break; } s = getBeta() * s; } if (!areAllNegative) { //exited from the feasible region throw new Exception("Optimization failed: impossible to remain within the faesible region"); } log.debug("s: " + s); // c) backtracking with norm double previousNormRX1L1V1t = Double.NaN; cnt = 0; while (cnt < 500) { cnt++; X1 = ColtUtils.add(X, stepX, s); L1 = ColtUtils.add(L, stepL, s); V1 = ColtUtils.add(V, stepV, s); if (isInDomainF0(X1)) { fiX1 = getFi(X1); gradF0X1 = getGradF0(X1); //GradFiX1 = getGradFi(X1); rPriX1 = rPri(X1); rCentX1L1t = rCent(fiX1, L1, t); rDualX1L1V1 = rDual(gradF0X1, L1, V1); double normRX1L1V1t = Math .sqrt(ALG.norm2(rPriX1) + ALG.norm2(rCentX1L1t) + ALG.norm2(rDualX1L1V1)); //log.debug("normRX1L1V1t: "+normRX1L1V1t); if (normRX1L1V1t <= (1 - getAlpha() * s) * normRXLVt) { break; } if (!Double.isNaN(previousNormRX1L1V1t)) { if (previousNormRX1L1V1t <= normRX1L1V1t) { log.warn("No progress achieved in backtracking with norm"); break; } } previousNormRX1L1V1t = normRX1L1V1t; } s = getBeta() * s; //log.debug("s: " + s); } // update X = X1; V = V1; L = L1; } // if(lpRequest.isCheckOptimalDualityConditions()){ // //check duality conditions: // if(!checkDualityConditions(X, L, V)){ // log.error("duality conditions not satisfied"); // lpResponse.setReturnCode(OptimizationResponse.FAILED); // throw new Exception("duality conditions not satisfied"); // } // } if (lpRequest.isCheckOptimalLagrangianBounds()) { //check equality constraints Lagrangian bounds // if(!checkEqConstraintsLagrangianBounds(V)){ // log.error("equality constraints Lagrangian multipliers bounds not satisfied"); // lpResponse.setReturnCode(OptimizationResponse.FAILED); // throw new Exception("equality constraints Lagrangian multipliers bounds not satisfied"); // } //check inequality constraints Lagrangian bounds // if(!checkIneqConstraintsLagrangianBounds(X, L)){ // log.error("inequality constraints Lagrangian multipliers bounds not satisfied"); // lpResponse.setReturnCode(OptimizationResponse.FAILED); // throw new Exception("inequality constraints Lagrangian multipliers bounds not satisfied"); // } } long tStop = System.currentTimeMillis(); log.debug("time: " + (tStop - tStart)); log.debug("sol : " + ArrayUtils.toString(X.toArray())); log.debug("ret code: " + lpResponse.getReturnCode()); // log.debug("XList : " + ArrayUtils.toString(XList)); // for(int s=0; s<SList.size(); s++){ // log.debug("SList : " + ArrayUtils.toString(SList.get(s))); // } lpResponse.setSolution(X.toArray()); setLPOptimizationResponse(lpResponse); return lpResponse.getReturnCode(); }
From source file:org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.java
/** * Compare two objects with their respective ObjectInspectors. */// w ww . j a v a2s .c o m public static int compare(Object o1, ObjectInspector oi1, Object o2, ObjectInspector oi2, MapEqualComparer mapEqualComparer) { if (oi1.getCategory() != oi2.getCategory()) { return oi1.getCategory().compareTo(oi2.getCategory()); } if (o1 == null) { return o2 == null ? 0 : -1; } else if (o2 == null) { return 1; } switch (oi1.getCategory()) { case PRIMITIVE: { PrimitiveObjectInspector poi1 = ((PrimitiveObjectInspector) oi1); PrimitiveObjectInspector poi2 = ((PrimitiveObjectInspector) oi2); if (poi1.getPrimitiveCategory() != poi2.getPrimitiveCategory()) { return poi1.getPrimitiveCategory().compareTo(poi2.getPrimitiveCategory()); } switch (poi1.getPrimitiveCategory()) { case VOID: return 0; case BOOLEAN: { int v1 = ((BooleanObjectInspector) poi1).get(o1) ? 1 : 0; int v2 = ((BooleanObjectInspector) poi2).get(o2) ? 1 : 0; return v1 - v2; } case BYTE: { int v1 = ((ByteObjectInspector) poi1).get(o1); int v2 = ((ByteObjectInspector) poi2).get(o2); return v1 - v2; } case SHORT: { int v1 = ((ShortObjectInspector) poi1).get(o1); int v2 = ((ShortObjectInspector) poi2).get(o2); return v1 - v2; } case INT: { int v1 = ((IntObjectInspector) poi1).get(o1); int v2 = ((IntObjectInspector) poi2).get(o2); return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0); } case LONG: { long v1 = ((LongObjectInspector) poi1).get(o1); long v2 = ((LongObjectInspector) poi2).get(o2); return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0); } case FLOAT: { float v1 = ((FloatObjectInspector) poi1).get(o1); float v2 = ((FloatObjectInspector) poi2).get(o2); // The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal. if (v1 == 0.0f && v2 == 0.0f) { return 0; } else { // Float.compare() treats -0.0 and 0.0 as different return Float.compare(v1, v2); } } case DOUBLE: { double v1 = ((DoubleObjectInspector) poi1).get(o1); double v2 = ((DoubleObjectInspector) poi2).get(o2); // The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal. if (v1 == 0.0d && v2 == 0.0d) { return 0; } else { // Double.compare() treats -0.0 and 0.0 as different return Double.compare(v1, v2); } } case STRING: { if (poi1.preferWritable() || poi2.preferWritable()) { Text t1 = (Text) poi1.getPrimitiveWritableObject(o1); Text t2 = (Text) poi2.getPrimitiveWritableObject(o2); return t1 == null ? (t2 == null ? 0 : -1) : (t2 == null ? 1 : t1.compareTo(t2)); } else { String s1 = (String) poi1.getPrimitiveJavaObject(o1); String s2 = (String) poi2.getPrimitiveJavaObject(o2); return s1 == null ? (s2 == null ? 0 : -1) : (s2 == null ? 1 : s1.compareTo(s2)); } } case CHAR: { HiveCharWritable t1 = ((HiveCharObjectInspector) poi1).getPrimitiveWritableObject(o1); HiveCharWritable t2 = ((HiveCharObjectInspector) poi2).getPrimitiveWritableObject(o2); return t1.compareTo(t2); } case VARCHAR: { HiveVarcharWritable t1 = ((HiveVarcharObjectInspector) poi1).getPrimitiveWritableObject(o1); HiveVarcharWritable t2 = ((HiveVarcharObjectInspector) poi2).getPrimitiveWritableObject(o2); return t1.compareTo(t2); } case BINARY: { BytesWritable bw1 = ((BinaryObjectInspector) poi1).getPrimitiveWritableObject(o1); BytesWritable bw2 = ((BinaryObjectInspector) poi2).getPrimitiveWritableObject(o2); return bw1.compareTo(bw2); } case DATE: { DateWritable d1 = ((DateObjectInspector) poi1).getPrimitiveWritableObject(o1); DateWritable d2 = ((DateObjectInspector) poi2).getPrimitiveWritableObject(o2); return d1.compareTo(d2); } case TIMESTAMP: { TimestampWritable t1 = ((TimestampObjectInspector) poi1).getPrimitiveWritableObject(o1); TimestampWritable t2 = ((TimestampObjectInspector) poi2).getPrimitiveWritableObject(o2); return t1.compareTo(t2); } case INTERVAL_YEAR_MONTH: { HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1) .getPrimitiveWritableObject(o1); HiveIntervalYearMonthWritable i2 = ((HiveIntervalYearMonthObjectInspector) poi2) .getPrimitiveWritableObject(o2); return i1.compareTo(i2); } case INTERVAL_DAY_TIME: { HiveIntervalDayTimeWritable i1 = ((HiveIntervalDayTimeObjectInspector) poi1) .getPrimitiveWritableObject(o1); HiveIntervalDayTimeWritable i2 = ((HiveIntervalDayTimeObjectInspector) poi2) .getPrimitiveWritableObject(o2); return i1.compareTo(i2); } case DECIMAL: { HiveDecimalWritable t1 = ((HiveDecimalObjectInspector) poi1).getPrimitiveWritableObject(o1); HiveDecimalWritable t2 = ((HiveDecimalObjectInspector) poi2).getPrimitiveWritableObject(o2); return t1.compareTo(t2); } default: { throw new RuntimeException("Unknown type: " + poi1.getPrimitiveCategory()); } } } case STRUCT: { StructObjectInspector soi1 = (StructObjectInspector) oi1; StructObjectInspector soi2 = (StructObjectInspector) oi2; List<? extends StructField> fields1 = soi1.getAllStructFieldRefs(); List<? extends StructField> fields2 = soi2.getAllStructFieldRefs(); int minimum = Math.min(fields1.size(), fields2.size()); for (int i = 0; i < minimum; i++) { int r = compare(soi1.getStructFieldData(o1, fields1.get(i)), fields1.get(i).getFieldObjectInspector(), soi2.getStructFieldData(o2, fields2.get(i)), fields2.get(i).getFieldObjectInspector(), mapEqualComparer); if (r != 0) { return r; } } return fields1.size() - fields2.size(); } case LIST: { ListObjectInspector loi1 = (ListObjectInspector) oi1; ListObjectInspector loi2 = (ListObjectInspector) oi2; int minimum = Math.min(loi1.getListLength(o1), loi2.getListLength(o2)); for (int i = 0; i < minimum; i++) { int r = compare(loi1.getListElement(o1, i), loi1.getListElementObjectInspector(), loi2.getListElement(o2, i), loi2.getListElementObjectInspector(), mapEqualComparer); if (r != 0) { return r; } } return loi1.getListLength(o1) - loi2.getListLength(o2); } case MAP: { if (mapEqualComparer == null) { throw new RuntimeException("Compare on map type not supported!"); } else { return mapEqualComparer.compare(o1, (MapObjectInspector) oi1, o2, (MapObjectInspector) oi2); } } case UNION: { UnionObjectInspector uoi1 = (UnionObjectInspector) oi1; UnionObjectInspector uoi2 = (UnionObjectInspector) oi2; byte tag1 = uoi1.getTag(o1); byte tag2 = uoi2.getTag(o2); if (tag1 != tag2) { return tag1 - tag2; } return compare(uoi1.getField(o1), uoi1.getObjectInspectors().get(tag1), uoi2.getField(o2), uoi2.getObjectInspectors().get(tag2), mapEqualComparer); } default: throw new RuntimeException("Compare on unknown type: " + oi1.getCategory()); } }
From source file:uk.ac.diamond.scisoft.analysis.rcp.views.HistogramView.java
private void autoRangeHistogram() { histogramPlotter.clearZoomHistory(); double[] m;/* w ww. jav a2 s. c om*/ if (autoContrast && data.getRank() == 2) { try { final int[] shape = data.getShape(); if (shape[0] > 512 && shape[1] > 512) { int yReduce = (int) Math.ceil(shape[0] / 512.0); int xReduce = (int) Math.ceil(shape[1] / 512.0); Downsample sample = new Downsample(DownsampleMode.MAXIMUM, xReduce, yReduce); m = Stats.quantile((Dataset) sample.value(data).get(0), getPreferenceAutoContrastLo(), getPreferenceAutoContrastHi()); } else m = Stats.quantile(data, getPreferenceAutoContrastLo(), getPreferenceAutoContrastHi()); } catch (Exception e) { m = new double[] { data.min().doubleValue(), data.max().doubleValue() }; } } else { m = new double[] { data.min().doubleValue(), data.max().doubleValue() }; } if (Double.compare(m[1], m[0]) <= 0) m[1] = m[0] + PreferenceConstants.MINIMUM_CONTRAST_DELTA / 100.0; currentMaxMin.max = m[1]; currentMaxMin.min = m[0]; }
From source file:gov.opm.scrd.batchprocessing.jobs.BatchProcessingJob.java
/** * Import a single lock box file.//from w ww.j av a2 s .c o m * * @param procMessage The process message. Used to build the mail message. * @param inputFile The lock box file to import * @return The import status * @throws BatchProcessingException If major error occurred. */ private ImportStatus importFile(StringBuilder procMessage, File inputFile) throws BatchProcessingException { // Create the import status ImportStatus importStatus = new ImportStatus(); importStatus.setAuditBatchId(todayAuditBatch.getId()); // Load file text content logger.info("Start loading file content from: " + inputFile); List<MainframeImport> mainFrameImports = loadFileContent(inputFile, importStatus); logger.info("End loading file content from: " + inputFile); try { if (!mainFrameImports.isEmpty()) { // Process the MainframeImport data logger.info("Start processing MainframeImport data from: " + inputFile); processMainframeData(mainFrameImports, importStatus); logger.info("End processing MainframeImport data from: " + inputFile); // Collate new mainframe payments if (importStatus.getNumberDiscreteRecords() > 0) { logger.info("Start collating new mainframe payments from: " + inputFile); collateNewMainframePayments(importStatus); logger.info("End collating new mainframe payments from: " + inputFile); } } } finally { if (Double.compare(importStatus.getTransactionsTotal().doubleValue(), importStatus.getFileSummaryTotal().doubleValue()) != 0) { importStatus.setSuccessful(false); } // Log import status logger.info(logImportStatus(importStatus)); sendFileImportEmail(procMessage, importStatus); } return importStatus; }
From source file:org.pentaho.plugin.jfreereport.reportcharts.AbstractChartExpression.java
/** * Reduces standard tick unit array to meet formatting precision and avoid duplicated values (PRD-5821) * * @return/*ww w .j av a 2 s . co m*/ */ protected void standardTickUnitsApplyFormat(NumberAxis numberAxis, NumberFormat format) { final TickUnits standardTickUnits = (TickUnits) numberAxis.getStandardTickUnits(); TickUnits cutTickUnits = new TickUnits(); double formatterMinSize = 1 / Math.pow(10, format.getMaximumFractionDigits()); for (int i = 0; i < standardTickUnits.size(); i++) { if (Double.compare(standardTickUnits.get(i).getSize(), formatterMinSize) >= 0) { cutTickUnits.add(new NumberTickUnit(standardTickUnits.get(i).getSize())); } } numberAxis.setStandardTickUnits(cutTickUnits); }