List of usage examples for java.lang Double compare
public static int compare(double d1, double d2)
From source file:org.apache.pulsar.broker.loadbalance.impl.OverloadShedder.java
/** * Attempt to shed some bundles off every broker which is overloaded. * * @param loadData/* w w w. j ava 2 s . c o m*/ * The load data to used to make the unloading decision. * @param conf * The service configuration. * @return A map from bundles to unload to the brokers on which they are loaded. */ public Multimap<String, String> findBundlesForUnloading(final LoadData loadData, final ServiceConfiguration conf) { selectedBundlesCache.clear(); final double overloadThreshold = conf.getLoadBalancerBrokerOverloadedThresholdPercentage() / 100.0; final Map<String, Long> recentlyUnloadedBundles = loadData.getRecentlyUnloadedBundles(); // Check every broker and select loadData.getBrokerData().forEach((broker, brokerData) -> { final LocalBrokerData localData = brokerData.getLocalData(); final double currentUsage = localData.getMaxResourceUsage(); if (currentUsage < overloadThreshold) { if (log.isDebugEnabled()) { log.debug("[{}] Broker is not overloaded, ignoring at this point", broker); } return; } // We want to offload enough traffic such that this broker will go below the overload threshold // Also, add a small margin so that this broker won't be very close to the threshold edge. double percentOfTrafficToOffload = currentUsage - overloadThreshold + ADDITIONAL_THRESHOLD_PERCENT_MARGIN; double brokerCurrentThroughput = localData.getMsgThroughputIn() + localData.getMsgThroughputOut(); double minimumThroughputToOffload = brokerCurrentThroughput * percentOfTrafficToOffload; log.info( "Attempting to shed load on {}, which has max resource usage above threshold {}% > {}% -- Offloading at least {} MByte/s of traffic", broker, currentUsage, overloadThreshold, minimumThroughputToOffload / 1024 / 1024); MutableDouble trafficMarkedToOffload = new MutableDouble(0); MutableBoolean atLeastOneBundleSelected = new MutableBoolean(false); if (localData.getBundles().size() > 1) { // Sort bundles by throughput, then pick the biggest N which combined make up for at least the minimum throughput to offload loadData.getBundleData().entrySet().stream().map((e) -> { // Map to throughput value // Consider short-term byte rate to address system resource burden String bundle = e.getKey(); BundleData bundleData = e.getValue(); TimeAverageMessageData shortTermData = bundleData.getShortTermData(); double throughput = shortTermData.getMsgThroughputIn() + shortTermData.getMsgThroughputOut(); return Pair.of(bundle, throughput); }).filter(e -> { // Only consider bundles that were not already unloaded recently return !recentlyUnloadedBundles.containsKey(e.getLeft()); }).sorted((e1, e2) -> { // Sort by throughput in reverse order return Double.compare(e2.getRight(), e1.getRight()); }).forEach(e -> { if (trafficMarkedToOffload.doubleValue() < minimumThroughputToOffload || atLeastOneBundleSelected.isFalse()) { selectedBundlesCache.put(broker, e.getLeft()); trafficMarkedToOffload.add(e.getRight()); atLeastOneBundleSelected.setTrue(); } }); } else if (localData.getBundles().size() == 1) { log.warn( "HIGH USAGE WARNING : Sole namespace bundle {} is overloading broker {}. " + "No Load Shedding will be done on this broker", localData.getBundles().iterator().next(), broker); } else { log.warn("Broker {} is overloaded despite having no bundles", broker); } }); return selectedBundlesCache; }
From source file:gsn.http.rest.AndroidPushDelivery.java
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AndroidPushDelivery that = (AndroidPushDelivery) o; if (Double.compare(that.notificationId, notificationId) != 0) return false; if (httpPost != null ? !httpPost.getURI().equals(that.httpPost.getURI()) : that.httpPost != null) return false; return true;/*from w ww . j a v a 2s . c o m*/ }
From source file:com.yahoo.egads.utilities.SpectralMethods.java
public static RealMatrix mFilter(RealMatrix data, int windowSize, FilteringMethod method, double methodParameter) { int n = data.getRowDimension(); int m = data.getColumnDimension(); int k = n - windowSize + 1; int i = 0, ind = 0; double[] temp; double sum = 0; RealMatrix hankelMat = SpectralMethods.createHankelMatrix(data, windowSize); SingularValueDecomposition svd = new SingularValueDecomposition(hankelMat); double[] singularValues = svd.getSingularValues(); switch (method) { case VARIANCE: temp = new double[singularValues.length - 1]; for (i = 1; i < singularValues.length; ++i) { sum += (singularValues[i] * singularValues[i]); }/* ww w .j a v a 2 s .c o m*/ for (i = 0; i < temp.length; ++i) { temp[i] = (singularValues[i + 1] * singularValues[i + 1]) / sum; } sum = 0; for (i = temp.length - 1; i >= 0; --i) { sum += temp[i]; if (sum >= 1 - methodParameter) { ind = i; break; } } break; case EXPLICIT: ind = (int) Math.max(Math.min(methodParameter - 1, singularValues.length - 1), 0); break; case K_GAP: final double[] eigenGaps = new double[singularValues.length - 1]; Integer[] index = new Integer[singularValues.length - 1]; for (i = 0; i < eigenGaps.length; ++i) { eigenGaps[i] = singularValues[i] - singularValues[i + 1]; index[i] = i; } Arrays.sort(index, new Comparator<Integer>() { @Override public int compare(Integer o1, Integer o2) { return Double.compare(eigenGaps[o1], eigenGaps[o2]); } }); int maxIndex = 0; for (i = index.length - (int) methodParameter; i < index.length; ++i) { if (index[i] > maxIndex) { maxIndex = index[i]; } } ind = Math.min(maxIndex, singularValues.length / 3); break; case SMOOTHNESS: double[] variances = new double[singularValues.length]; for (i = 1; i < singularValues.length; ++i) { variances[i] = (singularValues[i] * singularValues[i]); } variances[0] = variances[1]; double smoothness = SpectralMethods .computeSmoothness(Arrays.copyOfRange(variances, 1, variances.length)); if (methodParameter - smoothness < 0.01) { methodParameter += 0.01; } double invalidS = smoothness; int validIndex = 1, invalidIndex = singularValues.length; while (true) { if (invalidS >= methodParameter) { ind = invalidIndex - 1; break; } else if (invalidIndex - validIndex <= 1) { ind = validIndex - 1; break; } int ii = (validIndex + invalidIndex) / 2; double[] tempVariances = Arrays.copyOf(Arrays.copyOfRange(variances, 0, ii + 1), singularValues.length); double s = SpectralMethods.computeSmoothness(tempVariances); if (s >= methodParameter) { validIndex = ii; } else { invalidIndex = ii; invalidS = s; } } break; case EIGEN_RATIO: int startIndex = 0, endIndex = singularValues.length - 1; if (singularValues[endIndex] / singularValues[0] >= methodParameter) { ind = endIndex; } else { while (true) { int midIndex = (startIndex + endIndex) / 2; if (singularValues[midIndex] / singularValues[0] >= methodParameter) { if (singularValues[midIndex + 1] / singularValues[0] < methodParameter) { ind = midIndex; break; } else { startIndex = midIndex; } } else { endIndex = midIndex; } } } break; case GAP_RATIO: double[] gaps = new double[singularValues.length - 1]; for (i = 0; i < gaps.length; ++i) { gaps[i] = singularValues[i] - singularValues[i + 1]; } ind = 0; for (i = gaps.length - 1; i >= 0; --i) { if (gaps[i] / singularValues[0] >= methodParameter) { ind = i; break; } } break; default: ind = singularValues.length - 1; break; } ind = Math.max(0, Math.min(ind, singularValues.length - 1)); RealMatrix truncatedHankelMatrix = MatrixUtils.createRealMatrix(k, m * windowSize); RealMatrix mU = svd.getU(); RealMatrix mVT = svd.getVT(); for (i = 0; i <= ind; ++i) { truncatedHankelMatrix = truncatedHankelMatrix .add(mU.getColumnMatrix(i).multiply(mVT.getRowMatrix(i)).scalarMultiply(singularValues[i])); } return SpectralMethods.averageHankelMatrix(truncatedHankelMatrix, windowSize); }
From source file:com.perl5.lang.perl.internals.PerlVersion.java
@Override public int compareTo(@NotNull PerlVersion o) { return Double.compare(getDoubleVersion(), o.getDoubleVersion()); }
From source file:edu.txstate.dmlab.clusteringwiki.cluster.KMeansClusterer.java
/** * Cluster a set of documentsToCluster provided as an array of indexes within the * term document matrix/*from ww w. j a va 2 s . c o m*/ * @param docs * @return */ public List<ICluster> levelCluster(ICluster parent, int[] docs) { if (docs == null) { //cluster all docs return levelCluster(parent); } documentsToCluster = docs; setNumClusters(); //choose initial cluster seeds String parentId = parent.getId(); List<IClusterDocument> seeds = chooseSeeds(parentId); //create clusters and assign initial centroids/seeds List<ICluster> clusters = new ArrayList<ICluster>(); for (int i = 0; i < numClusters; i++) { KMeansCluster cluster = new KMeansCluster(getNextClusterId(), context, parent); clusters.add(cluster); } //initial cluster assignments int numDocs = docs.length; int bestCluster; int currentCluster; double similarity; double maxSimilarity; // For every document d, find the cluster j whose centroid is // most similar, assign d to cluster j. for (int i = 0; i < numDocs; i++) { bestCluster = 0; maxSimilarity = Double.MIN_VALUE; IClusterDocument d = allDocs.get(docs[i]); for (int j = 0; j < numClusters; j++) { similarity = seeds.get(j).computeSimilarity(d); if (Double.compare(similarity, maxSimilarity) > 0) { bestCluster = j; maxSimilarity = similarity; } } for (ICluster cluster : clusters) cluster.removeDocument(docs[i]); clusters.get(bestCluster).addDocument(docs[i]); } // Repeat until termination conditions are satisfied int iteration = 0; for (;;) { // For every cluster, re-compute the centroid based on the // current member documentsToCluster. for (int j = 0; j < numClusters; j++) { ((KMeansCluster) clusters.get(j)).computeCentroid(); } // For every document d, find the cluster i whose centroid is // most similar, assign d to cluster i. (If a document is // equally similar from all centroids, then just dump it into // cluster 0). int numChanged = 0; for (int i = 0; i < numDocs; i++) { bestCluster = 0; currentCluster = -1; maxSimilarity = Double.MIN_VALUE; IClusterDocument d = allDocs.get(docs[i]); for (int j = 0; j < numClusters; j++) { similarity = ((KMeansCluster) clusters.get(j)).getSimilarity(d); if (Double.compare(similarity, maxSimilarity) > 0) { bestCluster = j; maxSimilarity = similarity; } if (clusters.get(j).contains(docs[i])) currentCluster = j; } //if another cluster is better if (bestCluster != currentCluster) { clusters.get(currentCluster).removeDocument(docs[i]); clusters.get(bestCluster).addDocument(docs[i]); numChanged++; } } iteration++; if (iteration > maxIterations || numChanged == 0) break; } //set cluster labels for (int j = 0; j < numClusters; j++) { ICluster c = clusters.get(j); c.deduceLabel(); if (c.getLabel().equals("") && c.getDocuments() == null) { //if empty cluster, trim clusters.remove(j); j--; numClusters--; } } return clusters; }
From source file:org.noise_planet.noisecapture.MeasurementService.java
/** * @param dBGain Gain in dB//from w w w .ja v a 2s. com */ public void setdBGain(double dBGain) { this.dBGain = dBGain; if (audioProcess != null && Double.compare(0, dBGain) != 0) { audioProcess.setGain((float) Math.pow(10, dBGain / 20)); } }
From source file:com.opengamma.analytics.financial.equity.option.EquityOption.java
@Override public boolean equals(final Object obj) { if (this == obj) { return true; }/*from ww w. j a va2 s.c o m*/ if (!(obj instanceof EquityOption)) { return false; } final EquityOption other = (EquityOption) obj; if (Double.compare(_strike, other._strike) != 0) { return false; } if (Double.compare(_timeToExpiry, other._timeToExpiry) != 0) { return false; } if (_isCall != other._isCall) { return false; } if (_exerciseType != other._exerciseType) { return false; } if (_settlementType != other._settlementType) { return false; } if (Double.compare(_timeToSettlement, other._timeToSettlement) != 0) { return false; } if (Double.compare(_unitAmount, other._unitAmount) != 0) { return false; } if (!ObjectUtils.equals(_currency, other._currency)) { return false; } return true; }
From source file:com.mesosphere.dcos.cassandra.common.config.ClusterTaskConfig.java
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ClusterTaskConfig that = (ClusterTaskConfig) o; if (Double.compare(that.cpus, cpus) != 0) return false; if (memoryMb != that.memoryMb) return false; return diskMb == that.diskMb; }
From source file:com.opengamma.analytics.financial.equity.option.EquityIndexOption.java
@Override public boolean equals(final Object obj) { if (this == obj) { return true; }/*www .java 2 s .c o m*/ if (!(obj instanceof EquityIndexOption)) { return false; } final EquityIndexOption other = (EquityIndexOption) obj; if (Double.compare(_strike, other._strike) != 0) { return false; } if (Double.compare(_timeToExpiry, other._timeToExpiry) != 0) { return false; } if (_isCall != other._isCall) { return false; } if (_exerciseType != other._exerciseType) { return false; } if (_settlementType != other._settlementType) { return false; } if (Double.compare(_timeToSettlement, other._timeToSettlement) != 0) { return false; } if (Double.compare(_unitAmount, other._unitAmount) != 0) { return false; } if (!ObjectUtils.equals(_currency, other._currency)) { return false; } return true; }
From source file:r.lang.DoubleVector.java
@Override public int compare(int index1, int index2) { return Double.compare(values[index1], values[index2]); }