List of usage examples for java.lang Double compare
public static int compare(double d1, double d2)
From source file:io.pravega.client.stream.impl.ReaderGroupStateManager.java
/** * Given a set of segments returns one to release. The one returned is arbitrary. *//*w w w . ja va 2 s . c o m*/ private Segment findSegmentToRelease() { Set<Segment> segments = sync.getState().getSegments(readerId); return segments.stream().max((s1, s2) -> Double.compare(hashHelper.hashToRange(s1.getScopedName()), hashHelper.hashToRange(s2.getScopedName()))).orElse(null); }
From source file:ca.mudar.parkcatcher.ui.fragments.DetailsFragment.java
@Override public boolean onOptionsItemSelected(MenuItem item) { final Resources res = getResources(); if (item.getItemId() == R.id.menu_favorites_toggle) { onCheckedChanged(mIsStarred);//from ww w .j a va 2s . c om mIsStarred = (mIsStarred ? false : true); // Toggle value getSherlockActivity().invalidateOptionsMenu(); return true; } else if (item.getItemId() == R.id.menu_map) { final Intent intent = new Intent(getActivity(), MainActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP); intent.putExtra(Const.INTENT_EXTRA_GEO_LAT, mGeoLat); intent.putExtra(Const.INTENT_EXTRA_GEO_LNG, mGeoLng); intent.putExtra(Const.INTENT_EXTRA_POST_ID, mIdPost); startActivity(intent); return true; } else if (item.getItemId() == R.id.menu_reminder) { parkingApp.showToastText(R.string.toast_todo_reminder, Toast.LENGTH_LONG); return true; } else if (item.getItemId() == R.id.menu_directions) { if ((Double.compare(mGeoLat, Double.MIN_VALUE) != 0) && (Double.compare(mGeoLng, Double.MIN_VALUE) != 0)) { /** * Get directions using Intents. */ try { final Uri uriNavigation = Uri .parse(String.format(Const.URI_INTENT_NAVIGATION, mGeoLat, mGeoLng)); final Intent intent = new Intent(Intent.ACTION_VIEW, uriNavigation); startActivity(intent); } catch (Exception e) { e.printStackTrace(); String sAddr = ""; Location userLocation = parkingApp.getLocation(); if (userLocation != null) { sAddr = Double.toString(userLocation.getLatitude()) + "," + Double.toString(userLocation.getLongitude()); } final String urlGmaps = String.format(Const.URL_GMAPS_DIRECTIONS, sAddr, mGeoLat + "," + mGeoLng); final Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(urlGmaps)); startActivity(intent); } } return true; } else if (item.getItemId() == R.id.menu_streetview) { if ((Double.compare(mGeoLat, Double.MIN_VALUE) != 0) && (Double.compare(mGeoLng, Double.MIN_VALUE) != 0)) { try { final Uri uriStreetView = Uri .parse(String.format(Const.URI_INTENT_STREETVIEW, mGeoLat, mGeoLng)); final Intent intent = new Intent(Intent.ACTION_VIEW, uriStreetView); startActivity(intent); } catch (NullPointerException e) { e.printStackTrace(); return false; } catch (Exception e) { parkingApp.showToastText(R.string.toast_streetview_error, Toast.LENGTH_LONG); e.printStackTrace(); final Uri uriInstallStreetView = Uri.parse(Const.URI_INSTALL_STREETVIEW); final Intent intent = new Intent(Intent.ACTION_VIEW, uriInstallStreetView); startActivity(intent); return false; } } return true; } else if (item.getItemId() == R.id.menu_share) { final GregorianCalendar parkingCalendar = parkingApp.getParkingCalendar(); final int dayOfWeek = (parkingCalendar.get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY ? 7 : parkingCalendar.get(Calendar.DAY_OF_WEEK) - 1); final double parkingHour = parkingCalendar.get(Calendar.HOUR_OF_DAY) + Math.round(parkingCalendar.get(Calendar.MINUTE) / 0.6) / 100.00d; // final int duration = parkingApp.getParkingDuration(); final String url = String.format(res.getString(R.string.url_share_post_id), mIdPost, dayOfWeek, parkingHour, parkingApp.getParkingDuration()); final String subject = String.format(res.getString(R.string.details_share_title), url); final String desc = String.format(res.getString(R.string.details_share_subtitle), mShareDesc); final Intent intent = new Intent(Intent.ACTION_SEND); intent.setType("text/plain"); // EXTRA_SUBJECT is not used to allow sharing with SMS instead of // MMS // intent.putExtra(Intent.EXTRA_SUBJECT, subject); intent.putExtra(Intent.EXTRA_TEXT, subject + Const.LINE_SEPARATOR + desc); startActivity(intent); return true; } return (activityHelper.onOptionsItemSelected(item) || super.onOptionsItemSelected(item)); }
From source file:org.ut.biolab.medsavant.client.view.genetics.variantinfo.GenemaniaInfoRetriever.java
public List<Gene> getRelatedGenesByScore() throws ApplicationException, DataStoreException, NoRelatedGenesInfoException { options = runGeneManiaAlgorithm();/*ww w. j a v a 2s . c om*/ final Map<Gene, Double> scores = options.getScores(); ArrayList<Gene> relatedGenes = new ArrayList<Gene>(scores.keySet()); Collections.sort(relatedGenes, new Comparator<Gene>() { public int compare(Gene gene1, Gene gene2) { return -Double.compare(scores.get(gene1), scores.get(gene2)); } }); return relatedGenes; }
From source file:net.daboross.bukkitdev.skywars.api.location.SkyPlayerLocation.java
@Override @SuppressWarnings("RedundantIfStatement") public boolean equals(final Object o) { if (this == o) return true; if (!(o instanceof SkyPlayerLocation)) return false; SkyPlayerLocation location = (SkyPlayerLocation) o; if (Double.compare(location.pitch, pitch) != 0) return false; if (Double.compare(location.x, x) != 0) return false; if (Double.compare(location.y, y) != 0) return false; if (Double.compare(location.yaw, yaw) != 0) return false; if (Double.compare(location.z, z) != 0) return false; if (world != null ? !world.equals(location.world) : location.world != null) return false; return true;// www .j av a 2 s. co m }
From source file:ml.shifu.shifu.core.binning.EqualPopulationBinning.java
/** * Generate data bin by expected bin number * /* ww w. jav a2 s. c o m*/ * @param toBinningNum * toBinningNum * @return list of data binning */ private List<Double> getDataBin(int toBinningNum) { List<Double> binBorders = new ArrayList<Double>(); binBorders.add(Double.NEGATIVE_INFINITY); double totalCnt = getTotalInHistogram(); // merge extra small bins // extra small bin means : binCount less than 3% of average bin count // binCount < ( total * (1/toBinningNum) * (3/100)) mergeExtraSmallBins(totalCnt, toBinningNum); if (this.currentHistogramUnitCnt <= toBinningNum) { // if the count of histogram unit is less than expected bin number // return each histogram unit as a bin. The boundary will be middle value // of every two histogram unit values convertHistogramUnitIntoBin(binBorders); return binBorders; } LinkNode<HistogramUnit> currStartPos = null; // To improve time performance sumCacheGen(); for (int j = 1; j < toBinningNum; j++) { double s = (j * totalCnt) / toBinningNum; LinkNode<HistogramUnit> pos = locateHistogram(s, currStartPos); if (pos == null || pos == currStartPos || pos.next() == null) { continue; } else { HistogramUnit chu = pos.data(); HistogramUnit nhu = pos.next().data(); // double d = s - sum(chu.getHval()); double d = s - sumCache.get(pos); if (d < 0) { double u = (chu.getHval() + nhu.getHval()) / 2; binBorders.add(u); currStartPos = pos; continue; } double a = nhu.getHcnt() - chu.getHcnt(); double b = 2 * chu.getHcnt(); double c = -2 * d; double z = 0.0; if (Double.compare(a, 0) == 0) { z = -1 * c / b; } else { z = (-1 * b + Math.sqrt(b * b - 4 * a * c)) / (2 * a); } double u = chu.getHval() + (nhu.getHval() - chu.getHval()) * z; binBorders.add(u); currStartPos = pos; } } return binBorders; }
From source file:de.tudarmstadt.ukp.dkpro.core.mallet.topicmodel.MalletTopicModelInferencer.java
/** * Assign topics according to the following formula: * <p>/*from w w w . j a v a 2s . co m*/ * Topic proportion must be at least the maximum topic's proportion divided by the maximum * number of topics to be assigned. In addition, the topic proportion must not lie under the * minTopicProb. If more topics comply with these criteria, only retain the n * (maxTopicAssignments) largest values. * * @param topicDistribution * a double array containing the document's topic proportions * @return an array of integers pointing to the topics assigned to the document */ private int[] assignTopics(final double[] topicDistribution) { /* * threshold is the largest value divided by the maximum number of topics or the fixed * number set as minTopicProb parameter. */ double threshold = Math.max( Collections.max(Arrays.asList(ArrayUtils.toObject(topicDistribution))) / maxTopicAssignments, minTopicProb); /* * assign indexes for values that are above threshold */ List<Integer> indexes = new ArrayList<>(topicDistribution.length); for (int i = 0; i < topicDistribution.length; i++) { if (topicDistribution[i] >= threshold) { indexes.add(i); } } /* * Reduce assignments to maximum number of allowed assignments. */ if (indexes.size() > maxTopicAssignments) { /* sort index list by corresponding values */ Collections.sort(indexes, new Comparator<Integer>() { @Override public int compare(Integer aO1, Integer aO2) { return Double.compare(topicDistribution[aO1], topicDistribution[aO2]); } }); while (indexes.size() > maxTopicAssignments) { indexes.remove(0); } } return ArrayUtils.toPrimitive(indexes.toArray(new Integer[indexes.size()])); }
From source file:org.wso2.carbon.analytics.dataservice.core.indexing.sort.RecordSortUtils.java
private static int compareValues(AnalyticsSchema.ColumnType type, Object value1, Object value2) throws AnalyticsException { int compareInt; switch (type) { case STRING://from w w w . jav a 2 s. co m compareInt = ((String) value1).compareTo(((String) value2)); break; case INTEGER: compareInt = Integer.compare((Integer) value1, (Integer) value2); break; case LONG: compareInt = Long.compare((Long) value1, (Long) value2); break; case FLOAT: compareInt = Float.compare((Float) value1, (Float) value2); break; case DOUBLE: compareInt = Double.compare((Double) value1, (Double) value2); break; case BOOLEAN: compareInt = Boolean.compare((Boolean) value1, (Boolean) value2); break; default: throw new AnalyticsException("Cannot sort values of type: " + type); } return compareInt; }
From source file:org.eumetsat.usd.gcp.server.data.NetCDFCalibrationDataManager.java
/** * {@inheritDoc}/*from ww w .j a v a2 s . c o m*/ */ @Override public void addDataFromDatasetForUser(String userID, String datasetURL, String channelName, double userSceneTb) throws DatasetReadException, InvalidFormatException, InvalidFilenameException { // Download file to speed reading up. NetcdfFile ncfile = NetcdfUtils.downloadAndOpenFile(datasetURL); // Extract the conversion formulas. Set<String> convVarsNames = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER); // init conversion var names // list, case-insensitive. GlobalAttributesNames globalAttrNames = configManager.getGlobalAttributesNames(); Attribute radToTbFormulaAttr = ncfile .findGlobalAttributeIgnoreCase(globalAttrNames.getRadToTbConvFormula()); Attribute tbToRadFormulaAttr = ncfile .findGlobalAttributeIgnoreCase(globalAttrNames.getTbToRadConvFormula()); String radToTbConvFormula = configManager.getGlobalAttributesDefaults().getRadToTbConvFormula(); // default. if (radToTbFormulaAttr != null) { try { radToTbConvFormula = processRadianceToTbFormula(radToTbFormulaAttr.getStringValue().split("=")[1], convVarsNames); } catch (FormulaException fe) { LOGGER.warn("invalid radiance to tb conversion formula in <" + FilenameUtils.getName(datasetURL) + ">. Using default.", fe); } } else { LOGGER.warn("radiance to tb conversion formula not found. Using default."); } String tbToRadConvFormula = configManager.getGlobalAttributesDefaults().getRadToTbConvFormula(); // default. if (tbToRadFormulaAttr != null) { try { tbToRadConvFormula = processTbToRadianceFormula(tbToRadFormulaAttr.getStringValue().split("=")[1], convVarsNames); } catch (FormulaException fe) { LOGGER.warn("invalid tb to radiance conversion formula in <" + FilenameUtils.getName(datasetURL) + ">. Using default.", fe); } } else { LOGGER.warn("tb to radiance conversion formula not found. Using default."); } // Extract the variables from the NetCDF file ------------------- try { VariablesNames varNames = configManager.getVariablesNames(); // Get list of channel indexes to retrieve (All channels or single channel) Array channelNames = NetcdfUtils.readVariable(ncfile, varNames.getChannelName()); int firstChannelNum = 0; int lastChannelNum = 0; if (channelName.equalsIgnoreCase("All")) { firstChannelNum = 0; lastChannelNum = NetcdfUtils.getNumRowsOf(channelNames) - 1; } else { firstChannelNum = NetcdfUtils.getIndexOf(channelName, channelNames); if (firstChannelNum == -1) { throw new InvalidFormatException("'" + channelName + "' not found in the NetCDF file <" + FilenameUtils.getName(datasetURL) + ">."); } lastChannelNum = firstChannelNum; } for (int channelNum = firstChannelNum; channelNum <= lastChannelNum; channelNum++) { // Get data array from the netCDF file. Array dateArray = NetcdfUtils.readVariable(ncfile, varNames.getDate()); Array offsetArray = NetcdfUtils.readVariable(ncfile, varNames.getOffset(), channelNum); Array slopeArray = NetcdfUtils.readVariable(ncfile, varNames.getSlope(), channelNum); Array offsetSeArray = NetcdfUtils.readVariable(ncfile, varNames.getOffsetSe(), channelNum); Array slopeSeArray = NetcdfUtils.readVariable(ncfile, varNames.getSlopeSe(), channelNum); // Flag with Double.POSITIVE_INFINITY if slope is equal to 0. for (int i = 0; i < slopeArray.getSize(); i++) { if (Double.compare(slopeArray.getDouble(i), 0) == 0) { slopeArray.setDouble(i, Double.POSITIVE_INFINITY); } } // TODO: check if this workaround to support inconsistent datasets can be removed. Array covarianceArray = null; try { covarianceArray = NetcdfUtils.readVariable(ncfile, varNames.getCovariance(), channelNum); } catch (VariableNotFoundException vnfe) { try { covarianceArray = NetcdfUtils.readVariable(ncfile, "covar_of_offset_and_slope", channelNum); } catch (VariableNotFoundException vnfe2) { covarianceArray = NetcdfUtils.readVariable(ncfile, "covar", channelNum); } } // Get stdSceneTb if not defined. double sceneTb = 0.0; if (Double.compare(userSceneTb, -1.0) == 0) { sceneTb = NetcdfUtils.readVariable(ncfile, varNames.getStdSceneTb()).getDouble(channelNum); } else { sceneTb = userSceneTb; } NetcdfFilename ncfilename = NetcdfFilename.parse(FilenameUtils.getName(datasetURL)); String currentChannelName = NetcdfUtils.getStringFrom(channelNum, channelNames); // Format timestamp. String timestamp = DateUtils.format( DateUtils.parse(ncfilename.getTimestamp(), "yyyyMMddHHmmss", "GMT"), "yyyy/MM/dd HH:mm:ss", "GMT"); // Construct the dataset name. String datasetName = ncfilename.getSatellite() + "/" + ncfilename.getInstrument() + " referenced with " + ncfilename.getRefSatellite() + "/" + ncfilename.getRefInstrument() + " [" + ncfilename.getLocationIndication().split("-")[1] + "][" + ncfilename.getCorrectionType() + "][" + ncfilename.getMode() + "][" + timestamp + "][v" + ncfilename.getVersion() + "][" + currentChannelName + "][" + sceneTb + "K]"; // Add new records. addCalibrationRecords(userID, ncfile, datasetName, datasetURL, dateArray, offsetArray, offsetSeArray, slopeArray, slopeSeArray, covarianceArray, channelNum, sceneTb, radToTbConvFormula, tbToRadConvFormula, convVarsNames); } } catch (BadArgumentException bae) { throw new InvalidFormatException( "Format of NetCDF file <" + FilenameUtils.getName(datasetURL) + "> is invalid.", bae); } catch (ParseException pe) { throw new InvalidFormatException("Timestamp with invalid format.", pe); } catch (VariableNotFoundException vnfe) { throw new InvalidFormatException("Variable '" + vnfe.getVariableName() + "' not found in NetCDF file <" + FilenameUtils.getName(datasetURL) + ">.", vnfe); } catch (VariableReadException vre) { throw new InvalidFormatException("Variable '" + vre.getVariableName() + "' not found in NetCDF file <" + FilenameUtils.getName(datasetURL) + ">.", vre); } catch (ChannelNotFoundException cnfe) { throw new InvalidFormatException("Channel number '" + cnfe.getChannelNum() + "' not found in variable '" + cnfe.getVariableName() + "' in in NetCDF file <" + FilenameUtils.getName(datasetURL) + ">.", cnfe); } finally { // Clean-up if (ncfile != null) { try { ncfile.close(); } catch (IOException ioe) { LOGGER.error("trying to close the NetcdfFile", ioe); } } } }
From source file:org.kalypso.jts.JTSUtilities.java
private static double interpolateX(final double x1, final double x2, final int y1, final double y2, final double y) { if (Double.isNaN(x1) || Double.isNaN(x2)) return Double.NaN; if (Double.compare(x1, x2) == 0) return x1; else {// w ww . ja va 2 s . com try { final LinearEquation computeX = new LinearEquation(x1, y1, x2, y2); return computeX.computeX(y); } catch (final SameXValuesException e) { // should never happen, has we tests this explicitly e.printStackTrace(); return x1; } } }
From source file:org.sleuthkit.autopsy.timeline.ui.AbstractVisualization.java
/** iterate through the list of tick-marks building a two level structure of * replacement tick marl labels. (Visually) upper level has most * detailed/highest frequency part of date/time. Second level has rest of * date/time grouped by unchanging part. * eg:// w ww . j a va 2s.c o m * * * october-30_october-31_september-01_september-02_september-03 * * becomes * * _________30_________31___________01___________02___________03 * * _________october___________|_____________september___________ * * * NOTE: This method should only be invoked on the JFX thread */ public synchronized void layoutDateLabels() { //clear old labels branchPane.getChildren().clear(); leafPane.getChildren().clear(); //since the tickmarks aren't necessarily in value/position order, //make a clone of the list sorted by position along axis ObservableList<Axis.TickMark<X>> tickMarks = FXCollections.observableArrayList(getXAxis().getTickMarks()); tickMarks.sort( (Axis.TickMark<X> t, Axis.TickMark<X> t1) -> Double.compare(t.getPosition(), t1.getPosition())); if (tickMarks.isEmpty() == false) { //get the spacing between ticks in the underlying axis double spacing = getTickSpacing(); //initialize values from first tick TwoPartDateTime dateTime = new TwoPartDateTime(getTickMarkLabel(tickMarks.get(0).getValue())); String lastSeenBranchLabel = dateTime.branch; //cumulative width of the current branch label //x-positions (pixels) of the current branch and leaf labels double leafLabelX = 0; if (dateTime.branch.equals("")) { //if there is only one part to the date (ie only year), just add a label for each tick for (Axis.TickMark<X> t : tickMarks) { assignLeafLabel(new TwoPartDateTime(getTickMarkLabel(t.getValue())).leaf, spacing, leafLabelX, isTickBold(t.getValue())); leafLabelX += spacing; //increment x } } else { //there are two parts so ... //initialize additional state double branchLabelX = 0; double branchLabelWidth = 0; for (Axis.TickMark<X> t : tickMarks) { //for each tick //split the label into a TwoPartDateTime dateTime = new TwoPartDateTime(getTickMarkLabel(t.getValue())); //if we are still on the same branch if (lastSeenBranchLabel.equals(dateTime.branch)) { //increment branch width branchLabelWidth += spacing; } else {// we are on to a new branch, so ... assignBranchLabel(lastSeenBranchLabel, branchLabelWidth, branchLabelX); //and then update label, x-pos, and width lastSeenBranchLabel = dateTime.branch; branchLabelX += branchLabelWidth; branchLabelWidth = spacing; } //add the label for the leaf (highest frequency part) assignLeafLabel(dateTime.leaf, spacing, leafLabelX, isTickBold(t.getValue())); //increment leaf position leafLabelX += spacing; } //we have reached end so add branch label for current branch assignBranchLabel(lastSeenBranchLabel, branchLabelWidth, branchLabelX); } } //request layout since we have modified scene graph structure requestParentLayout(); }