Example usage for java.lang Double compare

List of usage examples for java.lang Double compare

Introduction

In this page you can find the example usage for java.lang Double compare.

Prototype

public static int compare(double d1, double d2) 

Source Link

Document

Compares the two specified double values.

Usage

From source file:com.opengamma.analytics.financial.equity.option.EquityIndexFutureOption.java

@Override
public boolean equals(final Object obj) {
    if (this == obj) {
        return true;
    }//w ww . j a  va  2 s  .  c o  m
    if (!(obj instanceof EquityIndexFutureOption)) {
        return false;
    }
    final EquityIndexFutureOption other = (EquityIndexFutureOption) obj;
    if (_exerciseType != other._exerciseType) {
        return false;
    }
    if (_isCall != other._isCall) {
        return false;
    }
    if (Double.compare(_strike, other._strike) != 0) {
        return false;
    }
    if (Double.compare(_referencePrice, other._referencePrice) != 0) {
        return false;
    }
    if (Double.compare(_expiry, other._expiry) != 0) {
        return false;
    }
    if (Double.compare(_pointValue, other._pointValue) != 0) {
        return false;
    }
    if (!ObjectUtils.equals(_underlying, other._underlying)) {
        return false;
    }
    return true;
}

From source file:ml.shifu.dtrain.NNTest.java

@Test
public void testNNApp() throws IOException {
    Properties props = new Properties();

    LOG.info("Set property for Guagua driver");
    props.setProperty(GuaguaConstants.MASTER_COMPUTABLE_CLASS, NNMaster.class.getName());
    props.setProperty(GuaguaConstants.WORKER_COMPUTABLE_CLASS, NNWorker.class.getName());
    props.setProperty(GuaguaConstants.GUAGUA_ITERATION_COUNT, "30");
    props.setProperty(GuaguaConstants.GUAGUA_MASTER_RESULT_CLASS, NNParams.class.getName());
    props.setProperty(GuaguaConstants.GUAGUA_WORKER_RESULT_CLASS, NNParams.class.getName());
    props.setProperty(GuaguaConstants.GUAGUA_INPUT_DIR,
            getClass().getResource("/data/wdbc/wdbc.normalized").toString());
    props.setProperty(GuaguaConstants.GUAGUA_MASTER_INTERCEPTERS, NNOutput.class.getName());
    props.setProperty(DtrainConstants.GUAGUA_NN_OUTPUT, OUTPUT);
    props.setProperty(DtrainConstants.NN_PROGRESS_FILE, PROGRESS_FILE_STRING);
    props.setProperty(DtrainConstants.NN_TRAINER_ID, "#1");
    props.setProperty(DtrainConstants.NN_TMP_MODELS_FOLDER, TMP_MODELS_FOLDER);

    // Since many parameter setting in NNMaster/NNWorker lack default value. So we have to 
    // specify all these parameters, or the master/worker won't work properly. Settings
    // below contains all indispensable and optional parameters.
    LOG.info("Set property for NN trainer");
    props.setProperty(DtrainConstants.NN_DATA_DELIMITER, ",");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_BAGGING_NUM, "1");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_IS_TRAIN_ON_DISK, "false");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_BAGGING_SAMPLE_RATE, "1.0");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_CROSS_VALIDATION_RATE, "0.2");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_NN_INPUT_NODES, "30");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_NN_OUTPUT_NODES, "1");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_NN_HIDDEN_LAYERS, "2");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_NN_HIDDEN_NODES, "30,20");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_NN_ACT_FUNCS,
            DtrainConstants.NN_SIGMOID + "," + DtrainConstants.NN_SIGMOID);
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_NN_PROPAGATION, NNUtils.QUICK_PROPAGATION);
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_NN_LEARNING_RATE, "0.2");
    props.setProperty(DtrainConstants.SHIFU_DTRAIN_PARALLEL, "true");

    GuaguaUnitDriver<NNParams, NNParams> driver = new GuaguaMRUnitDriver<NNParams, NNParams>(props);
    driver.run();/*  www.  j  av a2  s  .c  o m*/

    // Check output files exist.
    File finalModel = new File(NN_TEST);
    Assert.assertTrue(finalModel.exists());

    File progressFile = new File(PROGRESS_FILE_STRING);
    Assert.assertTrue(progressFile.exists());

    // Check final output error less than threshold.
    List<String> errorList = FileUtils.readLines(progressFile);
    String errorLine = errorList.get(errorList.size() - 1);

    Pattern resultPattern = Pattern
            .compile("Train\\s+Error:(\\d+\\.\\d+)\\s+Validation\\s+Error:(\\d+\\.\\d+)");
    Matcher errorMatcher = resultPattern.matcher(errorLine);
    Assert.assertTrue(errorMatcher.find());

    double trainErr = Double.parseDouble(errorMatcher.group(1));
    double testErr = Double.parseDouble(errorMatcher.group(2));
    double threshold = 0.2;
    Assert.assertTrue(Double.compare((trainErr + testErr) / 2, threshold) <= 0);

    // Check final model.
    // Here only simply check the output weight size.
    BasicNetwork model = (BasicNetwork) EncogDirectoryPersistence.loadObject(new File(OUTPUT));
    Assert.assertEquals(model.getFlat().getWeights().length, 31 * 30 + 31 * 20 + 21);
}

From source file:Business.InvertedIndex.java

public List findReleventDocs(/*Matcher matcher*/String[] queryTerms) {

    DocInfo queryDoc = new DocInfo("queryTerm", null, null);

    populateInvertedIndex(/*matcher*/queryTerms, "qureyTerm", queryDoc);
    documents.put("queryTerm", queryDoc);
    //        queryDoc.calculateLengthNormalisationCoffecient(documents.size());
    queryDoc.lengthNormalize(documents.size());

    Collection<DocTermInfo> queryTermsInfo = queryDoc.getTermsInADocument().values();
    Iterator i = queryTermsInfo.iterator();
    HashMap<DocInfo, Double> scores = new HashMap<>();

    while (i.hasNext()) {
        DocTermInfo queryTerm = (DocTermInfo) i.next();
        Collection docs = queryTerm.getTermInfo().getDocuments().values();

        Iterator docIt = docs.iterator();

        while (docIt.hasNext()) {
            DocInfo doc = (DocInfo) docIt.next();
            if (doc.getDocId() != "queryTerm") {
                //                    doc.calculateLengthNormalisationCoffecient(documents.size());
                doc.lengthNormalize(documents.size());
                if (scores.containsKey(doc)) {
                    double score = scores.get(doc)
                            + (doc.getTermsInADocument().get(queryTerm.getTerm()).getFinalCoffecient())
                                    * (queryTerm.getFinalCoffecient());
                    scores.put(doc, score);
                } else {
                    double score = (doc.getTermsInADocument().get(queryTerm.getTerm()).getFinalCoffecient())
                            * (queryTerm.getFinalCoffecient());
                    scores.put(doc, score);
                }/*from  ww w.j  ava2  s.c o  m*/
            }
        }
    }

    Set<DocInfo> set = scores.keySet();
    List<DocInfo> sortedDocs = new ArrayList<>(set);

    Collections.sort(sortedDocs, new Comparator<DocInfo>() {

        @Override
        public int compare(DocInfo s1, DocInfo s2) {
            return Double.compare(scores.get(s2), scores.get(s1));
        }
    });
    return sortedDocs;
}

From source file:com.tealcube.minecraft.bukkit.mythicdrops.api.enchantments.MythicEnchantment.java

@Override
public boolean equals(Object o) {
    if (this == o) {
        return true;
    }/*  w ww.  ja va 2  s .  c om*/
    if (!(o instanceof MythicEnchantment)) {
        return false;
    }

    MythicEnchantment that = (MythicEnchantment) o;

    return Double.compare(that.maximumLevel, maximumLevel) == 0
            && Double.compare(that.minimumLevel, minimumLevel) == 0
            && !(enchantment != null ? !enchantment.equals(that.enchantment) : that.enchantment != null);
}

From source file:lisong_mechlab.view.graphs.SustainedDpsGraph.java

private TableXYDataset getSeries() {
    final Collection<Modifier> modifiers = loadout.getModifiers();
    SortedMap<Weapon, List<Pair<Double, Double>>> data = new TreeMap<Weapon, List<Pair<Double, Double>>>(
            new Comparator<Weapon>() {
                @Override//w w w . ja v a  2 s.  c  o  m
                public int compare(Weapon aO1, Weapon aO2) {
                    int comp = Double.compare(aO2.getRangeMax(modifiers), aO1.getRangeMax(modifiers));
                    if (comp == 0)
                        return aO1.compareTo(aO2);
                    return comp;
                }
            });

    Double[] ranges = WeaponRanges.getRanges(loadout);
    for (double range : ranges) {
        Set<Entry<Weapon, Double>> damageDistributio = maxSustainedDPS.getWeaponRatios(range).entrySet();
        for (Map.Entry<Weapon, Double> entry : damageDistributio) {
            final Weapon weapon = entry.getKey();
            final double ratio = entry.getValue();
            final double dps = weapon.getStat("d/s", modifiers);
            final double rangeEff = weapon.getRangeEffectivity(range, modifiers);

            if (!data.containsKey(weapon)) {
                data.put(weapon, new ArrayList<Pair<Double, Double>>());
            }
            data.get(weapon).add(new Pair<Double, Double>(range, dps * ratio * rangeEff));
        }
    }

    List<Weapon> orderedWeapons = new ArrayList<>();
    DefaultTableXYDataset dataset = new DefaultTableXYDataset();
    for (Map.Entry<Weapon, List<Pair<Double, Double>>> entry : data.entrySet()) {
        XYSeries series = new XYSeries(entry.getKey().getName(), true, false);
        for (Pair<Double, Double> pair : entry.getValue()) {
            series.add(pair.first, pair.second);
        }
        dataset.addSeries(series);
        orderedWeapons.add(entry.getKey());
    }
    Collections.reverse(orderedWeapons);
    colours.updateColoursToMatch(orderedWeapons);

    return dataset;
}

From source file:org.akvo.caddisfly.model.TestInfo.java

/**
 * Sort the swatches for this test by their result values.
 *///www  .  java  2s . com
private void sort() {
    Collections.sort(swatches, new Comparator<Swatch>() {
        public int compare(Swatch c1, Swatch c2) {
            return Double.compare(c1.getValue(), (c2.getValue()));
        }
    });
}

From source file:twitter4j.internal.json.QueryResultJSONImpl.java

@Override
public boolean equals(Object o) {
    if (this == o)
        return true;
    if (o == null || getClass() != o.getClass())
        return false;

    QueryResult that = (QueryResult) o;/* www  .jav a 2s.  c o  m*/

    if (Double.compare(that.getCompletedIn(), completedIn) != 0)
        return false;
    if (maxId != that.getMaxId())
        return false;
    if (count != that.getCount())
        return false;
    if (sinceId != that.getSinceId())
        return false;
    if (!query.equals(that.getQuery()))
        return false;
    if (refreshUrl != null ? !refreshUrl.equals(that.getRefreshUrl()) : that.getRefreshUrl() != null)
        return false;
    if (tweets != null ? !tweets.equals(that.getTweets()) : that.getTweets() != null)
        return false;

    return true;
}

From source file:mase.mason.world.DistanceSensorArcs.java

/**
 * Very efficient implementation using an ordered TreeMap Should ensure
 * scalability when large numbers of objects are present, as there is no
 * need to check angles with objects that are farther than the closest
 * object in the given cone. Potential limitation (unlikely): if there are
 * two objects at exactly the same distance but at different angles, only
 * one of them will be considered, as the distance is used as key in the
 * TreeMap// w  ww  .java2 s  .c om
 */
@Override
public double[] readValues() {
    lastDistances = new double[valueCount()];
    Arrays.fill(lastDistances, Double.POSITIVE_INFINITY);
    Arrays.fill(closestObjects, null);
    if (range < 0.001) {
        return lastDistances;
    }
    double rangeNoiseAbs = Double.isInfinite(range) ? rangeNoise * fieldDiagonal : range * rangeNoise;

    WorldObject[] candidates = getCandidates();

    // TODO: replace treemap with collection-sort
    Pair<Double, WorldObject>[] distances = new Pair[candidates.length];
    int index = 0;
    for (WorldObject o : candidates) {
        if (!centerToCenter && o.isInside(ag.getLocation())) {
            Arrays.fill(lastDistances, 0);
            Arrays.fill(closestObjects, o);
            return lastDistances;
        }

        double dist = centerToCenter ? ag.getLocation().distance(o.getLocation())
                : Math.max(0, ag.distanceTo(o));
        if (rangeNoiseAbs > 0) {
            dist += rangeNoiseAbs
                    * (noiseType == UNIFORM ? state.random.nextDouble() * 2 - 1 : state.random.nextGaussian());
            dist = Math.max(dist, 0);
        }
        if (dist <= range) {
            distances[index++] = Pair.of(dist, o);
        }
    }
    if (index < distances.length) {
        distances = Arrays.copyOf(distances, index);
    }

    Arrays.sort(distances, new Comparator<Pair<Double, WorldObject>>() {
        @Override
        public int compare(Pair<Double, WorldObject> a, Pair<Double, WorldObject> b) {
            return Double.compare(a.getLeft(), b.getLeft());
        }
    });

    int filled = 0;
    for (Pair<Double, WorldObject> e : distances) {
        if (filled == arcStart.length) {
            break;
        }
        double angle = ag.angleTo(e.getRight().getLocation());
        if (orientationNoise > 0) {
            angle += orientationNoise
                    * (noiseType == UNIFORM ? state.random.nextDouble() * 2 - 1 : state.random.nextGaussian());
            angle = EmboddiedAgent.normalizeAngle(angle);
        }
        for (int a = 0; a < arcStart.length; a++) {
            if (Double.isInfinite(lastDistances[a]) && ((angle >= arcStart[a] && angle <= arcEnd[a])
                    || (arcStart[a] > arcEnd[a] && (angle >= arcStart[a] || angle <= arcEnd[a])))) {
                filled++;
                lastDistances[a] = e.getKey();
                closestObjects[a] = e.getValue();
            }
        }
    }
    return lastDistances;
}

From source file:org.jcurl.math.CurveCombined.java

/**
 * Search only part of an array. Could be more general operating with
 * {@link Comparable} and {@link Object}s.
 * /*from  w w w.ja  v a 2  s .  co m*/
 * @param a
 * @param fromIndex
 * @param toIndex
 * @param key
 * 
 * @return found index
 */
static <V extends R1RNFunction> int binarySearch(final List<Entry<Double, V>> a, int fromIndex, int toIndex,
        final double key) {
    if (false) {
        if (fromIndex > toIndex)
            throw new IllegalArgumentException("fromIndex(" + fromIndex + ") > toIndex(" + toIndex + ")");
        if (fromIndex < 0)
            throw new ArrayIndexOutOfBoundsException(fromIndex);
        if (toIndex > a.size())
            throw new ArrayIndexOutOfBoundsException(toIndex);

        int low = fromIndex;
        int high = toIndex - 1;
        while (low <= high) {
            final int mid = low + high >>> 1;
            final double midVal = a.get(mid).getKey().doubleValue();
            final int cmp = Double.compare(midVal, key);
            if (cmp < 0)
                low = mid + 1;
            else if (cmp > 0)
                high = mid - 1;
            else
                return mid; // done
        }
        return -(low + 1); // no such key
    } else {
        double fromKey = a.get(fromIndex).getKey().doubleValue();
        double toKey = a.get(toIndex).getKey().doubleValue();
        for (;;) {
            if (key == fromKey)
                return fromIndex;
            if (key == toKey)
                return toIndex;
            final int midIndex = (toIndex + fromIndex) / 2;
            final double midKey = a.get(midIndex).getKey().doubleValue();
            if (key == midKey)
                return midIndex;
            if (fromIndex + 1 >= toIndex) {
                if (fromKey < key && key < toKey)
                    return -1 - toIndex;
                return -1;
            }
            if (key < midKey) {
                toIndex = midIndex;
                toKey = midKey;
                continue;
            } else if (key > midKey) {
                fromIndex = midIndex;
                fromKey = midKey;
                continue;
            }
        }
    }
}

From source file:org.apache.hadoop.mapreduce.lib.output.TestMRSequenceFileAsBinaryOutputFormat.java

public void testBinary() throws IOException, InterruptedException {
    Configuration conf = new Configuration();
    Job job = new Job(conf);

    Path outdir = new Path(System.getProperty("test.build.data", "/tmp"), "outseq");
    Random r = new Random();
    long seed = r.nextLong();
    r.setSeed(seed);/*from   w w w . j  a  v  a  2 s  . c  om*/

    FileOutputFormat.setOutputPath(job, outdir);

    SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class);
    SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class);

    SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true);
    SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    BytesWritable bkey = new BytesWritable();
    BytesWritable bval = new BytesWritable();

    TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration());
    OutputFormat<BytesWritable, BytesWritable> outputFormat = new SequenceFileAsBinaryOutputFormat();
    OutputCommitter committer = outputFormat.getOutputCommitter(context);
    committer.setupJob(job);
    RecordWriter<BytesWritable, BytesWritable> writer = outputFormat.getRecordWriter(context);

    IntWritable iwritable = new IntWritable();
    DoubleWritable dwritable = new DoubleWritable();
    DataOutputBuffer outbuf = new DataOutputBuffer();
    LOG.info("Creating data by SequenceFileAsBinaryOutputFormat");
    try {
        for (int i = 0; i < RECORDS; ++i) {
            iwritable = new IntWritable(r.nextInt());
            iwritable.write(outbuf);
            bkey.set(outbuf.getData(), 0, outbuf.getLength());
            outbuf.reset();
            dwritable = new DoubleWritable(r.nextDouble());
            dwritable.write(outbuf);
            bval.set(outbuf.getData(), 0, outbuf.getLength());
            outbuf.reset();
            writer.write(bkey, bval);
        }
    } finally {
        writer.close(context);
    }
    committer.commitTask(context);
    committer.commitJob(job);

    InputFormat<IntWritable, DoubleWritable> iformat = new SequenceFileInputFormat<IntWritable, DoubleWritable>();
    int count = 0;
    r.setSeed(seed);
    SequenceFileInputFormat.setInputPaths(job, outdir);
    LOG.info("Reading data by SequenceFileInputFormat");
    for (InputSplit split : iformat.getSplits(job)) {
        RecordReader<IntWritable, DoubleWritable> reader = iformat.createRecordReader(split, context);
        MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable> mcontext = new MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable>(
                job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                MapReduceTestUtil.createDummyReporter(), split);
        reader.initialize(split, mcontext);
        try {
            int sourceInt;
            double sourceDouble;
            while (reader.nextKeyValue()) {
                sourceInt = r.nextInt();
                sourceDouble = r.nextDouble();
                iwritable = reader.getCurrentKey();
                dwritable = reader.getCurrentValue();
                assertEquals("Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*", sourceInt,
                        iwritable.get());
                assertTrue("Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*",
                        Double.compare(dwritable.get(), sourceDouble) == 0);
                ++count;
            }
        } finally {
            reader.close();
        }
    }
    assertEquals("Some records not found", RECORDS, count);
}