Example usage for java.lang Double NEGATIVE_INFINITY

List of usage examples for java.lang Double NEGATIVE_INFINITY

Introduction

In this page you can find the example usage for java.lang Double NEGATIVE_INFINITY.

Prototype

double NEGATIVE_INFINITY

To view the source code for java.lang Double NEGATIVE_INFINITY.

Click Source Link

Document

A constant holding the negative infinity of type double .

Usage

From source file:net.sf.json.TestJSONArray.java

public void testConstructor_primitive_array_double_Infinity() {
    try {//from  www  .jav  a2 s  .c o m
        JSONArray.fromObject(new double[] { Double.NEGATIVE_INFINITY });
        fail("Should have thrown a JSONException");
    } catch (JSONException expected) {
        // OK
    }

    try {
        JSONArray.fromObject(new double[] { Double.POSITIVE_INFINITY });
        fail("Should have thrown a JSONException");
    } catch (JSONException expected) {
        // OK
    }
}

From source file:ucar.unidata.idv.control.chart.MyScatterPlot.java

/**
 * Calculates the Y data range./*from  ww w.j  a v  a2s.  c  o m*/
 *
 * @param data  the data.
 *
 * @return The range.
 */
private Range calculateYDataRange(double[][] data) {

    Range result = null;
    //      double[][] data =  (double[][]) series.get(0);
    if (data != null) {
        double lowest = Double.POSITIVE_INFINITY;
        double highest = Double.NEGATIVE_INFINITY;
        for (int i = 0; i < data[0].length; i++) {
            double v = data[1][i];
            if (v < lowest) {
                lowest = v;
            }
            if (v > highest) {
                highest = v;
            }
        }
        if (lowest <= highest) {
            result = new Range(lowest, highest);
        }
    }
    return result;

}

From source file:clus.statistic.ClassificationStat.java

public int getMajorityClassDiff(int attr, ClassificationStat other) {
    int m_class = -1;
    double m_max = Double.NEGATIVE_INFINITY;
    double[] clcts1 = m_ClassCounts[attr];
    double[] clcts2 = other.m_ClassCounts[attr];
    for (int i = 0; i < clcts1.length; i++) {
        double diff = clcts1[i] - clcts2[i];
        if (diff > m_max) {
            m_class = i;
            m_max = diff;/*  w  ww.jav a2s  .  com*/
        }
    }
    if (m_max <= MathUtil.C1E_9 && m_Training != null) {
        // no examples covered -> m_max = null -> use whole training set majority class
        return m_Training.getMajorityClass(attr);
    }
    return m_class;
}

From source file:com.opengamma.core.historicaltimeseries.impl.NonVersionedRedisHistoricalTimeSeriesSource.java

protected LocalDateDoubleTimeSeries loadTimeSeriesFromRedis(String redisKey, LocalDate start, LocalDate end) {
    // This is the only method that needs implementation.
    try (Timer.Context context = _getSeriesTimer.time()) {
        Jedis jedis = getJedisPool().getResource();
        LocalDateDoubleTimeSeries ts = null;
        try {/*w w  w  .ja  va2  s. c o m*/
            String redisHtsDaysKey = toRedisHtsDaysKey(redisKey);
            double min = Double.NEGATIVE_INFINITY;
            double max = Double.POSITIVE_INFINITY;
            if (start != null) {
                min = localDateToDouble(start);
            }
            if (end != null) {
                max = localDateToDouble(end);
            }
            Set<String> dateTexts = jedis.zrangeByScore(redisHtsDaysKey, min, max);
            if (!dateTexts.isEmpty()) {
                String redisHtsDatapointKey = toRedisHtsDatapointKey(redisKey);
                List<String> valueTexts = jedis.hmget(redisHtsDatapointKey,
                        dateTexts.toArray(new String[dateTexts.size()]));

                List<Integer> times = Lists.newArrayListWithCapacity(dateTexts.size());
                List<Double> values = Lists.newArrayListWithCapacity(valueTexts.size());

                Iterator<String> dateItr = dateTexts.iterator();
                Iterator<String> valueItr = valueTexts.iterator();

                while (dateItr.hasNext()) {
                    String dateAsIntText = dateItr.next();
                    String valueText = StringUtils.trimToNull(valueItr.next());
                    if (valueText != null) {
                        times.add(Integer.parseInt(dateAsIntText));
                        values.add(Double.parseDouble(valueText));
                    }
                }
                ts = ImmutableLocalDateDoubleTimeSeries.of(
                        ArrayUtils.toPrimitive(times.toArray(new Integer[times.size()])),
                        ArrayUtils.toPrimitive(values.toArray(new Double[values.size()])));
            }
            getJedisPool().returnResource(jedis);
        } catch (Exception e) {
            s_logger.error("Unable to load points from redis for " + redisKey, e);
            getJedisPool().returnBrokenResource(jedis);
            throw new OpenGammaRuntimeException("Unable to load points from redis for " + redisKey, e);
        }
        return ts;
    }
}

From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.algorithm.DBEA.java

/**
 * Returns the solution with the largest objective value for the given
 * objective./*from ww  w  . ja v  a  2s. c  o m*/
 * 
 * @param objective the objective
 * @param population the population of solutions
 * @return the solution with the largest objective value
 */
private Solution largestObjectiveValue(int objective, Population population) {
    Solution largest = null;
    double value = Double.NEGATIVE_INFINITY;

    for (Solution solution : population) {
        if (solution.getObjective(objective) > value) {
            largest = solution;
            value = solution.getObjective(objective);
        }
    }

    return largest;
}

From source file:gate.plugin.learningframework.engines.EngineServer.java

@Override
public List<GateClassification> classify(AnnotationSet instanceAS, AnnotationSet inputAS,
        AnnotationSet sequenceAS, String parms) {
    Parms ps = new Parms(parms, "d:dense:b");
    boolean dense = (boolean) ps.getValueOrElse("dense", false);

    CorpusRepresentationMalletTarget data = (CorpusRepresentationMalletTarget) corpusRepresentationMallet;
    data.stopGrowth();/*from   ww  w  .j  a v  a  2  s . c  o m*/
    int nrCols = data.getPipe().getDataAlphabet().size();
    //System.err.println("Running EngineSklearn.classify on document "+instanceAS.getDocument().getName());
    List<GateClassification> gcs = new ArrayList<GateClassification>();
    LFPipe pipe = (LFPipe) data.getRepresentationMallet().getPipe();
    ArrayList<String> classList = null;
    // If we have a classification problem, pre-calculate the class label list
    if (pipe.getTargetAlphabet() != null) {
        classList = new ArrayList<String>();
        for (int i = 0; i < pipe.getTargetAlphabet().size(); i++) {
            String labelstr = pipe.getTargetAlphabet().lookupObject(i).toString();
            classList.add(labelstr);
        }
    }
    // For now create a single request per document
    // eventually we could allow a parameter for sending a maximum number of 
    // instances per request.

    List<Annotation> instances = instanceAS.inDocumentOrder();
    List<double[]> valuesvec = new ArrayList<double[]>();
    List<int[]> indicesvec = new ArrayList<int[]>();
    List<Double> weights = new ArrayList<Double>();
    ObjectMapper mapper = new ObjectMapper();
    boolean haveWeights = false;
    for (Annotation instAnn : instances) {
        Instance inst = data.extractIndependentFeatures(instAnn, inputAS);

        inst = pipe.instanceFrom(inst);
        FeatureVector fv = (FeatureVector) inst.getData();
        //System.out.println("Mallet instance, fv: "+fv.toString(true)+", len="+fv.numLocations());

        // Convert to the sparse vector we use to send to the process
        // TODO: depending on a parameter send sparse or dense vectors, for now always send sparse

        if (dense) {
            double[] values = new double[nrCols];
            for (int i = 0; i < nrCols; i++) {
                values[i] = fv.value(i);
            }
            valuesvec.add(values);
        } else {
            // To send a sparse vector, we need the indices and the values      
            int locs = fv.numLocations();
            int[] indices = new int[locs];
            double[] values = new double[locs];
            for (int i = 0; i < locs; i++) {
                indices[i] = fv.indexAtLocation(i);
                values[i] = fv.valueAtLocation(i);
            }
            valuesvec.add(values);
            indicesvec.add(indices);
        }
        double weight = Double.NaN;
        Object weightObj = inst.getProperty("instanceWeight");
        if (weightObj != null) {
            weight = (double) weightObj;
            haveWeights = true;
        }
        weights.add(weight);
    }
    // create the JSON for the request
    Map data4json = new HashMap<String, Object>();
    if (!dense)
        data4json.put("indices", indicesvec);
    data4json.put("values", valuesvec);
    data4json.put("n", nrCols);
    if (haveWeights)
        data4json.put("weights", weights);
    String json = null;
    try {
        json = mapper.writeValueAsString(data4json);
    } catch (JsonProcessingException ex) {
        throw new GateRuntimeException("Could not convert instances to json", ex);
    }
    //System.err.println("GOT JSON: "+json);

    HttpResponse<String> response;
    try {
        response = Unirest.post(serverUrl).header("accept", "application/json")
                .header("content-type", "application/json").body(json).asString();
    } catch (UnirestException ex) {
        throw new GateRuntimeException("Exception when connecting to the server", ex);
    }

    // The response should be either OK and JSON or not OK and an error message
    int status = response.getStatus();
    if (status != 200) {
        throw new GateRuntimeException(
                "Response von server is NOK, status=" + status + " msg=" + response.getBody());
    }
    //System.err.println("Got response, status is OK, data is: "+response.getBody());
    Map responseMap = null;
    try {
        // Parse the json
        responseMap = mapper.readValue(response.getBody(), HashMap.class);
    } catch (IOException ex) {
        Logger.getLogger(EngineServer.class.getName()).log(Level.SEVERE, null, ex);
    }

    // NOTE: the json created by the weka server currently automatically creates 1 instead
    // of 1.0 if the value is 1.0, and the parser then creates an Inteer from this. 
    // We could probably change the parsing behaviour into always creating doubles somehow but
    // for now we simply first parse the arrays into Number, then convert each vector into
    // a vector of Double
    ArrayList<ArrayList<Number>> targets = (ArrayList<ArrayList<Number>>) responseMap.get("preds");

    GateClassification gc = null;

    // now go through all the instances again and do the target assignment from the vector(s) we got
    int instNr = 0;
    for (Annotation instAnn : instances) {
        if (pipe.getTargetAlphabet() == null) { // we have regression        
            gc = new GateClassification(instAnn, (double) targets.get(instNr).get(0));
        } else {
            ArrayList<Number> valsN = targets.get(instNr);
            ArrayList<Double> vals = new ArrayList<Double>(valsN.size());
            for (Number valN : valsN)
                vals.add(valN.doubleValue());
            double target = vals.get(0); // if vals contains just one value, this will be what to use
            if (vals.size() > 1) {
                // find the maximum probability and use the index as target
                double maxProb = Double.NEGATIVE_INFINITY;
                double bestIndex = -1;
                int curIdx = 0;
                for (double val : vals) {
                    if (val > maxProb) {
                        maxProb = val;
                        bestIndex = (double) curIdx;
                    }
                    curIdx++;
                } // for
                target = bestIndex;
            }
            int bestlabel = (int) target;
            String cl = pipe.getTargetAlphabet().lookupObject(bestlabel).toString();
            double bestprob = Double.NaN;
            if (vals.size() > 1) {
                bestprob = Collections.max(vals);
                gc = new GateClassification(instAnn, cl, bestprob, classList, vals);
            } else {
                // create a fake probability distribution with 1.0/0.0 probabilities
                ArrayList<Double> probs = new ArrayList<Double>(classList.size());
                for (int i = 0; i < classList.size(); i++) {
                    if (i == bestlabel)
                        probs.add(1.0);
                    else
                        probs.add(0.0);
                }
                gc = new GateClassification(instAnn, cl, bestprob, classList, probs);

            }
        }
        gcs.add(gc);
        instNr++;
    }
    data.startGrowth();
    return gcs;
}

From source file:org.apache.drill.exec.fn.impl.TestNewMathFunctions.java

@Test
public void testLog10WithInt() throws Throwable {
    String json = "{" + "\"num1\": 0.0," + "\"num3\": 1.0," + "\"num5\": -1.0," + "\"num6\": 10.0" + "}";
    String query = "select " + "log10(cast(num1 as int)) as num1, " + "log10(cast(num3 as int)) as num3, "
            + "log10(cast(num5 as int)) as num5, " + "log10(cast(num6 as int)) as num6 "
            + "from dfs.`data.json`";
    File file = new File(dirTestWatcher.getRootDir(), "data.json");
    try {// ww w  . j  av a2  s .  co m
        FileUtils.writeStringToFile(file, json);
        setSessionOption(ExecConstants.JSON_READER_NAN_INF_NUMBERS, true);
        testBuilder().sqlQuery(query).ordered().baselineColumns("num1", "num3", "num5", "num6")
                .baselineValues(Double.NEGATIVE_INFINITY, 0d, Double.NaN, 1.0d).go();
    } finally {
        resetSessionOption(ExecConstants.JSON_READER_NAN_INF_NUMBERS);
        FileUtils.deleteQuietly(file);
    }
}

From source file:clus.algo.tdidt.tune.CDTuneSizeConstrPruning.java

public int findOptimalSize(ArrayList graph, boolean shouldBeLow) {
    double best_value = shouldBeLow ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY;
    int best_index = -1;
    for (int i = 0; i < graph.size(); i++) {
        SingleStatList elem = (SingleStatList) graph.get(i);
        if (shouldBeLow) {
            if (elem.getY() < best_value) {
                best_value = elem.getY();
                best_index = i;/*  w w  w .  ja v a 2  s . co  m*/
            }
        } else {
            if (elem.getY() > best_value) {
                best_value = elem.getY();
                best_index = i;
            }
        }
    }
    if (best_index == -1) {
        return 1;
    }
    // double max_diff = getRange(graph);
    SingleStatList best_elem = (SingleStatList) graph.get(best_index);
    System.out.print("[" + best_elem.getX() + "," + best_elem.getY() + "]");
    SingleStatList result = best_elem;
    int pos = best_index - 1;
    while (pos >= 0) {
        SingleStatList prev_elem = (SingleStatList) graph.get(pos);
        if (prev_elem.getX() >= 3 && Math.abs(prev_elem.getY() - best_elem.getY()) < m_RelErrAcc) {
            result = prev_elem;
            System.out.print(" < " + prev_elem.getX());
        }
        pos--;
    }
    return (int) result.getX();
}

From source file:com.rapidminer.gui.plotter.charts.MultipleScatterPlotter.java

private void prepareData() {
    idMap.clear();//from  w ww  .  j a  va  2 s .co  m
    this.plotIndexToColumnIndexMap.clear();
    dataSet = new DefaultXYDataset();

    if (xAxis >= 0) {
        Map<String, List<double[]>> dataCollection = new LinkedHashMap<String, List<double[]>>();
        Map<String, List<String>> idCollection = new LinkedHashMap<String, List<String>>();

        synchronized (dataTable) {
            for (int column = 0; column < plotColumns.length; column++) {
                if (plotColumns[column]) {
                    plotIndexToColumnIndexMap.add(column);
                    String columnName = this.dataTable.getColumnName(column);
                    Iterator<DataTableRow> i = this.dataTable.iterator();
                    int index = 0;
                    while (i.hasNext()) {
                        DataTableRow row = i.next();

                        double xValue = row.getValue(xAxis);
                        double yValue = row.getValue(column);

                        if (!Double.isNaN(xValue) && !Double.isNaN(yValue)) {
                            addPoint(dataCollection, idCollection, row.getId(), xValue, yValue, columnName);
                        }
                        index++;
                    }
                }
            }
        }

        double minX = Double.POSITIVE_INFINITY;
        double maxX = Double.NEGATIVE_INFINITY;
        double minY = Double.POSITIVE_INFINITY;
        double maxY = Double.NEGATIVE_INFINITY;

        Iterator<Map.Entry<String, List<double[]>>> i = dataCollection.entrySet().iterator();
        while (i.hasNext()) {
            Map.Entry<String, List<double[]>> entry = i.next();
            List<double[]> dataList = entry.getValue();
            Iterator<double[]> j = dataList.iterator();
            while (j.hasNext()) {
                double[] current = j.next();
                minX = Math.min(minX, current[0]);
                maxX = Math.max(maxX, current[0]);
                minY = Math.min(minY, current[1]);
                maxY = Math.max(maxY, current[1]);
            }
        }

        Random jitterRandom = new Random(2001);
        double oldXRange = maxX - minX;
        double oldYRange = maxY - minY;

        if (Double.isInfinite(oldXRange) || Double.isNaN(oldXRange)) {
            oldXRange = 0;
        }
        if (Double.isInfinite(oldYRange) || Double.isNaN(oldYRange)) {
            oldYRange = 0;
        }

        i = dataCollection.entrySet().iterator();
        while (i.hasNext()) {
            Map.Entry<String, List<double[]>> entry = i.next();
            String seriesName = entry.getKey();
            List<double[]> dataList = entry.getValue();

            double[][] data = new double[2][dataList.size()];
            int listCounter = 0;
            Iterator<double[]> j = dataList.iterator();
            while (j.hasNext()) {
                double[] current = j.next();
                data[0][listCounter] = current[0];
                data[1][listCounter] = current[1];

                if (this.jitterAmount > 0) {
                    double pertX = oldXRange * (jitterAmount / 200.0d) * jitterRandom.nextGaussian();
                    double pertY = oldYRange * (jitterAmount / 200.0d) * jitterRandom.nextGaussian();
                    data[0][listCounter] += pertX;
                    data[1][listCounter] += pertY;
                }

                listCounter++;
            }
            ((DefaultXYDataset) dataSet).addSeries(seriesName, data);
        }

        int seriesCounter = 0;
        Iterator<List<String>> v = idCollection.values().iterator();
        while (v.hasNext()) {
            List<String> idList = v.next();
            int itemCounter = 0;
            Iterator<String> j = idList.iterator();
            while (j.hasNext()) {
                idMap.put(new SeriesAndItem(seriesCounter, itemCounter++), j.next());
            }
            seriesCounter++;
        }
    }
}

From source file:com.clust4j.algo.AffinityPropagationTests.java

@Test
public void testSimMatFormulation() {
    double[][] X = MatUtils.reshape(VecUtils.asDouble(VecUtils.arange(9)), 3, 3);
    double[][] S = AffinityPropagation.computeSmoothedSimilarity(X, Distance.EUCLIDEAN,
            GlobalState.DEFAULT_RANDOM_STATE, false);

    assertTrue(MatUtils.equalsExactly(S, new double[][] { new double[] { -27, -27, -108 },
            new double[] { -27, -27, -27 }, new double[] { -108, -27, -27 } }));

    double[][] S_noise = AffinityPropagation.computeSmoothedSimilarity(X, Distance.EUCLIDEAN,
            GlobalState.DEFAULT_RANDOM_STATE, true);

    assertTrue(MatUtils.equalsWithTolerance(S_noise, new double[][] { new double[] { -27, -27, -108 },
            new double[] { -27, -27, -27 }, new double[] { -108, -27, -27 } }, 1e-12));

    // ==== DIFF EXAMPLE ====

    X = new double[][] { new double[] { 0.1, 0.2, 0.3, 0.4 }, new double[] { 0.2, 0.2, 0.3, 0.1 },
            new double[] { 12.1, 18.1, 34, 12 }, new double[] { 15, 23.2, 32.1, 14 } };

    S_noise = AffinityPropagation.computeSmoothedSimilarity(X, Distance.EUCLIDEAN,
            GlobalState.DEFAULT_RANDOM_STATE, true);

    assertTrue(MatUtils.equalsWithTolerance(S_noise,
            new double[][] {
                    new double[] { -8.88345000e+02, -1.00000000e-01, -1.73466000e+03, -1.94721000e+03 },
                    new double[] { -1.00000000e-01, -8.88345000e+02, -1.73932000e+03, -1.95249000e+03 },
                    new double[] { -1.73466000e+03, -1.73932000e+03, -8.88345000e+02, -4.20300000e+01 },
                    new double[] { -1.94721000e+03, -1.95249000e+03, -4.20300000e+01, -8.88345000e+02 } },
            1e-8));// w  w w .jav a2s  .c  om

    final int m = S_noise.length;
    double[][] A = new double[m][m];
    double[][] R = new double[m][m];
    double[][] tmp = new double[m][m];
    int[] I = new int[m];
    double[] Y = new double[m];
    double[] Y2 = new double[m];

    // Performs the work IN PLACE
    AffinityPropagation.affinityPiece1(A, S_noise, tmp, I, Y, Y2);

    assertTrue(MatUtils.equalsExactly(A, MatUtils.rep(0.0, m, m)));
    assertTrue(MatUtils.equalsExactly(R, MatUtils.rep(0.0, m, m)));
    assertTrue(VecUtils.equalsExactly(I, new int[] { 1, 0, 3, 2 }));
    assertTrue(VecUtils.equalsWithTolerance(Y, new double[] { -0.1, -0.1, -42.03, -42.03 }, 1e-12));
    assertTrue(
            VecUtils.equalsWithTolerance(Y2, new double[] { -888.345, -888.345, -888.345, -888.345 }, 1e-12));

    assertTrue(MatUtils.equalsWithTolerance(tmp, new double[][] {
            new double[] { -8.88345000e+02, Double.NEGATIVE_INFINITY, -1.73466000e+03, -1.94721000e+03 },
            new double[] { Double.NEGATIVE_INFINITY, -8.88345000e+02, -1.73932000e+03, -1.95249000e+03 },
            new double[] { -1.73466000e+03, -1.73932000e+03, -8.88345000e+02, Double.NEGATIVE_INFINITY },
            new double[] { -1.94721000e+03, -1.95249000e+03, Double.NEGATIVE_INFINITY, -8.88345000e+02 } },
            1e-8));

    // Performs the work IN PLACE
    double[] colSums = new double[m];
    AffinityPropagation.affinityPiece2(colSums, tmp, I, S_noise, R, Y, Y2, 0.5);

    assertTrue(MatUtils.equalsWithTolerance(R,
            new double[][] { new double[] { -444.1225, 444.1225, -867.28, -973.555 },
                    new double[] { 444.1225, -444.1225, -869.61, -976.195 },
                    new double[] { -846.315, -848.645, -423.1575, 423.1575 },
                    new double[] { -952.59, -955.23, 423.1575, -423.1575 } },
            1e-12));

    assertTrue(MatUtils.equalsWithTolerance(tmp,
            new double[][] { new double[] { -444.1225, 444.1225, 0., 0. },
                    new double[] { 444.1225, -444.1225, 0., 0. }, new double[] { 0., 0., -423.1575, 423.1575 },
                    new double[] { 0., 0., 423.1575, -423.1575 } },
            1e-12));

    // Performs the work IN PLACE
    double[] mask = new double[m];
    AffinityPropagation.affinityPiece3(tmp, colSums, A, R, mask, 0.5);

    assertTrue(MatUtils.equalsWithTolerance(R,
            new double[][] { new double[] { -444.1225, 444.1225, -867.28, -973.555 },
                    new double[] { 444.1225, -444.1225, -869.61, -976.195 },
                    new double[] { -846.315, -848.645, -423.1575, 423.1575 },
                    new double[] { -952.59, -955.23, 423.1575, -423.1575 } },
            1e-12));

    assertTrue(MatUtils.equalsWithTolerance(A,
            new double[][] { new double[] { 2.22061250e+02, -2.22061250e+02, -2.84217094e-14, 0.00000000e+00 },
                    new double[] { -2.22061250e+02, 2.22061250e+02, -2.84217094e-14, 0.00000000e+00 },
                    new double[] { -8.52651283e-14, 0.00000000e+00, 2.11578750e+02, -2.11578750e+02 },
                    new double[] { -8.52651283e-14, 0.00000000e+00, -2.11578750e+02, 2.11578750e+02 } },
            1e-12));

    assertTrue(MatUtils.equalsWithTolerance(tmp,
            new double[][] { new double[] { -2.22061250e+02, 2.22061250e+02, 2.84217094e-14, 0.00000000e+00 },
                    new double[] { 2.22061250e+02, -2.22061250e+02, 2.84217094e-14, 0.00000000e+00 },
                    new double[] { 8.52651283e-14, 0.00000000e+00, -2.11578750e+02, 2.11578750e+02 },
                    new double[] { 8.52651283e-14, 0.00000000e+00, 2.11578750e+02, -2.11578750e+02 } },
            1e-12));
}