Example usage for java.lang Double NEGATIVE_INFINITY

List of usage examples for java.lang Double NEGATIVE_INFINITY

Introduction

In this page you can find the example usage for java.lang Double NEGATIVE_INFINITY.

Prototype

double NEGATIVE_INFINITY

To view the source code for java.lang Double NEGATIVE_INFINITY.

Click Source Link

Document

A constant holding the negative infinity of type double .

Usage

From source file:net.myrrix.online.som.SelfOrganizingMaps.java

/**
 * @return coordinates of {@link Node} in map whose center is "closest" to the given vector. Here closeness
 *  is defined as smallest angle between the vectors
 *//* w  ww. ja v  a2s  .co m*/
private static int[] findBestMatchingUnit(float[] vector, Node[][] map) {
    int mapSize = map.length;
    double vectorNorm = SimpleVectorMath.norm(vector);
    double bestScore = Double.NEGATIVE_INFINITY;
    int bestI = -1;
    int bestJ = -1;
    for (int i = 0; i < mapSize; i++) {
        Node[] mapRow = map[i];
        for (int j = 0; j < mapSize; j++) {
            float[] center = mapRow[j].getCenter();
            double currentScore = SimpleVectorMath.dot(vector, center)
                    / (SimpleVectorMath.norm(center) * vectorNorm);
            if (LangUtils.isFinite(currentScore) && currentScore > bestScore) {
                bestScore = currentScore;
                bestI = i;
                bestJ = j;
            }
        }
    }
    return bestI == -1 || bestJ == -1 ? null : new int[] { bestI, bestJ };
}

From source file:de.tudarmstadt.lt.lm.app.LineProbPerp.java

void processLine(String line, ModelPerplexity<String> perp, ModelPerplexity<String> perp_oov) {

    if (line.trim().isEmpty()) {
        println(getOutputLine(line, 0, 0, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY,
                Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY));
        return;/*w w w .  j av  a2s  .  c o m*/
    }

    List<String>[] ngrams;
    try {
        List<String> tokens = _lm_prvdr.tokenizeSentence(line);
        if (tokens == null || tokens.isEmpty()) {
            println(getOutputLine(line, 0, 0, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY,
                    Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY));
            return;
        }
        ngrams = _lm_prvdr.getNgramSequence(tokens);
        if (ngrams == null || ngrams.length == 0) {
            println(getOutputLine(line, 0, 0, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY,
                    Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY));
            return;
        }
    } catch (Exception e) {
        LOG.error("{}: Could not get ngrams from line '{}'.", _rmi_string, StringUtils.abbreviate(line, 100),
                e);
        return;
    }

    perp.reset();
    perp_oov.reset();
    for (List<String> ngram : ngrams) {
        if (ngram.isEmpty())
            continue;
        try {
            perp_oov.addLog10Prob(ngram);
            if (!_lm_prvdr.ngramEndsWithOOV(ngram))
                perp.addLog10Prob(ngram);

        } catch (Exception e) {
            LOG.error("{}: Could not add ngram '{}' to perplexity.", _rmi_string, ngram);
            continue;
        }
    }
    println(getOutputLine(line, perp_oov.getN(), perp_oov.getN() - perp.getN(), perp_oov.getLog10Probs(),
            perp_oov.get(), perp.getLog10Probs(), perp.get()));

}

From source file:edu.cornell.med.icb.learning.MinMaxScalingRowProcessor.java

private double getMax(final double[] values) {
    double max = Double.NEGATIVE_INFINITY;
    for (final double value : values) {
        max = Math.max(value, max);
    }/*w w w .  ja v a  2 s . c  om*/
    return max;
}

From source file:edu.cuny.cat.stat.HistoricalReport.java

private void initializePriceRanges() {
    highestBidPrice = Double.NEGATIVE_INFINITY;
    lowestAskPrice = Double.POSITIVE_INFINITY;

    highestUnmatchedBid = null;
    lowestUnmatchedAsk = null;
}

From source file:com.rapidminer.gui.plotter.charts.BubbleChartPlotter.java

private void prepareNominalData() {
    DataTable dataTable = getDataTable();
    this.nominal = true;
    xyzDataSet = new DefaultXYZDataset();

    if (axis[X_AXIS] >= 0 && axis[Y_AXIS] >= 0 && axis[BUBBLE_SIZE_AXIS] >= 0) {

        this.bubbleSizeMin = Double.POSITIVE_INFINITY;
        this.bubbleSizeMax = Double.NEGATIVE_INFINITY;
        this.xAxisMin = Double.POSITIVE_INFINITY;
        this.xAxisMax = Double.NEGATIVE_INFINITY;
        this.yAxisMin = Double.POSITIVE_INFINITY;
        this.yAxisMax = Double.NEGATIVE_INFINITY;
        this.minColor = Double.POSITIVE_INFINITY;
        this.maxColor = Double.NEGATIVE_INFINITY;

        Map<String, List<double[]>> dataCollection = new LinkedHashMap<>();

        synchronized (dataTable) {
            Iterator<DataTableRow> i = dataTable.iterator();
            while (i.hasNext()) {
                DataTableRow row = i.next();

                double xValue = row.getValue(axis[X_AXIS]);
                double yValue = row.getValue(axis[Y_AXIS]);
                double bubbleSizeValue = row.getValue(axis[BUBBLE_SIZE_AXIS]);

                double colorValue = Double.NaN;
                if (colorColumn >= 0) {
                    colorValue = row.getValue(colorColumn);
                }/*w  w  w .  ja  va  2s  . c o m*/

                if (!Double.isNaN(xValue) && !Double.isNaN(yValue) && !Double.isNaN(bubbleSizeValue)) {
                    addPoint(dataTable, dataCollection, xValue, yValue, bubbleSizeValue, colorValue);
                }
            }
        }

        Iterator<Map.Entry<String, List<double[]>>> i = dataCollection.entrySet().iterator();
        double scaleFactor = Math.min(this.xAxisMax - this.xAxisMin, this.yAxisMax - this.yAxisMin) / 4.0d;
        while (i.hasNext()) {
            Map.Entry<String, List<double[]>> entry = i.next();
            String seriesName = entry.getKey();
            List<double[]> dataList = entry.getValue();
            double[][] data = new double[3][dataList.size()];
            int listCounter = 0;
            Iterator<double[]> j = dataList.iterator();
            while (j.hasNext()) {
                double[] current = j.next();
                data[X_AXIS][listCounter] = current[X_AXIS];
                data[Y_AXIS][listCounter] = current[Y_AXIS];
                data[BUBBLE_SIZE_AXIS][listCounter] = ((current[BUBBLE_SIZE_AXIS] - bubbleSizeMin)
                        / (bubbleSizeMax - bubbleSizeMin) + 0.1) * scaleFactor;
                listCounter++;
            }
            xyzDataSet.addSeries(seriesName, data);
        }
    }
}

From source file:com.linkedin.pinot.query.aggregation.DefaultAggregationExecutorTest.java

/**
 * Helper method to compute max of a given array of values.
 * @param values/* w w  w.  j ava2  s  . c  o  m*/
 * @return
 */
private double computeMax(double[] values) {
    double max = Double.NEGATIVE_INFINITY;
    for (int i = 0; i < values.length; i++) {
        max = Math.max(max, values[i]);
    }
    return max;
}

From source file:guineu.modules.filter.Alignment.SerumHuNormalization.SerumHuNormalizationTask.java

private void normalize(Dataset data, Dataset newData) {
    // First row => ids of the samples ( 0 == standard serum, 1 == normal sample)
    this.ids = data.getRow(0).clone();

    // Second row => run order
    this.runOrder = data.getRow(0).clone();

    // Third row => different data sets
    this.batches = data.getRow(0).clone();

    for (String name : data.getAllColumnNames()) {
        ids.setPeak(name, data.getParametersValue(name, this.id));
        runOrder.setPeak(name, data.getParametersValue(name, this.order));
        batches.setPeak(name, data.getParametersValue(name, this.batchesName));
    }//  w ww  .  ja  va 2s  .  c  o  m

    int numBatches = 1;
    double n = (Double) batches.getPeak(data.getAllColumnNames().get(0));

    for (String name : data.getAllColumnNames()) {
        if ((Double) batches.getPeak(name) > n) {
            numBatches++;
            n = (Double) batches.getPeak(name);
        }
    }

    this.createCurves(data, numBatches);
    for (int batch = 0; batch < numBatches; batch++) {
        message = "Normalizing";
        this.totalRows = data.getNumberRows();
        this.processedRows = 0;
        List<String> names = data.getAllColumnNames();
        for (int i = 0; i < data.getNumberRows(); i++) {
            this.processedRows++;
            PeakListRow row = data.getRow(i);
            PeakListRow newrow = newData.getRow(i);
            try {
                // Get the interpolation of all the human serum points using Loess 
                PolynomialSplineFunction function = functions.get(row.getID()).get(batch);

                if (function != null) {
                    // Prepare the points for the extrapolation
                    PolynomialFunction extrapolationFunction = null;
                    if (this.extrapolation) {
                        List<Double> points = new ArrayList<Double>();
                        for (int e = 0; e < row.getNumberPeaks(); e++) {
                            if ((Double) batches.getPeak(names.get(e)) == batch) {
                                try {
                                    points.add(function.value((Double) runOrder.getPeak(names.get(e))));
                                } catch (ArgumentOutsideDomainException ex) {
                                    Logger.getLogger(SerumHuNormalizationTask.class.getName()).log(Level.SEVERE,
                                            null, ex);
                                }
                            }
                        }

                        // Extrapolation function
                        extrapolationFunction = this.fittPolinomialFunction(batches, runOrder, names, batch,
                                points);
                    }
                    double lastPoint = 0;
                    for (int e = 0; e < row.getNumberPeaks(); e++) {
                        String sampleName = names.get(e);
                        if ((Double) ids.getPeak(sampleName) > 0.0) {
                            if ((Double) batches.getPeak(sampleName) == batch) {
                                try {

                                    if ((Double) ids.getPeak(sampleName) == 0) {
                                        lastPoint = function.value((Double) runOrder.getPeak(sampleName));
                                    }
                                    double value = 0;
                                    try {
                                        Double controlMol = function
                                                .value((Double) runOrder.getPeak(names.get(e)));
                                        if (controlMol < 0.0 || controlMol == Double.NaN
                                                || controlMol == Double.POSITIVE_INFINITY
                                                || controlMol == Double.NEGATIVE_INFINITY) {
                                            controlMol = getAverage(ids, row, e, names);
                                        }

                                        value = (Double) row.getPeak(sampleName) / controlMol;

                                        if (value < 0.0 || value == Double.NaN
                                                || value == Double.POSITIVE_INFINITY
                                                || value == Double.NEGATIVE_INFINITY) {
                                            controlMol = getAverage(ids, row, e, names);
                                        }

                                        value = (Double) row.getPeak(sampleName) / controlMol;
                                    } catch (ClassCastException exception) {
                                        value = -100;
                                    }
                                    newrow.setPeak(sampleName, value);
                                } catch (ArgumentOutsideDomainException ex) {
                                    // ex.printStackTrace();
                                    //if the value has to be extrapolated
                                    if (extrapolation && extrapolationFunction != null) {
                                        double value = 0;
                                        try {

                                            Double controlMol = extrapolationFunction
                                                    .value((Double) runOrder.getPeak(names.get(e)));
                                            if (controlMol < 0.0 || controlMol == Double.NaN
                                                    || controlMol == Double.POSITIVE_INFINITY
                                                    || controlMol == Double.NEGATIVE_INFINITY) {
                                                controlMol = getAverage(ids, row, e, names);
                                            }
                                            value = (Double) row.getPeak(sampleName) / controlMol;

                                            if (value < 0.0 || value == Double.NaN
                                                    || value == Double.POSITIVE_INFINITY
                                                    || value == Double.NEGATIVE_INFINITY) {
                                                controlMol = getAverage(ids, row, e, names);
                                            }

                                            value = (Double) row.getPeak(sampleName) / controlMol;
                                        } catch (ClassCastException exception) {
                                            value = -100;
                                        }
                                        newrow.setPeak(sampleName, value);
                                    } else {
                                        double value = 0;
                                        try {
                                            value = (Double) row.getPeak(sampleName) / lastPoint;//extrapolationFunction.value((Double) runOrder.getPeak(names.elementAt(e)));
                                        } catch (ClassCastException exception) {
                                            value = -100;
                                        }
                                        newrow.setPeak(sampleName, value);
                                    }

                                }
                            }
                        }
                    }
                } else {
                    System.out.println("Function is null" + row.getID());
                }
            } catch (Exception exception) {
                exception.printStackTrace();
                System.out.println(row.getID());

            }
        }
    }

}

From source file:beast.structuredCoalescent.distribution.Masco.java

public double calculateLogP() {
    // newly calculate tree intervals
    treeIntervalsInput.get().calculateIntervals();
    // correctly calculate the daughter nodes at coalescent intervals in the case of
    // bifurcation or in case two nodes are at the same height
    treeIntervalsInput.get().swap();//ww w .  ja  v  a 2s. c  o  m

    // Set up ArrayLists for the indices of active lineages and the lineage state probabilities
    activeLineages = new ArrayList<Integer>();
    lineStateProbs = new ArrayList<Double>();

    // Compute likelihood at each integration time and tree event starting at final sampling time and moving backwards
    logP = 0;

    // set the current time
    double currTime = 0.0;
    // total number of intervals
    final int intervalCount = treeIntervalsInput.get().getIntervalCount();
    // interval time counter
    int t = 0;
    // initialize the number of lineages
    nr_lineages = 0;
    // Captures the probabilities of lineages being in a state
    double[] p;

    // Initialize the migration rates matrix
    double[][] migration_rates = new double[states][states];
    int c = 0;

    for (int k = 0; k < states; k++) {
        for (int l = 0; l < states; l++) {
            if (k != l) {
                migration_rates[k][l] = migrationRatesInput.get().getArrayValue(c);
                c++;
            } else { // diagonal
                migration_rates[k][l] = 0.0;
            }

        }
    }

    // Initialize the coalescent rates
    double[] coalescent_rates = new double[states];
    for (int k = 0; k < states; k++) {
        coalescent_rates[k] = coalescentRatesInput.get().getArrayValue(k) / 2;//(epiModelInput.get().getF(t,k,k) / (Y.get(k)*Y.get(k)));
    }

    // integrate until there are no more tree intervals
    do {
        double nextIntervalTime = treeIntervalsInput.get().getInterval(t);

        // Length of the current interval
        final double duration = nextIntervalTime;// - currTime;
        // if the current interval has a length greater than 0, integrate
        if (duration > 0) {
            if (dependentHistory)
                p = new double[lineStateProbs.size()]; // Captures the probabilities of lineages being in a state
            else
                p = new double[lineStateProbs.size() + 1]; // Captures the probabilities of lineages being in a state, last one keeps track of the probability

            // convert the array list to double[]
            for (int i = 0; i < lineStateProbs.size(); i++)
                p[i] = lineStateProbs.get(i);

            // not needed
            if (!dependentHistory)
                p[lineStateProbs.size()] = 1;

            double[] p_for_ode = new double[p.length];
            double ts = 0.0;

            // If proportial time step is true, set the integration time for the given interval 
            // inverse proportional to the number of lineages
            if (propTimeStep)
                ts = timeStep / lineStateProbs.size();
            else
                ts = timeStep;

            // Never choose a longer time step than the integration window
            if (duration < (ts / 2))
                ts = duration / 2;

            FirstOrderIntegrator integrator = new ClassicalRungeKuttaIntegrator(ts);
            // set the odes
            FirstOrderDifferentialEquations ode = new ode_masco(migration_rates, coalescent_rates, nr_lineages,
                    states);
            // integrate                   
            integrator.integrate(ode, 0, p, duration, p_for_ode);

            // If the Dimension is larger than the maximum integer, at least one state prob is below 0 and the step is rejected
            if (ode.getDimension() == Integer.MAX_VALUE) {
                System.out.println(lineStateProbs.size());
                System.out.println("lalalallal");
                return Double.NEGATIVE_INFINITY;
            }

            for (int i = 0; i < lineStateProbs.size(); i++)
                lineStateProbs.set(i, p_for_ode[i]);
        }

        // update the time
        currTime = nextIntervalTime;
        // event is coalescent event
        if (treeIntervalsInput.get().getIntervalType(t) == IntervalType.COALESCENT) {
            logP += coalesce(t);
            nr_lineages--;
        }

        // event is sampling event
        if (treeIntervalsInput.get().getIntervalType(t) == IntervalType.SAMPLE) {
            logP += normalizeLineages();
            addLineages(t);
            nr_lineages++;
        }

        // update the interval number
        t++;
    } while (t < intervalCount);

    //Compute likelihood of remaining tree intervals (coal events occuring before origin)
    if (Double.isInfinite(logP))
        logP = Double.NEGATIVE_INFINITY;
    if (max_posterior < logP && logP < 0) {
        max_posterior = logP;
        max_mig = new double[states * (states - 1)];
        max_coal = new double[states];
        for (int i = 0; i < 1; i++)
            max_mig[i] = migrationRatesInput.get().getArrayValue(i);
        for (int i = 0; i < 1; i++)
            max_coal[i] = coalescentRatesInput.get().getArrayValue(i);
    }
    //        System.exit(0);

    return logP;

}

From source file:beast.structuredCoalescent.distribution.IndependentStructuredCoalescent.java

public double calculateLogP() {
    // newly calculate tree intervals
    treeIntervalsInput.get().calculateIntervals();
    // correctly calculate the daughter nodes at coalescent intervals in the case of
    // bifurcation or in case two nodes are at the same height
    treeIntervalsInput.get().swap();/*  w  ww  . ja v  a2  s .  co m*/

    // Set up ArrayLists for the indices of active lineages and the lineage state probabilities
    activeLineages = new ArrayList<Integer>();
    lineStateProbs = new ArrayList<Double>();

    // Compute likelihood at each integration time and tree event starting at final sampling time and moving backwards
    logP = 0;

    // set the current time
    double currTime = 0.0;
    // total number of intervals
    final int intervalCount = treeIntervalsInput.get().getIntervalCount();
    // interval time counter
    int t = 0;
    // initialize the number of lineages
    nr_lineages = 0;
    // Captures the probabilities of lineages being in a state
    double[] p;

    // Initialize the migration rates matrix
    double[][] migration_rates = new double[states][states];
    int c = 0;

    for (int k = 0; k < states; k++) {
        for (int l = 0; l < states; l++) {
            if (k != l) {
                migration_rates[k][l] = migrationRatesInput.get().getArrayValue(c);
                c++;
            } else { // diagonal
                migration_rates[k][l] = 0.0;
            }

        }
    }

    // Initialize the coalescent rates
    double[] coalescent_rates = new double[states];
    for (int k = 0; k < states; k++) {
        coalescent_rates[k] = coalescentRatesInput.get().getArrayValue(k) / 2;//(epiModelInput.get().getF(t,k,k) / (Y.get(k)*Y.get(k)));
    }

    // integrate until there are no more tree intervals
    do {
        double nextIntervalTime = treeIntervalsInput.get().getInterval(t);

        // Length of the current interval
        final double duration = nextIntervalTime;// - currTime;
        // if the current interval has a length greater than 0, integrate
        if (duration > 0) {
            if (dependentHistory)
                p = new double[lineStateProbs.size()]; // Captures the probabilities of lineages being in a state
            else
                p = new double[lineStateProbs.size() + 1]; // Captures the probabilities of lineages being in a state, last one keeps track of the probability

            // convert the array list to double[]
            for (int i = 0; i < lineStateProbs.size(); i++)
                p[i] = lineStateProbs.get(i);

            // not needed
            if (!dependentHistory)
                p[lineStateProbs.size()] = 1;

            double[] p_for_ode = new double[p.length];
            double ts = 0.0;

            // If proportial time step is true, set the integration time for the given interval 
            // inverse proportional to the number of lineages
            if (propTimeStep)
                ts = timeStep / lineStateProbs.size();
            else
                ts = timeStep;

            // Never choose a longer time step than the integration window
            if (duration < (ts / 2))
                ts = duration / 2;

            FirstOrderIntegrator integrator = new ClassicalRungeKuttaIntegrator(ts);
            // set the odes
            FirstOrderDifferentialEquations ode = new independent_ode_integrator(migration_rates,
                    coalescent_rates, nr_lineages, states);
            // integrate                   
            integrator.integrate(ode, 0, p, duration, p_for_ode);

            // If the Dimension is larger than the maximum integer, at least one state prob is below 0 and the step is rejected
            if (ode.getDimension() == Integer.MAX_VALUE) {
                System.out.println(lineStateProbs.size());
                System.out.println("lalalallal");
                return Double.NEGATIVE_INFINITY;
            }

            for (int i = 0; i < lineStateProbs.size(); i++)
                lineStateProbs.set(i, p_for_ode[i]);
        }

        // update the time
        currTime = nextIntervalTime;
        // event is coalescent event
        if (treeIntervalsInput.get().getIntervalType(t) == IntervalType.COALESCENT) {
            logP += coalesce(t);
            nr_lineages--;
        }

        // event is sampling event
        if (treeIntervalsInput.get().getIntervalType(t) == IntervalType.SAMPLE) {
            addLineages(t);
            nr_lineages++;
        }

        // update the interval number
        t++;
    } while (t < intervalCount);

    //Compute likelihood of remaining tree intervals (coal events occuring before origin)
    if (Double.isInfinite(logP))
        logP = Double.NEGATIVE_INFINITY;
    if (max_posterior < logP && logP < 0) {
        max_posterior = logP;
        max_mig = new double[states * (states - 1)];
        max_coal = new double[states];
        for (int i = 0; i < 1; i++)
            max_mig[i] = migrationRatesInput.get().getArrayValue(i);
        for (int i = 0; i < 1; i++)
            max_coal[i] = coalescentRatesInput.get().getArrayValue(i);
    }

    return logP;

}

From source file:egat.cli.strategyregret.StrategyRegretCommandHandler.java

protected void findRegret(Profile profile, StrategicGame game) {
    Player[] players = profile.players().toArray(new Player[0]);
    Strategy[] strategies = new Strategy[players.length];

    for (int i = 0; i < players.length; i++) {
        strategies[i] = profile.getStrategy(players[i]);
    }/*from  www .j a v  a2s  .co  m*/

    int playerIndex = 0;

    if (playerId != null) {
        for (int i = 0; i < players.length; i++) {
            if (playerId.equals(players[i].getID())) {
                playerIndex = i;
                break;
            }
        }
    }

    System.out.print("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
    System.out.print(String.format("<strategy-regret player=\"%s\">", players[playerIndex].getID()));

    double maxPayoff = Double.NEGATIVE_INFINITY;

    Action[] actions = game.getActions(players[playerIndex]).toArray(new Action[0]);
    double[] payoffs = new double[actions.length];

    for (int i = 0; i < actions.length; i++) {
        strategies[playerIndex] = Games.createStrategy(new Action[] { actions[i] }, new Number[] { 1.0 });
        double response = game.payoff(Games.createProfile(players, strategies)).getPayoff(players[0])
                .getValue();
        payoffs[i] = response;
        maxPayoff = Math.max(response, maxPayoff);
    }

    for (int i = 0; i < actions.length; i++) {
        System.out.print(String.format("<action id=\"%s\" regret=\"%f\" />", actions[i].getID(),
                maxPayoff - payoffs[i]));
    }

    System.out.print("</strategy-regret>");
}