List of usage examples for java.lang Double NEGATIVE_INFINITY
double NEGATIVE_INFINITY
To view the source code for java.lang Double NEGATIVE_INFINITY.
Click Source Link
From source file:org.logisticPlanning.utils.graphics.chart.impl.jfree._JFCXYItemRenderer.java
/** * create a wrapped renderer//from w ww .j a v a2 s .c o m * * @param out * the output renderer */ _JFCXYItemRenderer(final XYItemRenderer out) { this.m_out = out; // this.m_maxX = Double.MAX_VALUE; // this.m_maxY = Double.MAX_VALUE; // this.m_minX = Double.NEGATIVE_INFINITY; // this.m_minY = Double.NEGATIVE_INFINITY; this.m_maxX = Double.NEGATIVE_INFINITY; this.m_maxY = Double.NEGATIVE_INFINITY; this.m_minX = Double.POSITIVE_INFINITY; this.m_minY = Double.POSITIVE_INFINITY; // this.m_src = new double[8]; // this.m_dst = new double[8]; }
From source file:gedi.util.math.stat.distributions.NormalMixtureDistribution.java
public static NormalMixtureDistribution fit(NormalMixtureDistribution initialMixture, double[] data, final int maxIterations, final double threshold) { if (maxIterations < 1) { throw new NotStrictlyPositiveException(maxIterations); }/*from www. j a v a2s. c om*/ if (threshold < Double.MIN_VALUE) { throw new NotStrictlyPositiveException(threshold); } final int n = data.length; final int k = initialMixture.getNumComponents(); if (k == 1) return new NormalMixtureDistribution(new NormalDistribution[] { new NormalDistribution(new Mean().evaluate(data), new StandardDeviation().evaluate(data)) }, new double[] { 1 }); int numIterations = 0; double previousLogLikelihood = 0d; double logLikelihood = Double.NEGATIVE_INFINITY; // Initialize model to fit to initial mixture. NormalMixtureDistribution fittedModel = new NormalMixtureDistribution(initialMixture.components, initialMixture.mixing); while (numIterations++ <= maxIterations && FastMath.abs(previousLogLikelihood - logLikelihood) > threshold) { previousLogLikelihood = logLikelihood; logLikelihood = 0d; // E-step: compute the data dependent parameters of the expectation // function. // The percentage of row's total density between a row and a // component final double[][] gamma = new double[n][k]; // Sum of gamma for each component final double[] gammaSums = new double[k]; for (int i = 0; i < n; i++) { final double rowDensity = fittedModel.density(data[i]); logLikelihood += FastMath.log(rowDensity); for (int j = 0; j < k; j++) { gamma[i][j] = fittedModel.mixing[j] * fittedModel.components[j].density(data[i]) / rowDensity; gammaSums[j] += gamma[i][j]; } } logLikelihood /= n; // System.out.println(logLikelihood); // M-step: compute the new parameters based on the expectation // function. final double[] newWeights = gammaSums.clone(); ArrayUtils.mult(newWeights, 1.0 / n); NormalDistribution[] comp = new NormalDistribution[k]; for (int j = 0; j < k; j++) { double m = 0; for (int i = 0; i < n; i++) { m += gamma[i][j] * data[i]; } m /= gammaSums[j]; double var = 0; for (int i = 0; i < n; i++) { double d = m - data[i]; var += gamma[i][j] * d * d; } var /= gammaSums[j]; comp[j] = new NormalDistribution(m, Math.sqrt(var)); } // Update current model fittedModel = new NormalMixtureDistribution(comp, newWeights); } if (FastMath.abs(previousLogLikelihood - logLikelihood) > threshold) { // Did not converge before the maximum number of iterations throw new ConvergenceException(); } return fittedModel; }
From source file:edu.gslis.ts.ChunkToFile.java
/** * @param thriftFile/*from w ww . j a v a2 s . c o m*/ */ public void filter(File infile, Map<Integer, FeatureVector> queries, IndexWrapper index, Stopper stopper, String outputPath) { try { InputStream in = null; if (infile.getName().endsWith(".gz")) in = new GZIPInputStream(new FileInputStream(infile)); else if (infile.getName().endsWith("xz")) in = new XZInputStream(new FileInputStream(infile)); else { System.err.println("Regular FileInputStream"); in = new FileInputStream(infile); } TTransport inTransport = new TIOStreamTransport(new BufferedInputStream(in)); TBinaryProtocol inProtocol = new TBinaryProtocol(inTransport); inTransport.open(); Pairtree ptree = new Pairtree(); try { // Run through items in the chunk file while (true) { final StreamItem item = new StreamItem(); item.read(inProtocol); FeatureVector dv = new FeatureVector(item.body.clean_visible, stopper); double maxScore = Double.NEGATIVE_INFINITY; int qid = -1; for (int id : queries.keySet()) { FeatureVector qv = queries.get(id); double score = kl(dv, qv, index, MU); if (score > maxScore) { qid = id; maxScore = score; } } String streamId = item.stream_id; System.out.println(streamId + "=" + qid); //System.out.println(streamId); String ppath = ptree.mapToPPath(streamId.replace("-", "")); //System.out.println(streamId + "=>" + ppath); File dir = new File(outputPath + File.separator + qid + File.separator + ppath); dir.mkdirs(); XZOutputStream xos = new XZOutputStream( new FileOutputStream(dir.getAbsolutePath() + File.separator + streamId + ".xz"), new LZMA2Options()); TTransport outTransport = new TIOStreamTransport(xos); TBinaryProtocol outProtocol = new TBinaryProtocol(outTransport); outTransport.open(); item.write(outProtocol); outTransport.close(); } } catch (TTransportException te) { if (te.getType() == TTransportException.END_OF_FILE) { } else { throw te; } } inTransport.close(); } catch (Exception e) { System.err.println("Error processing " + infile.getAbsolutePath() + " " + infile.getName()); e.printStackTrace(); } }
From source file:emlab.role.market.AbstractMarketRole.java
private double markAcceptedBids(ClearingPoint point, boolean isSupply) { long time = point.getTime(); DecarbonizationMarket market = point.getAbstractMarket(); double clearedPrice = point.getPrice(); double clearedVolume = point.getVolume(); double totalBidVolume = 0d; double previousPrice = Double.NEGATIVE_INFINITY; double accpetedSamePriceVolume = 0d; Iterable<Bid> bids = isSupply ? reps.bidRepository.findOffersForMarketForTimeBelowPrice(market, time, clearedPrice) : market.isAuction() ? reps.bidRepository.findDemandBidsForMarketForTime(market, time) : reps.bidRepository.findDemandBidsForMarketForTimeAbovePrice(market, time, clearedPrice); for (Bid bid : bids) { double amount = bid.getAmount(); totalBidVolume += amount;/* w w w . j a v a 2 s .c o m*/ accpetedSamePriceVolume = bid.getPrice() == previousPrice ? accpetedSamePriceVolume + amount : amount; if (totalBidVolume < clearedVolume) { bid.setStatus(Bid.ACCEPTED); bid.setAcceptedAmount(bid.getAmount()); } else { double lastAvailableBidSize = clearedVolume - (totalBidVolume - accpetedSamePriceVolume); double samePriceVolume = calculateBidsForMarketForTimeForPrice(market, time, bid.getPrice(), isSupply); double adjustRatio = lastAvailableBidSize / samePriceVolume; for (Bid partBid : isSupply ? reps.bidRepository.findOffersForMarketForTimeForPrice(market, time, bid.getPrice()) : reps.bidRepository.findDemandBidsForMarketForTimeForPrice(market, time, bid.getPrice())) { partBid.setStatus(Bid.PARTLY_ACCEPTED); partBid.setAcceptedAmount(partBid.getAmount() * adjustRatio); } break; } previousPrice = bid.getPrice(); } return previousPrice; }
From source file:beast.math.distributions.ParametricDistribution.java
private double logDensity(double x, final double offset) { // if( x >= offset ) { x -= offset;/*from ww w . jav a2s. co m*/ final org.apache.commons.math.distribution.Distribution dist = getDistribution(); if (dist instanceof ContinuousDistribution) { return ((ContinuousDistribution) dist).logDensity(x); } else if (dist instanceof IntegerDistribution) { final double probability = ((IntegerDistribution) dist).probability(x); if (probability > 0) { return Math.log(probability); } } // } return Double.NEGATIVE_INFINITY; }
From source file:net.myrrix.online.eval.ParameterOptimizer.java
/** * @return a {@link Map} between the values of the given {@link System} properties and the best value found * during search//www . j a va 2s .c o m * @throws ExecutionException if an error occurs while calling {@code evaluator}; the cause is the * underlying exception */ public Map<String, Number> findGoodParameterValues() throws ExecutionException { int numProperties = parameterRanges.size(); String[] propertyNames = new String[numProperties]; Number[][] parameterValuesToTry = new Number[numProperties][]; int index = 0; for (Map.Entry<String, ParameterRange> entry : parameterRanges.entrySet()) { propertyNames[index] = entry.getKey(); parameterValuesToTry[index] = entry.getValue().buildSteps(numSteps); index++; } int numTests = 1; for (Number[] toTry : parameterValuesToTry) { numTests *= toTry.length; } List<Pair<Double, String>> testResultLinesByValue = Lists.newArrayListWithCapacity(numTests); Map<String, Number> bestParameterValues = Maps.newHashMap(); double bestValue = minimize ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY; for (int test = 0; test < numTests; test++) { StringBuilder testResultLine = new StringBuilder(); for (int prop = 0; prop < numProperties; prop++) { String property = propertyNames[prop]; Number parameterValue = getParameterValueToTry(parameterValuesToTry, test, prop); String propertyString = parameterValue.toString(); log.info("Setting {}={}", property, propertyString); System.setProperty(property, propertyString); testResultLine.append('[').append(property).append('=').append(propertyString).append("] "); } Number evaluatorResult; try { evaluatorResult = evaluator.call(); } catch (Exception e) { throw new ExecutionException(e); } if (evaluatorResult == null) { continue; } double testValue = evaluatorResult.doubleValue(); testResultLine.append("= ").append(testValue); testResultLinesByValue.add(new Pair<Double, String>(testValue, testResultLine.toString())); log.info("{}", testResultLine); if (minimize ? testValue < bestValue : testValue > bestValue) { log.info("New best value {}", testValue); bestValue = testValue; for (int prop = 0; prop < numProperties; prop++) { String property = propertyNames[prop]; Number parameterValue = getParameterValueToTry(parameterValuesToTry, test, prop); bestParameterValues.put(property, parameterValue); } } Collections.sort(testResultLinesByValue, new Comparator<Pair<Double, String>>() { @Override public int compare(Pair<Double, String> a, Pair<Double, String> b) { if (a.getFirst() > b.getFirst()) { return -1; } if (a.getFirst() < b.getFirst()) { return 1; } return 0; } }); for (Pair<Double, String> result : testResultLinesByValue) { log.info("{}", result.getSecond()); } log.info("Best parameter values so far are {}", bestParameterValues); } log.info("Final best parameter values are {}", bestParameterValues); return bestParameterValues; }
From source file:edu.cornell.med.icb.learning.weka.WekaClassifier.java
public double predict(final ClassificationModel trainingModel, final ClassificationProblem problem, final int instanceIndex, final double[] probabilities) { assert trainingModel instanceof WekaModel : "Model must be a weka model."; final double[] probs; try {/*from ww w. j a v a 2 s.c o m*/ probs = getWekaClassifier(this) .distributionForInstance(getWekaProblem(problem).instance(instanceIndex)); } catch (Exception e) { LOG.error("Weka classifier has thrown exception.", e); return Double.NaN; } System.arraycopy(probs, 0, probabilities, 0, probs.length); if (LOG.isDebugEnabled()) { LOG.debug("decision values: " + ArrayUtils.toString(probabilities)); } double maxProb = Double.NEGATIVE_INFINITY; int maxIndex = -1; for (int labelIndex = 0; labelIndex < probabilities.length; labelIndex++) { if (probabilities[labelIndex] > maxProb) { maxProb = probabilities[labelIndex]; maxIndex = labelIndex; } } final double decision; if (maxIndex == -1) { decision = Double.NaN; } else { decision = labelIndex2LabelValue[maxIndex]; } if (LOG.isDebugEnabled()) { LOG.debug("decision: " + decision); } return decision; }
From source file:ch.algotrader.simulation.SimulationResultFormatter.java
public void formatLong(final Appendable buffer, final SimulationResultVO resultVO, final CommonConfig commonConfig) throws IOException { buffer.append("execution time (min): " + (new DecimalFormat("0.00")).format(resultVO.getMins()) + "\r\n"); if (resultVO.getAllTrades().getCount() == 0) { buffer.append("no trades took place! \r\n"); return;//w w w . j ava 2 s . c o m } buffer.append("dataSet: " + commonConfig.getDataSet() + "\r\n"); double netLiqValue = resultVO.getNetLiqValue(); buffer.append("netLiqValue=" + twoDigitFormat.format(netLiqValue) + "\r\n"); // monthlyPerformances Collection<PeriodPerformanceVO> monthlyPerformances = resultVO.getMonthlyPerformances(); double maxDrawDownM = 0d; double bestMonthlyPerformance = Double.NEGATIVE_INFINITY; int positiveMonths = 0; int negativeMonths = 0; if ((monthlyPerformances != null)) { StringBuilder dateBuffer = new StringBuilder("month-year: "); StringBuilder performanceBuffer = new StringBuilder("monthlyPerformance: "); for (PeriodPerformanceVO monthlyPerformance : monthlyPerformances) { maxDrawDownM = Math.min(maxDrawDownM, monthlyPerformance.getValue()); bestMonthlyPerformance = Math.max(bestMonthlyPerformance, monthlyPerformance.getValue()); monthFormat.formatTo(DateTimeLegacy.toLocalDate(monthlyPerformance.getDate()), dateBuffer); performanceBuffer.append( StringUtils.leftPad(twoDigitFormat.format(monthlyPerformance.getValue() * 100), 6) + "% "); if (monthlyPerformance.getValue() > 0) { positiveMonths++; } else { negativeMonths++; } } buffer.append(dateBuffer.toString() + "\r\n"); buffer.append(performanceBuffer.toString() + "\r\n"); } // yearlyPerformances int positiveYears = 0; int negativeYears = 0; Collection<PeriodPerformanceVO> yearlyPerformances = resultVO.getYearlyPerformances(); if ((yearlyPerformances != null)) { StringBuilder dateBuffer = new StringBuilder("year: "); StringBuilder performanceBuffer = new StringBuilder("yearlyPerformance: "); for (PeriodPerformanceVO yearlyPerformance : yearlyPerformances) { yearFormat.formatTo(DateTimeLegacy.toGMTDate(yearlyPerformance.getDate()), dateBuffer); performanceBuffer.append( StringUtils.leftPad(twoDigitFormat.format(yearlyPerformance.getValue() * 100), 6) + "% "); if (yearlyPerformance.getValue() > 0) { positiveYears++; } else { negativeYears++; } } buffer.append(dateBuffer.toString() + "\r\n"); buffer.append(performanceBuffer.toString() + "\r\n"); } if ((monthlyPerformances != null)) { buffer.append("posMonths=" + positiveMonths + " negMonths=" + negativeMonths); if ((yearlyPerformances != null)) { buffer.append(" posYears=" + positiveYears + " negYears=" + negativeYears); } buffer.append("\r\n"); } PerformanceKeysVO performanceKeys = resultVO.getPerformanceKeys(); MaxDrawDownVO maxDrawDownVO = resultVO.getMaxDrawDown(); if (performanceKeys != null && maxDrawDownVO != null) { buffer.append("avgM=" + twoDigitFormat.format(performanceKeys.getAvgM() * 100) + "%"); buffer.append(" stdM=" + twoDigitFormat.format(performanceKeys.getStdM() * 100) + "%"); buffer.append(" avgY=" + twoDigitFormat.format(performanceKeys.getAvgY() * 100) + "%"); buffer.append(" stdY=" + twoDigitFormat.format(performanceKeys.getStdY() * 100) + "% "); buffer.append(" sharpeRatio=" + twoDigitFormat.format(performanceKeys.getSharpeRatio()) + "\r\n"); buffer.append("maxMonthlyDrawDown=" + twoDigitFormat.format(-maxDrawDownM * 100) + "%"); buffer.append(" bestMonthlyPerformance=" + twoDigitFormat.format(bestMonthlyPerformance * 100) + "%"); buffer.append(" maxDrawDown=" + twoDigitFormat.format(maxDrawDownVO.getAmount() * 100) + "%"); buffer.append( " maxDrawDownPeriod=" + twoDigitFormat.format(maxDrawDownVO.getPeriod() / 86400000) + "days"); buffer.append( " colmarRatio=" + twoDigitFormat.format(performanceKeys.getAvgY() / maxDrawDownVO.getAmount())); buffer.append("\r\n"); } buffer.append("WinningTrades:"); convertTrades(buffer, resultVO.getWinningTrades(), resultVO.getAllTrades().getCount()); buffer.append("LoosingTrades:"); convertTrades(buffer, resultVO.getLoosingTrades(), resultVO.getAllTrades().getCount()); buffer.append("AllTrades:"); convertTrades(buffer, resultVO.getAllTrades(), resultVO.getAllTrades().getCount()); for (Map.Entry<String, Object> entry : resultVO.getStrategyResults().entrySet()) { buffer.append(entry.getKey() + "=" + entry.getValue() + " "); } }
From source file:org.f3.tools.framework.Reporter.java
private String generateImage(String refName, String name, Number changeFactor) { DefaultCategoryDataset dataset = new DefaultCategoryDataset(); dataset.addValue(changeFactor, 0, 0); JFreeChart chart = ChartFactory.createBarChart("", "", "%change", dataset, PlotOrientation.HORIZONTAL, false, false, false);/*from w w w .j a v a 2 s . com*/ try { Color bgcolor = null; double value = changeFactor.doubleValue(); if (value == Double.POSITIVE_INFINITY || value == Double.NEGATIVE_INFINITY) { bgcolor = Color.YELLOW; } else if (value > 5) { bgcolor = Color.GREEN; } else if (value >= -5 && value <= 5) { bgcolor = Color.WHITE; } else { bgcolor = Color.RED; } chart.setBackgroundPaint(bgcolor); File dirFile = new File(IMAGE_DIRNAME); if (!dirFile.exists()) { dirFile.mkdirs(); } File ofile = new File(dirFile, name); ChartUtilities.saveChartAsPNG(ofile, chart, 300, 100); return getImageRef(refName, name); } catch (IOException ioe) { Utils.logger.severe(ioe.getMessage()); } return null; }
From source file:org.jfree.data.general.DefaultHeatMapDatasetTest.java
/** * Confirm that cloning works./* w w w . j av a 2 s . c om*/ */ @Test public void testCloning() throws CloneNotSupportedException { DefaultHeatMapDataset d1 = new DefaultHeatMapDataset(2, 3, -1.0, 4.0, -2.0, 5.0); d1.setZValue(0, 0, 10.0); d1.setZValue(0, 1, Double.NEGATIVE_INFINITY); d1.setZValue(0, 2, Double.POSITIVE_INFINITY); d1.setZValue(1, 0, Double.NaN); DefaultHeatMapDataset d2 = (DefaultHeatMapDataset) d1.clone(); assertTrue(d1 != d2); assertTrue(d1.getClass() == d2.getClass()); assertTrue(d1.equals(d2)); // simple check for independence d1.setZValue(0, 0, 11.0); assertFalse(d1.equals(d2)); d2.setZValue(0, 0, 11.0); assertTrue(d1.equals(d2)); }