List of usage examples for java.util TreeMap floorKey
public K floorKey(K key)
From source file:Main.java
public static void main(String[] args) { TreeMap<Integer, String> treemap = new TreeMap<Integer, String>(); // populating tree map treemap.put(2, "two"); treemap.put(1, "one"); treemap.put(3, "three"); treemap.put(6, "six"); treemap.put(5, "from java2s.com"); System.out.println("Checking greatest key less than or equal to 4"); System.out.println("Value is: " + treemap.floorKey(4)); }
From source file:com.opengamma.bloombergexample.loader.DemoEquityOptionCollarPortfolioLoader.java
private void addNodes(ManageablePortfolioNode rootNode, String underlying, boolean includeUnderlying, Period[] expiries) {/* w ww. java 2 s .c om*/ ExternalId ticker = ExternalSchemes.bloombergTickerSecurityId(underlying); ManageableSecurity underlyingSecurity = null; if (includeUnderlying) { underlyingSecurity = getOrLoadEquity(ticker); } ExternalIdBundle bundle = underlyingSecurity == null ? ExternalIdBundle.of(ticker) : underlyingSecurity.getExternalIdBundle(); HistoricalTimeSeriesInfoDocument timeSeriesInfo = getOrLoadTimeSeries(ticker, bundle); double estimatedCurrentStrike = getOrLoadMostRecentPoint(timeSeriesInfo); Set<ExternalId> optionChain = getOptionChain(ticker); //TODO: reuse positions/nodes? String longName = underlyingSecurity == null ? "" : underlyingSecurity.getName(); String formattedName = MessageFormatter.format("[{}] {}", underlying, longName); ManageablePortfolioNode equityNode = new ManageablePortfolioNode(formattedName); BigDecimal underlyingAmount = VALUE_OF_UNDERLYING.divide(BigDecimal.valueOf(estimatedCurrentStrike), BigDecimal.ROUND_HALF_EVEN); if (includeUnderlying) { addPosition(equityNode, underlyingAmount, ticker); } TreeMap<LocalDate, Set<BloombergTickerParserEQOption>> optionsByExpiry = new TreeMap<LocalDate, Set<BloombergTickerParserEQOption>>(); for (ExternalId optionTicker : optionChain) { s_logger.debug("Got option {}", optionTicker); BloombergTickerParserEQOption optionInfo = BloombergTickerParserEQOption.getOptionParser(optionTicker); s_logger.debug("Got option info {}", optionInfo); LocalDate key = optionInfo.getExpiry(); Set<BloombergTickerParserEQOption> set = optionsByExpiry.get(key); if (set == null) { set = new HashSet<BloombergTickerParserEQOption>(); optionsByExpiry.put(key, set); } set.add(optionInfo); } Set<ExternalId> tickersToLoad = new HashSet<ExternalId>(); BigDecimal expiryCount = BigDecimal.valueOf(expiries.length); BigDecimal defaultAmountAtExpiry = underlyingAmount.divide(expiryCount, BigDecimal.ROUND_DOWN); BigDecimal spareAmountAtExpiry = defaultAmountAtExpiry.add(BigDecimal.ONE); int spareCount = underlyingAmount.subtract(defaultAmountAtExpiry.multiply(expiryCount)).intValue(); for (int i = 0; i < expiries.length; i++) { Period bucketPeriod = expiries[i]; ManageablePortfolioNode bucketNode = new ManageablePortfolioNode(bucketPeriod.toString().substring(1)); LocalDate nowish = LocalDate.now().withDayOfMonth(20); //This avoids us picking different options every time this script is run LocalDate targetExpiry = nowish.plus(bucketPeriod); LocalDate chosenExpiry = optionsByExpiry.floorKey(targetExpiry); if (chosenExpiry == null) { s_logger.warn("No options for {} on {}", targetExpiry, underlying); continue; } s_logger.info("Using time {} for bucket {} ({})", new Object[] { chosenExpiry, bucketPeriod, targetExpiry }); Set<BloombergTickerParserEQOption> optionsAtExpiry = optionsByExpiry.get(chosenExpiry); TreeMap<Double, Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> optionsByStrike = new TreeMap<Double, Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>(); for (BloombergTickerParserEQOption option : optionsAtExpiry) { // s_logger.info("option {}", option); double key = option.getStrike(); Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike.get(key); if (pair == null) { pair = Pair.of(null, null); } if (option.getOptionType() == OptionType.CALL) { pair = Pair.of(option, pair.getSecond()); } else { pair = Pair.of(pair.getFirst(), option); } optionsByStrike.put(key, pair); } //cascading collar? BigDecimal amountAtExpiry = spareCount-- > 0 ? spareAmountAtExpiry : defaultAmountAtExpiry; s_logger.info(" est strike {}", estimatedCurrentStrike); Double[] strikes = optionsByStrike.keySet().toArray(new Double[0]); int strikeIndex = Arrays.binarySearch(strikes, estimatedCurrentStrike); if (strikeIndex < 0) { strikeIndex = -(1 + strikeIndex); } s_logger.info("strikes length {} index {} strike of index {}", new Object[] { Integer.valueOf(strikes.length), Integer.valueOf(strikeIndex), Double.valueOf(strikes[strikeIndex]) }); int minIndex = strikeIndex - _numOptions; minIndex = Math.max(0, minIndex); int maxIndex = strikeIndex + _numOptions; maxIndex = Math.min(strikes.length - 1, maxIndex); s_logger.info("min {} max {}", Integer.valueOf(minIndex), Integer.valueOf(maxIndex)); StringBuffer sb = new StringBuffer("strikes: ["); for (int j = minIndex; j <= maxIndex; j++) { sb.append(" "); sb.append(strikes[j]); } sb.append(" ]"); s_logger.info(sb.toString()); //Short Calls ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> calls = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>(); for (int j = minIndex; j < strikeIndex; j++) { Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike .get(strikes[j]); if (pair == null) { throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]); } calls.add(pair); } spreadOptions(bucketNode, calls, OptionType.CALL, -1, tickersToLoad, amountAtExpiry, includeUnderlying, calls.size()); // Long Puts ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> puts = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>(); for (int j = strikeIndex + 1; j <= maxIndex; j++) { Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike .get(strikes[j]); if (pair == null) { throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]); } puts.add(pair); } spreadOptions(bucketNode, puts, OptionType.PUT, 1, tickersToLoad, amountAtExpiry, includeUnderlying, puts.size()); if (bucketNode.getChildNodes().size() + bucketNode.getPositionIds().size() > 0) { equityNode.addChildNode(bucketNode); //Avoid generating empty nodes } } for (ExternalId optionTicker : tickersToLoad) { ManageableSecurity loaded = getOrLoadSecurity(optionTicker); if (loaded == null) { throw new OpenGammaRuntimeException("Unexpected option type " + loaded); } //TODO [LAPANA-29] Should be able to do this for index options too if (includeUnderlying) { try { HistoricalTimeSeriesInfoDocument loadedTs = getOrLoadTimeSeries(optionTicker, loaded.getExternalIdBundle()); if (loadedTs == null) { throw new OpenGammaRuntimeException("Failed to get time series for " + loaded); } } catch (Exception ex) { s_logger.error("Failed to get time series for " + loaded, ex); } } } if (equityNode.getPositionIds().size() + equityNode.getChildNodes().size() > 0) { rootNode.addChildNode(equityNode); } }
From source file:ch.algotrader.service.algo.VWAPOrderService.java
VWAPOrderStateVO createAlgoOrderState(final VWAPOrder algoOrder, final Date dateTime) throws OrderValidationException { Validate.notNull(algoOrder, "vwapOrder missing"); Security security = algoOrder.getSecurity(); SecurityFamily family = security.getSecurityFamily(); Exchange exchange = family.getExchange(); HistoricalDataService historicalDataService = this.applicationContext.getBean(HistoricalDataService.class); List<Bar> bars = historicalDataService.getHistoricalBars(security.getId(), // DateUtils.truncate(new Date(), Calendar.DATE), // algoOrder.getLookbackPeriod(), // TimePeriod.DAY, // algoOrder.getBucketSize(), // MarketDataEventType.TRADES, // Collections.emptyMap()); TreeMap<LocalTime, Long> buckets = new TreeMap<>(); Set<LocalDate> tradingDays = new HashSet<>(); for (Bar bar : bars) { int vol = bar.getVol(); LocalTime time = DateTimeLegacy.toLocalTime(bar.getDateTime()); tradingDays.add(DateTimeLegacy.toLocalDate(bar.getDateTime())); if (buckets.containsKey(time)) { buckets.put(time, buckets.get(time) + vol); } else {/* ww w . j av a 2s. c o m*/ buckets.put(time, (long) vol); } } // verify start and end time if (algoOrder.getStartTime() == null) { if (this.calendarService.isOpen(exchange.getId())) { algoOrder.setStartTime(dateTime); } else { Date nextOpenTime = this.calendarService.getNextOpenTime(exchange.getId()); algoOrder.setStartTime(nextOpenTime); } } Date closeTime = this.calendarService.getNextCloseTime(exchange.getId()); if (algoOrder.getEndTime() == null) { algoOrder.setEndTime(closeTime); } if (algoOrder.getStartTime().compareTo(dateTime) < 0) { throw new OrderValidationException("startTime needs to be in the future " + algoOrder); } else if (algoOrder.getEndTime().compareTo(dateTime) <= 0) { throw new OrderValidationException("endTime needs to be in the future " + algoOrder); } else if (algoOrder.getEndTime().compareTo(closeTime) > 0) { throw new OrderValidationException("endTime needs to be before next market closeTime for " + algoOrder); } else if (algoOrder.getEndTime().compareTo(algoOrder.getStartTime()) <= 0) { throw new OrderValidationException("endTime needs to be after startTime for " + algoOrder); } int historicalVolume = 0; LocalTime startTime = DateTimeLegacy.toLocalTime(algoOrder.getStartTime()); LocalTime endTime = DateTimeLegacy.toLocalTime(algoOrder.getEndTime()); LocalTime firstBucketStart = buckets.floorKey(startTime); LocalTime lastBucketStart = buckets.floorKey(endTime); SortedMap<LocalTime, Long> subBuckets = buckets.subMap(firstBucketStart, true, lastBucketStart, true); for (Map.Entry<LocalTime, Long> bucket : subBuckets.entrySet()) { long vol = bucket.getValue() / tradingDays.size(); bucket.setValue(vol); if (bucket.getKey().equals(firstBucketStart)) { LocalTime firstBucketEnd = firstBucketStart.plus(algoOrder.getBucketSize().getValue(), ChronoUnit.MILLIS); double fraction = (double) ChronoUnit.MILLIS.between(startTime, firstBucketEnd) / algoOrder.getBucketSize().getValue(); historicalVolume += vol * fraction; } else if (bucket.getKey().equals(lastBucketStart)) { double fraction = (double) ChronoUnit.MILLIS.between(lastBucketStart, endTime) / algoOrder.getBucketSize().getValue(); historicalVolume += vol * fraction; } else { historicalVolume += vol; } } double participation = algoOrder.getQuantity() / (double) historicalVolume; if (participation > MAX_PARTICIPATION) { throw new OrderValidationException("participation rate " + twoDigitFormat.format(participation * 100.0) + "% is above 50% of historical market volume for " + algoOrder); } if (LOGGER.isInfoEnabled()) { LOGGER.debug("participation of {} is {}%", algoOrder.getDescription(), twoDigitFormat.format(participation * 100.0)); } return new VWAPOrderStateVO(participation, buckets); }
From source file:com.opengamma.examples.bloomberg.loader.DemoEquityOptionCollarPortfolioLoader.java
private void addNodes(final ManageablePortfolioNode rootNode, final String underlying, final boolean includeUnderlying, final Period[] expiries) { final ExternalId ticker = ExternalSchemes.bloombergTickerSecurityId(underlying); ManageableSecurity underlyingSecurity = null; if (includeUnderlying) { underlyingSecurity = getOrLoadEquity(ticker); }/*from ww w . j a v a 2 s . c o m*/ final ExternalIdBundle bundle = underlyingSecurity == null ? ExternalIdBundle.of(ticker) : underlyingSecurity.getExternalIdBundle(); final HistoricalTimeSeriesInfoDocument timeSeriesInfo = getOrLoadTimeSeries(ticker, bundle); final double estimatedCurrentStrike = getOrLoadMostRecentPoint(timeSeriesInfo); final Set<ExternalId> optionChain = getOptionChain(ticker); //TODO: reuse positions/nodes? final String longName = underlyingSecurity == null ? "" : underlyingSecurity.getName(); final String formattedName = MessageFormatter.format("[{}] {}", underlying, longName).getMessage(); final ManageablePortfolioNode equityNode = new ManageablePortfolioNode(formattedName); final BigDecimal underlyingAmount = VALUE_OF_UNDERLYING.divide(BigDecimal.valueOf(estimatedCurrentStrike), BigDecimal.ROUND_HALF_EVEN); if (includeUnderlying) { addPosition(equityNode, underlyingAmount, ticker); } final TreeMap<LocalDate, Set<BloombergTickerParserEQOption>> optionsByExpiry = new TreeMap<LocalDate, Set<BloombergTickerParserEQOption>>(); for (final ExternalId optionTicker : optionChain) { s_logger.debug("Got option {}", optionTicker); final BloombergTickerParserEQOption optionInfo = BloombergTickerParserEQOption .getOptionParser(optionTicker); s_logger.debug("Got option info {}", optionInfo); final LocalDate key = optionInfo.getExpiry(); Set<BloombergTickerParserEQOption> set = optionsByExpiry.get(key); if (set == null) { set = new HashSet<BloombergTickerParserEQOption>(); optionsByExpiry.put(key, set); } set.add(optionInfo); } final Set<ExternalId> tickersToLoad = new HashSet<ExternalId>(); final BigDecimal expiryCount = BigDecimal.valueOf(expiries.length); final BigDecimal defaultAmountAtExpiry = underlyingAmount.divide(expiryCount, BigDecimal.ROUND_DOWN); final BigDecimal spareAmountAtExpiry = defaultAmountAtExpiry.add(BigDecimal.ONE); int spareCount = underlyingAmount.subtract(defaultAmountAtExpiry.multiply(expiryCount)).intValue(); for (final Period bucketPeriod : expiries) { final ManageablePortfolioNode bucketNode = new ManageablePortfolioNode( bucketPeriod.toString().substring(1)); final LocalDate nowish = LocalDate.now().withDayOfMonth(20); //This avoids us picking different options every time this script is run final LocalDate targetExpiry = nowish.plus(bucketPeriod); final LocalDate chosenExpiry = optionsByExpiry.floorKey(targetExpiry); if (chosenExpiry == null) { s_logger.info("No options for {} on {}", targetExpiry, underlying); continue; } s_logger.info("Using time {} for bucket {} ({})", new Object[] { chosenExpiry, bucketPeriod, targetExpiry }); final Set<BloombergTickerParserEQOption> optionsAtExpiry = optionsByExpiry.get(chosenExpiry); final TreeMap<Double, Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> optionsByStrike = new TreeMap<>(); for (final BloombergTickerParserEQOption option : optionsAtExpiry) { // s_logger.info("option {}", option); final double key = option.getStrike(); Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike.get(key); if (pair == null) { pair = Pair.of(null, null); } if (option.getOptionType() == OptionType.CALL) { pair = Pair.of(option, pair.getSecond()); } else { pair = Pair.of(pair.getFirst(), option); } optionsByStrike.put(key, pair); } //cascading collar? final BigDecimal amountAtExpiry = spareCount-- > 0 ? spareAmountAtExpiry : defaultAmountAtExpiry; s_logger.info(" est strike {}", estimatedCurrentStrike); final Double[] strikes = optionsByStrike.keySet().toArray(new Double[0]); int strikeIndex = Arrays.binarySearch(strikes, estimatedCurrentStrike); if (strikeIndex < 0) { strikeIndex = -(1 + strikeIndex); } s_logger.info("strikes length {} index {} strike of index {}", new Object[] { Integer.valueOf(strikes.length), Integer.valueOf(strikeIndex), Double.valueOf(strikes[strikeIndex]) }); int minIndex = strikeIndex - _numOptions; minIndex = Math.max(0, minIndex); int maxIndex = strikeIndex + _numOptions; maxIndex = Math.min(strikes.length - 1, maxIndex); s_logger.info("min {} max {}", Integer.valueOf(minIndex), Integer.valueOf(maxIndex)); final StringBuffer sb = new StringBuffer("strikes: ["); for (int j = minIndex; j <= maxIndex; j++) { sb.append(" "); sb.append(strikes[j]); } sb.append(" ]"); s_logger.info(sb.toString()); //Short Calls final ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> calls = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>(); for (int j = minIndex; j < strikeIndex; j++) { final Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike .get(strikes[j]); if (pair == null) { throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]); } calls.add(pair); } spreadOptions(bucketNode, calls, OptionType.CALL, -1, tickersToLoad, amountAtExpiry, includeUnderlying, calls.size()); // Long Puts final ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>> puts = new ArrayList<Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption>>(); for (int j = strikeIndex + 1; j <= maxIndex; j++) { final Pair<BloombergTickerParserEQOption, BloombergTickerParserEQOption> pair = optionsByStrike .get(strikes[j]); if (pair == null) { throw new OpenGammaRuntimeException("no pair for strike" + strikes[j]); } puts.add(pair); } spreadOptions(bucketNode, puts, OptionType.PUT, 1, tickersToLoad, amountAtExpiry, includeUnderlying, puts.size()); if (bucketNode.getChildNodes().size() + bucketNode.getPositionIds().size() > 0) { equityNode.addChildNode(bucketNode); //Avoid generating empty nodes } } for (final ExternalId optionTicker : tickersToLoad) { final ManageableSecurity loaded = getOrLoadSecurity(optionTicker); if (loaded == null) { throw new OpenGammaRuntimeException("Unexpected option type " + loaded); } //TODO [LAPANA-29] Should be able to do this for index options too if (includeUnderlying) { try { final HistoricalTimeSeriesInfoDocument loadedTs = getOrLoadTimeSeries(optionTicker, loaded.getExternalIdBundle()); if (loadedTs == null) { throw new OpenGammaRuntimeException("Failed to get time series for " + loaded); } } catch (final Exception ex) { s_logger.info("Failed to get time series for " + loaded, ex); } } } if (equityNode.getPositionIds().size() + equityNode.getChildNodes().size() > 0) { rootNode.addChildNode(equityNode); } }
From source file:umich.ms.batmass.gui.viewers.map2d.components.BaseMap2D.java
/** * Fills the map given a scan collection. * @param scans//from ww w. j ava 2 s . c o m * @return True, if filling was done successfully. * False if something bad happened, e.g. scanCollection didn't contain any scans between rtStart & rtEnd */ public boolean fillMapFromScans(IScanCollection scans) { int pixelsVertical = availableHeight; height = pixelsVertical; width = availableWidth; NavigableMap<Integer, IScan> scansByRtSpanAtMsLevel = scans.getScansByRtSpanAtMsLevel(rtLo, rtHi, msLevel); ; if (!precursorMzRange.equals(Map2DPanel.OPT_DISPLAY_ALL_MZ_REGIONS)) { // if only scans from specific precursor m/z window were requested IntervalST<Double, TreeMap<Integer, IScan>> precursorRanges = scans.getMapMsLevel2rangeGroups() .get(msLevel); if (precursorRanges != null) { IntervalST.Node<Double, TreeMap<Integer, IScan>> node = precursorRanges.get(precursorMzRange); if (node != null) { // these are all the scans at proper MS level and in proper precursor m/z range TreeMap<Integer, IScan> scansInMzRange = node.getValue(); // now filter this TreeMap to only leave scans that are in our RT range Integer numLo = scansByRtSpanAtMsLevel.firstKey(); Integer numHi = scansByRtSpanAtMsLevel.lastKey(); numLo = scansInMzRange.ceilingKey(numLo); numHi = scansInMzRange.floorKey(numHi); scansByRtSpanAtMsLevel = scansInMzRange.subMap(numLo, true, numHi, true); } } } if (scansByRtSpanAtMsLevel == null || scansByRtSpanAtMsLevel.size() == 0) { initErrorFillingState(); return false; } scanNumLo = scansByRtSpanAtMsLevel.firstKey(); scanNumHi = scansByRtSpanAtMsLevel.lastKey(); // compare the number of scans to available vertical pixels int scanCount = scansByRtSpanAtMsLevel.size(); this.map = new double[height][width]; this.mapRaw = new double[height][width]; this.maxValInFullRow = new double[height]; IScan scan; TreeMap<Integer, IScan> mapNum2scan = scans.getMapNum2scan(); IScan[] scansToAverage = new IScan[4]; ISpectrum spectrum; Integer mzIdxLo, mzIdxHi; int x, y; boolean hasProfile = true; double[] masses, intensities; filledRowIds = new int[scansByRtSpanAtMsLevel.size()]; int idx = 0; double denoisingTimeCounter = 0; for (Map.Entry<Integer, IScan> num2scan : scansByRtSpanAtMsLevel.entrySet()) { scan = num2scan.getValue(); if (doProfileModeGapFilling && !scan.isCentroided()) { hasProfile = true; } spectrum = null; try { spectrum = scan.fetchSpectrum(); } catch (FileParsingException ex) { Exceptions.printStackTrace(ex); } if (spectrum == null) { continue; } y = extrapolateRtToY(scan.getRt()); filledRowIds[idx] = y; idx++; if (y > this.map.length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (y > this.map.length-1) for scan #%d.\n" + "\ty=%d, len-1=%d, height=%d\n" + "\trt=%.20f, rtStart=%.20f, rtEnd=%.20f, rtSpan=%.20f", scan.getNum(), y, this.map.length - 1, height, scan.getRt(), rtLo, rtHi, rtSpan)); } masses = spectrum.getMZs(); intensities = spectrum.getIntensities(); mzIdxLo = spectrum.findMzIdxCeiling(mzLo); mzIdxHi = spectrum.findMzIdxFloor(mzHi); if (mzIdxLo == null || mzIdxHi == null) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: mzIdxLo or mzIdxHi were null for scan #%d. " + "Not filling the map from them.", scan.getNum())); continue; } if (mzIdxLo < 0 || mzIdxLo > masses.length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (mzIdxLo < 0 || mzIdxLo > masses.length-1) for scan #%d", scan.getNum())); } if (mzIdxHi < 0 || mzIdxHi > masses.length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (mzIdxHi < 0 || mzIdxHi > masses.length-1) for scan #%d", scan.getNum())); } double denoiseThreshold = Double.NaN; boolean applyDenoise = isDoDenoise(); if (applyDenoise) { long start = System.nanoTime(); denoiseThreshold = findDenoiseThreshold(masses, intensities); double denoisingTime = (System.nanoTime() - start) / 1e6; denoisingTimeCounter = denoisingTimeCounter + denoisingTime; if (Double.isNaN(denoiseThreshold)) { applyDenoise = false; } } double maxInt = spectrum.getMaxInt(); for (int i = mzIdxLo; i <= mzIdxHi; i++) { x = extrapolateMzToX(masses[i]); addPeakRaw(x, y, intensities[i]); if (applyDenoise && intensities[i] < denoiseThreshold) { continue; } if (x > this.map[0].length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (x > this.map[0].length-1) for scan #%d.\n" + "\tx=%d, len-1=%d, width=%d,\n" + "\ti=%d, masses[i]=%.20f, mzStart=%.20f, mzEnd=%.20f, mzSpan=%.20f", scan.getNum(), x, this.map[0].length - 1, width, i, masses[i], mzLo, mzHi, mzSpan)); } // boost if present in previous/next scan // boost if present in previous/next scan// boost if present in previous/next scan// boost if present in previous/next scan// boost if present in previous/next scan// boost if present in previous/next scan // double curIntensity = intensities[i]; // final int maxScanSpan = 2000; // int numScansDisplayed = scansByRtSpanAtMsLevel.size(); // if (false && numScansDisplayed <= maxScanSpan) { // double maxIntInVicinity; // double intensityUpdateFactor = 1; // double dm, dmPpm, dmUpdateFactor; // int maxIntIdx; // double[] curInts, curMzs; // // final int scanNumShift = 1; // final double ppmTolerance = 15d; // // if (scan.getNum() % 1000 == 0) { // System.out.printf("Averaging for scan %d\n", scan.getNum()); // } // scansToAverage[0] = mapNum2scan.get(scan.getNum() - scanNumShift*2); // scansToAverage[1] = mapNum2scan.get(scan.getNum() - scanNumShift); // scansToAverage[2] = mapNum2scan.get(scan.getNum() + scanNumShift); // scansToAverage[3] = mapNum2scan.get(scan.getNum() + scanNumShift*2); // double curMass = masses[i]; // // for (IScan avgScan : scansToAverage) { // if (avgScan == null) { // continue; // } // ISpectrum s = avgScan.getSpectrum(); // if (s == null) { // continue; // } // int[] mzIdxs = s.findMzIdxsWithinPpm(curMass, ppmTolerance); // dm = Double.NEGATIVE_INFINITY; // dmUpdateFactor = 1; // intensityUpdateFactor = 1; // if (mzIdxs != null) { // curInts = s.getIntensities(); // curMzs = s.getMZs(); // maxIntIdx = -1; // maxIntInVicinity = Double.NEGATIVE_INFINITY; // for (int j = mzIdxs[0]; j <= mzIdxs[1]; j++) { // if (curInts[j] > maxIntInVicinity) { // maxIntIdx = j; // } // } // if (maxIntIdx != -1) { // intensityUpdateFactor = curInts[maxIntIdx]; // dm = Math.abs(curMass - curMzs[maxIntIdx]); // // dmPpm = dm / (curMass / 1e6d); // if (dmPpm > ppmTolerance) { // dmUpdateFactor = 0d; // throw new IllegalStateException("dmUpdateFactor set to zero, should not happen"); // } else { // dmUpdateFactor = (1 - Math.pow(dmPpm / ppmTolerance, 2d)); // } // } else { // throw new IllegalStateException("Strange condition, should never be triggered"); // } // } else { // // if masses in the vicinity not found, then penalize // // TODO: this should be dependent on the chosen distribution for mass deviations // // see dmFactor // intensityUpdateFactor = 1; // dmUpdateFactor = (1 - Math.pow(0.5d, 2d)); // } // // curIntensity = curIntensity * (intensityUpdateFactor * dmUpdateFactor); // } // } // addPeak(x, y, curIntensity); addPeak(x, y, intensities[i]); maxValInFullRow[y] = maxInt; // if (curIntensity > 1e6) { // addPeak(x, y, curIntensity); // } } if (hasProfile && doProfileModeGapFilling) { double pixelSizeMz = getMzSpan() / availableWidth; if (pixelSizeMz < 0.05) { fillProfileGaps(0, y, pixelSizeMz); } } } if (isDoDenoise()) { OutputWndPrinter.printErr("DEBUG", String.format("Denoising took on average: %.2fms (%d scans)\n", (denoisingTimeCounter) / scansByRtSpanAtMsLevel.size(), scansByRtSpanAtMsLevel.size())); } if (hasProfile) { // profile mode spectrum if (!doProfileModeGapFilling && doMzCloseZoomGapFilling) { applySavitzkyGolay(map); } } else { // !hasProfile => centroided spectrum if (doMzCloseZoomGapFilling) { applySavitzkyGolay(map); } } findMinMaxIntensities(); // if we created the full-sized version of the map, then a lot of rows might // be zero, because no scan actually mapped to this row of pixels // so we just fill it with the same pixels as in the previous filled row. if (doInterpRt) { for (int filledRowIdx = 0; filledRowIdx < filledRowIds.length - 1; filledRowIdx++) { int rowLo = filledRowIds[filledRowIdx]; int rowHi = filledRowIds[filledRowIdx + 1]; for (int rowToFillIdx = rowLo + 1; rowToFillIdx < rowHi; rowToFillIdx++) { System.arraycopy(map[rowLo], 0, map[rowToFillIdx], 0, width); maxValInFullRow[rowToFillIdx] = maxValInFullRow[rowLo]; } } } // add a tiny bit to the total intensity, allows not to care about // edge values when mapping intensities to colors. // Adding MIN_NORMAL, as totalIntensity shoule be a value > 1.0 totalIntensityMax += 1e-8; return true; }