List of usage examples for java.util NavigableMap size
int size();
From source file:org.apache.tajo.storage.hbase.HBaseScanner.java
private Datum getDatum(Result result, int fieldId) throws IOException { byte[] value = null; if (isRowKeyMappings[fieldId]) { value = result.getRow();/*from w ww . ja v a 2 s . c o m*/ if (!isBinaryColumns[fieldId] && rowKeyFieldIndexes[fieldId] >= 0) { int rowKeyFieldIndex = rowKeyFieldIndexes[fieldId]; byte[][] rowKeyFields = BytesUtils.splitPreserveAllTokens(value, rowKeyDelimiter, columnMapping.getNumColumns()); if (rowKeyFields.length < rowKeyFieldIndex) { return NullDatum.get(); } else { value = rowKeyFields[rowKeyFieldIndex]; } } } else { if (isColumnKeys[fieldId]) { NavigableMap<byte[], byte[]> cfMap = result.getFamilyMap(mappingColumnFamilies[fieldId][0]); if (cfMap != null) { Set<byte[]> keySet = cfMap.keySet(); if (keySet.size() == 1) { try { return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], keySet.iterator().next()); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } else { StringBuilder sb = new StringBuilder(); sb.append("["); int count = 0; for (byte[] eachKey : keySet) { if (count > 0) { sb.append(", "); } Datum datum = HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], eachKey); sb.append("\"").append(datum.asChars()).append("\""); count++; if (count > MAX_LIST_SIZE) { break; } } sb.append("]"); return new TextDatum(sb.toString()); } } } else if (isColumnValues[fieldId]) { NavigableMap<byte[], byte[]> cfMap = result.getFamilyMap(mappingColumnFamilies[fieldId][0]); if (cfMap != null) { Collection<byte[]> valueList = cfMap.values(); if (valueList.size() == 1) { try { return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], valueList.iterator().next()); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } else { StringBuilder sb = new StringBuilder(); sb.append("["); int count = 0; for (byte[] eachValue : valueList) { if (count > 0) { sb.append(", "); } Datum datum = HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], eachValue); sb.append("\"").append(datum.asChars()).append("\""); count++; if (count > MAX_LIST_SIZE) { break; } } sb.append("]"); return new TextDatum(sb.toString()); } } } else { if (mappingColumnFamilies[fieldId][1] == null) { NavigableMap<byte[], byte[]> cfMap = result.getFamilyMap(mappingColumnFamilies[fieldId][0]); if (cfMap != null && !cfMap.isEmpty()) { int count = 0; String delim = ""; if (cfMap.size() == 0) { return NullDatum.get(); } else if (cfMap.size() == 1) { // If a column family is mapped without column name like "cf1:" and the number of cells is one, // return value is flat format not json format. NavigableMap.Entry<byte[], byte[]> entry = cfMap.entrySet().iterator().next(); byte[] entryKey = entry.getKey(); byte[] entryValue = entry.getValue(); if (entryKey == null || entryKey.length == 0) { try { if (isBinaryColumns[fieldId]) { return HBaseBinarySerializerDeserializer.deserialize(schemaColumns[fieldId], entryValue); } else { return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], entryValue); } } catch (Exception e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } } StringBuilder sb = new StringBuilder(); sb.append("{"); for (NavigableMap.Entry<byte[], byte[]> entry : cfMap.entrySet()) { byte[] entryKey = entry.getKey(); byte[] entryValue = entry.getValue(); String keyText = new String(entryKey); String valueText = null; if (entryValue != null) { try { if (isBinaryColumns[fieldId]) { valueText = HBaseBinarySerializerDeserializer .deserialize(schemaColumns[fieldId], entryValue).asChars(); } else { valueText = HBaseTextSerializerDeserializer .deserialize(schemaColumns[fieldId], entryValue).asChars(); } } catch (Exception e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } sb.append(delim).append("\"").append(keyText).append("\":\"").append(valueText) .append("\""); delim = ", "; count++; if (count > MAX_LIST_SIZE) { break; } } //end of for sb.append("}"); return new TextDatum(sb.toString()); } else { value = null; } } else { value = result.getValue(mappingColumnFamilies[fieldId][0], mappingColumnFamilies[fieldId][1]); } } } if (value == null) { return NullDatum.get(); } else { try { if (isBinaryColumns[fieldId]) { return HBaseBinarySerializerDeserializer.deserialize(schemaColumns[fieldId], value); } else { return HBaseTextSerializerDeserializer.deserialize(schemaColumns[fieldId], value); } } catch (Exception e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } }
From source file:org.commonvox.hbase_column_manager.ColumnInvalidityReport.java
private void outputReport() throws IOException { CSVFormat csvFormat = (verboseReport ? VERBOSE_CSV_FORMAT : SUMMARY_CSV_FORMAT); try (ResultScanner rows = tempReportTable.getScanner(new Scan().setMaxVersions()); CSVPrinter csvPrinter = csvFormat.withHeaderComments((verboseReport ? "VERBOSE" : "SUMMARY") + " Report on Invalid Column " + this.reportType + "S in Table <" + sourceTable.getName().getNameAsString() + (sourceColFamily == null ? "" : ">, ColumnFamily <" + Bytes.toString(sourceColFamily)) + "> -- Generated by " + Repository.PRODUCT_NAME + ":" + this.getClass().getSimpleName(), new Date()).print(new FileWriter(targetFile))) { for (Result row : rows) { String[] reportLineComponents = parseRowId(row.getRow()); NavigableMap<byte[], NavigableMap<Long, byte[]>> tempReportColumnMap = row.getMap().firstEntry() .getValue(); // .get(TEMP_REPORT_CF); if (verboseReport) { // print line for each invalid occurrence found for (Entry<byte[], NavigableMap<Long, byte[]>> tempReportColumn : tempReportColumnMap .entrySet()) {/*from www . jav a 2 s. co m*/ for (Entry<Long, byte[]> tempReportCell : tempReportColumn.getValue().entrySet()) { for (String reportLineComponent : reportLineComponents) { csvPrinter.print(reportLineComponent); } csvPrinter.print(Repository.getPrintableString(tempReportColumn.getKey())); // userRowId csvPrinter.print(tempReportCell.getKey()); // cell timestamp csvPrinter.print(Repository.getPrintableString(tempReportCell.getValue())); // colVal csvPrinter.println(); } } } else { // print summary line giving count of invalid occurrences for (String reportLineComponent : reportLineComponents) { csvPrinter.print(reportLineComponent); } csvPrinter.print(String.valueOf(tempReportColumnMap.size())); csvPrinter.println(); } } } }
From source file:org.hbasene.index.HBaseIndexReader.java
@Override public int docFreq(Term t) throws IOException { // same as in TermEnum. Avoid duplication. final String rowKey = t.field() + "/" + t.text(); Get get = new Get(Bytes.toBytes(rowKey)); get.addFamily(HBaseneConstants.FAMILY_TERMVECTOR); HTableInterface table = this.getTablePool().getTable(this.indexName); try {/*w w w. j a v a 2s . c om*/ Result result = table.get(get); if (result == null) { return 0; } NavigableMap<byte[], byte[]> map = result.getFamilyMap(HBaseneConstants.FAMILY_TERMVECTOR); if (map == null) { return 0; } return map.size(); } finally { this.getTablePool().putTable(table); } }
From source file:org.opennms.netmgt.rrd.model.RRDv3IT.java
/** * Test samples for a single RRA//w w w .jav a 2s . c o m * * @throws Exception the exception */ @Test public void testSamplesSingleRRA() throws Exception { File source = new File("src/test/resources/sample-counter.xml"); RRDv3 rrd = JaxbUtils.unmarshal(RRDv3.class, source); Assert.assertNotNull(rrd); NavigableMap<Long, List<Double>> samples = rrd.generateSamples(rrd.getRras().get(0)); Assert.assertFalse(samples.isEmpty()); long ts = 1441748400L; Double v1 = 600.0; Double v2 = 2.0; Assert.assertEquals(rrd.getRras().get(0).getRows().size(), samples.size()); for (Map.Entry<Long, List<Double>> s : samples.entrySet()) { System.out.println(s); Assert.assertEquals(2, s.getValue().size()); Assert.assertEquals(ts, (long) s.getKey()); Assert.assertEquals(v1, s.getValue().get(0)); Assert.assertEquals(v2, s.getValue().get(1)); ts += 300L; v1 += 300.0 * v2; v2 += 1.0; } }
From source file:org.opennms.netmgt.rrd.model.RRDv3IT.java
/** * Test samples for multiple RRAs (1)//from w ww. j av a 2 s . com * * @throws Exception the exception */ @Test public void testSamplesMultipleRRAs1() throws Exception { File source = new File("src/test/resources/sample-counter-rras.xml"); RRDv3 rrd = JaxbUtils.unmarshal(RRDv3.class, source); Assert.assertNotNull(rrd); NavigableMap<Long, List<Double>> samples = rrd.generateSamples(rrd.getRras().get(1)); Assert.assertFalse(samples.isEmpty()); Assert.assertEquals(rrd.getRras().get(1).getRows().size(), samples.size()); }
From source file:org.opennms.netmgt.snmp.mock.PropertyOidContainer.java
public SnmpObjId findNextOidForOid(final SnmpObjId oid) { final NavigableMap<SnmpObjId, SnmpValue> next = m_tree.tailMap(oid, false); if (next.size() == 0) { return null; } else {/*w ww .j a va 2s .c o m*/ return next.firstKey(); } }
From source file:sadl.modellearner.rtiplus.SimplePDRTALearner.java
public List<Interval> checkDistribution(PDRTAState s, int alphIdx, DistributionCheckType type, StateColoring sc) {/*from ww w. j ava2 s. c o m*/ final NavigableMap<Integer, Interval> ins = s.getIntervals(alphIdx); if (ins.size() != 1) { return Collections.emptyList(); } final Interval in = ins.firstEntry().getValue(); if (in.isEmpty()) { return Collections.emptyList(); } int tolerance; if (type.equals(DistributionCheckType.DISABLED)) { return Collections.emptyList(); } else if (type.equals(DistributionCheckType.STRICT_BORDER) || type.equals(DistributionCheckType.STRICT)) { tolerance = 0; } else if (type.equals(DistributionCheckType.MAD_BORDER) || type.equals(DistributionCheckType.MAD)) { tolerance = getToleranceMAD(in, PDRTA.getMinData()); } else if (type.equals(DistributionCheckType.OUTLIER_BORDER) || type.equals(DistributionCheckType.OUTLIER)) { tolerance = getToleranceOutliers(in, PDRTA.getMinData()); } else { throw new IllegalArgumentException("Nonexistent type used!"); } final NavigableMap<Integer, Collection<TimedTail>> tails = in.getTails().asMap(); final List<Integer> splits = new ArrayList<>(); if ((type.ordinal() - 1) % 2 != 0) { // The types without border final Iterator<Entry<Integer, Collection<TimedTail>>> it = tails.entrySet().iterator(); if (it.hasNext()) { Entry<Integer, Collection<TimedTail>> ePrev = it.next(); int t = ePrev.getKey().intValue(); if (in.getBegin() <= t - tolerance - 1) { splits.add(new Integer(t - tolerance - 1)); } while (it.hasNext()) { final Entry<Integer, Collection<TimedTail>> eCurr = it.next(); t = ePrev.getKey().intValue(); final int t2 = eCurr.getKey().intValue(); final int diff = t2 - t - 1; if (diff > 2 * tolerance) { splits.add(new Integer(t + tolerance)); splits.add(new Integer(t2 - tolerance - 1)); } ePrev = eCurr; } t = ePrev.getKey().intValue(); if (in.getEnd() > t + tolerance) { splits.add(new Integer(t + tolerance)); } } } else { int t = tails.firstKey().intValue(); if (in.getBegin() <= t - tolerance - 1) { splits.add(new Integer(t - tolerance - 1)); } t = tails.lastKey().intValue(); if (in.getEnd() > t + tolerance) { splits.add(new Integer(t + tolerance)); } } // Interval cIn = new Interval(in); // for (int i = 0; i < splits.size(); i++) { // cIn.split(splits.get(i)); // // TODO test resulting intervals for containing more than minData // // tails otherwise remove split // } if (splits.size() == 0) { return Collections.emptyList(); } final List<Interval> resultingIns = new ArrayList<>(splits.size() + 1); Pair<Interval, Interval> splittedIns = null; for (int i = 0; i < splits.size(); i++) { splittedIns = OperationUtil.split(s, alphIdx, splits.get(i).intValue(), sc); if (!splittedIns.getLeft().isEmpty()) { resultingIns.add(splittedIns.getLeft()); } } if (splittedIns != null && !splittedIns.getRight().isEmpty()) { resultingIns.add(splittedIns.getRight()); } return resultingIns; }
From source file:sadl.modellearner.rtiplus.SimplePDRTALearner.java
/** * Calculates the maximum allowed size for an empty interval part when only few {@link TimedTail}s use this interval. The allowed size depends on the * parameter for the minimum amount of {@link TimedTail}s and the distance between the occupied slots. * /*from www . j a v a 2 s.co m*/ * @param minData * The minimum amount of {@link TimedTail}s * @return The maximum allowed size for an empty interval part */ private int getToleranceFewSlots(Interval in, int minData) { final NavigableMap<Integer, Collection<TimedTail>> tails = in.getTails().asMap(); final int slots = tails.size(); assert (slots > 0 && slots <= 2); if (slots == 1) { final int size = tails.firstEntry().getValue().size(); if (size < (minData / 2.0)) { return (int) Math.ceil((in.getEnd() - in.getBegin() + 1) * 0.05); } else { return 0; } } else { final Integer t1Int = tails.firstKey(); final int s1 = tails.get(t1Int).size(); final Integer t2Int = tails.lastKey(); final int s2 = tails.get(t2Int).size(); final int t1 = t1Int.intValue(); final int t2 = t2Int.intValue(); final double perc = (double) (t2 - t1 - 1) / (double) (in.getEnd() - in.getBegin() - 1); if (s1 >= minData && s2 >= minData && perc >= 0.2) { return (int) Math.ceil((in.getEnd() - in.getBegin() + 1) * 0.05); } else if ((s1 >= minData || s2 >= minData) && perc >= 0.2) { return (int) Math.ceil((in.getEnd() - in.getBegin() + 1) * 0.075); } else { return (int) Math.ceil((t2 - t1 - 1) / 2.0); } } }
From source file:umich.ms.batmass.gui.viewers.map2d.components.BaseMap2D.java
/** * Fills the map given a scan collection. * @param scans//from w w w .j a v a 2 s. c o m * @return True, if filling was done successfully. * False if something bad happened, e.g. scanCollection didn't contain any scans between rtStart & rtEnd */ public boolean fillMapFromScans(IScanCollection scans) { int pixelsVertical = availableHeight; height = pixelsVertical; width = availableWidth; NavigableMap<Integer, IScan> scansByRtSpanAtMsLevel = scans.getScansByRtSpanAtMsLevel(rtLo, rtHi, msLevel); ; if (!precursorMzRange.equals(Map2DPanel.OPT_DISPLAY_ALL_MZ_REGIONS)) { // if only scans from specific precursor m/z window were requested IntervalST<Double, TreeMap<Integer, IScan>> precursorRanges = scans.getMapMsLevel2rangeGroups() .get(msLevel); if (precursorRanges != null) { IntervalST.Node<Double, TreeMap<Integer, IScan>> node = precursorRanges.get(precursorMzRange); if (node != null) { // these are all the scans at proper MS level and in proper precursor m/z range TreeMap<Integer, IScan> scansInMzRange = node.getValue(); // now filter this TreeMap to only leave scans that are in our RT range Integer numLo = scansByRtSpanAtMsLevel.firstKey(); Integer numHi = scansByRtSpanAtMsLevel.lastKey(); numLo = scansInMzRange.ceilingKey(numLo); numHi = scansInMzRange.floorKey(numHi); scansByRtSpanAtMsLevel = scansInMzRange.subMap(numLo, true, numHi, true); } } } if (scansByRtSpanAtMsLevel == null || scansByRtSpanAtMsLevel.size() == 0) { initErrorFillingState(); return false; } scanNumLo = scansByRtSpanAtMsLevel.firstKey(); scanNumHi = scansByRtSpanAtMsLevel.lastKey(); // compare the number of scans to available vertical pixels int scanCount = scansByRtSpanAtMsLevel.size(); this.map = new double[height][width]; this.mapRaw = new double[height][width]; this.maxValInFullRow = new double[height]; IScan scan; TreeMap<Integer, IScan> mapNum2scan = scans.getMapNum2scan(); IScan[] scansToAverage = new IScan[4]; ISpectrum spectrum; Integer mzIdxLo, mzIdxHi; int x, y; boolean hasProfile = true; double[] masses, intensities; filledRowIds = new int[scansByRtSpanAtMsLevel.size()]; int idx = 0; double denoisingTimeCounter = 0; for (Map.Entry<Integer, IScan> num2scan : scansByRtSpanAtMsLevel.entrySet()) { scan = num2scan.getValue(); if (doProfileModeGapFilling && !scan.isCentroided()) { hasProfile = true; } spectrum = null; try { spectrum = scan.fetchSpectrum(); } catch (FileParsingException ex) { Exceptions.printStackTrace(ex); } if (spectrum == null) { continue; } y = extrapolateRtToY(scan.getRt()); filledRowIds[idx] = y; idx++; if (y > this.map.length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (y > this.map.length-1) for scan #%d.\n" + "\ty=%d, len-1=%d, height=%d\n" + "\trt=%.20f, rtStart=%.20f, rtEnd=%.20f, rtSpan=%.20f", scan.getNum(), y, this.map.length - 1, height, scan.getRt(), rtLo, rtHi, rtSpan)); } masses = spectrum.getMZs(); intensities = spectrum.getIntensities(); mzIdxLo = spectrum.findMzIdxCeiling(mzLo); mzIdxHi = spectrum.findMzIdxFloor(mzHi); if (mzIdxLo == null || mzIdxHi == null) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: mzIdxLo or mzIdxHi were null for scan #%d. " + "Not filling the map from them.", scan.getNum())); continue; } if (mzIdxLo < 0 || mzIdxLo > masses.length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (mzIdxLo < 0 || mzIdxLo > masses.length-1) for scan #%d", scan.getNum())); } if (mzIdxHi < 0 || mzIdxHi > masses.length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (mzIdxHi < 0 || mzIdxHi > masses.length-1) for scan #%d", scan.getNum())); } double denoiseThreshold = Double.NaN; boolean applyDenoise = isDoDenoise(); if (applyDenoise) { long start = System.nanoTime(); denoiseThreshold = findDenoiseThreshold(masses, intensities); double denoisingTime = (System.nanoTime() - start) / 1e6; denoisingTimeCounter = denoisingTimeCounter + denoisingTime; if (Double.isNaN(denoiseThreshold)) { applyDenoise = false; } } double maxInt = spectrum.getMaxInt(); for (int i = mzIdxLo; i <= mzIdxHi; i++) { x = extrapolateMzToX(masses[i]); addPeakRaw(x, y, intensities[i]); if (applyDenoise && intensities[i] < denoiseThreshold) { continue; } if (x > this.map[0].length - 1) { OutputWndPrinter.printErr("DEBUG", String.format( "BaseMap2D: (x > this.map[0].length-1) for scan #%d.\n" + "\tx=%d, len-1=%d, width=%d,\n" + "\ti=%d, masses[i]=%.20f, mzStart=%.20f, mzEnd=%.20f, mzSpan=%.20f", scan.getNum(), x, this.map[0].length - 1, width, i, masses[i], mzLo, mzHi, mzSpan)); } // boost if present in previous/next scan // boost if present in previous/next scan// boost if present in previous/next scan// boost if present in previous/next scan// boost if present in previous/next scan// boost if present in previous/next scan // double curIntensity = intensities[i]; // final int maxScanSpan = 2000; // int numScansDisplayed = scansByRtSpanAtMsLevel.size(); // if (false && numScansDisplayed <= maxScanSpan) { // double maxIntInVicinity; // double intensityUpdateFactor = 1; // double dm, dmPpm, dmUpdateFactor; // int maxIntIdx; // double[] curInts, curMzs; // // final int scanNumShift = 1; // final double ppmTolerance = 15d; // // if (scan.getNum() % 1000 == 0) { // System.out.printf("Averaging for scan %d\n", scan.getNum()); // } // scansToAverage[0] = mapNum2scan.get(scan.getNum() - scanNumShift*2); // scansToAverage[1] = mapNum2scan.get(scan.getNum() - scanNumShift); // scansToAverage[2] = mapNum2scan.get(scan.getNum() + scanNumShift); // scansToAverage[3] = mapNum2scan.get(scan.getNum() + scanNumShift*2); // double curMass = masses[i]; // // for (IScan avgScan : scansToAverage) { // if (avgScan == null) { // continue; // } // ISpectrum s = avgScan.getSpectrum(); // if (s == null) { // continue; // } // int[] mzIdxs = s.findMzIdxsWithinPpm(curMass, ppmTolerance); // dm = Double.NEGATIVE_INFINITY; // dmUpdateFactor = 1; // intensityUpdateFactor = 1; // if (mzIdxs != null) { // curInts = s.getIntensities(); // curMzs = s.getMZs(); // maxIntIdx = -1; // maxIntInVicinity = Double.NEGATIVE_INFINITY; // for (int j = mzIdxs[0]; j <= mzIdxs[1]; j++) { // if (curInts[j] > maxIntInVicinity) { // maxIntIdx = j; // } // } // if (maxIntIdx != -1) { // intensityUpdateFactor = curInts[maxIntIdx]; // dm = Math.abs(curMass - curMzs[maxIntIdx]); // // dmPpm = dm / (curMass / 1e6d); // if (dmPpm > ppmTolerance) { // dmUpdateFactor = 0d; // throw new IllegalStateException("dmUpdateFactor set to zero, should not happen"); // } else { // dmUpdateFactor = (1 - Math.pow(dmPpm / ppmTolerance, 2d)); // } // } else { // throw new IllegalStateException("Strange condition, should never be triggered"); // } // } else { // // if masses in the vicinity not found, then penalize // // TODO: this should be dependent on the chosen distribution for mass deviations // // see dmFactor // intensityUpdateFactor = 1; // dmUpdateFactor = (1 - Math.pow(0.5d, 2d)); // } // // curIntensity = curIntensity * (intensityUpdateFactor * dmUpdateFactor); // } // } // addPeak(x, y, curIntensity); addPeak(x, y, intensities[i]); maxValInFullRow[y] = maxInt; // if (curIntensity > 1e6) { // addPeak(x, y, curIntensity); // } } if (hasProfile && doProfileModeGapFilling) { double pixelSizeMz = getMzSpan() / availableWidth; if (pixelSizeMz < 0.05) { fillProfileGaps(0, y, pixelSizeMz); } } } if (isDoDenoise()) { OutputWndPrinter.printErr("DEBUG", String.format("Denoising took on average: %.2fms (%d scans)\n", (denoisingTimeCounter) / scansByRtSpanAtMsLevel.size(), scansByRtSpanAtMsLevel.size())); } if (hasProfile) { // profile mode spectrum if (!doProfileModeGapFilling && doMzCloseZoomGapFilling) { applySavitzkyGolay(map); } } else { // !hasProfile => centroided spectrum if (doMzCloseZoomGapFilling) { applySavitzkyGolay(map); } } findMinMaxIntensities(); // if we created the full-sized version of the map, then a lot of rows might // be zero, because no scan actually mapped to this row of pixels // so we just fill it with the same pixels as in the previous filled row. if (doInterpRt) { for (int filledRowIdx = 0; filledRowIdx < filledRowIds.length - 1; filledRowIdx++) { int rowLo = filledRowIds[filledRowIdx]; int rowHi = filledRowIds[filledRowIdx + 1]; for (int rowToFillIdx = rowLo + 1; rowToFillIdx < rowHi; rowToFillIdx++) { System.arraycopy(map[rowLo], 0, map[rowToFillIdx], 0, width); maxValInFullRow[rowToFillIdx] = maxValInFullRow[rowLo]; } } } // add a tiny bit to the total intensity, allows not to care about // edge values when mapping intensities to colors. // Adding MIN_NORMAL, as totalIntensity shoule be a value > 1.0 totalIntensityMax += 1e-8; return true; }