Example usage for java.util LinkedList peekFirst

List of usage examples for java.util LinkedList peekFirst

Introduction

In this page you can find the example usage for java.util LinkedList peekFirst.

Prototype

public E peekFirst() 

Source Link

Document

Retrieves, but does not remove, the first element of this list, or returns null if this list is empty.

Usage

From source file:Main.java

public static void main(String[] args) {
    // create a LinkedList
    LinkedList<String> list = new LinkedList<String>();

    // add some elements
    list.add("Hello");
    list.add("from java2s.com");
    list.add("10");

    // print the list
    System.out.println("LinkedList:" + list);

    // peek at the first element
    System.out.println("First element of the list:" + list.peekFirst());
}

From source file:com.act.lcms.v2.MZCollisionCounter.java

public static void main(String[] args) throws Exception {
    CLIUtil cliUtil = new CLIUtil(MassChargeCalculator.class, HELP_MESSAGE, OPTION_BUILDERS);
    CommandLine cl = cliUtil.parseCommandLine(args);

    File inputFile = new File(cl.getOptionValue(OPTION_INPUT_INCHI_LIST));
    if (!inputFile.exists()) {
        cliUtil.failWithMessage("Input file at does not exist at %s", inputFile.getAbsolutePath());
    }/*from  w w  w  .ja v  a 2s. c o m*/

    List<MassChargeCalculator.MZSource> sources = new ArrayList<>();
    try (BufferedReader reader = new BufferedReader(new FileReader(inputFile))) {
        String line;
        while ((line = reader.readLine()) != null) {
            line = line.trim();
            sources.add(new MassChargeCalculator.MZSource(line));
            if (sources.size() % 1000 == 0) {
                LOGGER.info("Loaded %d sources from input file", sources.size());
            }
        }
    }

    Set<String> considerIons = Collections.emptySet();
    if (cl.hasOption(OPTION_ONLY_CONSIDER_IONS)) {
        List<String> ions = Arrays.asList(cl.getOptionValues(OPTION_ONLY_CONSIDER_IONS));
        LOGGER.info("Only considering ions for m/z calculation: %s", StringUtils.join(ions, ", "));
        considerIons = new HashSet<>(ions);
    }

    TSVWriter<String, Long> tsvWriter = new TSVWriter<>(Arrays.asList("collisions", "count"));
    tsvWriter.open(new File(cl.getOptionValue(OPTION_OUTPUT_FILE)));

    try {
        LOGGER.info("Loaded %d sources in total from input file", sources.size());

        MassChargeCalculator.MassChargeMap mzMap = MassChargeCalculator.makeMassChargeMap(sources,
                considerIons);

        if (!cl.hasOption(OPTION_COUNT_WINDOW_INTERSECTIONS)) {
            // Do an exact analysis of the m/z collisions if windowing is not specified.

            LOGGER.info("Computing precise collision histogram.");
            Iterable<Double> mzs = mzMap.ionMZIter();
            Map<Integer, Long> collisionHistogram = histogram(
                    StreamSupport.stream(mzs.spliterator(), false).map(mz -> { // See comment about Iterable below.
                        try {
                            return mzMap.ionMZToMZSources(mz).size();
                        } catch (NoSuchElementException e) {
                            LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage());
                            throw e;
                        }
                    }));
            List<Integer> sortedCollisions = new ArrayList<>(collisionHistogram.keySet());
            Collections.sort(sortedCollisions);
            for (Integer collision : sortedCollisions) {
                tsvWriter.append(new HashMap<String, Long>() {
                    {
                        put("collisions", collision.longValue());
                        put("count", collisionHistogram.get(collision));
                    }
                });
            }
        } else {
            /* After some deliberation (thanks Gil!), the windowed variant of this calculation counts the number of
             * structures whose 0.01 Da m/z windows (for some set of ions) overlap with each other.
             *
             * For example, let's assume we have five total input structures, and are only searching for one ion.  Let's
             * also assume that three of those structures have m/z A and the remaining two have m/z B.  The windows might
             * look like this in the m/z domain:
             * |----A----|
             *        |----B----|
             * Because A represents three structures and overlaps with B, which represents two, we assign A a count of 5--
             * this is the number of structures we believe could fall into the range of A given our current peak calling
             * approach.  Similarly, B is assigned a count of 5, as the possibility for collision/confusion is symmetric.
             *
             * Note that this is an over-approximation of collisions, as we could more precisely only consider intersections
             * when the exact m/z of B falls within the window around A and vice versa.  However, because we have observed
             * cases where the MS sensor doesn't report structures at exactly the m/z we predict, we employ this weaker
             * definition of intersection to give a slightly pessimistic view of what confusions might be possible. */
            // Compute windows for every m/z.  We don't care about the original mz values since we just want the count.
            List<Double> mzs = mzMap.ionMZsSorted();

            final Double windowHalfWidth;
            if (cl.hasOption(OPTION_WINDOW_HALFWIDTH)) {
                // Don't use get with default for this option, as we want the exact FP value of the default tolerance.
                windowHalfWidth = Double.valueOf(cl.getOptionValue(OPTION_WINDOW_HALFWIDTH));
            } else {
                windowHalfWidth = DEFAULT_WINDOW_TOLERANCE;
            }

            /* Window = (lower bound, upper bound), counter of represented m/z's that collide with this window, and number
             * of representative structures (which will be used in counting collisions). */
            LinkedList<CollisionWindow> allWindows = new LinkedList<CollisionWindow>() {
                {
                    for (Double mz : mzs) {
                        // CPU for memory trade-off: don't re-compute the window bounds over and over and over and over and over.
                        try {
                            add(new CollisionWindow(mz, windowHalfWidth, mzMap.ionMZToMZSources(mz).size()));
                        } catch (NoSuchElementException e) {
                            LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage());
                            throw e;
                        }
                    }
                }
            };

            // Sweep line time!  The window ranges are the interesting points.  We just accumulate overlap counts as we go.
            LinkedList<CollisionWindow> workingSet = new LinkedList<>();
            List<CollisionWindow> finished = new LinkedList<>();

            while (allWindows.size() > 0) {
                CollisionWindow thisWindow = allWindows.pop();
                // Remove any windows from the working set that don't overlap with the next window.
                while (workingSet.size() > 0 && workingSet.peekFirst().getMaxMZ() < thisWindow.getMinMZ()) {
                    finished.add(workingSet.pop());
                }

                for (CollisionWindow w : workingSet) {
                    /* Add the size of the new overlapping window's structure count to each of the windows in the working set,
                     * which represents the number of possible confused structures that fall within the overlapping region.
                     * We exclude the window itself as it should already have counted the colliding structures it represents. */
                    w.getAccumulator().add(thisWindow.getStructureCount());

                    /* Reciprocally, add the structure counts of all windows with which the current window overlaps to it. */
                    thisWindow.getAccumulator().add(w.getStructureCount());
                }

                // Now that accumulation is complete, we can safely add the current window.
                workingSet.add(thisWindow);
            }

            // All the interesting events are done, so drop the remaining windows into the finished set.
            finished.addAll(workingSet);

            Map<Long, Long> collisionHistogram = histogram(
                    finished.stream().map(w -> w.getAccumulator().longValue()));
            List<Long> sortedCollisions = new ArrayList<>(collisionHistogram.keySet());
            Collections.sort(sortedCollisions);
            for (Long collision : sortedCollisions) {
                tsvWriter.append(new HashMap<String, Long>() {
                    {
                        put("collisions", collision);
                        put("count", collisionHistogram.get(collision));
                    }
                });
            }
        }
    } finally {
        if (tsvWriter != null) {
            tsvWriter.close();
        }
    }
}

From source file:eu.smartfp7.foursquare.AttendanceCrawler.java

/**
 * The main takes an undefined number of cities as arguments, then initializes
 * the specific crawling of all the trending venues of these cities.
 * The trending venues must have been previously identified using the `DownloadPages`
 * program.//w w  w. ja va 2 s  . c  om
 * 
 * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco.
 * 
 */
public static void main(String[] args) throws Exception {
    Settings settings = Settings.getInstance();
    String folder = settings.getFolder();

    // We keep info and error logs, so that we know what happened in case
    // of incoherence in the time series.
    Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>();
    Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>();

    // For each city we monitor, we store the venue IDs that we got from
    // a previous crawl.
    Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>();

    // Contains the epoch time when the last API call has been made for each 
    // venue. Ensures that we get data only once each hour. 
    Map<String, Long> venue_last_call = new HashMap<String, Long>();

    // Contains the epoch time when we last checked if time series were broken
    // for each city.
    // We do these checks once every day before the batch forecasting begins.
    Map<String, Long> sanity_checks = new HashMap<String, Long>();

    // We also keep in memory the number of checkins for the last hour for
    // each venue.
    Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>();

    Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>();

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    int total_venues = 0;
    long total_calls = 0;
    long time_spent_on_API = 0;

    for (String c : args) {
        settings.checkFileHierarchy(c);

        city_venues.put(c, loadVenues(c));
        total_venues += city_venues.get(c).size();

        info_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true));
        error_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true));

        Calendar cal = Calendar.getInstance();

        info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". "
                + city_venues.get(c).size() + " venues loaded.\n");
        info_logs.get(c).flush();

        // If we interrupted the program for some reason, we can get back
        // the in-memory data.
        // Important: the program must not be interrupted for more than one
        // hour, or we will lose time series data.
        for (String venue_id : city_venues.get(c)) {
            String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id
                    + ".ts";

            if (new File(ts_file).exists()) {
                BufferedReader buffer = new BufferedReader(new FileReader(ts_file));
                String mem = null, line = null;
                for (; (line = buffer.readLine()) != null; mem = line)
                    ;
                buffer.close();

                if (mem == null)
                    continue;

                String[] tmp = mem.split(",");
                venue_last_call.put(venue_id, df.parse(tmp[0]).getTime());
                venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3]));

                VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file));
            } // if
        } // for

        sanity_checks.put(c, cal.getTimeInMillis());
    } // for

    if (total_venues > 5000) {
        System.out.println(
                "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now.");
        return;
    }

    while (true) {

        for (String c : args) {
            // We create a FIFO queue and pop venue IDs one at a time.
            LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c));
            String venue_id = null;

            // Artificial wait to avoid processors looping at 100% of their capacity
            // when there is no more venues to crawl for the current hour.
            Thread.sleep(3000);

            while ((venue_id = city_venues_buffer.pollFirst()) != null) {
                // We get the current time according to the city's time zone
                Calendar cal = Calendar.getInstance();
                cal.add(Calendar.MILLISECOND,
                        TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime())
                                - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime()));
                //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime()));

                long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime();

                // We query Foursquare only once per hour per venue.
                if (venue_last_call.get(venue_id) != null
                        && current_time < venue_last_call.get(venue_id) + 3600000)
                    continue;

                intelligentWait(total_venues, cal.getTime().getTime(),
                        (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls)));

                Venue venue = null;

                try {
                    long beforeCall = System.currentTimeMillis();
                    venue = new Venue(getFoursquareVenueById(venue_id, c));

                    // If there is no last call, this is the beginning of the time series
                    // for this venue. We get the number of people "here now" to initialize
                    // the series.
                    if (venue_last_call.get(venue_id) == null) {
                        /** TODO: by doing this, we keep a representation of the venue dating from the beginning
                         *       of the specific crawl. we might want to change this and update this file once
                         *      in a while.
                         */
                        FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues"
                                + File.separator + venue_id + ".info");
                        info.write(venue.getFoursquareJson());
                        info.close();

                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts");
                        out.write("Date,here_now,hour_checkins,total_checkins\n");
                        out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow()
                                + "," + venue.getCheckincount() + "\n");
                        out.close();
                    } else {
                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts", true);
                        int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id);
                        out.write(df.format(current_time) + "," + venue.getHereNow() + ","
                                + Integer.toString(checks) + "," + venue.getCheckincount() + "\n");
                        out.close();
                    }

                    if (APICallsCount.get(current_time) == null)
                        APICallsCount.put(current_time, 1);
                    else
                        APICallsCount.put(current_time, APICallsCount.get(current_time) + 1);

                    total_calls++;

                    venue_last_call.put(venue_id, current_time);
                    venue_last_checkin.put(venue_id, venue.getCheckincount());

                    time_spent_on_API += System.currentTimeMillis() - beforeCall;
                } catch (Exception e) {
                    // If something bad happens (crawler not available, IO error, ...), we put the
                    // venue_id in the FIFO queue so that it gets reevaluated later.
                    //e.printStackTrace();
                    error_logs.get(c)
                            .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id
                                    + " (" + e.getMessage() + "). " + APICallsCount.get(current_time)
                                    + " API calls so far this hour, " + city_venues_buffer.size()
                                    + " venues remaining in the buffer.\n");
                    error_logs.get(c).flush();

                    System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- "
                            + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size()
                            + " venues remaining " + " (" + e.getMessage() + ")");

                    if (e instanceof FoursquareAPIException)
                        if (((FoursquareAPIException) e).getHttp_code().equals("400")
                                && ((FoursquareAPIException) e).getError_detail()
                                        .equals("Venue " + venue_id + " has been deleted")) {
                            city_venues.get(c).remove(venue_id);
                            removeVenue(venue_id, c);
                        } else
                            city_venues_buffer.add(venue_id);

                    continue;
                }
            } // while

            // Every day between 0am and 2am, we repair all the broken time series (if there
            // is something to repair).
            Calendar cal = Calendar.getInstance();
            if (city_venues_buffer.peekFirst() == null
                    && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000
                    && cal.get(Calendar.HOUR_OF_DAY) < 2) {
                VenueUtil.fixBrokenTimeSeriesCity(c, folder);
                sanity_checks.put(c, cal.getTimeInMillis());
                info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n");
                info_logs.get(c).flush();
            }
        } // for
    } // while
}

From source file:eulermind.importer.LineNode.java

private static LineNode reduceToChapterTreeByBlankLine(List<LineNode> lineNodes) {
    LinkedList<LineNode> newlineNodes = new LinkedList<LineNode>();

    //??, //from w  ww  .  j  a  v  a 2  s .  c  o  m
    for (LineNode lineNode : lineNodes) {
        if (lineNode.m_blankLines > 200) {
            lineNode.m_blankLines = 200;
        }
    }
    Iterator<LineNode> iterator = lineNodes.iterator();
    newlineNodes.add(iterator.next());

    //lineNode ?????newLineNodes
    //

    //?????
    {
        int maxBlankLines = 0;
        while (iterator.hasNext()) {
            LineNode lineNode = iterator.next();
            maxBlankLines = Math.max(maxBlankLines, lineNode.m_blankLines);

            //
            for (int i = newlineNodes.peekLast().m_blankLines + 1; i < lineNode.m_blankLines; i++) {
                newlineNodes.add(new LineNode(i));
            }
            newlineNodes.add(lineNode);
        }

        //
        for (int i = newlineNodes.peekLast().m_blankLines + 1; i <= maxBlankLines + 1; i++) {
            newlineNodes.add(new LineNode(i));
        }
    }

    //?
    LinkedList<LineNode> stack = new LinkedList<LineNode>();

    for (LineNode newLineNode : newlineNodes) {
        if (!stack.isEmpty() && stack.peekLast().m_blankLines < newLineNode.m_blankLines) {
            List<LineNode> reducedLineNodes = popSameBlankLineNodes(stack);

            for (LineNode reducedLineNode : reducedLineNodes) {
                newLineNode.add(reducedLineNode);
            }
        }

        stack.add(newLineNode);
    }

    assert stack.size() == 1;

    return stack.peekFirst();
}

From source file:ch.zhaw.icclab.tnova.expressionsolver.OTFlyEval.java

String findMin(LinkedList<Double> paramList) {
    Double min = paramList.peekFirst();
    for (int i = 0; i < paramList.size(); i++) {
        if (min > paramList.get(i))
            min = paramList.get(i);/*ww w  . java2 s  .  c  om*/
    }
    return min.toString();
}

From source file:ch.zhaw.icclab.tnova.expressionsolver.OTFlyEval.java

String findMax(LinkedList<Double> paramList) {
    Double max = paramList.peekFirst();
    for (int i = 0; i < paramList.size(); i++) {
        if (max < paramList.get(i))
            max = paramList.get(i);/*from   ww w  . j  av a 2s .co m*/
    }
    return max.toString();
}

From source file:syndeticlogic.memento.AbstractPolicyStrategy.java

protected String dumpKeys(String message, LinkedList list) {
    String dump = null;/* ww  w. j a  v  a2  s. c  o  m*/
    StringBuffer sb = new StringBuffer();
    LinkedListNode node = list.peekFirst();
    Cache.CacheNode current = null;

    while (node != null) {
        current = (Cache.CacheNode) node.getValue();
        sb.append(current.getKey());
        node = node.getNext();
    }

    dump = sb.toString();
    LOG.debug(message + dump);
    return dump;
}

From source file:com.offbynull.voip.kademlia.GraphHelper.java

private ArrayList<BitString> removePrefixesForNextLevel(LinkedList<BitString> sortedPrefixes) {
    ArrayList<BitString> ret = new ArrayList<>();

    if (sortedPrefixes.isEmpty()) {
        return ret;
    }/*from   ww w  . j av  a2s  .  c  o m*/

    int hitCount = sortedPrefixes.peekFirst().getBitLength();

    while (!sortedPrefixes.isEmpty()) {
        if (sortedPrefixes.peekFirst().getBitLength() == hitCount) {
            ret.add(sortedPrefixes.removeFirst());
        } else {
            break;
        }
    }

    return ret;
}

From source file:ru.codeinside.gses.activiti.forms.definitions.FormParser.java

void processBlocks(Map<String, PropertyParser> nodes, List<PropertyParser> rootList) throws BuildException {
    final ArrayList<PropertyParser> allPropertyParsers = new ArrayList<PropertyParser>(nodes.values());
    final LinkedList<BlockStartParser> stack = new LinkedList<BlockStartParser>();
    for (final PropertyParser propertyParser : allPropertyParsers) {
        final BlockStartParser block = stack.peekFirst();
        propertyParser.block = block;//from  ww  w  .ja v  a  2s. com
        final boolean end = (propertyParser instanceof EndBlockParser);
        if (!end) {
            if (block == null) {
                if (!(propertyParser instanceof SignatureParser)) {
                    rootList.add(propertyParser);
                }
            } else {
                block.items.add(propertyParser);
            }
            if (propertyParser instanceof BlockStartParser) {
                final BlockStartParser start = (BlockStartParser) propertyParser;
                start.items = new ArrayList<PropertyParser>();
                stack.addFirst(start);
            }
        } else {
            if (block == null
                    || !block.property.id.substring(1).equals(propertyParser.property.id.substring(1))) {
                throw new BuildException("   ",
                        propertyParser);
            }
            stack.removeFirst();
        }
    }

    PropertyParser badStart = stack.peekFirst();
    if (badStart != null) {
        throw new BuildException("?   ?", badStart);
    }

    for (PropertyParser propertyParser : allPropertyParsers) {
        if (propertyParser instanceof BlockStartParser) {
            final BlockStartParser start = (BlockStartParser) propertyParser;
            if (start.items.isEmpty()) {
                throw new BuildException("? ", propertyParser);
            }
        }
    }
}

From source file:com.act.lcms.v2.TraceIndexExtractor.java

/**
 * Initiate a data feast of all traces within some window allocation.  OM NOM NOM.
 * @param iter An iterator over an LCMS data file.
 * @return The windows, time points, and per-window traces.
 *//*from   ww  w .  j a v a 2  s  .  c om*/
private IndexedTraces runSweepLine(List<Double> targetMZs, Iterator<LCMSSpectrum> iter)
        throws RocksDBException, IOException {
    // Create windows for sweep-linin'.
    List<MZWindow> windows = new ArrayList<MZWindow>() {
        {
            int i = 0;
            for (Double targetMZ : targetMZs) {
                add(new MZWindow(i, targetMZ));
                i++;
            }
        }
    };

    /* We *must* ensure the windows are sorted in m/z order for the sweep line to work.  However, we don't know anything
     * about the input targetMZs list, which may be immutable or may be in some order the client wants to preserve.
     * Rather than mess with that array, we'll sort the windows in our internal array and leave be he client's targets.
     */
    Collections.sort(windows, (a, b) -> a.getTargetMZ().compareTo(b.getTargetMZ()));

    List<Double> times = new ArrayList<>();

    List<List<Double>> allTraces = new ArrayList<List<Double>>(windows.size()) {
        {
            for (int i = 0; i < windows.size(); i++) {
                add(new ArrayList<>());
            }
        }
    };

    // Keep an array of accumulators around to reduce the overhead of accessing the trace matrix for accumulation.
    double[] sumIntensitiesInEachWindow = new double[windows.size()];

    int timepointCounter = 0;
    while (iter.hasNext()) {
        LCMSSpectrum spectrum = iter.next();
        Double time = spectrum.getTimeVal();

        // Store one list of the time values so we can knit times and intensity sums later to form XZs.
        times.add(time);

        for (int i = 0; i < sumIntensitiesInEachWindow.length; i++) {
            sumIntensitiesInEachWindow[i] = 0.0;
        }

        timepointCounter++;

        if (timepointCounter % 100 == 0) {
            LOGGER.info("Extracted %d timepoints (now at %.3fs)", timepointCounter, time);
        }

        /* We use a sweep-line approach to scanning through the m/z windows so that we can aggregate all intensities in
         * one pass over the current LCMSSpectrum (this saves us one inner loop in our extraction process).  The m/z
         * values in the LCMSSpectrum become our "critical" or "interesting points" over which we sweep our m/z ranges.
         * The next window in m/z order is guaranteed to be the next one we want to consider since we address the points
         * in m/z order as well.  As soon as we've passed out of the range of one of our windows, we discard it.  It is
         * valid for a window to be added to and discarded from the working queue in one application of the work loop. */
        LinkedList<MZWindow> workingQueue = new LinkedList<>();
        // TODO: can we reuse these instead of creating fresh?
        LinkedList<MZWindow> tbdQueue = new LinkedList<>(windows);

        // Assumption: these arrive in m/z order.
        for (Pair<Double, Double> mzIntensity : spectrum.getIntensities()) {
            Double mz = mzIntensity.getLeft();
            Double intensity = mzIntensity.getRight();

            // First, shift any applicable ranges onto the working queue based on their minimum mz.
            while (!tbdQueue.isEmpty() && tbdQueue.peekFirst().getMin() <= mz) {
                workingQueue.add(tbdQueue.pop());
            }

            // Next, remove any ranges we've passed.
            while (!workingQueue.isEmpty() && workingQueue.peekFirst().getMax() < mz) {
                workingQueue.pop();
            }

            if (workingQueue.isEmpty()) {
                if (tbdQueue.isEmpty()) {
                    // If both queues are empty, there are no more windows to consider at all.  One to the next timepoint!
                    break;
                }

                // If there's nothing that happens to fit in this range, skip it!
                continue;
            }

            // The working queue should now hold only ranges that include this m/z value.  Sweep line swept!

            /* Now add this intensity to accumulator value for each of the items in the working queue.
             * By the end of the outer loop, trace(t) = Sum(intensity) | win_min <= m/z <= win_max @ time point # t */
            for (MZWindow window : workingQueue) {
                // TODO: count the number of times we add intensities to each window's accumulator for MS1-style warnings.
                sumIntensitiesInEachWindow[window.getIndex()] += intensity;
            }
        }

        /* Extend allTraces to add a row of accumulated intensity values for this time point.  We build this incrementally
         * because the LCMSSpectrum iterator doesn't tell us how many time points to expect up front. */
        for (int i = 0; i < sumIntensitiesInEachWindow.length; i++) {
            allTraces.get(i).add(sumIntensitiesInEachWindow[i]);
        }
    }

    // Trace data has been devoured.  Might want to loosen the belt at this point...
    LOGGER.info("Done extracting %d traces", allTraces.size());

    return new IndexedTraces(windows, times, allTraces);
}