Example usage for java.util TreeMap firstEntry

List of usage examples for java.util TreeMap firstEntry

Introduction

In this page you can find the example usage for java.util TreeMap firstEntry.

Prototype

public Map.Entry<K, V> firstEntry() 

Source Link

Usage

From source file:org.apache.accumulo.server.tabletserver.ScanRunState.java

private TreeMap<KeyExtent, SplitInfo> splitTablet(Tablet tablet, byte[] splitPoint) throws IOException {
    long t1 = System.currentTimeMillis();

    TreeMap<KeyExtent, SplitInfo> tabletInfo = tablet.split(splitPoint);
    if (tabletInfo == null) {
        return null;
    }/* w  w  w  . ja v a2  s.co  m*/

    log.info("Starting split: " + tablet.getExtent());
    statsKeeper.incrementStatusSplit();
    long start = System.currentTimeMillis();

    Tablet[] newTablets = new Tablet[2];

    Entry<KeyExtent, SplitInfo> first = tabletInfo.firstEntry();
    newTablets[0] = new Tablet(TabletServer.this, new Text(first.getValue().dir), first.getKey(),
            resourceManager.createTabletResourceManager(), first.getValue().datafiles, first.getValue().time,
            first.getValue().initFlushID, first.getValue().initCompactID);

    Entry<KeyExtent, SplitInfo> last = tabletInfo.lastEntry();
    newTablets[1] = new Tablet(TabletServer.this, new Text(last.getValue().dir), last.getKey(),
            resourceManager.createTabletResourceManager(), last.getValue().datafiles, last.getValue().time,
            last.getValue().initFlushID, last.getValue().initCompactID);

    // roll tablet stats over into tablet server's statsKeeper object as
    // historical data
    statsKeeper.saveMinorTimes(tablet.timer);
    statsKeeper.saveMajorTimes(tablet.timer);

    // lose the reference to the old tablet and open two new ones
    synchronized (onlineTablets) {
        onlineTablets.remove(tablet.getExtent());
        onlineTablets.put(newTablets[0].getExtent(), newTablets[0]);
        onlineTablets.put(newTablets[1].getExtent(), newTablets[1]);
    }
    // tell the master
    enqueueMasterMessage(new SplitReportMessage(tablet.getExtent(), newTablets[0].getExtent(),
            new Text("/" + newTablets[0].getLocation().getName()), newTablets[1].getExtent(),
            new Text("/" + newTablets[1].getLocation().getName())));

    statsKeeper.updateTime(Operation.SPLIT, start, 0, false);
    long t2 = System.currentTimeMillis();
    log.info("Tablet split: " + tablet.getExtent() + " size0 " + newTablets[0].estimateTabletSize() + " size1 "
            + newTablets[1].estimateTabletSize() + " time " + (t2 - t1) + "ms");

    return tabletInfo;
}

From source file:org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.java

/**
 * Loads initial store files that were picked up from some physical location pertaining to
 * this store (presumably). Unlike adding files after compaction, assumes empty initial
 * sets, and is forgiving with regard to stripe constraints - at worst, many/all files will
 * go to level 0.//from w  ww .  jav  a 2 s.c  o m
 * @param storeFiles Store files to add.
 */
private void loadUnclassifiedStoreFiles(List<StoreFile> storeFiles) {
    LOG.debug("Attempting to load " + storeFiles.size() + " store files.");
    TreeMap<byte[], ArrayList<StoreFile>> candidateStripes = new TreeMap<byte[], ArrayList<StoreFile>>(
            MAP_COMPARATOR);
    ArrayList<StoreFile> level0Files = new ArrayList<StoreFile>();
    // Separate the files into tentative stripes; then validate. Currently, we rely on metadata.
    // If needed, we could dynamically determine the stripes in future.
    for (StoreFile sf : storeFiles) {
        byte[] startRow = startOf(sf), endRow = endOf(sf);
        // Validate the range and put the files into place.
        if (isInvalid(startRow) || isInvalid(endRow)) {
            insertFileIntoStripe(level0Files, sf); // No metadata - goes to L0.
            ensureLevel0Metadata(sf);
        } else if (!isOpen(startRow) && !isOpen(endRow) && nonOpenRowCompare(startRow, endRow) >= 0) {
            LOG.error("Unexpected metadata - start row [" + Bytes.toString(startRow) + "], end row ["
                    + Bytes.toString(endRow) + "] in file [" + sf.getPath() + "], pushing to L0");
            insertFileIntoStripe(level0Files, sf); // Bad metadata - goes to L0 also.
            ensureLevel0Metadata(sf);
        } else {
            ArrayList<StoreFile> stripe = candidateStripes.get(endRow);
            if (stripe == null) {
                stripe = new ArrayList<StoreFile>();
                candidateStripes.put(endRow, stripe);
            }
            insertFileIntoStripe(stripe, sf);
        }
    }
    // Possible improvement - for variable-count stripes, if all the files are in L0, we can
    // instead create single, open-ended stripe with all files.

    boolean hasOverlaps = false;
    byte[] expectedStartRow = null; // first stripe can start wherever
    Iterator<Map.Entry<byte[], ArrayList<StoreFile>>> entryIter = candidateStripes.entrySet().iterator();
    while (entryIter.hasNext()) {
        Map.Entry<byte[], ArrayList<StoreFile>> entry = entryIter.next();
        ArrayList<StoreFile> files = entry.getValue();
        // Validate the file start rows, and remove the bad ones to level 0.
        for (int i = 0; i < files.size(); ++i) {
            StoreFile sf = files.get(i);
            byte[] startRow = startOf(sf);
            if (expectedStartRow == null) {
                expectedStartRow = startRow; // ensure that first stripe is still consistent
            } else if (!rowEquals(expectedStartRow, startRow)) {
                hasOverlaps = true;
                LOG.warn("Store file doesn't fit into the tentative stripes - expected to start at ["
                        + Bytes.toString(expectedStartRow) + "], but starts at [" + Bytes.toString(startRow)
                        + "], to L0 it goes");
                StoreFile badSf = files.remove(i);
                insertFileIntoStripe(level0Files, badSf);
                ensureLevel0Metadata(badSf);
                --i;
            }
        }
        // Check if any files from the candidate stripe are valid. If so, add a stripe.
        byte[] endRow = entry.getKey();
        if (!files.isEmpty()) {
            expectedStartRow = endRow; // Next stripe must start exactly at that key.
        } else {
            entryIter.remove();
        }
    }

    // In the end, there must be open ends on two sides. If not, and there were no errors i.e.
    // files are consistent, they might be coming from a split. We will treat the boundaries
    // as open keys anyway, and log the message.
    // If there were errors, we'll play it safe and dump everything into L0.
    if (!candidateStripes.isEmpty()) {
        StoreFile firstFile = candidateStripes.firstEntry().getValue().get(0);
        boolean isOpen = isOpen(startOf(firstFile)) && isOpen(candidateStripes.lastKey());
        if (!isOpen) {
            LOG.warn("The range of the loaded files does not cover full key space: from ["
                    + Bytes.toString(startOf(firstFile)) + "], to ["
                    + Bytes.toString(candidateStripes.lastKey()) + "]");
            if (!hasOverlaps) {
                ensureEdgeStripeMetadata(candidateStripes.firstEntry().getValue(), true);
                ensureEdgeStripeMetadata(candidateStripes.lastEntry().getValue(), false);
            } else {
                LOG.warn("Inconsistent files, everything goes to L0.");
                for (ArrayList<StoreFile> files : candidateStripes.values()) {
                    for (StoreFile sf : files) {
                        insertFileIntoStripe(level0Files, sf);
                        ensureLevel0Metadata(sf);
                    }
                }
                candidateStripes.clear();
            }
        }
    }

    // Copy the results into the fields.
    State state = new State();
    state.level0Files = ImmutableList.copyOf(level0Files);
    state.stripeFiles = new ArrayList<ImmutableList<StoreFile>>(candidateStripes.size());
    state.stripeEndRows = new byte[Math.max(0, candidateStripes.size() - 1)][];
    ArrayList<StoreFile> newAllFiles = new ArrayList<StoreFile>(level0Files);
    int i = candidateStripes.size() - 1;
    for (Map.Entry<byte[], ArrayList<StoreFile>> entry : candidateStripes.entrySet()) {
        state.stripeFiles.add(ImmutableList.copyOf(entry.getValue()));
        newAllFiles.addAll(entry.getValue());
        if (i > 0) {
            state.stripeEndRows[state.stripeFiles.size() - 1] = entry.getKey();
        }
        --i;
    }
    state.allFilesCached = ImmutableList.copyOf(newAllFiles);
    this.state = state;
    debugDumpState("Files loaded");
}

From source file:nl.rivm.cib.episim.model.disease.infection.MSEIRSPlot.java

@Override
public void start(final Stage stage) {
    final SIRConfig conf = ConfigFactory.create(SIRConfig.class);
    final double[] t = conf.t();
    final long[] pop = conf.population();
    final double n0 = Arrays.stream(pop).sum();
    final String[] colors = conf.colors(), colors2 = conf.colors2();

    final Pane plot = new Pane();
    plot.setPrefSize(400, 300);//w  w  w.  jav  a  2  s  .c  o m
    plot.setMinSize(50, 50);

    final NumberAxis xAxis = new NumberAxis(t[0], t[1], (t[1] - t[0]) / 10);
    final NumberAxis yAxis = new NumberAxis(0, n0, n0 / 10);
    final Pane axes = new Pane();
    axes.prefHeightProperty().bind(plot.heightProperty());
    axes.prefWidthProperty().bind(plot.widthProperty());

    xAxis.setSide(Side.BOTTOM);
    xAxis.setMinorTickVisible(false);
    xAxis.setPrefWidth(axes.getPrefWidth());
    xAxis.prefWidthProperty().bind(axes.widthProperty());
    xAxis.layoutYProperty().bind(axes.heightProperty());

    yAxis.setSide(Side.LEFT);
    yAxis.setMinorTickVisible(false);
    yAxis.setPrefHeight(axes.getPrefHeight());
    yAxis.prefHeightProperty().bind(axes.heightProperty());
    yAxis.layoutXProperty().bind(Bindings.subtract(1, yAxis.widthProperty()));
    axes.getChildren().setAll(xAxis, yAxis);

    final Label lbl = new Label(String.format("R0=%.1f, recovery=%.1ft\nSIR(0)=%s", conf.reproduction(),
            conf.recovery(), Arrays.toString(pop)));
    lbl.setTextAlignment(TextAlignment.CENTER);
    lbl.setTextFill(Color.WHITE);

    final Path[] deterministic = { new Path(), new Path(), new Path() };
    IntStream.range(0, pop.length).forEach(i -> {
        final Color color = Color.valueOf(colors[i]);
        final Path path = deterministic[i];
        path.setStroke(color.deriveColor(0, 1, 1, 0.6));
        path.setStrokeWidth(2);
        path.setClip(new Rectangle(0, 0, plot.getPrefWidth(), plot.getPrefHeight()));
    });

    plot.getChildren().setAll(axes);

    // fill paths with integration estimates
    final double xl = xAxis.getLowerBound(), sx = plot.getPrefWidth() / (xAxis.getUpperBound() - xl),
            yh = plot.getPrefHeight(), sy = yh / (yAxis.getUpperBound() - yAxis.getLowerBound());
    final TreeMap<Double, Integer> iDeterministic = new TreeMap<>();

    MSEIRSTest.deterministic(conf, () -> new DormandPrince853Integrator(1.0E-8, 10, 1.0E-20, 1.0E-20))
            .subscribe(yt -> {
                iDeterministic.put(yt.getKey(), deterministic[0].getElements().size());
                final double[] y = yt.getValue();
                final double x = (yt.getKey() - xl) * sx;
                for (int i = 0; i < y.length; i++) {
                    final double yi = yh - y[i] * sy;
                    final PathElement di = deterministic[i].getElements().isEmpty() ? new MoveTo(x, yi)
                            : new LineTo(x, yi);
                    deterministic[i].getElements().add(di);
                }
            }, e -> LOG.error("Problem", e), () -> plot.getChildren().addAll(deterministic));

    final Path[] stochasticTau = { new Path(), new Path(), new Path() };
    IntStream.range(0, pop.length).forEach(i -> {
        final Color color = Color.valueOf(colors[i]);
        final Path path = stochasticTau[i];
        path.setStroke(color);
        path.setStrokeWidth(1);
        path.setClip(new Rectangle(0, 0, plot.getPrefWidth(), plot.getPrefHeight()));
    });

    final TreeMap<Double, Integer> iStochasticTau = new TreeMap<>();
    MSEIRSTest.stochasticGillespie(conf).subscribe(yt -> {
        final double x = (yt.getKey() - xl) * sx;
        iStochasticTau.put(yt.getKey(), stochasticTau[0].getElements().size());
        final long[] y = yt.getValue();
        for (int i = 0; i < y.length; i++) {
            final double yi = yh - y[i] * sy;
            final ObservableList<PathElement> path = stochasticTau[i].getElements();
            if (path.isEmpty()) {
                path.add(new MoveTo(x, yi)); // first
            } else {
                final PathElement last = path.get(path.size() - 1);
                final double y_prev = last instanceof MoveTo ? ((MoveTo) last).getY() : ((LineTo) last).getY();
                path.add(new LineTo(x, y_prev));
                path.add(new LineTo(x, yi));
            }
        }
    }, e -> LOG.error("Problem", e), () -> plot.getChildren().addAll(stochasticTau));

    final Path[] stochasticRes = { new Path(), new Path(), new Path() };
    IntStream.range(0, pop.length).forEach(i -> {
        final Color color = Color.valueOf(colors2[i]);
        final Path path = stochasticRes[i];
        path.setStroke(color);
        path.setStrokeWidth(1);
        path.setClip(new Rectangle(0, 0, plot.getPrefWidth(), plot.getPrefHeight()));
    });

    final TreeMap<Double, Integer> iStochasticRes = new TreeMap<>();
    MSEIRSTest.stochasticSellke(conf).subscribe(yt -> {
        final double x = (yt.getKey() - xl) * sx;
        iStochasticRes.put(yt.getKey(), stochasticRes[0].getElements().size());
        final long[] y = yt.getValue();
        for (int i = 0; i < y.length; i++) {
            final double yi = yh - y[i] * sy;
            final ObservableList<PathElement> path = stochasticRes[i].getElements();
            if (path.isEmpty()) {
                path.add(new MoveTo(x, yi)); // first
            } else {
                final PathElement last = path.get(path.size() - 1);
                final double y_prev = last instanceof MoveTo ? ((MoveTo) last).getY() : ((LineTo) last).getY();
                path.add(new LineTo(x, y_prev));
                path.add(new LineTo(x, yi));
            }
        }
    }, e -> LOG.error("Problem", e), () -> plot.getChildren().addAll(stochasticRes));

    // auto-scale on stage/plot resize 
    // FIXME scaling around wrong origin, use ScatterChart?
    //         xAxis.widthProperty()
    //               .addListener( (ChangeListener<Number>) ( observable,
    //                  oldValue, newValue ) ->
    //               {
    //                  final double scale = ((Double) newValue)
    //                        / plot.getPrefWidth();
    //                  plot.getChildren().filtered( n -> n instanceof Path )
    //                        .forEach( n ->
    //                        {
    //                           final Path path = (Path) n;
    //                           path.setScaleX( scale );
    //                           path.setTranslateX( (path
    //                                 .getBoundsInParent().getWidth()
    //                                 - path.getLayoutBounds().getWidth())
    //                                 / 2 );
    //                        } );
    //               } );
    //         plot.heightProperty()
    //               .addListener( (ChangeListener<Number>) ( observable,
    //                  oldValue, newValue ) ->
    //               {
    //                  final double scale = ((Double) newValue)
    //                        / plot.getPrefHeight();
    //                  plot.getChildren().filtered( n -> n instanceof Path )
    //                        .forEach( n ->
    //                        {
    //                           final Path path = (Path) n;
    //                           path.setScaleY( scale );
    //                           path.setTranslateY(
    //                                 (path.getBoundsInParent()
    //                                       .getHeight() * (scale - 1))
    //                                       / 2 );
    //                        } );
    //               } );

    final StackPane layout = new StackPane(lbl, plot);
    layout.setAlignment(Pos.TOP_CENTER);
    layout.setPadding(new Insets(50));
    layout.setStyle("-fx-background-color: rgb(35, 39, 50);");

    final Line vertiCross = new Line();
    vertiCross.setStroke(Color.SILVER);
    vertiCross.setStrokeWidth(1);
    vertiCross.setVisible(false);
    axes.getChildren().add(vertiCross);

    final Tooltip tip = new Tooltip("");
    tip.setAutoHide(false);
    tip.hide();
    axes.setOnMouseExited(ev -> tip.hide());
    axes.setOnMouseMoved(ev -> {
        final Double x = (Double) xAxis.getValueForDisplay(ev.getX());
        if (x > xAxis.getUpperBound() || x < xAxis.getLowerBound()) {
            tip.hide();
            vertiCross.setVisible(false);
            return;
        }
        final Double y = (Double) yAxis.getValueForDisplay(ev.getY());
        if (y > yAxis.getUpperBound() || y < yAxis.getLowerBound()) {
            tip.hide();
            vertiCross.setVisible(false);
            return;
        }
        final double xs = xAxis.getDisplayPosition(x);
        vertiCross.setStartX(xs);
        vertiCross.setStartY(yAxis.getDisplayPosition(0));
        vertiCross.setEndX(xs);
        vertiCross.setEndY(yAxis.getDisplayPosition(yAxis.getUpperBound()));
        vertiCross.setVisible(true);
        final int i = (iDeterministic.firstKey() > x ? iDeterministic.firstEntry()
                : iDeterministic.floorEntry(x)).getValue();
        final Object[] yi = Arrays.stream(deterministic).mapToDouble(p -> getY(p, i))
                .mapToObj(yAxis::getValueForDisplay).map(n -> DecimalUtil.toScale(n, 1)).toArray();
        final int j = (iStochasticTau.firstKey() > x ? iStochasticTau.firstEntry()
                : iStochasticTau.floorEntry(x)).getValue();
        final Object[] yj = Arrays.stream(stochasticTau).mapToDouble(p -> getY(p, j))
                .mapToObj(yAxis::getValueForDisplay).map(n -> DecimalUtil.toScale(n, 0)).toArray();
        final int k = (iStochasticRes.firstKey() > x ? iStochasticRes.firstEntry()
                : iStochasticRes.floorEntry(x)).getValue();
        final Object[] yk = Arrays.stream(stochasticRes).mapToDouble(p -> getY(p, k))
                .mapToObj(yAxis::getValueForDisplay).map(n -> DecimalUtil.toScale(n, 0)).toArray();
        final String txt = String.format("SIR(t=%.1f)\n" + "~det%s\n" + "~tau%s\n" + "~res%s", x,
                Arrays.toString(yi), Arrays.toString(yj), Arrays.toString(yk));

        tip.setText(txt);
        tip.show(axes, ev.getScreenX() - ev.getSceneX() + xs, ev.getScreenY() + 15);
    });

    try {
        stage.getIcons().add(new Image(FileUtil.toInputStream("icon.jpg")));
    } catch (final IOException e) {
        LOG.error("Problem", e);
    }
    stage.setTitle("Deterministic vs. Stochastic");
    stage.setScene(new Scene(layout, Color.rgb(35, 39, 50)));
    //         stage.setOnHidden( ev -> tip.hide() );
    stage.show();
}

From source file:io.warp10.continuum.gts.GTSHelper.java

public static List<GeoTimeSerie> chunk(GeoTimeSerie gts, long lastchunk, long chunkwidth, long chunkcount,
        String chunklabel, boolean keepempty, long overlap) throws WarpScriptException {

    if (overlap < 0 || overlap > chunkwidth) {
        throw new WarpScriptException("Overlap cannot exceed chunk width.");
    }/*from  w  w  w. ja  v  a  2s . c  om*/

    //
    // Check if 'chunklabel' exists in the GTS labels
    //

    Metadata metadata = gts.getMetadata();

    if (metadata.getLabels().containsKey(chunklabel)) {
        throw new WarpScriptException(
                "Cannot operate on Geo Time Series which already have a label named '" + chunklabel + "'");
    }

    TreeMap<Long, GeoTimeSerie> chunks = new TreeMap<Long, GeoTimeSerie>();

    //
    // If GTS is bucketized, make sure bucketspan is less than boxwidth
    //

    boolean bucketized = GTSHelper.isBucketized(gts);

    if (bucketized) {
        if (gts.bucketspan > chunkwidth) {
            throw new WarpScriptException(
                    "Cannot operate on Geo Time Series with a bucketspan greater than the chunk width.");
        }
    } else {
        // GTS is not bucketized and has 0 values, if lastchunk was 0, return an empty list as we
        // are unable to produce chunks
        if (0 == gts.values && 0L == lastchunk) {
            return new ArrayList<GeoTimeSerie>();
        }
    }

    //
    // Set chunkcount to Integer.MAX_VALUE if it's 0
    //

    boolean zeroChunkCount = false;

    if (0 == chunkcount) {
        chunkcount = Integer.MAX_VALUE;
        zeroChunkCount = true;
    }

    //
    // Sort timestamps in reverse order so we can produce all chunks in O(n)
    //

    GTSHelper.sort(gts, true);

    //
    // Loop on the chunks
    //

    // Index in the timestamp array
    int idx = 0;

    long bucketspan = gts.bucketspan;
    int bucketcount = gts.bucketcount;
    long lastbucket = gts.lastbucket;

    //
    // If lastchunk is 0, use lastbucket or the most recent tick
    //

    if (0 == lastchunk) {
        if (isBucketized(gts)) {
            lastchunk = lastbucket;
        } else {
            // Use the most recent tick
            lastchunk = gts.ticks[0];
            // Make sure lastchunk is aligned on 'chunkwidth' boundary
            if (0 != (lastchunk % chunkwidth)) {
                lastchunk = lastchunk - (lastchunk % chunkwidth) + chunkwidth;
            }
        }
    }

    for (long i = 0; i < chunkcount; i++) {

        // If we have no more values and were not specified a chunk count, exit the loop, we're done
        if (idx >= gts.values && zeroChunkCount) {
            break;
        }

        // Compute chunk bounds
        long chunkend = lastchunk - i * chunkwidth;
        long chunkstart = chunkend - chunkwidth + 1;

        GeoTimeSerie chunkgts = new GeoTimeSerie(lastbucket, bucketcount, bucketspan, 16);

        // Set metadata for the GTS
        chunkgts.setMetadata(metadata);
        // Add 'chunklabel'
        chunkgts.getMetadata().putToLabels(chunklabel, Long.toString(chunkend));

        if (bucketized) {
            // Chunk is outside the GTS, it will be empty 
            if (lastbucket < chunkstart || chunkend <= lastbucket - (bucketcount * bucketspan)) {
                // Add the (empty) chunk if keepempty is true
                if (keepempty || overlap > 0) {
                    chunks.put(chunkend, chunkgts);
                }
                continue;
            }

            // Set the bucketized parameters in the GTS

            // If bucketspan does not divide chunkwidth, chunks won't be bucketized

            if (0 == chunkwidth % bucketspan) {
                chunkgts.bucketspan = bucketspan;
                chunkgts.lastbucket = chunkend;
                chunkgts.bucketcount = (int) ((chunkend - chunkstart + 1) / bucketspan);
            } else {
                chunkgts.bucketspan = 0L;
                chunkgts.lastbucket = 0L;
                chunkgts.bucketspan = 0;
            }
        }

        //
        // Add the datapoints which fall within the current chunk
        //

        // Advance until the current tick is before 'chunkend'       
        while (idx < gts.values && gts.ticks[idx] > chunkend) {
            idx++;
        }

        // We've exhausted the values
        if (idx >= gts.values) {
            // only add chunk if it's not empty or empty with 'keepempty' set to true
            if (0 != chunkgts.values || (keepempty || overlap > 0)) {
                chunks.put(chunkend, chunkgts);
            }
            continue;
        }

        // The current tick is before the beginning of the current chunk
        if (gts.ticks[idx] < chunkstart) {
            // only add chunk if it's not empty or empty with 'keepempty' set to true
            if (0 != chunkgts.values || (keepempty || overlap > 0)) {
                chunks.put(chunkend, chunkgts);
            }
            continue;
        }

        while (idx < gts.values && gts.ticks[idx] >= chunkstart) {
            GTSHelper.setValue(chunkgts, GTSHelper.tickAtIndex(gts, idx), GTSHelper.locationAtIndex(gts, idx),
                    GTSHelper.elevationAtIndex(gts, idx), GTSHelper.valueAtIndex(gts, idx), false);
            idx++;
        }

        // only add chunk if it's not empty or empty with 'keepempty' set to true
        if (0 != chunkgts.values || (keepempty || overlap > 0)) {
            chunks.put(chunkend, chunkgts);
        }
    }

    //
    // Handle overlapping is need be.
    // We need to iterate over all ticks and add datapoints to each GTS they belong to
    //

    if (overlap > 0) {

        //
        // Check if we need to add a first and a last chunk
        //

        long ts = GTSHelper.tickAtIndex(gts, 0);

        if (ts <= chunks.firstKey() - chunkwidth) {
            Entry<Long, GeoTimeSerie> currentFirst = chunks.firstEntry();
            GeoTimeSerie firstChunk = currentFirst.getValue().cloneEmpty();
            if (GTSHelper.isBucketized(currentFirst.getValue())) {
                firstChunk.lastbucket = firstChunk.lastbucket - firstChunk.bucketspan;
            }
            chunks.put(currentFirst.getKey() - chunkwidth, firstChunk);
        }

        ts = GTSHelper.tickAtIndex(gts, gts.values - 1);

        if (ts >= chunks.lastKey() - chunkwidth + 1 - overlap) {
            Entry<Long, GeoTimeSerie> currentLast = chunks.lastEntry();
            GeoTimeSerie lastChunk = currentLast.getValue().cloneEmpty();
            if (GTSHelper.isBucketized(currentLast.getValue())) {
                lastChunk.lastbucket = lastChunk.lastbucket + lastChunk.bucketspan;
            }
            chunks.put(currentLast.getKey() + chunkwidth, lastChunk);
        }

        //
        // Put all entries in a list so we can access them randomly
        //

        List<Entry<Long, GeoTimeSerie>> allchunks = new ArrayList<Entry<Long, GeoTimeSerie>>(chunks.entrySet());

        int[] currentSizes = new int[allchunks.size()];

        for (int i = 0; i < currentSizes.length; i++) {
            currentSizes[i] = allchunks.get(i).getValue().values;
        }

        //
        // Iterate over chunks, completing with prev and next overlaps
        // Remember the timestamps are in reverse order so far.
        //

        for (int i = 0; i < allchunks.size(); i++) {
            GeoTimeSerie current = allchunks.get(i).getValue();
            long lowerBound = allchunks.get(i).getKey() - chunkwidth + 1 - overlap;
            long upperBound = allchunks.get(i).getKey() + overlap;
            if (i > 0) {
                GeoTimeSerie prev = allchunks.get(i - 1).getValue();
                for (int j = 0; j < currentSizes[i - 1]; j++) {
                    long timestamp = GTSHelper.tickAtIndex(prev, j);
                    if (timestamp < lowerBound) {
                        break;
                    }
                    GTSHelper.setValue(current, timestamp, GTSHelper.locationAtIndex(prev, j),
                            GTSHelper.elevationAtIndex(prev, j), GTSHelper.valueAtIndex(prev, j), false);
                }
            }
            if (i < allchunks.size() - 1) {
                GeoTimeSerie next = allchunks.get(i + 1).getValue();
                for (int j = currentSizes[i + 1] - 1; j >= 0; j--) {
                    long timestamp = GTSHelper.tickAtIndex(next, j);
                    if (timestamp > upperBound) {
                        break;
                    }
                    GTSHelper.setValue(current, timestamp, GTSHelper.locationAtIndex(next, j),
                            GTSHelper.elevationAtIndex(next, j), GTSHelper.valueAtIndex(next, j), false);
                }
            }
        }
    }

    List<GeoTimeSerie> result = new ArrayList<GeoTimeSerie>();

    for (GeoTimeSerie g : chunks.values()) {
        if (!keepempty && 0 == g.values) {
            continue;
        }
        result.add(g);
    }

    return result;
}