io.nitor.api.backend.cache.CacheHandler.java Source code

Java tutorial

Introduction

Here is the source code for io.nitor.api.backend.cache.CacheHandler.java

Source

/**
 * Copyright 2018-2019 Nitor Creations Oy
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package io.nitor.api.backend.cache;

import io.vertx.core.Handler;
import io.vertx.core.MultiMap;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.file.AsyncFile;
import io.vertx.core.file.FileSystem;
import io.vertx.core.file.OpenOptions;
import io.vertx.core.http.HttpServerRequest;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.core.json.JsonObject;
import io.vertx.core.streams.WriteStream;
import io.vertx.ext.web.RoutingContext;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;

import static io.vertx.core.http.HttpHeaders.ACCEPT_ENCODING;
import static io.vertx.core.http.HttpHeaders.UPGRADE;
import static io.vertx.core.http.HttpMethod.GET;
import static io.vertx.core.http.HttpMethod.HEAD;
import static java.lang.System.currentTimeMillis;
import static java.nio.file.Files.createDirectories;
import static java.nio.file.Files.delete;
import static java.nio.file.Files.move;
import static java.nio.file.Files.walk;

public class CacheHandler {
    public static final String CTX_KEY_CACHE_EVENTS = "cacheEvents";
    private static final Logger logger = LogManager.getLogger(CacheHandler.class);
    static final OpenOptions CREATE_NEW = new OpenOptions().setCreate(true).setCreateNew(true);
    private static final AtomicInteger filenameCounter = new AtomicInteger();
    final ConcurrentHashMap<String, CacheEntry> cachedFiles = new ConcurrentHashMap<>(1024);

    private final Path dir;
    private final FileSystem fileSystem;

    public CacheHandler(JsonObject cacheConf, FileSystem fileSystem) {
        dir = Paths.get(cacheConf.getString("cacheDir", "/tmp/backend")).toAbsolutePath();
        this.fileSystem = fileSystem;
        try {
            createDirectories(dir);
            walk(dir, 1).filter(p -> !p.equals(dir))
                    .forEach(p -> fileSystem.deleteRecursiveBlocking(p.toString(), true));
        } catch (IOException ex) {
            throw new RuntimeException("Failed to initialize cache directory at " + dir);
        }
        logger.info("Caching responses to " + dir);
    }

    public Handler<RoutingContext> build() {
        return ctx -> {
            if (!canServeFromCache(ctx)) {
                ctx.next();
                return;
            }
            String key = getCacheKey(ctx);
            CacheEntry entry = cachedFiles.get(key);
            if (entry != null) {
                if (entry.expires < currentTimeMillis()) {
                    cachedFiles.remove(key, entry);
                } else {
                    waitToServeCachedResponse(entry, ctx);
                    return;
                }
            }
            if (cacheableRequest(ctx)) {
                ctx.put(CTX_KEY_CACHE_EVENTS, new CacheEventsImpl(key));
                // TODO allow configuration option to put the entry to cache so that duplicate requests wait for the single result to finish
                // TODO and remember to remove&release the entry in endHandler if no content was cached
                logger.debug("Potentially cacheable request");
            }
            ctx.next();
        };
    }

    /**
     * Note: also normalizes the outgoing accept-encoding header to match the normalize one.
     */
    static String getCacheKey(RoutingContext ctx) {
        String uri = ctx.normalisedPath() + '?' + ctx.request().query();
        String encoding = ctx.request().getHeader(ACCEPT_ENCODING);
        if (encoding == null) {
            return uri;
        }
        StringBuilder normalizedEncoding = new StringBuilder(8);
        if (encoding.contains("br")) {
            normalizedEncoding.append("br");
        }
        if (encoding.contains("gzip")) {
            if (normalizedEncoding.length() > 0) {
                normalizedEncoding.append(',');
            }
            normalizedEncoding.append("gzip");
        }
        ctx.request().headers().set(ACCEPT_ENCODING, normalizedEncoding.toString());
        return uri + '#' + normalizedEncoding;
    }

    class CacheEventsImpl implements CacheEvents {
        private final String uri;
        CacheEntry entry;
        private AsyncFile file;
        private Path tmpName;

        private boolean success = true;

        public CacheEventsImpl(String uri) {
            this.uri = uri;
        }

        @Override
        public WriteStream<Buffer> createCachingStream(MultiMap headers, long expiryTime) {
            Path finalName = dir.resolve(String.valueOf(filenameCounter.incrementAndGet()));
            tmpName = finalName.resolveSibling(finalName.getFileName() + ".tmp");
            entry = new CacheEntry(uri, headers, finalName, expiryTime);
            file = fileSystem.openBlocking(tmpName.toString(), CREATE_NEW);
            file.exceptionHandler(error -> {
                logger.error("Failed to write to file " + tmpName, error);
                streamFinished(false);
            });
            cachedFiles.put(uri, entry);
            return file;
        }

        @Override
        public void streamFinished(boolean success) {
            synchronized (this) {
                this.success &= success;
                file.close(__ -> streamFinishedImpl());
            }
        }

        private void streamFinishedImpl() {
            boolean success;
            synchronized (this) {
                success = this.success;
            }
            try {
                if (success) {
                    move(tmpName, entry.file);
                    logger.debug("Cached: {} to {}", uri, entry.file);
                } else {
                    delete(tmpName);
                }
            } catch (IOException e) {
                success = false;
                throw new RuntimeException("Cached file " + entry.file + "failed", e);
            } finally {
                if (!success) {
                    cachedFiles.remove(uri, entry);
                    logger.warn("Not cached: caching of content failed");
                }
                entry.fileReady(success);
            }
        }
    }

    private void waitToServeCachedResponse(CacheEntry entry, RoutingContext ctx) {
        entry.onReady(result -> {
            if (result.failed()) {
                ctx.next();
            } else {
                serveCachedResponse(result.result(), ctx);
            }
        });
    }

    private void serveCachedResponse(CacheEntry entry, RoutingContext ctx) {
        logger.debug("Returning cached content for {}", entry.uri);
        HttpServerResponse resp = ctx.response();
        resp.headers().setAll(entry.headers);
        if (ctx.request().method() != HEAD) {
            resp.sendFile(entry.file.toString());
        }
    }

    private boolean canServeFromCache(RoutingContext ctx) {
        // TODO: honor request cache-control headers
        return cacheableRequest(ctx) || ctx.request().method() == HEAD;
    }

    private boolean cacheableRequest(RoutingContext ctx) {
        HttpServerRequest req = ctx.request();
        return req.method() == GET && !req.headers().contains(UPGRADE);
    }
}