Java tutorial
/* * Copyright 2010 Talis Information Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.talis.storage.s3.cache; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jets3t.service.model.S3Object; import org.jredis.RedisException; import org.jredis.ri.alphazero.JRedisClient; import com.google.inject.Inject; import com.talis.storage.s3.ChunkHandler; import com.talis.storage.s3.ExternalizableS3Object; import com.talis.storage.s3.S3Store; public class RedisChunkHandler implements ChunkHandler { private static final Log LOG = LogFactory.getLog(RedisChunkHandler.class); private final ChunkHandler baseHandler; private final JRedisClient client; @Inject public RedisChunkHandler(ChunkHandler baseHandler, JRedisClient client) { LOG.debug(String.format("Creating redis chunk handler around %s", baseHandler.getClass().getName())); this.baseHandler = baseHandler; this.client = client; } @Override public String getBucketname() { return baseHandler.getBucketname(); } @Override public void deleteChunk(String key) throws IOException { try { LOG.debug("Removing chunk from cache"); client.del(getRedisKey(key)); } catch (RedisException e) { LOG.error("ERROR", e); } LOG.debug("Delegating delete to base handler"); baseHandler.deleteChunk(key); } @Override public S3Object getChunk(String key) throws IOException { String cacheKey = getRedisKey(key); LOG.debug(String.format("Fetching object %s from cache", cacheKey)); try { S3Object object = deserialize(client.get(cacheKey)); if (null == object) { LOG.debug("Not found, delegating fetch to base handler"); S3Object stored = baseHandler.getChunk(key); LOG.debug("Cloning chunk"); S3Object cached = clone(stored); LOG.debug("Writing chunk to cache"); client.set(cacheKey, serialize(cached)); client.expire(cacheKey, 3600); LOG.debug("Written to cache"); return stored; } else { LOG.debug("Found object in cache"); return object; } } catch (Exception e) { LOG.error("ERROR", e); throw new IOException("ERROR", e); } } @Override public S3Object[] listChunks(String key) throws IOException { LOG.debug("Delegating list to base handler"); return baseHandler.listChunks(key); } @Override public InputStream reconstructChunks(S3Object[] chunks) throws IOException { LOG.debug("Delegating reconstruct to base handler"); return baseHandler.reconstructChunks(chunks); } @Override public S3Object writeChunk(S3Object chunk) throws IOException { String cacheKey = getRedisKey(chunk.getKey()); LOG.debug(String.format("Writing object %s to cache", cacheKey)); LOG.debug("Cloning chunk"); S3Object cached = clone(chunk); LOG.debug("Delegating write to base handler"); S3Object written = baseHandler.writeChunk(chunk); try { LOG.debug("Writing chunk to cache"); client.set(cacheKey, serialize(cached)); client.expire(cacheKey, 3600); LOG.debug("Written to cache"); } catch (RedisException e) { LOG.error("ERROR", e); } return written; } public static byte[] serialize(S3Object chunk) throws IOException { ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); ObjectOutputStream objStream = new ObjectOutputStream(byteStream); objStream.writeObject(chunk); objStream.flush(); objStream.close(); return byteStream.toByteArray(); } public static S3Object deserialize(byte[] bytes) { try { ByteArrayInputStream byteStream = new ByteArrayInputStream(bytes); ObjectInputStream objStream = new ObjectInputStream(byteStream); return (S3Object) objStream.readObject(); } catch (Exception e) { LOG.warn("Exception parsing cached bytes"); return null; } } private String getRedisKey(String key) { return DigestUtils.md5Hex(getBucketname() + "/" + key); } private S3Object clone(S3Object chunk) throws IOException { try { S3Object clone = new ExternalizableS3Object(); clone.setKey(chunk.getKey()); clone.setBucketName(chunk.getBucketName()); clone.setContentType(S3Store.TMB_CHUNK_TYPE.toString()); clone.setDataInputStream(new ByteArrayInputStream(getEntityBytes(chunk))); clone.addAllMetadata(chunk.getMetadataMap()); clone.setETag(chunk.getETag()); if (null != chunk.getLastModifiedDate()) { clone.setLastModifiedDate(chunk.getLastModifiedDate()); } return clone; } catch (Exception e) { throw new IOException("Error", e); } } private byte[] getEntityBytes(S3Object chunk) throws IOException { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(chunk.getDataInputStream(), out); chunk.setDataInputStream(new ByteArrayInputStream(out.toByteArray())); return out.toByteArray(); } catch (Exception e) { throw new IOException("Error", e); } } }