Java tutorial
/* * Copyright 2015 Adobe Systems Incorporated * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.razorfish.fluent.contentintelligence.core.servlets; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import javax.jcr.RepositoryException; import javax.servlet.Servlet; import javax.servlet.ServletException; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Service; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.SlingHttpServletResponse; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.servlets.SlingAllMethodsServlet; import org.apache.sling.api.servlets.SlingSafeMethodsServlet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.amazonaws.auth.profile.ProfileCredentialsProvider; import com.amazonaws.services.rekognition.AmazonRekognitionClient; import com.amazonaws.services.rekognition.model.Attribute; import com.amazonaws.services.rekognition.model.DetectFacesRequest; import com.amazonaws.services.rekognition.model.DetectFacesResult; import com.amazonaws.services.rekognition.model.FaceDetail; import com.day.cq.commons.ImageHelper; import com.day.cq.dam.api.Asset; import com.day.cq.dam.api.Rendition; import com.day.cq.dam.commons.util.DamUtil; import com.day.cq.wcm.commons.AbstractImageServlet; import com.day.cq.wcm.foundation.Image; import com.day.image.Layer; /** * Servlet that writes some sample content into the response. It is mounted for * all resources of a specific Sling resource type. The * {@link SlingSafeMethodsServlet} shall be used for HTTP methods that are * idempotent. For write operations use the {@link SlingAllMethodsServlet}. */ @Component(label = "Example smart crop servlet", description = "example smart crop servlet.", metatype = true) @Properties({ @Property(label = "Resource Types", description = "Resource Types and Node Types to bind this servlet to.", name = "sling.servlet.resourceTypes", value = { "sling/servlet/default" }, propertyPrivate = false), @Property(label = "Extension", description = "", name = "sling.servlet.extensions", value = { "jpg", "png", "gif" }), @Property(name = "sling.servlet.methods", value = { "GET" }, propertyPrivate = true) }) @Service(Servlet.class) public class SmartCropServlet extends AbstractImageServlet { /** * */ private static final long serialVersionUID = 4394825359530691603L; private static final Logger log = LoggerFactory.getLogger(SmartCropServlet.class); @Override protected final void doGet(final SlingHttpServletRequest request, final SlingHttpServletResponse response) throws ServletException, IOException { String[] selectors = request.getRequestPathInfo().getSelectors(); int sizeX = Integer.parseInt(selectors[0]); int sizeY = Integer.parseInt(selectors[1]); String extension = request.getRequestPathInfo().getExtension(); String imagePath = request.getRequestPathInfo().getResourcePath().substring(0, request.getRequestPathInfo().getResourcePath().indexOf(".")); log.info("received" + Arrays.toString(selectors) + " : " + extension + " : " + imagePath); String type = getImageType(extension); if (type == null) { response.sendError(404, "Image type not supported"); return; } response.setContentType(type); ImageContext context = new ImageContext(request, type); Resource resource = context.request.getResourceResolver().getResource(imagePath + "." + extension); Asset asset = resource.adaptTo(Asset.class); log.info("asset : " + asset.getPath()); log.info("resource : " + resource.getPath() + "type " + resource.getResourceType()); Image image = new Image(resource); float x1 = 0, y1 = 0, x2 = 1, y2 = 1; if (isAsset(resource) || isRendition(resource)) { image.setFileReference(image.getPath()); Rendition r = null; if (isAsset(resource)) { r = DamUtil.resolveToAsset(resource).getOriginal(); } else { r = resource.adaptTo(Rendition.class); } byte[] data = new byte[(int) r.getSize()]; int numbytesread = r.getStream().read(data); log.debug("Read : {} of {}", numbytesread, r.getSize()); DetectFacesRequest dfrequest = new DetectFacesRequest() .withImage( new com.amazonaws.services.rekognition.model.Image().withBytes(ByteBuffer.wrap(data))) .withAttributes(Attribute.ALL); AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient( new ProfileCredentialsProvider().getCredentials()); rekognitionClient.setSignerRegionOverride("us-east-1"); DetectFacesResult result = rekognitionClient.detectFaces(dfrequest); List<FaceDetail> faceDetails = result.getFaceDetails(); if (!faceDetails.isEmpty()) { log.info("result " + Arrays.toString(faceDetails.toArray())); x1 = faceDetails.get(0).getBoundingBox().getLeft(); y1 = faceDetails.get(0).getBoundingBox().getTop(); x2 = x1 + faceDetails.get(0).getBoundingBox().getWidth(); y2 = y1 + faceDetails.get(0).getBoundingBox().getHeight(); } } if (!image.hasContent()) { response.sendError(404); return; } Layer layer; try { log.info("image : " + image.getMimeType()); layer = image.getLayer(true, false, true); int ratioY = layer.getHeight(); int ratioX = layer.getWidth(); if (sizeX > ratioX) { sizeX = ratioX; } if (sizeY > ratioY) { sizeY = ratioY; } log.info("baseline : " + sizeX + "," + sizeY); x1 = (int) Math.ceil(x1 * ratioX); y1 = (int) Math.ceil(y1 * ratioY); x2 = (int) Math.ceil(x2 * ratioX); y2 = (int) Math.ceil(y2 * ratioY); log.info("detected : " + (int) x1 + "," + (int) y1 + "," + (int) x2 + "," + (int) y2); log.info("calculated : " + (int) (x2 - x1) + "," + (int) (y2 - y1)); // check if the crop target is bigger than bounding box, if so crop at a larger size if ((x2 - x1) < sizeX) { x1 = x1 - ((sizeX - (x2 - x1)) / 2); x2 = x2 + ((sizeX - (x2 - x1)) / 2); log.info("x adj : " + (int) x1 + "," + (int) y1 + "," + (int) x2 + "," + (int) y2); log.info("x adj : " + (int) (x2 - x1) + "," + (int) (y2 - y1)); } if ((y2 - y1) < sizeY) { y1 = y1 - ((sizeY - (y2 - y1)) / 2); y2 = y2 + ((sizeY - (y2 - y1)) / 2); log.info("y adj : " + (int) x1 + "," + (int) y1 + "," + (int) x2 + "," + (int) y2); log.info("y adj : " + (int) (x2 - x1) + "," + (int) (y2 - y1)); } //ensure we are still within the image boundaries if (x1 < 0) { x2 = x2 - x1; x1 = 0; } if (y1 < 0) { y2 = y2 - y1; y1 = 0; } if (x2 > ratioX) { x1 = x1 - (x2 - ratioX); x2 = ratioX; } if (y2 > ratioY) { y1 = y1 - (y2 - ratioY); y2 = ratioY; } //TODO - handle negative values for bounding box - http://docs.aws.amazon.com/rekognition/latest/dg/API_BoundingBox.html log.info("resolved : " + (int) x1 + "," + (int) y1 + "," + (int) x2 + "," + (int) y2); layer.crop(ImageHelper.getCropRect((int) x1 + "," + (int) y1 + "," + (int) x2 + "," + (int) y2, image.getPath())); //after cropping the face, resize to the target size if needed layer.resize(sizeX, sizeY); //layer.crop(ImageHelper.getCropRect("225,121,525,421", image.getPath())); double quality = image.getMimeType().equals("image/gif") ? 255 : 1.0; layer.write(image.getMimeType(), quality, response.getOutputStream()); } catch (RepositoryException e) { log.error("Could not create layer" + e); e.printStackTrace(); } response.flushBuffer(); } @Override protected void writeLayer(SlingHttpServletRequest req, SlingHttpServletResponse resp, ImageContext c, Layer layer) throws IOException, RepositoryException { Resource res = c.resource; log.info("resource : " + res.getPath() + "type " + res.getResourceType()); Image image = new Image(res); log.info("image : " + image.getMimeType()); if (isAsset(res) || isRendition(res)) { // image.setFileReference(image.getPath()); // Rendition r = null; // if (isAsset(res)) { // r = DamUtil.resolveToAsset(res).getOriginal(); // } else { // r = res.adaptTo(Rendition.class); // } // byte[] data = new byte[(int) r.getSize()]; // int numbytesread = r.getStream().read(data); // log.debug("Read : {} of {}", numbytesread, r.getSize()); // DetectFacesRequest dfrequest = new DetectFacesRequest() // .withImage(new // com.amazonaws.services.rekognition.model.Image().withBytes(ByteBuffer.wrap(data))) // .withAttributes(Attribute.ALL); // // AmazonRekognitionClient rekognitionClient = new // AmazonRekognitionClient(new // ProfileCredentialsProvider().getCredentials()) // .withEndpoint("service endpoint"); // rekognitionClient.setSignerRegionOverride("us-east-1"); // // // DetectFacesResult result = // rekognitionClient.detectFaces(dfrequest); // // List<FaceDetail> faceDetails = result.getFaceDetails(); // // log.info ("result " + Arrays.toString(faceDetails.toArray())); } if (!image.hasContent()) { resp.sendError(404); return; } image.loadStyleData(c.style); layer = image.getLayer(true, false, true); double quality = image.getMimeType().equals("image/gif") ? 255 : 1.0; layer.write(image.getMimeType(), quality, resp.getOutputStream()); resp.flushBuffer(); } private boolean isRendition(Resource res) { return res.adaptTo(Rendition.class) != null; } private boolean isAsset(Resource res) { return res.adaptTo(Asset.class) != null; } protected String getImageType(String ext) { if ("png".equals(ext)) return "image/png"; if ("gif".equals(ext)) return "image/gif"; if (("jpg".equals(ext)) || ("jpeg".equals(ext))) { return "image/jpg"; } return null; } @Override protected Layer createLayer(ImageContext c) throws RepositoryException, IOException { return null; } }