List of usage examples for javax.imageio ImageReader getWidth
public abstract int getWidth(int imageIndex) throws IOException;
From source file:org.dita.dost.writer.ImageMetadataFilter.java
private Attributes readMetadata(final URI imgInput) { logger.info("Reading " + imgInput); final XMLUtils.AttributesBuilder a = new XMLUtils.AttributesBuilder(); try {/* w w w . jav a2 s .c o m*/ InputStream in = null; ImageReader r = null; ImageInputStream iis = null; try { in = getInputStream(imgInput); iis = ImageIO.createImageInputStream(in); final Iterator<ImageReader> i = ImageIO.getImageReaders(iis); if (!i.hasNext()) { logger.info("Image " + imgInput + " format not supported"); } else { r = i.next(); r.setInput(iis); final int imageIndex = r.getMinIndex(); a.add(DITA_OT_NS, ATTR_IMAGE_WIDTH, DITA_OT_PREFIX + ":" + ATTR_IMAGE_WIDTH, "CDATA", Integer.toString(r.getWidth(imageIndex))); a.add(DITA_OT_NS, ATTR_IMAGE_HEIGHT, DITA_OT_PREFIX + ":" + ATTR_IMAGE_HEIGHT, "CDATA", Integer.toString(r.getHeight(imageIndex))); final Element node = (Element) r.getImageMetadata(0).getAsTree("javax_imageio_1.0"); final NodeList hs = node.getElementsByTagName("HorizontalPixelSize"); if (hs != null && hs.getLength() == 1) { final float v = Float.parseFloat(((Element) hs.item(0)).getAttribute("value")); final int dpi = Math.round(MM_TO_INCH / v); a.add(DITA_OT_NS, ATTR_HORIZONTAL_DPI, DITA_OT_PREFIX + ":" + ATTR_HORIZONTAL_DPI, "CDATA", Integer.toString(dpi)); } final NodeList vs = node.getElementsByTagName("VerticalPixelSize"); if (vs != null && vs.getLength() == 1) { final float v = Float.parseFloat(((Element) vs.item(0)).getAttribute("value")); final int dpi = Math.round(MM_TO_INCH / v); a.add(DITA_OT_NS, ATTR_VERTICAL_DPI, DITA_OT_PREFIX + ":" + ATTR_VERTICAL_DPI, "CDATA", Integer.toString(dpi)); } } } finally { if (r != null) { r.dispose(); } if (iis != null) { iis.close(); } if (in != null) { in.close(); } } } catch (final Exception e) { logger.error("Failed to read image " + imgInput + " metadata: " + e.getMessage(), e); } return a.build(); }
From source file:org.exoplatform.wcm.ext.component.activity.FileUIActivity.java
protected int getImageWidth(Node node) { int imageWidth = 0; try {/*from ww w .j a v a 2 s. c om*/ if (node.hasNode(NodetypeConstant.JCR_CONTENT)) node = node.getNode(NodetypeConstant.JCR_CONTENT); ImageReader reader = ImageIO.getImageReadersByMIMEType(mimeType).next(); ImageInputStream iis = ImageIO.createImageInputStream(node.getProperty("jcr:data").getStream()); reader.setInput(iis, true); imageWidth = reader.getWidth(0); iis.close(); reader.dispose(); } catch (Exception e) { LOG.info("Cannot get node"); } return imageWidth; }
From source file:org.exoplatform.wcm.notification.plugin.FileActivityChildPlugin.java
private int getImageWidth(Node node) { int imageWidth = 0; try {/*from w ww. java 2 s . c o m*/ if (node.hasNode(NodetypeConstant.JCR_CONTENT)) node = node.getNode(NodetypeConstant.JCR_CONTENT); ImageReader reader = ImageIO.getImageReadersByMIMEType(mimeType).next(); ImageInputStream iis = ImageIO.createImageInputStream(node.getProperty("jcr:data").getStream()); reader.setInput(iis, true); imageWidth = reader.getWidth(0); iis.close(); reader.dispose(); } catch (RepositoryException | IOException e) { if (LOG.isWarnEnabled()) { LOG.warn("Can not get image width in " + this.getClass().getName()); } } return imageWidth; }
From source file:org.geotools.gce.imagemosaic.catalogbuilder.CatalogBuilder.java
/** * //w ww .j a v a2 s . c o m * @param resolutionLevels * @param numberOfLevels * @param coverageReader * @param imageioReader * @param compareLevels optional resolutionLevels to be compared. * @return * @throws IndexOutOfBoundsException * @throws IOException */ private static boolean setupResolutions(final double[][] resolutionLevels, final int numberOfLevels, final AbstractGridCoverage2DReader coverageReader, final ImageReader imageioReader, final double[][] compareLevels) throws IndexOutOfBoundsException, IOException { double[] res = CoverageUtilities .getResolution((AffineTransform) coverageReader.getOriginalGridToWorld(PixelInCell.CELL_CORNER)); resolutionLevels[0][0] = res[0]; resolutionLevels[1][0] = res[1]; final boolean checkLevels = compareLevels != null; // resolutions levels are computed using the raster space scale factors if (numberOfLevels >= 1) { for (int k = 0; k < numberOfLevels; k++) { resolutionLevels[0][k] = resolutionLevels[0][0] * coverageReader.getOriginalGridRange().getSpan(0) / (1.0 * imageioReader.getWidth(k)); resolutionLevels[1][k] = resolutionLevels[1][0] * coverageReader.getOriginalGridRange().getSpan(1) / (1.0 * imageioReader.getHeight(k)); if (checkLevels) { if (Math.abs(resolutionLevels[0][k] - compareLevels[0][k]) > RESOLUTION_TOLERANCE_FACTOR * compareLevels[0][k] || Math.abs(resolutionLevels[1][k] - compareLevels[1][k]) > RESOLUTION_TOLERANCE_FACTOR * compareLevels[1][k]) { return false; } } } } return true; }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
private void init(final BoundingBox granuleBBOX, final URL granuleUrl, final ImageReaderSpi suggestedSPI, final MultiLevelROI roiProvider, final boolean heterogeneousGranules, final boolean handleArtifactsFiltering, final Hints hints) { this.granuleBBOX = ReferencedEnvelope.reference(granuleBBOX); this.granuleUrl = granuleUrl; this.roiProvider = roiProvider; this.handleArtifactsFiltering = handleArtifactsFiltering; filterMe = handleArtifactsFiltering && roiProvider != null; // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null; try {/*from w ww. j a v a 2s . com*/ // //get info about the raster we have to read // // get a stream if (cachedStreamSPI == null) { cachedStreamSPI = ImageIOExt.getImageInputStreamSPI(granuleUrl, true); if (cachedStreamSPI == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided granule " + granuleUrl.toString()); } } assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided file " + granuleUrl.toString()); } // get a reader and try to cache the suggested SPI first if (cachedReaderSPI == null) { inStream.mark(); if (suggestedSPI != null && suggestedSPI.canDecodeInput(inStream)) { cachedReaderSPI = suggestedSPI; inStream.reset(); } else { inStream.mark(); reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); inStream.reset(); } } if (reader == null) { if (cachedReaderSPI == null) { throw new IllegalArgumentException( "Unable to get a ReaderSPI for the provided input: " + granuleUrl.toString()); } reader = cachedReaderSPI.createReaderInstance(); } if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); boolean ignoreMetadata = customizeReaderInitialization(reader, hints); reader.setInput(inStream, false, ignoreMetadata); //get selected level and base level dimensions final Rectangle originalDimension = Utils.getDimension(0, reader); // build the g2W for this tile, in principle we should get it // somehow from the tile itself or from the index, but at the moment // we do not have such info, hence we assume that it is a simple // scale and translate this.geMapper = new GridToEnvelopeMapper(new GridEnvelope2D(originalDimension), granuleBBOX); geMapper.setPixelAnchor(PixelInCell.CELL_CENTER);//this is the default behavior but it is nice to write it down anyway this.baseGridToWorld = geMapper.createAffineTransform(); // add the base level this.granuleLevels.put(Integer.valueOf(0), new GranuleOverviewLevelDescriptor(1, 1, originalDimension.width, originalDimension.height)); ////////////////////// Setting overviewController /////////////////////// if (heterogeneousGranules) { // // // // Right now we are setting up overviewsController by assuming that // overviews are internal images as happens in TIFF images // We can improve this by leveraging on coverageReaders // // // // Getting the first level descriptor final GranuleOverviewLevelDescriptor baseOverviewLevelDescriptor = granuleLevels.get(0); // Variables initialization final int numberOfOvervies = reader.getNumImages(true) - 1; final AffineTransform2D baseG2W = baseOverviewLevelDescriptor.getGridToWorldTransform(); final int width = baseOverviewLevelDescriptor.getWidth(); final int height = baseOverviewLevelDescriptor.getHeight(); final double resX = AffineTransform2D.getScaleX0(baseG2W); final double resY = AffineTransform2D.getScaleY0(baseG2W); final double[] highestRes = new double[] { resX, resY }; final double[][] overviewsResolution = new double[numberOfOvervies][2]; // Populating overviews and initializing overviewsController for (int i = 0; i < numberOfOvervies; i++) { overviewsResolution[i][0] = (highestRes[0] * width) / reader.getWidth(i + 1); overviewsResolution[i][1] = (highestRes[1] * height) / reader.getHeight(i + 1); } overviewsController = new OverviewsController(highestRes, numberOfOvervies, overviewsResolution); } ////////////////////////////////////////////////////////////////////////// if (hints != null && hints.containsKey(Utils.CHECK_AUXILIARY_METADATA)) { boolean checkAuxiliaryMetadata = (Boolean) hints.get(Utils.CHECK_AUXILIARY_METADATA); if (checkAuxiliaryMetadata) { checkPamDataset(); } } } catch (IllegalStateException e) { throw new IllegalArgumentException(e); } catch (IOException e) { throw new IllegalArgumentException(e); } finally { // close/dispose stream and readers try { if (inStream != null) { inStream.close(); } } catch (Throwable e) { throw new IllegalArgumentException(e); } finally { if (reader != null) { reader.dispose(); } } } }
From source file:org.geotools.gce.imagemosaic.Utils.java
/** * Retrieves the dimensions of the {@link RenderedImage} at index * <code>imageIndex</code> for the provided {@link ImageReader} and * {@link ImageInputStream}./*from ww w . j ava2 s. c o m*/ * * <p> * Notice that none of the input parameters can be <code>null</code> or a * {@link NullPointerException} will be thrown. Morevoer the * <code>imageIndex</code> cannot be negative or an * {@link IllegalArgumentException} will be thrown. * * @param imageIndex * the index of the image to get the dimensions for. * @param inStream * the {@link ImageInputStream} to use as an input * @param reader * the {@link ImageReader} to decode the image dimensions. * @return a {@link Rectangle} that contains the dimensions for the image at * index <code>imageIndex</code> * @throws IOException * in case the {@link ImageReader} or the * {@link ImageInputStream} fail. */ static Rectangle getDimension(final int imageIndex, final ImageReader reader) throws IOException { Utilities.ensureNonNull("reader", reader); if (imageIndex < 0) throw new IllegalArgumentException(Errors.format(ErrorKeys.INDEX_OUT_OF_BOUNDS_$1, imageIndex)); return new Rectangle(0, 0, reader.getWidth(imageIndex), reader.getHeight(imageIndex)); }
From source file:org.geotools.utils.imagemosaic.MosaicIndexBuilder.java
/** * Main thread for the mosaic index builder. *//*from w ww .j av a 2s. c o m*/ public void run() { // ///////////////////////////////////////////////////////////////////// // // CREATING INDEX FILE // // ///////////////////////////////////////////////////////////////////// // ///////////////////////////////////////////////////////////////////// // // Create a file handler that write log record to a file called // my.log // // ///////////////////////////////////////////////////////////////////// FileHandler handler = null; try { boolean append = true; handler = new FileHandler(new StringBuffer(locationPath).append("/error.txt").toString(), append); handler.setLevel(Level.SEVERE); // Add to the desired logger LOGGER.addHandler(handler); // ///////////////////////////////////////////////////////////////////// // // Create a set of file names that have to be skipped since these are // our metadata files // // ///////////////////////////////////////////////////////////////////// final Set<String> skipFiles = new HashSet<String>( Arrays.asList(new String[] { indexName + ".shp", indexName + ".dbf", indexName + ".shx", indexName + ".prj", "error.txt", "error.txt.lck", indexName + ".properties" })); // ///////////////////////////////////////////////////////////////////// // // Creating temp vars // // ///////////////////////////////////////////////////////////////////// ShapefileDataStore index = null; Transaction t = new DefaultTransaction(); // declaring a preciosion model to adhere the java double type // precision PrecisionModel precMod = new PrecisionModel(PrecisionModel.FLOATING); GeometryFactory geomFactory = new GeometryFactory(precMod); try { index = new ShapefileDataStore( new File(locationPath + File.separator + indexName + ".shp").toURI().toURL()); } catch (MalformedURLException ex) { if (LOGGER.isLoggable(Level.SEVERE)) LOGGER.log(Level.SEVERE, ex.getLocalizedMessage(), ex); fireException(ex); return; } final List<File> files = new ArrayList<File>(); recurse(files, locationPath); // ///////////////////////////////////////////////////////////////////// // // Cycling over the files that have filtered out // // ///////////////////////////////////////////////////////////////////// numFiles = files.size(); String validFileName = null; final Iterator<File> filesIt = files.iterator(); FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null; boolean doneSomething = false; for (int i = 0; i < numFiles; i++) { StringBuffer message; // // // // Check that this file is actually good to go // // // final File fileBeingProcessed = ((File) filesIt.next()); if (!fileBeingProcessed.exists() || !fileBeingProcessed.canRead() || !fileBeingProcessed.isFile()) { // send a message message = new StringBuffer("Skipped file ").append(files.get(i)) .append(" snce it seems invalid."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } // // // // Anyone has asked us to stop? // // // if (getStopThread()) { message = new StringBuffer("Stopping requested at file ").append(i).append(" of ") .append(numFiles).append(" files"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), ((i * 100.0) / numFiles)); return; } // replacing chars on input path try { validFileName = fileBeingProcessed.getCanonicalPath(); } catch (IOException e1) { fireException(e1); return; } validFileName = validFileName.replace('\\', '/'); validFileName = validFileName.substring(locationPath.length() + 1, fileBeingProcessed.getAbsolutePath().length()); if (skipFiles.contains(validFileName)) continue; message = new StringBuffer("Now indexing file ").append(validFileName); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), ((i * 100.0) / numFiles)); try { // //////////////////////////////////////////////////////// // // // STEP 1 // Getting an ImageIO reader for this coverage. // // // //////////////////////////////////////////////////////// ImageInputStream inStream = ImageIO.createImageInputStream(fileBeingProcessed); if (inStream == null) { if (LOGGER.isLoggable(Level.SEVERE)) LOGGER.severe(fileBeingProcessed + " has been skipped since we could not get a stream for it"); continue; } inStream.mark(); final Iterator<ImageReader> it = ImageIO.getImageReaders(inStream); ImageReader r = null; if (it.hasNext()) { r = (ImageReader) it.next(); r.setInput(inStream); } else { // release resources try { inStream.close(); } catch (Exception e) { // ignore exception } // try { // r.dispose(); // } catch (Exception e) { // // ignore exception // } // send a message message = new StringBuffer("Skipped file ").append(files.get(i)) .append(":No ImageIO readeres avalaible."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } // //////////////////////////////////////////////////////// // // STEP 2 // Getting a coverage reader for this coverage. // // //////////////////////////////////////////////////////// if (LOGGER.isLoggable(Level.FINE)) LOGGER.fine(new StringBuffer("Getting a reader").toString()); final AbstractGridFormat format = (AbstractGridFormat) GridFormatFinder .findFormat(files.get(i)); if (format == null || !format.accepts(files.get(i))) { // release resources try { inStream.close(); } catch (Exception e) { // ignore exception } try { r.dispose(); } catch (Exception e) { // ignore exception } message = new StringBuffer("Skipped file ").append(files.get(i)) .append(": File format is not supported."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } final AbstractGridCoverage2DReader reader = (AbstractGridCoverage2DReader) format .getReader(files.get(i)); envelope = (GeneralEnvelope) reader.getOriginalEnvelope(); actualCRS = reader.getCrs(); // ///////////////////////////////////////////////////////////////////// // // STEP 3 // Get the type specifier for this image and the check that the // image has the correct sample model and color model. // If this is the first cycle of the loop we initialize // eveything. // // ///////////////////////////////////////////////////////////////////// final ImageTypeSpecifier its = ((ImageTypeSpecifier) r.getImageTypes(0).next()); boolean skipFeature = false; if (globEnvelope == null) { // ///////////////////////////////////////////////////////////////////// // // at the first step we initialize everything that we will // reuse afterwards starting with color models, sample // models, crs, etc.... // // ///////////////////////////////////////////////////////////////////// defaultCM = its.getColorModel(); if (defaultCM instanceof IndexColorModel) { IndexColorModel icm = (IndexColorModel) defaultCM; int numBands = defaultCM.getNumColorComponents(); defaultPalette = new byte[3][icm.getMapSize()]; icm.getReds(defaultPalette[0]); icm.getGreens(defaultPalette[0]); icm.getBlues(defaultPalette[0]); if (numBands == 4) icm.getAlphas(defaultPalette[0]); } defaultSM = its.getSampleModel(); defaultCRS = actualCRS; globEnvelope = new GeneralEnvelope(envelope); // ///////////////////////////////////////////////////////////////////// // // getting information about resolution // // ///////////////////////////////////////////////////////////////////// // // // // get the dimension of the hr image and build the model // as well as // computing the resolution // // // resetting reader and recreating stream, turnaround for a // strange imageio bug r.reset(); try { inStream.reset(); } catch (IOException e) { inStream = ImageIO.createImageInputStream(fileBeingProcessed); } //let's check if we got something now if (inStream == null) { //skip file if (LOGGER.isLoggable(Level.WARNING)) LOGGER.warning("Skipping file " + fileBeingProcessed.toString()); continue; } r.setInput(inStream); numberOfLevels = r.getNumImages(true); resolutionLevels = new double[2][numberOfLevels]; double[] res = getResolution(envelope, new Rectangle(r.getWidth(0), r.getHeight(0)), defaultCRS); resolutionLevels[0][0] = res[0]; resolutionLevels[1][0] = res[1]; // resolutions levels if (numberOfLevels > 1) { for (int k = 0; k < numberOfLevels; k++) { res = getResolution(envelope, new Rectangle(r.getWidth(k), r.getHeight(k)), defaultCRS); resolutionLevels[0][k] = res[0]; resolutionLevels[1][k] = res[1]; } } // ///////////////////////////////////////////////////////////////////// // // creating the schema // // ///////////////////////////////////////////////////////////////////// final SimpleFeatureTypeBuilder featureBuilder = new SimpleFeatureTypeBuilder(); featureBuilder.setName("Flag"); featureBuilder.setNamespaceURI("http://www.geo-solutions.it/"); featureBuilder.add("location", String.class); featureBuilder.add("the_geom", Polygon.class, this.actualCRS); featureBuilder.setDefaultGeometry("the_geom"); final SimpleFeatureType simpleFeatureType = featureBuilder.buildFeatureType(); // create the schema for the new shape file index.createSchema(simpleFeatureType); // get a feature writer fw = index.getFeatureWriter(t); } else { // //////////////////////////////////////////////////////// // // comparing ColorModel // comparing SampeModel // comparing CRSs // //////////////////////////////////////////////////////// globEnvelope.add(envelope); actualCM = its.getColorModel(); actualSM = its.getSampleModel(); skipFeature = (i > 0 ? !(CRS.equalsIgnoreMetadata(defaultCRS, actualCRS)) : false); if (skipFeature) LOGGER.warning(new StringBuffer("Skipping image ").append(files.get(i)) .append(" because CRSs do not match.").toString()); skipFeature = checkColorModels(defaultCM, defaultPalette, actualCM); if (skipFeature) LOGGER.warning(new StringBuffer("Skipping image ").append(files.get(i)) .append(" because color models do not match.").toString()); // defaultCM.getNumComponents()==actualCM.getNumComponents()&& // defaultCM.getClass().equals(actualCM.getClass()) // && defaultSM.getNumBands() == actualSM // .getNumBands() // && defaultSM.getDataType() == actualSM // .getDataType() && // // if (skipFeature) // LOGGER // .warning(new StringBuffer("Skipping image ") // .append(files.get(i)) // .append( // " because cm or sm does not match.") // .toString()); // res = getResolution(envelope, new // Rectangle(r.getWidth(0), // r.getHeight(0)), defaultCRS); // if (Math.abs((resX - res[0]) / resX) > EPS // || Math.abs(resY - res[1]) > EPS) { // LOGGER.warning(new StringBuffer("Skipping image // ").append( // files.get(i)).append( // " because resolutions does not match.") // .toString()); // skipFeature = true; // } } // //////////////////////////////////////////////////////// // // STEP 4 // // create and store features // // //////////////////////////////////////////////////////// if (!skipFeature) { final SimpleFeature feature = fw.next(); feature.setAttribute(1, geomFactory.toGeometry(new ReferencedEnvelope((Envelope) envelope))); feature.setAttribute( 0, absolute ? new StringBuilder(this.locationPath).append(File.separatorChar) .append(validFileName).toString() : validFileName); fw.write(); message = new StringBuffer("Done with file ").append(files.get(i)); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } message.append('\n'); fireEvent(message.toString(), (((i + 1) * 99.0) / numFiles)); doneSomething = true; } else skipFeature = false; // //////////////////////////////////////////////////////// // // STEP 5 // // release resources // // //////////////////////////////////////////////////////// try { inStream.close(); } catch (Exception e) { // ignore exception } try { r.dispose(); } catch (Exception e) { // ignore exception } // release resources reader.dispose(); } catch (IOException e) { fireException(e); break; } catch (ArrayIndexOutOfBoundsException e) { fireException(e); break; } } try { if (fw != null) fw.close(); t.commit(); t.close(); index.dispose(); } catch (IOException e) { LOGGER.log(Level.SEVERE, e.getLocalizedMessage(), e); } createPropertiesFiles(globEnvelope, doneSomething); } catch (SecurityException el) { fireException(el); return; } catch (IOException el) { fireException(el); return; } finally { try { if (handler != null) handler.close(); } catch (Throwable e) { // ignore } } }
From source file:org.hippoecm.frontend.plugins.gallery.imageutil.ScaleImageOperation.java
/** * Creates a scaled version of an image. The given scaling parameters define a bounding box with a certain width and * height. Images that do not fit in this box (i.e. are too large) are always scaled down such that they do fit. If * the aspect ratio of the original image differs from that of the bounding box, either the width or the height of * scaled image will be less than that of the box.</p> <p> Smaller images are scaled up in the same way as large * images are scaled down, but only if upscaling is true. When upscaling is false and the image is smaller than the * bounding box, the scaled image will be equal to the original.</p> <p> If the width or height of the scaling * parameters is 0 or less, that side of the bounding box does not exist (i.e. is unbounded). If both sides of the * bounding box are unbounded, the scaled image will be equal to the original.</p> * * @param data the original image data * @param reader reader for the image data * @param writer writer for the image data *//*from w w w . ja va 2s . c o m*/ public void execute(InputStream data, ImageReader reader, ImageWriter writer) throws IOException { // save the image data in a temporary file so we can reuse the original data as-is if needed without // putting all the data into memory final File tmpFile = writeToTmpFile(data); boolean deleteTmpFile = true; log.debug("Stored uploaded image in temporary file {}", tmpFile); InputStream dataInputStream = null; ImageInputStream imageInputStream = null; try { dataInputStream = new FileInputStream(tmpFile); imageInputStream = new MemoryCacheImageInputStream(dataInputStream); reader.setInput(imageInputStream); final int originalWidth = reader.getWidth(0); final int originalHeight = reader.getHeight(0); if (isOriginalVariant()) { scaledWidth = originalWidth; scaledHeight = originalHeight; scaledData = new AutoDeletingTmpFileInputStream(tmpFile); deleteTmpFile = false; } else { BufferedImage scaledImage = getScaledImage(reader, originalWidth, originalHeight); ByteArrayOutputStream scaledOutputStream = ImageUtils.writeImage(writer, scaledImage, compressionQuality); scaledWidth = scaledImage.getWidth(); scaledHeight = scaledImage.getHeight(); scaledData = new ByteArrayInputStream(scaledOutputStream.toByteArray()); } } finally { if (imageInputStream != null) { imageInputStream.close(); } IOUtils.closeQuietly(dataInputStream); if (deleteTmpFile) { log.debug("Deleting temporary file {}", tmpFile); tmpFile.delete(); } } }
From source file:org.hippoecm.frontend.plugins.gallery.imageutil.ScaleImageOperationTest.java
private void checkImageDimensions(ScaleImageOperation scaleOp, String mimeType, int expectedWidth, int expectedHeight) throws IOException { assertEquals(expectedWidth, scaleOp.getScaledWidth()); assertEquals(expectedHeight, scaleOp.getScaledHeight()); ImageReader reader = ImageIO.getImageReadersByMIMEType(mimeType).next(); ImageInputStream iis = null;/* ww w .j av a 2 s . c o m*/ try { iis = ImageIO.createImageInputStream(scaleOp.getScaledData()); reader.setInput(iis); assertEquals(scaleOp.getScaledWidth(), reader.getWidth(0)); assertEquals(scaleOp.getScaledHeight(), reader.getHeight(0)); } finally { if (iis != null) { iis.close(); } } }
From source file:org.jamwiki.parser.image.ImageProcessor.java
/** * Retrieve image dimensions. This method simply reads headers so it should perform * relatively fast./*w w w .j a v a2 s .c o m*/ */ protected static Dimension retrieveImageDimensions(File imageFile) throws IOException { long start = System.currentTimeMillis(); if (!imageFile.exists()) { logger.info("No file found while determining image dimensions: " + imageFile.getAbsolutePath()); return null; } ImageInputStream iis = null; Dimension dimensions = null; ImageReader reader = null; // use a FileInputStream and make sure it gets closed to prevent unclosed file // errors on some operating systems FileInputStream fis = null; try { fis = new FileInputStream(imageFile); iis = ImageIO.createImageInputStream(fis); Iterator<ImageReader> readers = ImageIO.getImageReaders(iis); if (readers.hasNext()) { reader = readers.next(); reader.setInput(iis, true); dimensions = new Dimension(reader.getWidth(0), reader.getHeight(0)); } } finally { if (reader != null) { reader.dispose(); } if (iis != null) { try { iis.close(); } catch (IOException e) { // ignore } } IOUtils.closeQuietly(fis); } if (logger.isDebugEnabled()) { long execution = (System.currentTimeMillis() - start); logger.debug("Image dimension lookup for " + imageFile.getAbsolutePath() + " took " + (execution / 1000.000) + " s"); } return dimensions; }