List of usage examples for java.awt.image RenderedImage getHeight
int getHeight();
From source file:org.geoserver.wms.map.RenderedImageMapOutputFormatTest.java
/** * Test to check if we can successfully create a direct rendered image by using * a coverage view as a source, and a symbolizer defining which three bands of the * input coverage view can be used for RGB coloring, and with what order. *///ww w . ja v a2 s . c om @Test public void testStyleUsingChannelsFromCoverageView() throws Exception { GetMapRequest request = new GetMapRequest(); CoordinateReferenceSystem crs = DefaultGeographicCRS.WGS84; ReferencedEnvelope bbox = new ReferencedEnvelope(new Envelope(-116.90673461649858211, -114.30988665660261461, 32.07093728218402617, 33.89032847348440214), crs); request.setBbox(bbox); request.setSRS("urn:x-ogc:def:crs:EPSG:4326"); request.setFormat("image/png"); final WMSMapContent map = new WMSMapContent(request); map.setMapWidth(300); map.setMapHeight(300); map.setTransparent(false); map.getViewport().setBounds(bbox); StyleBuilder styleBuilder = new StyleBuilder(); Catalog catalog = getCatalog(); // Source image CoverageInfo ci = catalog.getCoverageByName(SystemTestData.MULTIBAND.getPrefix(), SystemTestData.MULTIBAND.getLocalPart()); GridCoverage2DReader reader = (GridCoverage2DReader) ci.getGridCoverageReader(null, null); reader.getCoordinateReferenceSystem(); Layer sl = new CachedGridReaderLayer(reader, styleBuilder.createStyle(styleBuilder.createRasterSymbolizer())); map.addLayer(sl); RenderedImageMap srcImageMap = this.rasterMapProducer.produceMap(map); RenderedImage srcImage = srcImageMap.getImage(); // CoverageView band creation. We create a coverage view with 6 bands, using // the original bands from the multiband coverage //Note that first three bands are int reverse order of the bands of the source coverage final InputCoverageBand ib0 = new InputCoverageBand("multiband", "2"); final CoverageBand b0 = new CoverageBand(Collections.singletonList(ib0), "multiband@2", 0, CompositionType.BAND_SELECT); final InputCoverageBand ib1 = new InputCoverageBand("multiband", "1"); final CoverageBand b1 = new CoverageBand(Collections.singletonList(ib1), "multiband@1", 1, CompositionType.BAND_SELECT); final InputCoverageBand ib2 = new InputCoverageBand("multiband", "0"); final CoverageBand b2 = new CoverageBand(Collections.singletonList(ib2), "multiband@0", 2, CompositionType.BAND_SELECT); final InputCoverageBand ib3 = new InputCoverageBand("multiband", "0"); final CoverageBand b3 = new CoverageBand(Collections.singletonList(ib3), "multiband@0", 0, CompositionType.BAND_SELECT); final InputCoverageBand ib4 = new InputCoverageBand("multiband", "1"); final CoverageBand b4 = new CoverageBand(Collections.singletonList(ib4), "multiband@1", 1, CompositionType.BAND_SELECT); final InputCoverageBand ib5 = new InputCoverageBand("multiband", "2"); final CoverageBand b5 = new CoverageBand(Collections.singletonList(ib5), "multiband@2", 2, CompositionType.BAND_SELECT); final List<CoverageBand> coverageBands = new ArrayList<CoverageBand>(1); coverageBands.add(b0); coverageBands.add(b1); coverageBands.add(b2); coverageBands.add(b3); coverageBands.add(b4); coverageBands.add(b5); CoverageView multiBandCoverageView = new CoverageView("multiband_select", coverageBands); CoverageStoreInfo storeInfo = catalog.getCoverageStoreByName("multiband"); CatalogBuilder builder = new CatalogBuilder(catalog); // Reordered bands coverage CoverageInfo coverageInfo = multiBandCoverageView.createCoverageInfo("multiband_select", storeInfo, builder); coverageInfo.getParameters().put("USE_JAI_IMAGEREAD", "false"); catalog.add(coverageInfo); final LayerInfo layerInfoView = builder.buildLayer(coverageInfo); catalog.add(layerInfoView); final Envelope env = ci.boundingBox(); LOGGER.info("about to create map ctx for BasicPolygons with bounds " + env); RasterSymbolizer symbolizer = styleBuilder.createRasterSymbolizer(); ChannelSelection cs = new ChannelSelectionImpl(); SelectedChannelType red = new SelectedChannelTypeImpl(); SelectedChannelType green = new SelectedChannelTypeImpl(); SelectedChannelType blue = new SelectedChannelTypeImpl(); // We want to create an image where the RGB channels are in reverse order // regarding the band order of the input coverage view // Note that channel names start with index "1" red.setChannelName("3"); green.setChannelName("2"); blue.setChannelName("1"); cs.setRGBChannels(new SelectedChannelType[] { red, green, blue }); symbolizer.setChannelSelection(cs); reader = (GridCoverage2DReader) coverageInfo.getGridCoverageReader(null, null); reader.getCoordinateReferenceSystem(); Layer dl = new CachedGridReaderLayer(reader, styleBuilder.createStyle(symbolizer)); map.removeLayer(sl); map.addLayer(dl); RenderedImageMap dstImageMap = this.rasterMapProducer.produceMap(map); RenderedImage destImage = dstImageMap.getImage(); int dWidth = destImage.getWidth(); int dHeight = destImage.getHeight(); int[] destImageRowBand0 = new int[dWidth * dHeight]; int[] destImageRowBand1 = new int[destImageRowBand0.length]; int[] destImageRowBand2 = new int[destImageRowBand0.length]; destImage.getData().getSamples(0, 0, dWidth, dHeight, 0, destImageRowBand0); destImage.getData().getSamples(0, 0, dWidth, dHeight, 1, destImageRowBand1); destImage.getData().getSamples(0, 0, dWidth, dHeight, 2, destImageRowBand2); int sWidth = srcImage.getWidth(); int sHeight = srcImage.getHeight(); int[] srcImageRowBand0 = new int[sWidth * sHeight]; int[] srcImageRowBand2 = new int[srcImageRowBand0.length]; srcImage.getData().getSamples(0, 0, sWidth, sHeight, 0, srcImageRowBand0); // Source and result image first bands should be the same. We have reversed the order // of the three first bands of the source coverage and then we re-reversed the three // first bands using channel selection on the raster symbolizer used for rendering. Assert.assertTrue(Arrays.equals(destImageRowBand0, srcImageRowBand0)); //Result band 0 should not be equal to source image band 2 Assert.assertFalse(Arrays.equals(destImageRowBand0, srcImageRowBand2)); srcImageMap.dispose(); dstImageMap.dispose(); map.dispose(); }
From source file:org.geoserver.wps.gs.GeorectifyCoverage.java
@DescribeResults({ @DescribeResult(name = "result", description = "Georectified raster", type = GridCoverage2D.class), @DescribeResult(name = "path", description = "Pathname of the generated raster on the server", type = String.class) }) public Map<String, Object> execute( @DescribeParameter(name = "data", description = "Input raster") GridCoverage2D coverage, @DescribeParameter(name = "gcp", description = "List of Ground control points. Points are specified as [x,y] or [x,y,z].") String gcps, @DescribeParameter(name = "bbox", description = "Bounding box for output", min = 0) Envelope bbox, @DescribeParameter(name = "targetCRS", description = "Coordinate reference system to use for the output raster") CoordinateReferenceSystem crs, @DescribeParameter(name = "width", description = "Width of output raster in pixels", min = 0) Integer width, @DescribeParameter(name = "height", description = "Height of output raster in pixels", min = 0) Integer height, @DescribeParameter(name = "warpOrder", min = 0, description = "Order of the warping polynomial (1 to 3)") Integer warpOrder, @DescribeParameter(name = "transparent", min = 0, description = "Force output to have transparent background") Boolean transparent, @DescribeParameter(name = "store", min = 0, description = "Indicates whether to keep the output file after processing") Boolean store, @DescribeParameter(name = "outputPath", min = 0, description = "Pathname where the output file is stored") String outputPath) throws IOException { GeoTiffReader reader = null;// w w w.j av a 2s .c o m List<File> removeFiles = new ArrayList<File>(); String location = null; try { File tempFolder = config.getTempFolder(); File loggingFolder = config.getLoggingFolder(); // do we have to add the alpha channel? boolean forceTransparent = false; if (transparent == null) { transparent = true; } ColorModel cm = coverage.getRenderedImage().getColorModel(); if (cm.getTransparency() == Transparency.OPAQUE && transparent) { forceTransparent = true; } // // // // STEP 1: Getting the dataset to be georectified // // // final Object fileSource = coverage.getProperty(GridCoverage2DReader.FILE_SOURCE_PROPERTY); if (fileSource != null && fileSource instanceof String) { location = (String) fileSource; } if (location == null) { RenderedImage image = coverage.getRenderedImage(); if (forceTransparent) { ImageWorker iw = new ImageWorker(image); iw.forceComponentColorModel(); final ImageLayout tempLayout = new ImageLayout(image); tempLayout.unsetValid(ImageLayout.COLOR_MODEL_MASK).unsetValid(ImageLayout.SAMPLE_MODEL_MASK); RenderedImage alpha = ConstantDescriptor.create(Float.valueOf(image.getWidth()), Float.valueOf(image.getHeight()), new Byte[] { Byte.valueOf((byte) 255) }, new RenderingHints(JAI.KEY_IMAGE_LAYOUT, tempLayout)); iw.addBand(alpha, false); image = iw.getRenderedImage(); cm = image.getColorModel(); } File storedImageFile = storeImage(image, tempFolder); location = storedImageFile.getAbsolutePath(); removeFiles.add(storedImageFile); } // // // // STEP 2: Adding Ground Control Points // // // final int gcpNum[] = new int[1]; final String gcp = parseGcps(gcps, gcpNum); File vrtFile = addGroundControlPoints(location, gcp, config.getGdalTranslateParameters()); if (vrtFile == null || !vrtFile.exists() || !vrtFile.canRead()) { throw new IOException("Unable to get a valid file with attached Ground Control Points"); } removeFiles.add(vrtFile); // // // // STEP 3: Warping // // // File warpedFile = warpFile(vrtFile, bbox, crs, width, height, warpOrder, tempFolder, loggingFolder, config.getExecutionTimeout(), config.getGdalWarpingParameters()); if (warpedFile == null || !warpedFile.exists() || !warpedFile.canRead()) { throw new IOException("Unable to get a valid georectified file"); } boolean expand = false; if (cm instanceof IndexColorModel) { expand = true; } else if (cm instanceof ComponentColorModel && cm.getNumComponents() == 1 && cm.getComponentSize()[0] == 1) { expand = true; } if (expand) { removeFiles.add(warpedFile); warpedFile = expandRgba(warpedFile.getAbsolutePath()); } // if we have the output path move the final file there if (Boolean.TRUE.equals(store) && outputPath != null) { File output = new File(outputPath); if (output.exists()) { if (!output.delete()) { throw new WPSException("Output file " + outputPath + " exists but cannot be overwritten"); } } else { File parent = output.getParentFile(); if (!parent.exists()) { if (!parent.mkdirs()) { throw new WPSException("Output file parent directory " + parent.getAbsolutePath() + " does not exist and cannot be created"); } } } if (!warpedFile.renameTo(output)) { throw new WPSException("Could not move " + warpedFile.getAbsolutePath() + " to " + outputPath + ", it's likely a permission issue"); } warpedFile = output; } // mark the output file for deletion at the end of request if (resourceManager != null && !Boolean.TRUE.equals(store)) { resourceManager.addResource(new WPSFileResource(warpedFile)); } // // // // FINAL STEP: Returning the warped gridcoverage // // // reader = new GeoTiffReader(warpedFile); GridCoverage2D cov = addLocationProperty(reader.read(null), warpedFile); Map<String, Object> result = new HashMap<String, Object>(); result.put("result", cov); result.put("path", warpedFile.getAbsolutePath()); return result; } finally { if (reader != null) { try { reader.dispose(); } catch (Throwable t) { // Does nothing } } for (File file : removeFiles) { deleteFile(file); } } }
From source file:org.geotools.gce.imagecollection.ImageCollectionReaderTest.java
@Test public void testReader() throws IllegalArgumentException, IOException, NoSuchAuthorityCodeException, CQLException { final File file = TestData.file(this, "sample"); final String string = "PATH='folder1/world.tif'"; Filter filter = CQL.toFilter(string); final ImageCollectionReader reader = new ImageCollectionReader(file); final ParameterValue<GridGeometry2D> gg = AbstractGridFormat.READ_GRIDGEOMETRY2D.createValue(); final GeneralEnvelope envelope = new GeneralEnvelope(new Rectangle(1000, -800, 1000, 400)); envelope.setCoordinateReferenceSystem(CartesianAuthorityFactory.GENERIC_2D); final Rectangle rasterArea = new Rectangle(0, 0, 500, 200); final GridEnvelope2D range = new GridEnvelope2D(rasterArea); gg.setValue(new GridGeometry2D(range, envelope)); final ParameterValue<Filter> ff = ImageCollectionFormat.FILTER.createValue(); ff.setValue(filter);/*w w w .ja v a 2 s . com*/ final ParameterValue<double[]> background = ImageCollectionFormat.BACKGROUND_VALUES.createValue(); background.setValue(new double[] { 0 }); GeneralParameterValue[] params = new GeneralParameterValue[] { ff, gg, background }; if (reader != null) { // reading the coverage GridCoverage2D coverage = (GridCoverage2D) reader.read(params); RenderedImage image = coverage.getRenderedImage(); assertTrue(image.getWidth() == 500); assertTrue(image.getHeight() == 200); } }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
/** * Load a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}. * * @param imageReadParameters the {@link ImageReadParam} to use for reading. * @param index the index to use for the {@link ImageReader}. * @param cropBBox the bbox to use for cropping. * @param mosaicWorldToGrid the cropping grid to world transform. * @param request the incoming request to satisfy. * @param hints {@link Hints} to be used for creating this raster. * @return a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}. * @throws IOException in case an error occurs. *//*ww w .j av a 2s.c o m*/ public GranuleLoadingResult loadRaster(final ImageReadParam imageReadParameters, final int index, final ReferencedEnvelope cropBBox, final MathTransform2D mosaicWorldToGrid, final RasterLayerRequest request, final Hints hints) throws IOException { if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { final String name = Thread.currentThread().getName(); LOGGER.finer("Thread:" + name + " Loading raster data for granuleDescriptor " + this.toString()); } ImageReadParam readParameters = null; int imageIndex; final boolean useFootprint = roiProvider != null && request.getFootprintBehavior() != FootprintBehavior.None; Geometry inclusionGeometry = useFootprint ? roiProvider.getFootprint() : null; final ReferencedEnvelope bbox = useFootprint ? new ReferencedEnvelope(granuleBBOX.intersection(inclusionGeometry.getEnvelopeInternal()), granuleBBOX.getCoordinateReferenceSystem()) : granuleBBOX; boolean doFiltering = false; if (filterMe && useFootprint) { doFiltering = Utils.areaIsDifferent(inclusionGeometry, baseGridToWorld, granuleBBOX); } // intersection of this tile bound with the current crop bbox final ReferencedEnvelope intersection = new ReferencedEnvelope(bbox.intersection(cropBBox), cropBBox.getCoordinateReferenceSystem()); if (intersection.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } // check if the requested bbox intersects or overlaps the requested area if (useFootprint && inclusionGeometry != null && !JTS.toGeometry(cropBBox).intersects(inclusionGeometry)) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } ImageInputStream inStream = null; ImageReader reader = null; try { // //get info about the raster we have to read // // get a stream assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) return null; // get a reader and try to cache the relevant SPI if (cachedReaderSPI == null) { reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); } else reader = cachedReaderSPI.createReaderInstance(); if (reader == null) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.warning(new StringBuilder("Unable to get s reader for granuleDescriptor ") .append(this.toString()).append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } // set input customizeReaderInitialization(reader, hints); reader.setInput(inStream); // Checking for heterogeneous granules if (request.isHeterogeneousGranules()) { // create read parameters readParameters = new ImageReadParam(); //override the overviews controller for the base layer imageIndex = ReadParamsController.setReadParams( request.spatialRequestHelper.getRequestedResolution(), request.getOverviewPolicy(), request.getDecimationPolicy(), readParameters, request.rasterManager, overviewsController); } else { imageIndex = index; readParameters = imageReadParameters; } //get selected level and base level dimensions final GranuleOverviewLevelDescriptor selectedlevel = getLevel(imageIndex, reader); // now create the crop grid to world which can be used to decide // which source area we need to crop in the selected level taking // into account the scale factors imposed by the selection of this // level together with the base level grid to world transformation AffineTransform2D cropWorldToGrid = new AffineTransform2D(selectedlevel.gridToWorldTransformCorner); cropWorldToGrid = (AffineTransform2D) cropWorldToGrid.inverse(); // computing the crop source area which lives into the // selected level raster space, NOTICE that at the end we need to // take into account the fact that we might also decimate therefore // we cannot just use the crop grid to world but we need to correct // it. final Rectangle sourceArea = CRS.transform(cropWorldToGrid, intersection).toRectangle2D().getBounds(); //gutter if (selectedlevel.baseToLevelTransform.isIdentity()) { sourceArea.grow(2, 2); } XRectangle2D.intersect(sourceArea, selectedlevel.rasterDimensions, sourceArea);//make sure roundings don't bother us // is it empty?? if (sourceArea.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine("Got empty area for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result"); } return null; } else if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { LOGGER.finer("Loading level " + imageIndex + " with source region: " + sourceArea + " subsampling: " + readParameters.getSourceXSubsampling() + "," + readParameters.getSourceYSubsampling() + " for granule:" + granuleUrl); } // Setting subsampling int newSubSamplingFactor = 0; final String pluginName = cachedReaderSPI.getPluginClassName(); if (pluginName != null && pluginName.equals(ImageUtilities.DIRECT_KAKADU_PLUGIN)) { final int ssx = readParameters.getSourceXSubsampling(); final int ssy = readParameters.getSourceYSubsampling(); newSubSamplingFactor = ImageIOUtilities.getSubSamplingFactor2(ssx, ssy); if (newSubSamplingFactor != 0) { if (newSubSamplingFactor > maxDecimationFactor && maxDecimationFactor != -1) { newSubSamplingFactor = maxDecimationFactor; } readParameters.setSourceSubsampling(newSubSamplingFactor, newSubSamplingFactor, 0, 0); } } // set the source region readParameters.setSourceRegion(sourceArea); RenderedImage raster; try { // read raster = request.getReadType().read(readParameters, imageIndex, granuleUrl, selectedlevel.rasterDimensions, reader, hints, false); } catch (Throwable e) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.log(java.util.logging.Level.FINE, "Unable to load raster for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result", e); } return null; } // use fixed source area sourceArea.setRect(readParameters.getSourceRegion()); // // setting new coefficients to define a new affineTransformation // to be applied to the grid to world transformation // ----------------------------------------------------------------------------------- // // With respect to the original envelope, the obtained planarImage // needs to be rescaled. The scaling factors are computed as the // ratio between the cropped source region sizes and the read // image sizes. // // place it in the mosaic using the coords created above; double decimationScaleX = ((1.0 * sourceArea.width) / raster.getWidth()); double decimationScaleY = ((1.0 * sourceArea.height) / raster.getHeight()); final AffineTransform decimationScaleTranform = XAffineTransform.getScaleInstance(decimationScaleX, decimationScaleY); // keep into account translation to work into the selected level raster space final AffineTransform afterDecimationTranslateTranform = XAffineTransform .getTranslateInstance(sourceArea.x, sourceArea.y); // now we need to go back to the base level raster space final AffineTransform backToBaseLevelScaleTransform = selectedlevel.baseToLevelTransform; // now create the overall transform final AffineTransform finalRaster2Model = new AffineTransform(baseGridToWorld); finalRaster2Model.concatenate(CoverageUtilities.CENTER_TO_CORNER); if (!XAffineTransform.isIdentity(backToBaseLevelScaleTransform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(backToBaseLevelScaleTransform); if (!XAffineTransform.isIdentity(afterDecimationTranslateTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(afterDecimationTranslateTranform); if (!XAffineTransform.isIdentity(decimationScaleTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(decimationScaleTranform); // adjust roi if (useFootprint) { ROIGeometry transformed; try { transformed = roiProvider.getTransformedROI(finalRaster2Model.createInverse()); if (transformed.getAsGeometry().isEmpty()) { // inset might have killed the geometry fully return null; } PlanarImage pi = PlanarImage.wrapRenderedImage(raster); if (!transformed.intersects(pi.getBounds())) { return null; } pi.setProperty("ROI", transformed); raster = pi; } catch (NoninvertibleTransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.INFO)) LOGGER.info("Unable to create a granuleDescriptor " + this.toString() + " due to a problem when managing the ROI"); return null; } } // keep into account translation factors to place this tile finalRaster2Model.preConcatenate((AffineTransform) mosaicWorldToGrid); final Interpolation interpolation = request.getInterpolation(); //paranoiac check to avoid that JAI freaks out when computing its internal layouT on images that are too small Rectangle2D finalLayout = ImageUtilities.layoutHelper(raster, (float) finalRaster2Model.getScaleX(), (float) finalRaster2Model.getScaleY(), (float) finalRaster2Model.getTranslateX(), (float) finalRaster2Model.getTranslateY(), interpolation); if (finalLayout.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.INFO)) LOGGER.info("Unable to create a granuleDescriptor " + this.toString() + " due to jai scale bug creating a null source area"); return null; } // apply the affine transform conserving indexed color model final RenderingHints localHints = new RenderingHints(JAI.KEY_REPLACE_INDEX_COLOR_MODEL, interpolation instanceof InterpolationNearest ? Boolean.FALSE : Boolean.TRUE); if (XAffineTransform.isIdentity(finalRaster2Model, Utils.AFFINE_IDENTITY_EPS)) { return new GranuleLoadingResult(raster, null, granuleUrl, doFiltering, pamDataset); } else { // // In case we are asked to use certain tile dimensions we tile // also at this stage in case the read type is Direct since // buffered images comes up untiled and this can affect the // performances of the subsequent affine operation. // final Dimension tileDimensions = request.getTileDimensions(); if (tileDimensions != null && request.getReadType().equals(ReadType.DIRECT_READ)) { final ImageLayout layout = new ImageLayout(); layout.setTileHeight(tileDimensions.width).setTileWidth(tileDimensions.height); localHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout)); } else { if (hints != null && hints.containsKey(JAI.KEY_IMAGE_LAYOUT)) { final Object layout = hints.get(JAI.KEY_IMAGE_LAYOUT); if (layout != null && layout instanceof ImageLayout) { localHints .add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, ((ImageLayout) layout).clone())); } } } if (hints != null && hints.containsKey(JAI.KEY_TILE_CACHE)) { final Object cache = hints.get(JAI.KEY_TILE_CACHE); if (cache != null && cache instanceof TileCache) localHints.add(new RenderingHints(JAI.KEY_TILE_CACHE, (TileCache) cache)); } if (hints != null && hints.containsKey(JAI.KEY_TILE_SCHEDULER)) { final Object scheduler = hints.get(JAI.KEY_TILE_SCHEDULER); if (scheduler != null && scheduler instanceof TileScheduler) localHints.add(new RenderingHints(JAI.KEY_TILE_SCHEDULER, (TileScheduler) scheduler)); } boolean addBorderExtender = true; if (hints != null && hints.containsKey(JAI.KEY_BORDER_EXTENDER)) { final Object extender = hints.get(JAI.KEY_BORDER_EXTENDER); if (extender != null && extender instanceof BorderExtender) { localHints.add(new RenderingHints(JAI.KEY_BORDER_EXTENDER, (BorderExtender) extender)); addBorderExtender = false; } } // BORDER extender if (addBorderExtender) { localHints.add(ImageUtilities.BORDER_EXTENDER_HINTS); } ImageWorker iw = new ImageWorker(raster); iw.setRenderingHints(localHints); iw.affine(finalRaster2Model, interpolation, request.getBackgroundValues()); return new GranuleLoadingResult(iw.getRenderedImage(), null, granuleUrl, doFiltering, pamDataset); } } catch (IllegalStateException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } catch (org.opengis.referencing.operation.NoninvertibleTransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } catch (TransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } finally { try { if (request.getReadType() != ReadType.JAI_IMAGEREAD && inStream != null) { inStream.close(); } } finally { if (request.getReadType() != ReadType.JAI_IMAGEREAD && reader != null) { reader.dispose(); } } } }
From source file:org.mrgeo.rasterops.GeoTiffExporter.java
public static void export(final RenderedImage image, final Bounds bounds, final OutputStream os, final boolean replaceNan, final String xmp, final Number nodata) throws IOException { OpImageRegistrar.registerMrGeoOps(); final TIFFEncodeParam param = new TIFFEncodeParam(); // The version of GDAL that Legion is using requires a tile size > 1 param.setTileSize(image.getTileWidth(), image.getTileHeight()); param.setWriteTiled(true);// w ww. j a va2 s. c om // if the image only has 1 pixel, the value of this pixel changes after compressing (especially // if this pixel is no data value. e.g -9999 changes to -8192 when read the image back). // So don't do compress if the image has only 1 pixel. if (image.getWidth() > 1 && image.getHeight() > 1) { // Deflate lossless compression (also known as "Zip-in-TIFF") param.setCompression(TIFFEncodeParam.COMPRESSION_DEFLATE); param.setDeflateLevel(Deflater.BEST_COMPRESSION); } final GeoTIFFDirectory dir = new GeoTIFFDirectory(); // GTModelTypeGeoKey : using geographic coordinate system. dir.addGeoKey(new XTIFFField(1024, XTIFFField.TIFF_SHORT, 1, new char[] { 2 })); // GTRasterTypeGeoKey : pixel is point dir.addGeoKey(new XTIFFField(1025, XTIFFField.TIFF_SHORT, 1, new char[] { 1 })); // GeographicTypeGeoKey : 4326 WGS84 dir.addGeoKey(new XTIFFField(2048, XTIFFField.TIFF_SHORT, 1, new char[] { 4326 })); dir.addGeoKey(new XTIFFField(2049, XTIFFField.TIFF_ASCII, 7, new String[] { "WGS 84" })); // GeogAngularUnitsGeoKey : Angular Degree dir.addGeoKey(new XTIFFField(2054, XTIFFField.TIFF_SHORT, 1, new char[] { 9102 })); if (xmp != null) { final byte[] b = xmp.getBytes("UTF8"); dir.addField(new XTIFFField(700, XTIFFField.TIFF_BYTE, b.length, b)); } dir.getFields(); final double[] tiePoints = new double[6]; tiePoints[0] = 0.0; tiePoints[1] = 0.0; tiePoints[2] = 0.0; tiePoints[3] = bounds.getMinX(); tiePoints[4] = bounds.getMaxY(); tiePoints[5] = 0.0; dir.setTiepoints(tiePoints); final double[] pixelScale = new double[3]; pixelScale[0] = bounds.getWidth() / image.getWidth(); pixelScale[1] = bounds.getHeight() / image.getHeight(); pixelScale[2] = 0; dir.setPixelScale(pixelScale); final Vector<TIFFField> fields = toTiffField(dir.getFields()); RenderedImage output = image; final String[] nullValues = new String[1]; switch (image.getSampleModel().getDataType()) { case DataBuffer.TYPE_DOUBLE: nullValues[0] = Double.toString(nodata.doubleValue()); if (replaceNan) { output = ReplaceNanDescriptor.create(image, nodata.doubleValue()); } // Tiff exporter doesn't handle doubles. Yuck! output = ConvertToFloatDescriptor.create(output); // Double.NaN (our default nodata on ingest) should not be written out as nodata on export // (i.e. GeoTiffs imported without NODATA metadata field should be exported as such) if (!Double.isNaN(nodata.doubleValue())) { fields.add(new TIFFField(NULL_TAG, XTIFFField.TIFF_ASCII, 1, nullValues)); } break; case DataBuffer.TYPE_FLOAT: nullValues[0] = Double.toString(nodata.floatValue()); if (replaceNan) { output = ReplaceNanDescriptor.create(image, nodata.floatValue()); } // Float.NaN (our default nodata on ingest) should not be written out as nodata on export // (i.e. GeoTiffs imported without NODATA metadata field should be exported as such) if (!Float.isNaN(nodata.floatValue())) { fields.add(new TIFFField(NULL_TAG, XTIFFField.TIFF_ASCII, 1, nullValues)); } break; case DataBuffer.TYPE_INT: case DataBuffer.TYPE_USHORT: case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_BYTE: nullValues[0] = Integer.toString(nodata.intValue()); fields.add(new TIFFField(NULL_TAG, XTIFFField.TIFF_ASCII, 1, nullValues)); break; } param.setExtraFields(fields.toArray(new TIFFField[0])); EncodeDescriptor.create(output, os, "TIFF", param, null); }
From source file:org.pentaho.reporting.engine.classic.core.modules.output.pageable.pdf.internal.PdfGraphics2D.java
/** * @noinspection UseOfObsoleteCollectionType * @see Graphics2D#drawRenderedImage(RenderedImage, AffineTransform) *///from www. ja v a 2s. co m @Override public void drawRenderedImage(final RenderedImage img, final AffineTransform xform) { final BufferedImage image; if (img instanceof BufferedImage) { image = (BufferedImage) img; } else { final ColorModel cm = img.getColorModel(); final int width = img.getWidth(); final int height = img.getHeight(); final WritableRaster raster = cm.createCompatibleWritableRaster(width, height); final boolean isAlphaPremultiplied = cm.isAlphaPremultiplied(); final Hashtable properties = new Hashtable(); final String[] keys = img.getPropertyNames(); if (keys != null) { final int keyCount = keys.length; for (int i = 0; i < keyCount; i++) { properties.put(keys[i], img.getProperty(keys[i])); } } final BufferedImage result = new BufferedImage(cm, raster, isAlphaPremultiplied, properties); img.copyData(raster); image = result; } drawImage(image, xform, null); }
From source file:org.photovault.dcraw.AHDInterpolateOp.java
static private ImageLayout layoutHelper(RenderedImage src, int downSample) { int width = src.getWidth() / downSample; int height = src.getHeight() / downSample; PixelInterleavedSampleModel sampleModel = new PixelInterleavedSampleModel(DataBuffer.TYPE_USHORT, width, height, 3, width * 3, new int[] { 0, 1, 2 }); ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_LINEAR_RGB); ColorModel c = new ComponentColorModel(cs, false, false, ColorModel.OPAQUE, DataBuffer.TYPE_USHORT); ImageLayout il = new ImageLayout(0, 0, 256, 256, sampleModel, c); il.setWidth(width);/*from w ww. ja v a 2s.c om*/ il.setHeight(height); return il; }
From source file:org.photovault.image.ImageIOImage.java
/** Load the image and/or metadata//from w ww. ja va 2s .c o m @param loadImage Load the image pixel data if <CODE>true</CODE> @param loadMetadata Load image metadata if <CODE>true</CODE>. @param minWidth Minimum width of the loaded image @param minHeight Minimum height of the loaded image @param isLowQualityAllowed If <code>true</code>, use larger subsampling to speed up loading. */ private void load(boolean loadImage, boolean loadMetadata, int minWidth, int minHeight, boolean isLowQualityAllowed) { if (f != null && f.canRead()) { ImageReader reader = getImageReader(f); if (reader != null) { log.debug("Creating stream"); ImageInputStream iis = null; try { iis = ImageIO.createImageInputStream(f); reader.setInput(iis, false, false); width = reader.getWidth(0); height = reader.getHeight(0); if (loadImage) { RenderedImage ri = null; if (isLowQualityAllowed) { ri = readExifThumbnail(f); if (ri == null || !isOkForThumbCreation(ri.getWidth(), ri.getHeight(), minWidth, minHeight, reader.getAspectRatio(0), 0.01)) { /* EXIF thumbnail either did not exist or was unusable, try to read subsampled version of original */ ri = readSubsampled(reader, minWidth, minHeight); } } else { /* High quality image is requested. If the image is very large, use subsampling anyway to decrease memory consumption & speed up interactive operations. Anyway, most often user just views image at screen resolution */ ImageReadParam param = reader.getDefaultReadParam(); if (minWidth * 2 < width && minHeight * 2 < height) { param.setSourceSubsampling(2, 2, 0, 0); } ri = reader.read(0, param); } if (ri != null) { /* TODO: JAI seems to have problems in doing convolutions for large image tiles. Split image to reasonably sized tiles as a workaround for this. */ ri = new TiledImage(ri, 256, 256); image = new RenderedImageAdapter(ri); originalSampleModel = image.getSampleModel(); originalColorModel = image.getColorModel(); final float[] DEFAULT_KERNEL_1D = { 0.25f, 0.5f, 0.25f }; ParameterBlock pb = new ParameterBlock(); KernelJAI kernel = new KernelJAI(DEFAULT_KERNEL_1D.length, DEFAULT_KERNEL_1D.length, DEFAULT_KERNEL_1D.length / 2, DEFAULT_KERNEL_1D.length / 2, DEFAULT_KERNEL_1D, DEFAULT_KERNEL_1D); pb.add(kernel); BorderExtender extender = BorderExtender.createInstance(BorderExtender.BORDER_COPY); RenderingHints hints = JAI.getDefaultInstance().getRenderingHints(); if (hints == null) { hints = new RenderingHints(JAI.KEY_BORDER_EXTENDER, extender); } else { hints.put(JAI.KEY_BORDER_EXTENDER, extender); } RenderedOp filter = new RenderedOp("convolve", pb, hints); // javax.media.jai.operator.BoxFilterDescriptor.create( null, new Integer(2), new Integer(2), new Integer(0), new Integer(0), null ); // Add the subsampling operation. pb = new ParameterBlock(); pb.addSource(filter); pb.add(new Float(0.5F)).add(new Float(0.5F)); pb.add(new Float(0.0F)).add(new Float(0.0F)); pb.add(Interpolation.getInstance(Interpolation.INTERP_NEAREST)); RenderedOp downSampler = new RenderedOp("scale", pb, null); renderableImage = RenderableDescriptor.createRenderable(image, downSampler, null, null, null, null, null); } else { image = null; renderableImage = null; } imageIsLowQuality = isLowQualityAllowed; } if (loadMetadata) { readImageMetadata(reader); } } catch (Exception ex) { log.warn(ex.getMessage()); ex.printStackTrace(); return; } } } }
From source file:org.photovault.imginfo.CreateCopyImageCommand.java
/** Execute the command. /*w w w. ja va 2 s . c om*/ @throws CommandException If no image suitable for using as a source can be found or if saving the created image does not succeed. */ public void execute() throws CommandException { // Find the image used as source for the new instance PhotoInfoDAO photoDAO = daoFactory.getPhotoInfoDAO(); VolumeDAO volDAO = daoFactory.getVolumeDAO(); photo = photoDAO.findByUUID(photoUuid); Set<ImageOperations> operationsNotApplied = EnumSet.copyOf(operationsToApply); ImageDescriptorBase srcImageDesc = photo.getOriginal(); // Find a suitable image for using as source if the original has not // yet been loaded. if (img == null) { ImageFile srcImageFile = srcImageDesc.getFile(); File src = srcImageFile.findAvailableCopy(); if (src == null && !createFromOriginal) { srcImageDesc = photo.getPreferredImage(EnumSet.noneOf(ImageOperations.class), operationsToApply, maxWidth, maxHeight, Integer.MAX_VALUE, Integer.MAX_VALUE); if (srcImageDesc != null) { srcImageFile = srcImageDesc.getFile(); src = srcImageFile.findAvailableCopy(); operationsNotApplied.removeAll(((CopyImageDescriptor) srcImageDesc).getAppliedOperations()); } } if (src == null) { throw new CommandException("No suitable image file found"); } // Create the image for the instance PhotovaultImageFactory imgFactory = new PhotovaultImageFactory(); try { img = imgFactory.create(src, false, false); } catch (PhotovaultException ex) { throw new CommandException(ex.getMessage()); } } if (operationsNotApplied.contains(ImageOperations.CROP)) { img.setCropBounds(photo.getCropBounds()); img.setRotation(photo.getPrefRotation()); } if (operationsNotApplied.contains(ImageOperations.COLOR_MAP)) { ChannelMapOperation channelMap = photo.getColorChannelMapping(); if (channelMap != null) { img.setColorAdjustment(channelMap); } } if (operationsNotApplied.contains(ImageOperations.COLOR_MAP) && img instanceof RawImage) { RawImage ri = (RawImage) img; ri.setRawSettings(photo.getRawSettings()); } RenderedImage renderedDst = img.getRenderedImage(maxWidth, maxHeight, lowQualityAllowed); // Determine correct file name for the image & save it if (volumeUuid != null) { VolumeBase vol = volDAO.findById(volumeUuid, false); dstFile = vol.getInstanceName(photo, "jpg"); } if (dstFile == null) { throw new CommandException("Either destination file or volume must be specified"); } ImageFileDAO ifDAO = daoFactory.getImageFileDAO(); ImageFile dstImageFile = new ImageFile(); ifDAO.makePersistent(dstImageFile); CopyImageDescriptor dstImage = new CopyImageDescriptor(dstImageFile, "image#0", photo.getOriginal()); ImageDescriptorDAO idDAO = daoFactory.getImageDescriptorDAO(); idDAO.makePersistent(dstImage); if (operationsToApply.contains(ImageOperations.COLOR_MAP)) { dstImage.setColorChannelMapping(photo.getColorChannelMapping()); } if (operationsToApply.contains(ImageOperations.CROP)) { dstImage.setCropArea(photo.getCropBounds()); dstImage.setRotation(photo.getPrefRotation()); } if (operationsToApply.contains(ImageOperations.RAW_CONVERSION)) { dstImage.setRawSettings(photo.getRawSettings()); } dstImage.setWidth(renderedDst.getWidth()); dstImage.setHeight(renderedDst.getHeight()); ((CopyImageDescriptor) dstImageFile.getImages().get("image#0")).setOriginal(photo.getOriginal()); byte[] xpmData = createXMPMetadata(dstImageFile); try { saveImage(dstFile, renderedDst, xpmData); } catch (PhotovaultException ex) { throw new CommandException(ex.getMessage()); } finally { img.dispose(); } /* Check if the resulting image file is already known & create a new one if not */ byte[] hash = ImageFile.calcHash(dstFile); dstImageFile.setHash(hash); /* Store location of created file in database */ if (volume != null) { dstImageFile.addLocation(volume.getFileLocation(dstFile)); } /* Ensure that the photo is initialized in memory as it is used as a detached object after closing our persistence context. */ if (!photo.hasThumbnail()) { log.error("No valid thumbnail available!!!"); } }
From source file:org.sensorML.process.WMS_Process.java
/** * Executes process algorithm on inputs and set output data *//*from w w w . j a v a 2 s .c o m*/ public void execute() throws ProcessException { RenderedImage renderedImage = null; try { initRequest(); //switchBytes(); if (log.isDebugEnabled()) log.debug(owsUtils.buildURLQuery(request)); URLConnection urlCon = owsUtils.sendGetRequest(request); // Check on mimeType catches all three types (blank, inimage, xml) // of OGC service exceptions String mimeType = urlCon.getContentType(); if (mimeType.contains("xml") || mimeType.startsWith("application")) { OGCExceptionReader reader = new OGCExceptionReader(); reader.parseException(urlCon.getInputStream()); } else { // use JAI MemorySeekableStream for better performance dataStream = new MemoryCacheSeekableStream(urlCon.getInputStream()); // Create the ParameterBlock and add the SeekableStream to it. ParameterBlock pb = new ParameterBlock(); pb.add(dataStream); // decode image using JAI RenderedOp rop = JAI.create("stream", pb); if (rop != null) { renderedImage = rop.createInstance(); // put data buffer in output datablock byte[] data = ((DataBufferByte) renderedImage.getData().getDataBuffer()).getData(); ((DataBlockByte) outputImage.getData()).setUnderlyingObject(data); } } // adjust width and height of the output int width = 0; int height = 0; if (renderedImage != null) { width = renderedImage.getWidth(); height = renderedImage.getHeight(); } outputWidth.getData().setIntValue(width); outputHeight.getData().setIntValue(height); output.combineDataBlocks(); } catch (Exception e) { throw new ProcessException("Error while requesting data from WMS server: " + request.getGetServer(), e); } finally { endRequest(); } }