List of usage examples for javax.imageio.stream ImageInputStream mark
void mark();
From source file:ch5ImageReaderSpi.java
/** * This method gets called when an application wants to see if the input * image's format can be decoded by this ImageReader. In this case, we'll * simply check the first byte of data to see if its a 5 which is the format * type's magic number// w w w. j av a2 s.c o m */ public boolean canDecodeInput(Object input) { boolean reply = false; ImageInputStream iis = (ImageInputStream) input; iis.mark(); // mark where we are in ImageInputStream try { String magicNumber = iis.readLine().trim(); iis.reset(); // reset stream back to marked location if (magicNumber.equals("5")) reply = true; } catch (IOException exception) { } return reply; }
From source file:org.apache.xmlgraphics.image.loader.impl.imageio.ImageLoaderImageIO.java
/** {@inheritDoc} */ public Image loadImage(ImageInfo info, Map hints, ImageSessionContext session) throws ImageException, IOException { RenderedImage imageData = null; IIOException firstException = null; IIOMetadata iiometa = (IIOMetadata) info.getCustomObjects().get(ImageIOUtil.IMAGEIO_METADATA); boolean ignoreMetadata = (iiometa != null); boolean providerIgnoresICC = false; Source src = session.needSource(info.getOriginalURI()); ImageInputStream imgStream = ImageUtil.needImageInputStream(src); try {/*w ww. j av a 2 s . c o m*/ Iterator iter = ImageIO.getImageReaders(imgStream); while (iter.hasNext()) { ImageReader reader = (ImageReader) iter.next(); try { imgStream.mark(); ImageReadParam param = reader.getDefaultReadParam(); reader.setInput(imgStream, false, ignoreMetadata); final int pageIndex = ImageUtil.needPageIndexFromURI(info.getOriginalURI()); try { if (ImageFlavor.BUFFERED_IMAGE.equals(this.targetFlavor)) { imageData = reader.read(pageIndex, param); } else { imageData = reader.read(pageIndex, param); //imageData = reader.readAsRenderedImage(pageIndex, param); //TODO Reenable the above when proper listeners are implemented //to react to late pixel population (so the stream can be closed //properly). } if (iiometa == null) { iiometa = reader.getImageMetadata(pageIndex); } providerIgnoresICC = checkProviderIgnoresICC(reader.getOriginatingProvider()); break; //Quit early, we have the image } catch (IndexOutOfBoundsException indexe) { throw new ImageException("Page does not exist. Invalid image index: " + pageIndex); } catch (IllegalArgumentException iae) { //Some codecs like com.sun.imageio.plugins.wbmp.WBMPImageReader throw //IllegalArgumentExceptions when they have trouble parsing the image. throw new ImageException("Error loading image using ImageIO codec", iae); } catch (IIOException iioe) { if (firstException == null) { firstException = iioe; } else { log.debug("non-first error loading image: " + iioe.getMessage()); } } try { //Try fallback for CMYK images BufferedImage bi = getFallbackBufferedImage(reader, pageIndex, param); imageData = bi; firstException = null; //Clear exception after successful fallback attempt break; } catch (IIOException iioe) { //ignore } imgStream.reset(); } finally { reader.dispose(); } } } finally { ImageUtil.closeQuietly(src); //TODO Some codecs may do late reading. } if (firstException != null) { throw new ImageException("Error while loading image: " + firstException.getMessage(), firstException); } if (imageData == null) { throw new ImageException("No ImageIO ImageReader found ."); } ColorModel cm = imageData.getColorModel(); Color transparentColor = null; if (cm instanceof IndexColorModel) { //transparent color will be extracted later from the image } else { if (providerIgnoresICC && cm instanceof ComponentColorModel) { // Apply ICC Profile to Image by creating a new image with a new // color model. ICC_Profile iccProf = tryToExctractICCProfile(iiometa); if (iccProf != null) { ColorModel cm2 = new ComponentColorModel(new ICC_ColorSpace(iccProf), cm.hasAlpha(), cm.isAlphaPremultiplied(), cm.getTransparency(), cm.getTransferType()); WritableRaster wr = Raster.createWritableRaster(imageData.getSampleModel(), null); imageData.copyData(wr); BufferedImage bi = new BufferedImage(cm2, wr, cm2.isAlphaPremultiplied(), null); imageData = bi; cm = cm2; } } // ImageIOUtil.dumpMetadataToSystemOut(iiometa); // Retrieve the transparent color from the metadata if (iiometa != null && iiometa.isStandardMetadataFormatSupported()) { Element metanode = (Element) iiometa.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); Element dim = ImageIOUtil.getChild(metanode, "Transparency"); if (dim != null) { Element child; child = ImageIOUtil.getChild(dim, "TransparentColor"); if (child != null) { String value = child.getAttribute("value"); if (value == null || value.length() == 0) { //ignore } else if (cm.getNumColorComponents() == 1) { int gray = Integer.parseInt(value); transparentColor = new Color(gray, gray, gray); } else { StringTokenizer st = new StringTokenizer(value); transparentColor = new Color(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken())); } } } } } if (ImageFlavor.BUFFERED_IMAGE.equals(this.targetFlavor)) { return new ImageBuffered(info, (BufferedImage) imageData, transparentColor); } else { return new ImageRendered(info, imageData, transparentColor); } }
From source file:org.apache.xmlgraphics.image.loader.impl.ImageLoaderRawCCITTFax.java
/** {@inheritDoc} */ public Image loadImage(ImageInfo info, Map hints, ImageSessionContext session) throws ImageException, IOException { if (!MimeConstants.MIME_TIFF.equals(info.getMimeType())) { throw new IllegalArgumentException( "ImageInfo must be from a image with MIME type: " + MimeConstants.MIME_TIFF); }//from w ww. j av a2 s.co m int fillOrder = 1; int compression = TIFFImage.COMP_NONE; long stripOffset; long stripLength; TIFFDirectory dir; Source src = session.needSource(info.getOriginalURI()); ImageInputStream in = ImageUtil.needImageInputStream(src); in.mark(); try { SeekableStream seekable = new SeekableStreamAdapter(in); dir = new TIFFDirectory(seekable, 0); TIFFField fld; fld = dir.getField(TIFFImageDecoder.TIFF_COMPRESSION); if (fld != null) { compression = fld.getAsInt(0); switch (compression) { case TIFFImage.COMP_FAX_G3_1D: case TIFFImage.COMP_FAX_G3_2D: case TIFFImage.COMP_FAX_G4_2D: break; default: log.debug("Unsupported compression " + compression); throw new ImageException("ImageLoader doesn't support TIFF compression: " + compression); } } //Read information used for raw embedding fld = dir.getField(TIFFImageDecoder.TIFF_FILL_ORDER); if (fld != null) { fillOrder = fld.getAsInt(0); } int stripCount; fld = dir.getField(TIFFImageDecoder.TIFF_ROWS_PER_STRIP); if (fld == null) { stripCount = 1; } else { stripCount = (int) (info.getSize().getHeightPx() / fld.getAsLong(0)); } if (stripCount > 1) { log.debug("More than one strip found in TIFF image."); throw new ImageException("ImageLoader doesn't support multiple strips"); } stripOffset = dir.getField(TIFFImageDecoder.TIFF_STRIP_OFFSETS).getAsLong(0); stripLength = dir.getField(TIFFImageDecoder.TIFF_STRIP_BYTE_COUNTS).getAsLong(0); } finally { in.reset(); } in.seek(stripOffset); InputStream subin = new SubInputStream(ImageUtil.needInputStream(src), stripLength, true); if (fillOrder == 2) { //Decorate to flip bit order subin = new FillOrderChangeInputStream(subin); } ImageRawCCITTFax rawImage = new ImageRawCCITTFax(info, subin, compression); //Strip stream from source as we pass it on internally ImageUtil.removeStreams(src); return rawImage; }
From source file:org.apache.xmlgraphics.image.loader.impl.ImageLoaderRawJPEG.java
/** {@inheritDoc} */ public Image loadImage(ImageInfo info, Map hints, ImageSessionContext session) throws ImageException, IOException { if (!MimeConstants.MIME_JPEG.equals(info.getMimeType())) { throw new IllegalArgumentException( "ImageInfo must be from a image with MIME type: " + MimeConstants.MIME_JPEG); }//from www .j a va 2 s . co m ColorSpace colorSpace = null; boolean appeFound = false; int sofType = 0; ByteArrayOutputStream iccStream = null; Source src = session.needSource(info.getOriginalURI()); ImageInputStream in = ImageUtil.needImageInputStream(src); JPEGFile jpeg = new JPEGFile(in); in.mark(); try { outer: while (true) { int reclen; int segID = jpeg.readMarkerSegment(); if (log.isTraceEnabled()) { log.trace("Seg Marker: " + Integer.toHexString(segID)); } switch (segID) { case EOI: log.trace("EOI found. Stopping."); break outer; case SOS: log.trace("SOS found. Stopping early."); //TODO Not sure if this is safe break outer; case SOI: case NULL: break; case SOF0: //baseline case SOF1: //extended sequential DCT case SOF2: //progressive (since PDF 1.3) case SOFA: //progressive (since PDF 1.3) sofType = segID; if (log.isTraceEnabled()) { log.trace("SOF: " + Integer.toHexString(sofType)); } in.mark(); try { reclen = jpeg.readSegmentLength(); in.skipBytes(1); //data precision in.skipBytes(2); //height in.skipBytes(2); //width int numComponents = in.readUnsignedByte(); if (numComponents == 1) { colorSpace = ColorSpace.getInstance(ColorSpace.CS_GRAY); } else if (numComponents == 3) { colorSpace = ColorSpace.getInstance(ColorSpace.CS_LINEAR_RGB); } else if (numComponents == 4) { colorSpace = DeviceCMYKColorSpace.getInstance(); } else { throw new ImageException("Unsupported ColorSpace for image " + info + ". The number of components supported are 1, 3 and 4."); } } finally { in.reset(); } in.skipBytes(reclen); break; case APP2: //ICC (see ICC1V42.pdf) in.mark(); try { reclen = jpeg.readSegmentLength(); // Check for ICC profile byte[] iccString = new byte[11]; in.readFully(iccString); in.skipBytes(1); //string terminator (null byte) if ("ICC_PROFILE".equals(new String(iccString, "US-ASCII"))) { in.skipBytes(2); //chunk sequence number and total number of chunks int payloadSize = reclen - 2 - 12 - 2; if (ignoreColorProfile(hints)) { log.debug("Ignoring ICC profile data in JPEG"); in.skipBytes(payloadSize); } else { byte[] buf = new byte[payloadSize]; in.readFully(buf); if (iccStream == null) { if (log.isDebugEnabled()) { log.debug("JPEG has an ICC profile"); DataInputStream din = new DataInputStream(new ByteArrayInputStream(buf)); log.debug("Declared ICC profile size: " + din.readInt()); } //ICC profiles can be split into several chunks //so collect in a byte array output stream iccStream = new ByteArrayOutputStream(); } iccStream.write(buf); } } } finally { in.reset(); } in.skipBytes(reclen); break; case APPE: //Adobe-specific (see 5116.DCT_Filter.pdf) in.mark(); try { reclen = jpeg.readSegmentLength(); // Check for Adobe header byte[] adobeHeader = new byte[5]; in.readFully(adobeHeader); if ("Adobe".equals(new String(adobeHeader, "US-ASCII"))) { // The reason for reading the APPE marker is that Adobe Photoshop // generates CMYK JPEGs with inverted values. The correct thing // to do would be to interpret the values in the marker, but for now // only assume that if APPE marker is present and colorspace is CMYK, // the image is inverted. appeFound = true; } } finally { in.reset(); } in.skipBytes(reclen); break; default: jpeg.skipCurrentMarkerSegment(); } } } finally { in.reset(); } ICC_Profile iccProfile = buildICCProfile(info, colorSpace, iccStream); if (iccProfile == null && colorSpace == null) { throw new ImageException("ColorSpace could not be identified for JPEG image " + info); } boolean invertImage = false; if (appeFound && colorSpace.getType() == ColorSpace.TYPE_CMYK) { if (log.isDebugEnabled()) { log.debug("JPEG has an Adobe APPE marker. Note: CMYK Image will be inverted. (" + info.getOriginalURI() + ")"); } invertImage = true; } ImageRawJPEG rawImage = new ImageRawJPEG(info, ImageUtil.needInputStream(src), sofType, colorSpace, iccProfile, invertImage); return rawImage; }
From source file:org.apache.xmlgraphics.image.loader.impl.PreloaderTIFF.java
private ImageInfo createImageInfo(String uri, ImageInputStream in, ImageContext context) throws IOException, ImageException { ImageInfo info = null;/*from w w w . ja v a2 s. co m*/ in.mark(); try { int pageIndex = ImageUtil.needPageIndexFromURI(uri); SeekableStream seekable = new SeekableStreamAdapter(in); TIFFDirectory dir; try { dir = new TIFFDirectory(seekable, pageIndex); } catch (IllegalArgumentException iae) { String errorMessage = MessageFormat.format("Subimage {0} does not exist.", new Object[] { new Integer(pageIndex) }); throw new SubImageNotFoundException(errorMessage); } int width = (int) dir.getFieldAsLong(TIFFImageDecoder.TIFF_IMAGE_WIDTH); int height = (int) dir.getFieldAsLong(TIFFImageDecoder.TIFF_IMAGE_LENGTH); ImageSize size = new ImageSize(); size.setSizeInPixels(width, height); int unit = 2; //inch is default if (dir.isTagPresent(TIFFImageDecoder.TIFF_RESOLUTION_UNIT)) { unit = (int) dir.getFieldAsLong(TIFFImageDecoder.TIFF_RESOLUTION_UNIT); } if (unit == 2 || unit == 3) { float xRes, yRes; TIFFField fldx = dir.getField(TIFFImageDecoder.TIFF_X_RESOLUTION); TIFFField fldy = dir.getField(TIFFImageDecoder.TIFF_Y_RESOLUTION); if (fldx == null || fldy == null) { unit = 2; xRes = context.getSourceResolution(); yRes = xRes; } else { xRes = fldx.getAsFloat(0); yRes = fldy.getAsFloat(0); } if (unit == 2) { size.setResolution(xRes, yRes); //Inch } else { size.setResolution(UnitConv.in2mm(xRes) / 10, UnitConv.in2mm(yRes) / 10); //Centimeters } } else { size.setResolution(context.getSourceResolution()); } size.calcSizeFromPixels(); if (log.isTraceEnabled()) { log.trace("TIFF image detected: " + size); } info = new ImageInfo(uri, MimeConstants.MIME_TIFF); info.setSize(size); TIFFField fld; fld = dir.getField(TIFFImageDecoder.TIFF_COMPRESSION); if (fld != null) { int compression = fld.getAsInt(0); if (log.isTraceEnabled()) { log.trace("TIFF compression: " + compression); } info.getCustomObjects().put("TIFF_COMPRESSION", new Integer(compression)); } fld = dir.getField(TIFFImageDecoder.TIFF_TILE_WIDTH); if (fld != null) { if (log.isTraceEnabled()) { log.trace("TIFF is tiled"); } info.getCustomObjects().put("TIFF_TILED", Boolean.TRUE); } int stripCount; fld = dir.getField(TIFFImageDecoder.TIFF_ROWS_PER_STRIP); if (fld == null) { stripCount = 1; } else { stripCount = (int) Math.ceil(size.getHeightPx() / (double) fld.getAsLong(0)); } if (log.isTraceEnabled()) { log.trace("TIFF has " + stripCount + " strips."); } info.getCustomObjects().put("TIFF_STRIP_COUNT", new Integer(stripCount)); try { //Check if there is a next page new TIFFDirectory(seekable, pageIndex + 1); info.getCustomObjects().put(ImageInfo.HAS_MORE_IMAGES, Boolean.TRUE); if (log.isTraceEnabled()) { log.trace("TIFF is multi-page."); } } catch (IllegalArgumentException iae) { info.getCustomObjects().put(ImageInfo.HAS_MORE_IMAGES, Boolean.FALSE); } } finally { in.reset(); } return info; }
From source file:org.dcm4che2.tool.dcm2dcm.Dcm2Dcm.java
/** * Read the raw raster data from the input stream without applying a VOI LUT, if one * is present./*www . j a v a2 s .c om*/ */ private Raster readRaster(java.io.InputStream inputStream, int frame) throws IOException, NoSuchFieldException, IllegalAccessException { ImageInputStream imageInputStream = ImageIO.createImageInputStream(inputStream); // Mark the stream position, since we may need to rewind imageInputStream.mark(); DicomImageReader regularReader = (DicomImageReader) new DicomImageReaderSpi().createReaderInstance(); regularReader.setInput(imageInputStream); // Make sure we read the DICOM data, so that the 'compressed' flag is properly set. DicomStreamMetaData metadata = (DicomStreamMetaData) regularReader.getStreamMetadata(); metadata.getDicomObject(); DicomImageReadParam params = (DicomImageReadParam) regularReader.getDefaultReadParam(); // If the reader has determined that this is compressed data, it will enter a conditional block and // potentially throw an exception in readRaster. // We check here and avoid that block if the compressed flag is true. if (isCompressed(regularReader)) { // Rewind the stream imageInputStream.reset(); // Use the custom reader DicomImageReaderNoVoiLut reader = (DicomImageReaderNoVoiLut) new DicomImageReaderNoVoiLutSpi() .createReaderInstance(); reader.setInput(imageInputStream); BufferedImage image16 = reader.read(frame, params); return image16.getData(); } else { return regularReader.readRaster(frame, params); } }
From source file:org.geotools.gce.imagecollection.Utils.java
/** * Look for an {@link ImageReader} instance that is able to read the * provided {@link ImageInputStream}, which must be non null. * // w w w .j ava2 s . co m * <p> * In case no reader is found, <code>null</code> is returned. * * @param inStream * an instance of {@link ImageInputStream} for which we need to * find a suitable {@link ImageReader}. * @return a suitable instance of {@link ImageReader} or <code>null</code> * if one cannot be found. */ static ImageReader getReader(final ImageInputStream inStream) { Utilities.ensureNonNull("inStream", inStream); // get a reader inStream.mark(); final Iterator<ImageReader> readersIt = ImageIO.getImageReaders(inStream); if (!readersIt.hasNext()) { return null; } return readersIt.next(); }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
private void init(final BoundingBox granuleBBOX, final URL granuleUrl, final ImageReaderSpi suggestedSPI, final MultiLevelROI roiProvider, final boolean heterogeneousGranules, final boolean handleArtifactsFiltering, final Hints hints) { this.granuleBBOX = ReferencedEnvelope.reference(granuleBBOX); this.granuleUrl = granuleUrl; this.roiProvider = roiProvider; this.handleArtifactsFiltering = handleArtifactsFiltering; filterMe = handleArtifactsFiltering && roiProvider != null; // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null;//ww w. ja v a2 s. c o m try { // //get info about the raster we have to read // // get a stream if (cachedStreamSPI == null) { cachedStreamSPI = ImageIOExt.getImageInputStreamSPI(granuleUrl, true); if (cachedStreamSPI == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided granule " + granuleUrl.toString()); } } assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided file " + granuleUrl.toString()); } // get a reader and try to cache the suggested SPI first if (cachedReaderSPI == null) { inStream.mark(); if (suggestedSPI != null && suggestedSPI.canDecodeInput(inStream)) { cachedReaderSPI = suggestedSPI; inStream.reset(); } else { inStream.mark(); reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); inStream.reset(); } } if (reader == null) { if (cachedReaderSPI == null) { throw new IllegalArgumentException( "Unable to get a ReaderSPI for the provided input: " + granuleUrl.toString()); } reader = cachedReaderSPI.createReaderInstance(); } if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); boolean ignoreMetadata = customizeReaderInitialization(reader, hints); reader.setInput(inStream, false, ignoreMetadata); //get selected level and base level dimensions final Rectangle originalDimension = Utils.getDimension(0, reader); // build the g2W for this tile, in principle we should get it // somehow from the tile itself or from the index, but at the moment // we do not have such info, hence we assume that it is a simple // scale and translate this.geMapper = new GridToEnvelopeMapper(new GridEnvelope2D(originalDimension), granuleBBOX); geMapper.setPixelAnchor(PixelInCell.CELL_CENTER);//this is the default behavior but it is nice to write it down anyway this.baseGridToWorld = geMapper.createAffineTransform(); // add the base level this.granuleLevels.put(Integer.valueOf(0), new GranuleOverviewLevelDescriptor(1, 1, originalDimension.width, originalDimension.height)); ////////////////////// Setting overviewController /////////////////////// if (heterogeneousGranules) { // // // // Right now we are setting up overviewsController by assuming that // overviews are internal images as happens in TIFF images // We can improve this by leveraging on coverageReaders // // // // Getting the first level descriptor final GranuleOverviewLevelDescriptor baseOverviewLevelDescriptor = granuleLevels.get(0); // Variables initialization final int numberOfOvervies = reader.getNumImages(true) - 1; final AffineTransform2D baseG2W = baseOverviewLevelDescriptor.getGridToWorldTransform(); final int width = baseOverviewLevelDescriptor.getWidth(); final int height = baseOverviewLevelDescriptor.getHeight(); final double resX = AffineTransform2D.getScaleX0(baseG2W); final double resY = AffineTransform2D.getScaleY0(baseG2W); final double[] highestRes = new double[] { resX, resY }; final double[][] overviewsResolution = new double[numberOfOvervies][2]; // Populating overviews and initializing overviewsController for (int i = 0; i < numberOfOvervies; i++) { overviewsResolution[i][0] = (highestRes[0] * width) / reader.getWidth(i + 1); overviewsResolution[i][1] = (highestRes[1] * height) / reader.getHeight(i + 1); } overviewsController = new OverviewsController(highestRes, numberOfOvervies, overviewsResolution); } ////////////////////////////////////////////////////////////////////////// if (hints != null && hints.containsKey(Utils.CHECK_AUXILIARY_METADATA)) { boolean checkAuxiliaryMetadata = (Boolean) hints.get(Utils.CHECK_AUXILIARY_METADATA); if (checkAuxiliaryMetadata) { checkPamDataset(); } } } catch (IllegalStateException e) { throw new IllegalArgumentException(e); } catch (IOException e) { throw new IllegalArgumentException(e); } finally { // close/dispose stream and readers try { if (inStream != null) { inStream.close(); } } catch (Throwable e) { throw new IllegalArgumentException(e); } finally { if (reader != null) { reader.dispose(); } } } }
From source file:org.geotools.utils.imagemosaic.MosaicIndexBuilder.java
/** * Main thread for the mosaic index builder. *//*from w ww . jav a2 s. c o m*/ public void run() { // ///////////////////////////////////////////////////////////////////// // // CREATING INDEX FILE // // ///////////////////////////////////////////////////////////////////// // ///////////////////////////////////////////////////////////////////// // // Create a file handler that write log record to a file called // my.log // // ///////////////////////////////////////////////////////////////////// FileHandler handler = null; try { boolean append = true; handler = new FileHandler(new StringBuffer(locationPath).append("/error.txt").toString(), append); handler.setLevel(Level.SEVERE); // Add to the desired logger LOGGER.addHandler(handler); // ///////////////////////////////////////////////////////////////////// // // Create a set of file names that have to be skipped since these are // our metadata files // // ///////////////////////////////////////////////////////////////////// final Set<String> skipFiles = new HashSet<String>( Arrays.asList(new String[] { indexName + ".shp", indexName + ".dbf", indexName + ".shx", indexName + ".prj", "error.txt", "error.txt.lck", indexName + ".properties" })); // ///////////////////////////////////////////////////////////////////// // // Creating temp vars // // ///////////////////////////////////////////////////////////////////// ShapefileDataStore index = null; Transaction t = new DefaultTransaction(); // declaring a preciosion model to adhere the java double type // precision PrecisionModel precMod = new PrecisionModel(PrecisionModel.FLOATING); GeometryFactory geomFactory = new GeometryFactory(precMod); try { index = new ShapefileDataStore( new File(locationPath + File.separator + indexName + ".shp").toURI().toURL()); } catch (MalformedURLException ex) { if (LOGGER.isLoggable(Level.SEVERE)) LOGGER.log(Level.SEVERE, ex.getLocalizedMessage(), ex); fireException(ex); return; } final List<File> files = new ArrayList<File>(); recurse(files, locationPath); // ///////////////////////////////////////////////////////////////////// // // Cycling over the files that have filtered out // // ///////////////////////////////////////////////////////////////////// numFiles = files.size(); String validFileName = null; final Iterator<File> filesIt = files.iterator(); FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null; boolean doneSomething = false; for (int i = 0; i < numFiles; i++) { StringBuffer message; // // // // Check that this file is actually good to go // // // final File fileBeingProcessed = ((File) filesIt.next()); if (!fileBeingProcessed.exists() || !fileBeingProcessed.canRead() || !fileBeingProcessed.isFile()) { // send a message message = new StringBuffer("Skipped file ").append(files.get(i)) .append(" snce it seems invalid."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } // // // // Anyone has asked us to stop? // // // if (getStopThread()) { message = new StringBuffer("Stopping requested at file ").append(i).append(" of ") .append(numFiles).append(" files"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), ((i * 100.0) / numFiles)); return; } // replacing chars on input path try { validFileName = fileBeingProcessed.getCanonicalPath(); } catch (IOException e1) { fireException(e1); return; } validFileName = validFileName.replace('\\', '/'); validFileName = validFileName.substring(locationPath.length() + 1, fileBeingProcessed.getAbsolutePath().length()); if (skipFiles.contains(validFileName)) continue; message = new StringBuffer("Now indexing file ").append(validFileName); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), ((i * 100.0) / numFiles)); try { // //////////////////////////////////////////////////////// // // // STEP 1 // Getting an ImageIO reader for this coverage. // // // //////////////////////////////////////////////////////// ImageInputStream inStream = ImageIO.createImageInputStream(fileBeingProcessed); if (inStream == null) { if (LOGGER.isLoggable(Level.SEVERE)) LOGGER.severe(fileBeingProcessed + " has been skipped since we could not get a stream for it"); continue; } inStream.mark(); final Iterator<ImageReader> it = ImageIO.getImageReaders(inStream); ImageReader r = null; if (it.hasNext()) { r = (ImageReader) it.next(); r.setInput(inStream); } else { // release resources try { inStream.close(); } catch (Exception e) { // ignore exception } // try { // r.dispose(); // } catch (Exception e) { // // ignore exception // } // send a message message = new StringBuffer("Skipped file ").append(files.get(i)) .append(":No ImageIO readeres avalaible."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } // //////////////////////////////////////////////////////// // // STEP 2 // Getting a coverage reader for this coverage. // // //////////////////////////////////////////////////////// if (LOGGER.isLoggable(Level.FINE)) LOGGER.fine(new StringBuffer("Getting a reader").toString()); final AbstractGridFormat format = (AbstractGridFormat) GridFormatFinder .findFormat(files.get(i)); if (format == null || !format.accepts(files.get(i))) { // release resources try { inStream.close(); } catch (Exception e) { // ignore exception } try { r.dispose(); } catch (Exception e) { // ignore exception } message = new StringBuffer("Skipped file ").append(files.get(i)) .append(": File format is not supported."); if (LOGGER.isLoggable(Level.INFO)) LOGGER.info(message.toString()); fireEvent(message.toString(), ((i * 99.0) / numFiles)); continue; } final AbstractGridCoverage2DReader reader = (AbstractGridCoverage2DReader) format .getReader(files.get(i)); envelope = (GeneralEnvelope) reader.getOriginalEnvelope(); actualCRS = reader.getCrs(); // ///////////////////////////////////////////////////////////////////// // // STEP 3 // Get the type specifier for this image and the check that the // image has the correct sample model and color model. // If this is the first cycle of the loop we initialize // eveything. // // ///////////////////////////////////////////////////////////////////// final ImageTypeSpecifier its = ((ImageTypeSpecifier) r.getImageTypes(0).next()); boolean skipFeature = false; if (globEnvelope == null) { // ///////////////////////////////////////////////////////////////////// // // at the first step we initialize everything that we will // reuse afterwards starting with color models, sample // models, crs, etc.... // // ///////////////////////////////////////////////////////////////////// defaultCM = its.getColorModel(); if (defaultCM instanceof IndexColorModel) { IndexColorModel icm = (IndexColorModel) defaultCM; int numBands = defaultCM.getNumColorComponents(); defaultPalette = new byte[3][icm.getMapSize()]; icm.getReds(defaultPalette[0]); icm.getGreens(defaultPalette[0]); icm.getBlues(defaultPalette[0]); if (numBands == 4) icm.getAlphas(defaultPalette[0]); } defaultSM = its.getSampleModel(); defaultCRS = actualCRS; globEnvelope = new GeneralEnvelope(envelope); // ///////////////////////////////////////////////////////////////////// // // getting information about resolution // // ///////////////////////////////////////////////////////////////////// // // // // get the dimension of the hr image and build the model // as well as // computing the resolution // // // resetting reader and recreating stream, turnaround for a // strange imageio bug r.reset(); try { inStream.reset(); } catch (IOException e) { inStream = ImageIO.createImageInputStream(fileBeingProcessed); } //let's check if we got something now if (inStream == null) { //skip file if (LOGGER.isLoggable(Level.WARNING)) LOGGER.warning("Skipping file " + fileBeingProcessed.toString()); continue; } r.setInput(inStream); numberOfLevels = r.getNumImages(true); resolutionLevels = new double[2][numberOfLevels]; double[] res = getResolution(envelope, new Rectangle(r.getWidth(0), r.getHeight(0)), defaultCRS); resolutionLevels[0][0] = res[0]; resolutionLevels[1][0] = res[1]; // resolutions levels if (numberOfLevels > 1) { for (int k = 0; k < numberOfLevels; k++) { res = getResolution(envelope, new Rectangle(r.getWidth(k), r.getHeight(k)), defaultCRS); resolutionLevels[0][k] = res[0]; resolutionLevels[1][k] = res[1]; } } // ///////////////////////////////////////////////////////////////////// // // creating the schema // // ///////////////////////////////////////////////////////////////////// final SimpleFeatureTypeBuilder featureBuilder = new SimpleFeatureTypeBuilder(); featureBuilder.setName("Flag"); featureBuilder.setNamespaceURI("http://www.geo-solutions.it/"); featureBuilder.add("location", String.class); featureBuilder.add("the_geom", Polygon.class, this.actualCRS); featureBuilder.setDefaultGeometry("the_geom"); final SimpleFeatureType simpleFeatureType = featureBuilder.buildFeatureType(); // create the schema for the new shape file index.createSchema(simpleFeatureType); // get a feature writer fw = index.getFeatureWriter(t); } else { // //////////////////////////////////////////////////////// // // comparing ColorModel // comparing SampeModel // comparing CRSs // //////////////////////////////////////////////////////// globEnvelope.add(envelope); actualCM = its.getColorModel(); actualSM = its.getSampleModel(); skipFeature = (i > 0 ? !(CRS.equalsIgnoreMetadata(defaultCRS, actualCRS)) : false); if (skipFeature) LOGGER.warning(new StringBuffer("Skipping image ").append(files.get(i)) .append(" because CRSs do not match.").toString()); skipFeature = checkColorModels(defaultCM, defaultPalette, actualCM); if (skipFeature) LOGGER.warning(new StringBuffer("Skipping image ").append(files.get(i)) .append(" because color models do not match.").toString()); // defaultCM.getNumComponents()==actualCM.getNumComponents()&& // defaultCM.getClass().equals(actualCM.getClass()) // && defaultSM.getNumBands() == actualSM // .getNumBands() // && defaultSM.getDataType() == actualSM // .getDataType() && // // if (skipFeature) // LOGGER // .warning(new StringBuffer("Skipping image ") // .append(files.get(i)) // .append( // " because cm or sm does not match.") // .toString()); // res = getResolution(envelope, new // Rectangle(r.getWidth(0), // r.getHeight(0)), defaultCRS); // if (Math.abs((resX - res[0]) / resX) > EPS // || Math.abs(resY - res[1]) > EPS) { // LOGGER.warning(new StringBuffer("Skipping image // ").append( // files.get(i)).append( // " because resolutions does not match.") // .toString()); // skipFeature = true; // } } // //////////////////////////////////////////////////////// // // STEP 4 // // create and store features // // //////////////////////////////////////////////////////// if (!skipFeature) { final SimpleFeature feature = fw.next(); feature.setAttribute(1, geomFactory.toGeometry(new ReferencedEnvelope((Envelope) envelope))); feature.setAttribute( 0, absolute ? new StringBuilder(this.locationPath).append(File.separatorChar) .append(validFileName).toString() : validFileName); fw.write(); message = new StringBuffer("Done with file ").append(files.get(i)); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } message.append('\n'); fireEvent(message.toString(), (((i + 1) * 99.0) / numFiles)); doneSomething = true; } else skipFeature = false; // //////////////////////////////////////////////////////// // // STEP 5 // // release resources // // //////////////////////////////////////////////////////// try { inStream.close(); } catch (Exception e) { // ignore exception } try { r.dispose(); } catch (Exception e) { // ignore exception } // release resources reader.dispose(); } catch (IOException e) { fireException(e); break; } catch (ArrayIndexOutOfBoundsException e) { fireException(e); break; } } try { if (fw != null) fw.close(); t.commit(); t.close(); index.dispose(); } catch (IOException e) { LOGGER.log(Level.SEVERE, e.getLocalizedMessage(), e); } createPropertiesFiles(globEnvelope, doneSomething); } catch (SecurityException el) { fireException(el); return; } catch (IOException el) { fireException(el); return; } finally { try { if (handler != null) handler.close(); } catch (Throwable e) { // ignore } } }
From source file:org.geotools.utils.imageoverviews.OverviewsEmbedder.java
public void run() { // did we create a local private tile cache or not? boolean localTileCache = false; //// www .j ava2 s. co m // creating the image to use for the successive // subsampling // TileCache baseTC = JAI.getDefaultInstance().getTileCache(); if (baseTC == null) { localTileCache = true; final long tilecacheSize = super.getTileCacheSize(); baseTC = JAI.createTileCache(); baseTC.setMemoryCapacity(tilecacheSize); baseTC.setMemoryThreshold(0.75f); } // // CHECK INPUT DIRECTORIES/FILES // if (sourcePath == null) { fireEvent("Provided sourcePath is null", overallProgress); return; } // getting an image input stream to the file final File file = new File(sourcePath); final File[] files; int numFiles = 1; StringBuilder message; if (!file.canRead() || !file.exists()) { fireEvent("Provided file " + file.getAbsolutePath() + " cannot be read or does not exist", 100); return; } if (file.isDirectory()) { if (wildcardString == null) { fireEvent("Provided wildcardString is null", 100); return; } final FileFilter fileFilter = new WildcardFileFilter(wildcardString); files = file.listFiles(fileFilter); numFiles = files.length; if (numFiles <= 0) { message = new StringBuilder("No files to process!"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), 100); } } else files = new File[] { file }; if (files == null || files.length == 0) { fireEvent("Unable to find input files for the provided wildcard " + wildcardString + " and input path " + sourcePath, 100); return; } // setting step overallProgressStep = 100 * 1.0 / numFiles; // // ADDING OVERVIEWS TO ALL FOUND FILES // for (fileBeingProcessed = 0; fileBeingProcessed < numFiles; fileBeingProcessed++) { message = new StringBuilder("Managing file ").append(fileBeingProcessed).append(" of ") .append(files[fileBeingProcessed]).append(" files"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } overallProgress = overallProgressStep * fileBeingProcessed; fireEvent(message.toString(), overallProgress); if (getStopThread()) { message = new StringBuilder("Stopping requested at file ").append(fileBeingProcessed) .append(" of ").append(numFiles).append(" files"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), overallProgress); return; } ImageInputStream stream = null; ImageWriter writer = null; ImageOutputStream streamOut = null; RenderedOp currentImage = null; RenderedOp newImage = null; try { File localFile = files[fileBeingProcessed]; // // get a stream // stream = ImageIO.createImageInputStream(localFile); if (stream == null) { message = new StringBuilder("Unable to create an input stream for file") .append(files[fileBeingProcessed]); if (LOGGER.isLoggable(Level.SEVERE)) { LOGGER.severe(message.toString()); } fireEvent(message.toString(), overallProgress); break; } stream.mark(); // // get a reader // final Iterator<ImageReader> it = ImageIO.getImageReaders(stream); if (!it.hasNext()) { message = new StringBuilder("Unable to find a reader for file") .append(files[fileBeingProcessed]); if (LOGGER.isLoggable(Level.SEVERE)) { LOGGER.severe(message.toString()); } fireEvent(message.toString(), overallProgress); break; } final ImageReader reader = (ImageReader) it.next(); stream.reset(); stream.mark(); // is it a geotiff reader or not? if (!reader.getFormatName().toLowerCase().startsWith("tif")) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.info("Discarding input file " + files[fileBeingProcessed] + " since it is not a proper tif file."); } continue; } // // set input // reader.setInput(stream); ImageLayout layout = null; // tiling the image if needed int actualTileW = reader.getTileWidth(0); int actualTileH = reader.getTileHeight(0); if (!reader.isImageTiled(0) || (reader.isImageTiled(0) && (actualTileH != tileH && tileH != -1) || (actualTileW != tileW && tileW != -1))) { message = new StringBuilder("Retiling image ").append(fileBeingProcessed); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), overallProgress); layout = Utils.createTiledLayout(tileW, tileH, 0, 0); } stream.reset(); reader.reset(); reader.dispose(); // // output image stream // if (externalOverviews) { // create a sibling file localFile = new File(localFile.getParent(), FilenameUtils.getBaseName(localFile.getAbsolutePath()) + ".tif.ovr"); } streamOut = ImageIOExt.createImageOutputStream(null, localFile); if (streamOut == null) { message = new StringBuilder("Unable to acquire an ImageOutputStream for the file ") .append(files[fileBeingProcessed].toString()); if (LOGGER.isLoggable(Level.SEVERE)) { LOGGER.severe(message.toString()); } fireEvent(message.toString(), 100); break; } // // Preparing to write the set of images. First of all I write // the first image ` // // getting a writer for this reader writer = TIFF_IMAGE_WRITER_SPI.createWriterInstance(); writer.setOutput(streamOut); writer.addIIOWriteProgressListener(writeProgressListener); writer.addIIOWriteWarningListener(writeProgressListener); ImageWriteParam param = writer.getDefaultWriteParam(); // // setting tiling on the first image using writing parameters // if (tileH != -1 & tileW != -1) { param.setTilingMode(ImageWriteParam.MODE_EXPLICIT); param.setTiling(tileW, tileH, 0, 0); } else { param.setTilingMode(ImageWriteParam.MODE_EXPLICIT); param.setTiling(actualTileW, actualTileH, 0, 0); } if (this.compressionScheme != null && !Double.isNaN(compressionRatio)) { param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); param.setCompressionType(compressionScheme); param.setCompressionQuality((float) this.compressionRatio); } final RenderingHints newHints = new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout); newHints.add(new RenderingHints(JAI.KEY_TILE_CACHE, baseTC)); // read base image ParameterBlock pbjRead = new ParameterBlock(); pbjRead.add(stream); pbjRead.add(Integer.valueOf(0)); pbjRead.add(Boolean.FALSE); pbjRead.add(Boolean.FALSE); pbjRead.add(Boolean.FALSE); pbjRead.add(null); pbjRead.add(null); pbjRead.add(null); pbjRead.add(null); currentImage = JAI.create("ImageRead", pbjRead, newHints); message = new StringBuilder("Read original image ").append(fileBeingProcessed); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), overallProgress); int i = 0; // // OVERVIEWS CYLE // for (overviewInProcess = 0; overviewInProcess < numSteps; overviewInProcess++) { message = new StringBuilder("Subsampling step ").append(overviewInProcess + 1) .append(" of image ").append(fileBeingProcessed); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), overallProgress); // paranoiac check if (currentImage.getWidth() / downsampleStep <= 0 || currentImage.getHeight() / downsampleStep <= 0) break; // SCALE // subsampling the input image using the chosen algorithm final SubsampleAlgorithm algorithm = SubsampleAlgorithm.valueOf(scaleAlgorithm); switch (algorithm) { case Average: newImage = Utils.scaleAverage(currentImage, baseTC, downsampleStep, borderExtender); break; case Filtered: newImage = Utils.filteredSubsample(currentImage, baseTC, downsampleStep, lowPassFilter); break; case Bilinear: newImage = Utils.subsample(currentImage, baseTC, new InterpolationBilinear(), downsampleStep, borderExtender); break; case Bicubic: newImage = Utils.subsample(currentImage, baseTC, new InterpolationBicubic(2), downsampleStep, borderExtender); break; case Nearest: newImage = Utils.subsample(currentImage, baseTC, new InterpolationNearest(), downsampleStep, borderExtender); break; default: throw new IllegalArgumentException("Invalid scaling algorithm " + scaleAlgorithm);//cannot get here } //set relevant metadata IIOMetadata imageMetadata = null; if (writer instanceof TIFFImageWriter) { imageMetadata = writer.getDefaultImageMetadata(new ImageTypeSpecifier(newImage), param); if (imageMetadata != null) ((TIFFImageMetadata) imageMetadata).addShortOrLongField( BaselineTIFFTagSet.TAG_NEW_SUBFILE_TYPE, BaselineTIFFTagSet.NEW_SUBFILE_TYPE_REDUCED_RESOLUTION); } // write out if (!externalOverviews || i > 0) writer.writeInsert(-1, new IIOImage(newImage, null, imageMetadata), param); else writer.write(null, new IIOImage(newImage, null, imageMetadata), param); message = new StringBuilder("Step ").append(overviewInProcess + 1).append(" of image ") .append(fileBeingProcessed).append(" done!"); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), overallProgress); // switching images currentImage.dispose(); //dispose old image currentImage = newImage; i++; } overallProgress = 100; // close message message = new StringBuilder("Done with image ").append(fileBeingProcessed); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(message.toString()); } fireEvent(message.toString(), overallProgress); } catch (Throwable e) { fireException(e); } finally { // clean up // clean caches if they are local if (localTileCache && baseTC != null) try { baseTC.flush(); } catch (Exception e) { } // // free everything try { if (streamOut != null) streamOut.close(); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } try { if (writer != null) writer.dispose(); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } try { if (currentImage != null) currentImage.dispose(); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } try { if (newImage != null) newImage.dispose(); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } try { if (stream != null) stream.close(); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } } } if (LOGGER.isLoggable(Level.FINE)) LOGGER.fine("Done!!!"); }