List of usage examples for java.awt.image RenderedImage getSampleModel
SampleModel getSampleModel();
From source file:fr.gael.dhus.datastore.processing.impl.ProcessingUtils.java
/** * Cut the quicklook that have a big width/height ratio. * Each sub-part is dispatched on multiple bands. * * @param quick_look the quick_look to be cut * @param max_ratio the maximum ratio between quick_look width and height. * @param margin the margin between each band. default 5. */// w w w.ja v a 2 s.com public static RenderedImage cutQuickLook(RenderedImage input_image, double max_ratio, int margin) { ColorModel color_model = input_image.getColorModel(); if ((color_model == null) && (input_image.getSampleModel() != null)) { color_model = ColorRenderer.createColorModel(input_image.getSampleModel()); } BufferedImage quick_look; try { quick_look = PlanarImage.wrapRenderedImage(input_image).getAsBufferedImage( new Rectangle(input_image.getWidth(), input_image.getHeight()), color_model); } catch (Exception e) { logger.error("Problem getting buffered image.", e); throw new IllegalArgumentException("Problem getting buffered image", e); } if ((quick_look != null) && ((quick_look.getWidth() > 0) && (quick_look.getHeight() > 0))) { //Compute width/height ratio int ql_width = quick_look.getWidth(); int ql_height = quick_look.getHeight(); int ratio = (int) Math.sqrt(Math.max(ql_width, ql_height) / Math.min(ql_width, ql_height)); //Check if the quicklook has a strong width/height ratio if ((ratio < max_ratio) || (ratio <= 1)) return PlanarImage.wrapRenderedImage(quick_look); /** * Cut the wider side (width or height) into "ratio" bands. * Ex: If height = 3 * width then we cut 3 bands along columns * So height' = height / 3 (extract 1 band / 3 from height) * width' = width * 3 (dispatch 3 bands along lines) */ int width = ql_width; //width of the bands int height = ql_height; //height of the bands if (ql_width < ql_height) //cut along height { width = (ql_width + margin) * ratio; height = ql_height / ratio; } else //cut along width { width = ql_width / ratio; height = (ql_height + margin) * ratio; } //Dispatch the sub-parts BufferedImage quick_look_cut = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); Graphics2D g2 = quick_look_cut.createGraphics(); for (int k = 0; k < ratio; k++) { BufferedImage ql_band = null; //Dispatch on columns if (ql_width < ql_height) { ql_band = quick_look.getSubimage(0, (k * ql_height) / ratio, ql_width, ql_height / ratio); g2.drawImage(ql_band, null, k * (ql_width + margin), 0); } //Dispatch on lines else { ql_band = quick_look.getSubimage((k * ql_width) / ratio, 0, ql_width / ratio, ql_height); g2.drawImage(ql_band, null, 0, k * (ql_height + margin)); } } //for each band g2.dispose(); return PlanarImage.wrapRenderedImage(quick_look_cut); } return PlanarImage.wrapRenderedImage(quick_look); }
From source file:it.geosolutions.imageio.plugins.nitronitf.NITFImageWriter.java
/** * Setup Image Compression related fields depending on the compression properties. * //from ww w. j a v a 2s .c om * @param subheader the {@link ImageSubheader} to be set * @param compression the specified {@link WriteCompression} * @param isJP2 whether a JP2 * @param ri the renderedImage * @param fis * @return * @throws IOException */ private static double setImageCompression(final ImageSubheader subheader, final WriteCompression compression, final RenderedImage ri, final FileImageInputStreamExt fis) throws IOException { double ratio = Double.NaN; final int numBits = ri.getSampleModel().getSampleSize(0); if (compression != null && compression != WriteCompression.UNCOMPRESSED) { // Setting up compression type and compression ratio NITFUtilities.setField("IC", subheader.getImageCompression(), NITFUtilities.Consts.COMPRESSION_JP2); if (fis != null) { final long codeStreamSize = fis.length(); final long imageSize = ri.getWidth() * ri.getHeight() * ri.getSampleModel().getNumBands(); ratio = codeStreamSize / (double) (imageSize / (double) numBits); } String comrat = ""; if (compression.getCompression() == Compression.NUMERICALLY_LOSSLESS) { String ratioString = Double.toString(ratio); ratioString = ratioString.replace(".", ""); ratioString = ratioString.replace(",", ""); ratioString = "N0" + ratioString.substring(0, 2); // ImproveMe comrat = ratioString; } else if (compression == WriteCompression.RATIO_15_1) { // Wait for NGA feedbacks to fix this value comrat = NITFUtilities.Consts.COMPRESSION_L005; } else if (compression.toString().endsWith("VL")) { // Visually lossless uses a static value comrat = NITFUtilities.Consts.COMPRESSION_V039; } NITFUtilities.setField("COMRAT", subheader.getCompressionRate(), comrat); } else { NITFUtilities.setField("IC", subheader.getImageCompression(), NITFUtilities.Consts.COMPRESSION_NONE); } return ratio; }
From source file:com.bc.ceres.jai.opimage.ReinterpretOpImage.java
static RenderedImage create(RenderedImage source, double factor, double offset, ScalingType scalingType, InterpretationType interpretationType, Map<Object, Object> config) { final ImageLayout imageLayout; if (config != null && config.get(JAI.KEY_IMAGE_LAYOUT) instanceof ImageLayout) { imageLayout = (ImageLayout) config.get(JAI.KEY_IMAGE_LAYOUT); } else {/*from ww w . ja v a 2s.c o m*/ final int targetDataType = ReinterpretDescriptor.getTargetDataType( source.getSampleModel().getDataType(), factor, offset, scalingType, interpretationType); final PixelInterleavedSampleModel sampleModel = new PixelInterleavedSampleModel(targetDataType, source.getTileWidth(), source.getTileHeight(), 1, source.getTileWidth(), new int[] { 0 }); imageLayout = ReinterpretDescriptor.createTargetImageLayout(source, sampleModel); } return new ReinterpretOpImage(source, imageLayout, config, factor, offset, scalingType, interpretationType); }
From source file:it.geosolutions.imageio.plugins.nitronitf.NITFImageWriter.java
/** * //from w ww. jav a2s . c om * @param imageWrapper * @param subheader * @param compression * @param fis * @return * @throws IOException * @throws NITFException */ private static double initImageSubHeader(final ImageWrapper imageWrapper, final ImageSubheader subheader, final WriteCompression compression, final FileImageInputStreamExt fis) throws IOException, NITFException { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, "Populating ImageSubHeader"); } final RenderedImage ri = imageWrapper.getImage(); final List<String> comments = imageWrapper.getComments(); // Setting up rows, cols, blocks, bits properties final boolean isJP2 = compression != null && compression != WriteCompression.UNCOMPRESSED; final int nCols = ri.getWidth(); final int nRows = ri.getHeight(); final int nBits = ri.getSampleModel().getSampleSize(0); final String numBlocksPerRow = isJP2 ? String.valueOf((int) Math.ceil((double) nCols / NITFUtilities.DEFAULT_TILE_WIDTH)) : String.valueOf(1); final String numBlocksPerCol = isJP2 ? String.valueOf((int) Math.ceil((double) nRows / NITFUtilities.DEFAULT_TILE_HEIGHT)) : String.valueOf(1); final String numPixelsPerVertBlock = isJP2 ? String.valueOf(NITFUtilities.DEFAULT_TILE_HEIGHT) : "0000"; // As per specification final String numPixelsPerHorizBlock = isJP2 ? String.valueOf(NITFUtilities.DEFAULT_TILE_WIDTH) : "0000"; // As per specification double ratio = Double.NaN; NITFUtilities.setField("IM", subheader.getFilePartType(), "IM"); NITFUtilities.setField("IID1", subheader.getImageId(), imageWrapper.getId()); NITFUtilities.setField("IDATIM", subheader.getImageDateAndTime(), imageWrapper.getDateTime()); NITFUtilities.setField("IID2", subheader.getImageTitle(), imageWrapper.getTitle()); NITFUtilities.setField("ISCLAS", subheader.getImageSecurityClass(), imageWrapper.getSecurityClassification()); NITFUtilities.setField("ISCLSY", subheader.getSecurityGroup().getClassificationSystem(), imageWrapper.getSecurityClassificationSystem()); NITFUtilities.setField("ENCRYP", subheader.getEncrypted(), Integer.toString(imageWrapper.getEncrypted())); NITFUtilities.setField("ISORCE", subheader.getImageSource(), imageWrapper.getSource()); NITFUtilities.setField("NROWS", subheader.getNumRows(), String.valueOf(nRows)); NITFUtilities.setField("NCOLS", subheader.getNumCols(), String.valueOf(nCols)); NITFUtilities.setField("PVTYPE", subheader.getPixelValueType(), NITFUtilities.Consts.DEFAULT_PVTYPE); NITFUtilities.setField("IREP", subheader.getImageRepresentation(), imageWrapper.getRepresentation().toString()); NITFUtilities.setField("ICAT", subheader.getImageCategory(), imageWrapper.getImageCategory().toString()); NITFUtilities.setField("ABPP", subheader.getActualBitsPerPixel(), Integer.toString(nBits)); NITFUtilities.setField("PJUST", subheader.getPixelJustification(), imageWrapper.getPixelJustification()); NITFUtilities.setField("ICORDS", subheader.getImageCoordinateSystem(), imageWrapper.getImageCoordinateSystem()); NITFUtilities.setField("IGEOLO", subheader.getCornerCoordinates(), imageWrapper.getIgeolo()); if (comments != null && !comments.isEmpty()) { int i = 0; for (String comment : comments) { subheader.insertImageComment(comment, i++); } } ratio = setImageCompression(subheader, compression, ri, fis); setImageBands(subheader, imageWrapper); NITFUtilities.setField("ISYNC", subheader.getImageSyncCode(), NITFUtilities.Consts.ZERO); NITFUtilities.setField("IMODE", subheader.getImageMode(), NITFUtilities.Consts.DEFAULT_IMODE); NITFUtilities.setField("NBPR", subheader.getNumBlocksPerRow(), numBlocksPerRow); NITFUtilities.setField("NBPC", subheader.getNumBlocksPerCol(), numBlocksPerCol); NITFUtilities.setField("NPPBH", subheader.getNumPixelsPerHorizBlock(), numPixelsPerHorizBlock); NITFUtilities.setField("NPPBV", subheader.getNumPixelsPerVertBlock(), numPixelsPerVertBlock); NITFUtilities.setField("NBPP", subheader.getNumBitsPerPixel(), Integer.toString(nBits)); NITFUtilities.setField("IDLVL", subheader.getImageDisplayLevel(), NITFUtilities.Consts.ONE); NITFUtilities.setField("IALVL", subheader.getImageAttachmentLevel(), NITFUtilities.Consts.ZERO); NITFUtilities.setField("ILOC", subheader.getImageLocation(), NITFUtilities.Consts.ZERO); NITFUtilities.setField("IMAG", subheader.getImageMagnification(), imageWrapper.getImageMagnification()); return ratio; }
From source file:it.geosolutions.imageio.plugins.nitronitf.NITFImageWriter.java
/** * Do the real write operation (writing images, texts, ...) * //from w w w . j a v a 2 s . co m * @param record * @param images * @param shp * @param fis * @param text * @return * @throws NITFException * @throws IOException */ private boolean writeNITF(final Record record, final List<ImageWrapper> images, final ShapeFileWrapper shp, final FileImageInputStreamExt fis, final List<TextWrapper> texts) throws NITFException, IOException { final int numImages = images.size(); ImageWrapper image = images.get(0); RenderedImage ri = image.getImage(); WriteCompression compression = image.getCompression(); int nBands = ri.getSampleModel().getNumBands(); boolean written = false; Writer writer = new Writer(); IOHandle handle = new IOHandle(outputFile.getCanonicalPath(), IOHandle.NITF_ACCESS_WRITEONLY, IOHandle.NITF_CREATE); byte[] shapeFileData = null; final boolean isJP2 = !(compression == WriteCompression.UNCOMPRESSED); if (shp != null) { shapeFileData = getShapeData(record, shp); } boolean prepared = false; if (isJP2) { // // // // get the JP2 Codestream previously written with Kakadu and transfer its content within // the NITF imageSegment // // // WriteHandler codeStream = null; IOInterface io; final int size = (int) fis.length(); io = new IOFileInputStream(fis); writer.prepare(record, handle); if (shapeFileData != null) { writeData(shapeFileData, writer); } codeStream = new StreamIOWriteHandler(io, 0, size); writer.setImageWriteHandler(0, codeStream); prepared = true; } if (!isJP2 || numImages > 1) { if (!prepared) { writer.prepare(record, handle); } if (numImages == 1) { // setup a Writer if (shapeFileData != null) { writeData(shapeFileData, writer); } ImageSource imageSource = new ImageSource(); nitf.ImageWriter imageWriter = writer.getNewImageWriter(0); boolean[] successes = new boolean[nBands]; final boolean isMono = images.get(0).getImage().getSampleModel().getNumBands() == 1; if (isMono) { DataBufferByte dbb = (DataBufferByte) ri.getData().getDataBuffer(); BandSource bs = new MemorySource(dbb.getData(), dbb.getSize(), 0, 0, 0); successes[0] = imageSource.addBand(bs); } else { for (int i = 0; i < nBands; i++) { RenderedImage band = BandSelectDescriptor.create(ri, new int[] { i }, null); DataBufferByte dbb = (DataBufferByte) band.getData().getDataBuffer(); BandSource bs = new MemorySource(dbb.getData(), dbb.getSize(), 0, 0, 0); successes[i] = imageSource.addBand(bs); } } imageWriter.attachSource(imageSource); } else { ImageWrapper img = images.get(1); ri = img.getImage(); nBands = ri.getSampleModel().getNumBands(); ImageSource imageSource = new ImageSource(); nitf.ImageWriter imageWriter2 = writer.getNewImageWriter(1); boolean[] successes = new boolean[nBands]; DataBufferByte dbb = (DataBufferByte) ri.getData().getDataBuffer(); BandSource bs = new MemorySource(dbb.getData(), dbb.getSize(), 0, 0, 0); successes[0] = imageSource.addBand(bs); imageWriter2.attachSource(imageSource); } } // Adding text if (texts != null && !texts.isEmpty()) { int i = 0; for (TextWrapper text : texts) { byte[] textContent = text.getTextContent(); if (textContent != null) { SegmentWriter textWriter = writer.getNewTextWriter(i++); SegmentSource source = SegmentSource.makeSegmentMemorySource(textContent, textContent.length, 0, 0); textWriter.attachSource(source); } } } written = writer.write(); if (handle != null) { handle.close(); } return written; }
From source file:it.geosolutions.imageio.plugins.nitronitf.NITFImageWriter.java
/** * Encode a RenderedImage as a JP2K codestream on the specified outputFile, using the proper set of compression parameters. * //w w w . j ava 2 s .c o m * @param outputFile * @param compression * @param ri * @throws FileNotFoundException * @throws IOException */ private void prepareJP2Image(final RenderedImage ri, final File outputFile, final WriteCompression compression) throws FileNotFoundException, IOException { JP2KKakaduImageWriter kakaduWriter = null; try { // TODO: Check PAN/MULTI can really be known from number of bands final int numBands = ri.getSampleModel().getNumBands(); final boolean isMulti = numBands == 1 ? false : true; kakaduWriter = new JP2KKakaduImageWriter(KAKADU_SPI); kakaduWriter.setOutput(outputFile); JP2KKakaduImageWriteParam param = NITFUtilities.getCompressionParam(kakaduWriter, compression, isMulti); kakaduWriter.write(null, new IIOImage(ri, null, null), param); } finally { if (kakaduWriter != null) { try { kakaduWriter.dispose(); } catch (Throwable t) { } } } }
From source file:it.geosolutions.geobatch.destination.vulnerability.VulnerabilityComputation.java
/** * Method used for merging the input Rasters into a 2 images, one for human targets and the other for not human targets * /*from ww w . j a v a2 s. c o m*/ * @param humanTargets * @param notHumanTargets * @param bandPerTargetH * @param bandPerTargetNH * @throws IOException * @throws java.awt.geom.NoninvertibleTransformException * @throws TransformException * @throws MismatchedDimensionException */ public RenderedImage[] rasterCalculation(Map<Integer, TargetInfo> bandPerTargetH, Map<Integer, TargetInfo> bandPerTargetNH) throws IOException, java.awt.geom.NoninvertibleTransformException, MismatchedDimensionException, TransformException { // Initialization of the images RenderedImage humanTargets = null; RenderedImage notHumanTargets = null; String basePath = System.getProperty(RASTER_PATH_PROP, ""); if (!basePath.equals("")) { basePath = basePath + File.separator + codicePartner; } // Read of the resources Map vulnerabilityConf = (Map) readResourceFromXML("/vulnerability.xml"); // Vulnerability engine used for extracting the Targets VulnerabilityStatsEngine vsengine = new VulnerabilityStatsEngine(basePath, vulnerabilityConf, dataStore, DISTANCE_TYPE_NAME, pixelArea); // Target Map Map<String, TargetInfo> targetInfo = vsengine.getTargetInfo(); /* * Creation of 2 images: one for the HUMAN TARGETS and the other for NOT HUMAN TARGETS */ // List of Human Targets List<RenderedImage> humanList = new ArrayList<RenderedImage>(); // List of Not Human Targets List<RenderedImage> notHumanList = new ArrayList<RenderedImage>(); // Counters indicating which band is associated to the TargetInfo and // Image int humanBandCounter = 0; int notHumanBandCounter = 0; // Iterator on all the targets Iterator<String> rasterIter = targetInfo.keySet().iterator(); // Initializations of the parameters for merging the input rasters Envelope2D globalBBOXHuman = null; Envelope2D globalBBOXNotHuman = null; List<AffineTransform> tfHuman = new ArrayList<AffineTransform>(); List<AffineTransform> tfNotHuman = new ArrayList<AffineTransform>(); AffineTransform g2WHuman = null; AffineTransform g2WNotHuman = null; // Cycle on all the rasters while (rasterIter.hasNext()) { // save the ID of this target String targetID = rasterIter.next(); // Load the target manager, init its status and check if the actual // distance is a valid distance for it TargetInfo info = targetInfo.get(targetID); // Getting of the transformation parameters GridGeometry2D gg2D = info.getGG2D(); Envelope2D envelope = gg2D.getEnvelope2D(); AffineTransform w2g = (AffineTransform) gg2D.getCRSToGrid2D(PixelOrientation.UPPER_LEFT); // getting information about current Target TargetManager manager = info.getManager(); // Image associated to the current target RenderedImage newImage = info.getRaster(); // Image data type int imgDataType = newImage.getSampleModel().getDataType(); // Check if the image really exists if (newImage != null) { // If the target is human if (manager.isHumanTarget()) { // Other check for ensuring the target is correct if (imgDataType != DataBuffer.TYPE_FLOAT) { System.out.println("Wrong data type"); } // perform union if (globalBBOXHuman == null) { globalBBOXHuman = new Envelope2D(envelope); } else { globalBBOXHuman.include(envelope); } // Selection of the first g2w transform as the global one if (g2WHuman == null) { g2WHuman = (AffineTransform) gg2D.getGridToCRS2D(PixelOrientation.UPPER_LEFT); } // Creation of the transformation from destination Raster space to source Raster space AffineTransform temp = new AffineTransform(w2g); temp.concatenate(g2WHuman); tfHuman.add(temp); // Addition of the TargetInfo of this target bandPerTargetH.put(humanBandCounter, info); // Update of the bandCounter humanBandCounter++; // Addition of the image to the associated list humanList.add(newImage); } else { // Other check for ensuring the target is correct if (imgDataType != DataBuffer.TYPE_BYTE) { System.out.println("Wrong data type"); } // perform union if (globalBBOXNotHuman == null) { globalBBOXNotHuman = envelope; } else { globalBBOXNotHuman.include(envelope); } // Selection of the first g2w transform as the global one if (g2WNotHuman == null) { g2WNotHuman = (AffineTransform) gg2D.getGridToCRS2D(PixelOrientation.UPPER_LEFT); } // Creation of the transformation from destination Raster space to source Raster space AffineTransform temp = new AffineTransform(w2g); temp.concatenate(g2WNotHuman); tfNotHuman.add(temp); // Addition of the TargetInfo of this target bandPerTargetNH.put(notHumanBandCounter, info); // Update of the bandCounter notHumanBandCounter++; // Addition of the image to the associated list notHumanList.add(newImage); } } } // computing final raster space for the two targets GridGeometry2D humanGG2D = new GridGeometry2D(PixelInCell.CELL_CORNER, new AffineTransform2D(g2WHuman), globalBBOXHuman, null); globalBBOXHuman = humanGG2D.getEnvelope2D(); // take into account integer pixel roundings GridGeometry2D noHumanGG2D = new GridGeometry2D(PixelInCell.CELL_CORNER, new AffineTransform2D(g2WNotHuman), globalBBOXNotHuman, null); globalBBOXNotHuman = noHumanGG2D.getEnvelope2D(); // take into account integer pixel roundings // BandMerge of the images RenderedImage[] imagesHuman = new RenderedImage[humanList.size()]; RenderedImage[] imagesNotHuman = new RenderedImage[notHumanList.size()]; // Setting of the final layout ImageLayout layoutH = new ImageLayout2(); GridEnvelope2D gridRange2D = humanGG2D.getGridRange2D(); layoutH.setMinX(gridRange2D.x); layoutH.setMinY(gridRange2D.y); layoutH.setWidth(gridRange2D.width); layoutH.setHeight(gridRange2D.height); // Definition of the TileCache RenderingHints hintsH = new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache()); // Setting of the layout as hint hintsH.put(JAI.KEY_IMAGE_LAYOUT, layoutH); // Merging of the input human targets humanTargets = BandMergeDescriptor.create(null, 0, hintsH, tfHuman, humanList.toArray(imagesHuman)); // Setting of the final layout ImageLayout layoutNH = new ImageLayout2(); gridRange2D = noHumanGG2D.getGridRange2D(); layoutNH.setMinX(gridRange2D.x); layoutNH.setMinY(gridRange2D.y); layoutNH.setWidth(gridRange2D.width); layoutNH.setHeight(gridRange2D.height); // Definition of the TileCache RenderingHints hintsNH = new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache()); hintsNH.put(JAI.KEY_IMAGE_LAYOUT, layoutNH); // Merging of the input not human targets notHumanTargets = BandMergeDescriptor.create(null, 0, hintsNH, tfNotHuman, notHumanList.toArray(imagesNotHuman)); // cache the final images humanTargets = NullDescriptor.create(humanTargets, new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache())); notHumanTargets = NullDescriptor.create(notHumanTargets, new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache())); // Clearing of the initial lists notHumanList.clear(); humanList.clear(); // create a new array of the new images return new RenderedImage[] { humanTargets, notHumanTargets }; }
From source file:org.apache.fop.render.pcl.PCLGenerator.java
/** * Paint a bitmap at the current cursor position. The bitmap must be a monochrome * (1-bit) bitmap image./* w w w .java 2 s . c o m*/ * @param img the bitmap image (must be 1-bit b/w) * @param resolution the resolution of the image (must be a PCL resolution) * @throws IOException In case of an I/O error */ public void paintMonochromeBitmap(RenderedImage img, int resolution) throws IOException { if (!isValidPCLResolution(resolution)) { throw new IllegalArgumentException("Invalid PCL resolution: " + resolution); } boolean monochrome = isMonochromeImage(img); if (!monochrome) { throw new IllegalArgumentException("img must be a monochrome image"); } setRasterGraphicsResolution(resolution); writeCommand("*r0f" + img.getHeight() + "t" + img.getWidth() + "s1A"); Raster raster = img.getData(); Encoder encoder = new Encoder(img); // Transfer graphics data int imgw = img.getWidth(); IndexColorModel cm = (IndexColorModel) img.getColorModel(); if (cm.getTransferType() == DataBuffer.TYPE_BYTE) { DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer(); MultiPixelPackedSampleModel packedSampleModel = new MultiPixelPackedSampleModel(DataBuffer.TYPE_BYTE, img.getWidth(), img.getHeight(), 1); if (img.getSampleModel().equals(packedSampleModel) && dataBuffer.getNumBanks() == 1) { //Optimized packed encoding byte[] buf = dataBuffer.getData(); int scanlineStride = packedSampleModel.getScanlineStride(); int idx = 0; int c0 = toGray(cm.getRGB(0)); int c1 = toGray(cm.getRGB(1)); boolean zeroIsWhite = c0 > c1; for (int y = 0, maxy = img.getHeight(); y < maxy; y++) { for (int x = 0, maxx = scanlineStride; x < maxx; x++) { if (zeroIsWhite) { encoder.add8Bits(buf[idx]); } else { encoder.add8Bits((byte) ~buf[idx]); } idx++; } encoder.endLine(); } } else { //Optimized non-packed encoding for (int y = 0, maxy = img.getHeight(); y < maxy; y++) { byte[] line = (byte[]) raster.getDataElements(0, y, imgw, 1, null); for (int x = 0, maxx = imgw; x < maxx; x++) { encoder.addBit(line[x] == 0); } encoder.endLine(); } } } else { //Safe but slow fallback for (int y = 0, maxy = img.getHeight(); y < maxy; y++) { for (int x = 0, maxx = imgw; x < maxx; x++) { int sample = raster.getSample(x, y, 0); encoder.addBit(sample == 0); } encoder.endLine(); } } // End raster graphics writeCommand("*rB"); }
From source file:org.apache.xmlgraphics.image.loader.impl.imageio.ImageLoaderImageIO.java
/** {@inheritDoc} */ public Image loadImage(ImageInfo info, Map hints, ImageSessionContext session) throws ImageException, IOException { RenderedImage imageData = null; IIOException firstException = null; IIOMetadata iiometa = (IIOMetadata) info.getCustomObjects().get(ImageIOUtil.IMAGEIO_METADATA); boolean ignoreMetadata = (iiometa != null); boolean providerIgnoresICC = false; Source src = session.needSource(info.getOriginalURI()); ImageInputStream imgStream = ImageUtil.needImageInputStream(src); try {/*from w w w . j a va 2s .co m*/ Iterator iter = ImageIO.getImageReaders(imgStream); while (iter.hasNext()) { ImageReader reader = (ImageReader) iter.next(); try { imgStream.mark(); ImageReadParam param = reader.getDefaultReadParam(); reader.setInput(imgStream, false, ignoreMetadata); final int pageIndex = ImageUtil.needPageIndexFromURI(info.getOriginalURI()); try { if (ImageFlavor.BUFFERED_IMAGE.equals(this.targetFlavor)) { imageData = reader.read(pageIndex, param); } else { imageData = reader.read(pageIndex, param); //imageData = reader.readAsRenderedImage(pageIndex, param); //TODO Reenable the above when proper listeners are implemented //to react to late pixel population (so the stream can be closed //properly). } if (iiometa == null) { iiometa = reader.getImageMetadata(pageIndex); } providerIgnoresICC = checkProviderIgnoresICC(reader.getOriginatingProvider()); break; //Quit early, we have the image } catch (IndexOutOfBoundsException indexe) { throw new ImageException("Page does not exist. Invalid image index: " + pageIndex); } catch (IllegalArgumentException iae) { //Some codecs like com.sun.imageio.plugins.wbmp.WBMPImageReader throw //IllegalArgumentExceptions when they have trouble parsing the image. throw new ImageException("Error loading image using ImageIO codec", iae); } catch (IIOException iioe) { if (firstException == null) { firstException = iioe; } else { log.debug("non-first error loading image: " + iioe.getMessage()); } } try { //Try fallback for CMYK images BufferedImage bi = getFallbackBufferedImage(reader, pageIndex, param); imageData = bi; firstException = null; //Clear exception after successful fallback attempt break; } catch (IIOException iioe) { //ignore } imgStream.reset(); } finally { reader.dispose(); } } } finally { ImageUtil.closeQuietly(src); //TODO Some codecs may do late reading. } if (firstException != null) { throw new ImageException("Error while loading image: " + firstException.getMessage(), firstException); } if (imageData == null) { throw new ImageException("No ImageIO ImageReader found ."); } ColorModel cm = imageData.getColorModel(); Color transparentColor = null; if (cm instanceof IndexColorModel) { //transparent color will be extracted later from the image } else { if (providerIgnoresICC && cm instanceof ComponentColorModel) { // Apply ICC Profile to Image by creating a new image with a new // color model. ICC_Profile iccProf = tryToExctractICCProfile(iiometa); if (iccProf != null) { ColorModel cm2 = new ComponentColorModel(new ICC_ColorSpace(iccProf), cm.hasAlpha(), cm.isAlphaPremultiplied(), cm.getTransparency(), cm.getTransferType()); WritableRaster wr = Raster.createWritableRaster(imageData.getSampleModel(), null); imageData.copyData(wr); BufferedImage bi = new BufferedImage(cm2, wr, cm2.isAlphaPremultiplied(), null); imageData = bi; cm = cm2; } } // ImageIOUtil.dumpMetadataToSystemOut(iiometa); // Retrieve the transparent color from the metadata if (iiometa != null && iiometa.isStandardMetadataFormatSupported()) { Element metanode = (Element) iiometa.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName); Element dim = ImageIOUtil.getChild(metanode, "Transparency"); if (dim != null) { Element child; child = ImageIOUtil.getChild(dim, "TransparentColor"); if (child != null) { String value = child.getAttribute("value"); if (value == null || value.length() == 0) { //ignore } else if (cm.getNumColorComponents() == 1) { int gray = Integer.parseInt(value); transparentColor = new Color(gray, gray, gray); } else { StringTokenizer st = new StringTokenizer(value); transparentColor = new Color(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken())); } } } } } if (ImageFlavor.BUFFERED_IMAGE.equals(this.targetFlavor)) { return new ImageBuffered(info, (BufferedImage) imageData, transparentColor); } else { return new ImageRendered(info, imageData, transparentColor); } }
From source file:org.freecine.filmscan.ScanStrip.java
/** Try to find perforation corners using (modified) Hough transform. After the hough transform, matching pairs of top and bottom corners are found and clustered into pointClusterws list./* w w w . j a v a 2 s . c o m*/ */ void houghTransform() { // Siebel transform of stripImage KernelJAI sxKernel = new KernelJAI(3, 3, new float[] { -1.0f, 0.0f, 1.0f, -2.0f, 0.0f, 2.0f, -1.0f, 0.0f, 1.0f }); KernelJAI syKernel = new KernelJAI(3, 3, new float[] { -1.0f, -2.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 2.0f, 1.0f }); RenderedImage dblImg = FormatDescriptor.create(stripImage, DataBuffer.TYPE_DOUBLE, null); RenderedImage sxImg = ConvolveDescriptor.create(dblImg, sxKernel, null); RenderedImage syImg = ConvolveDescriptor.create(dblImg, syKernel, null); SampleModel sm = sxImg.getSampleModel(); int nbands = sm.getNumBands(); double[] sxPixel = new double[nbands]; double[] syPixel = new double[nbands]; /* We are interested only in the left side of the strip as the perforations are there */ Rectangle perfArea = new Rectangle(0, 0, stripImage.getWidth() / 4, stripImage.getHeight()); RectIter sxIter = RectIterFactory.create(sxImg, perfArea); RectIter syIter = RectIterFactory.create(syImg, perfArea); int width = (int) perfArea.getWidth(); int height = (int) perfArea.getHeight(); /* We use 2 accumulators - one for detecting the upper right corner, one for lower right corner. As the original is huge and the detaile we are looking for are tiny, we use a sliding window that stores only the relevant part of accumulator. */ int accumHeight = (int) maxCornerRadius + 2; int[][] startAccum = new int[(int) (maxCornerRadius - minCornerRadius)][width * accumHeight]; int[][] endAccum = new int[(int) (maxCornerRadius - minCornerRadius)][width * accumHeight]; List<Point> startCorners = new ArrayList<Point>(); List<Point> endCorners = new ArrayList<Point>(); int y = 0; int maxVal = 0; if (analysisListener != null) { analysisListener.scanAnalysisProgress(0, height); } while (!sxIter.nextLineDone() && !syIter.nextLineDone()) { if (y % 1000 == 0 && y > 0) { System.out.println("" + y + " lines analyzed"); } sxIter.startPixels(); syIter.startPixels(); int x = 0; while (!sxIter.nextPixelDone() && !syIter.nextPixelDone()) { sxIter.getPixel(sxPixel); syIter.getPixel(syPixel); double isq = sxPixel[0] * sxPixel[0] + syPixel[0] * syPixel[0]; if (isq > EDGE_MIN_GRADIENT * EDGE_MIN_GRADIENT) { // This seems like a border if (syPixel[0] <= 0 && sxPixel[0] >= 0) { // Upper right corner candidate double intensity = Math.sqrt(isq); for (double r = minCornerRadius; r < maxCornerRadius; r += 1.0) { double cx = (double) x - r * sxPixel[0] / intensity; double cy = (double) y - r * syPixel[0] / intensity; if (cx > 0.0) { int accumLine = (int) cy % accumHeight; startAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]++; if (startAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine] > maxVal) { maxVal = startAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]; } } } } if (syPixel[0] >= 0 && sxPixel[0] >= 0) { // Lower right corner candidate double intensity = Math.sqrt(isq); for (double r = minCornerRadius; r < maxCornerRadius; r += 1.0) { double cx = (double) x - r * sxPixel[0] / intensity; double cy = (double) y - r * syPixel[0] / intensity; if (cx > 0.0 && cy > 0.0) { int accumLine = (int) cy % accumHeight; endAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]++; if (endAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine] > maxVal) { maxVal = endAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]; } } } } } x++; } y++; /* 1 line processed - check if there are corner candidates in the accumulator line we are going to overwrite */ int y2 = y - accumHeight; int l = y % accumHeight; if (y2 > 0) { for (int n = 0; n < perfArea.getWidth(); n++) { for (int r = 0; r < (int) (maxCornerRadius - minCornerRadius); r++) { if (startAccum[r][n + width * l] >= CORNER_MIN_HOUGH) { // Is this a local maxima? int val = startAccum[r][n + width * l]; if (val == getLocalMaxima(startAccum, r, n, y, width)) { startCorners.add(new Point(n, y)); System.out.println(String.format("Found corner, quality = %d, r = %d, (%d, %d)", val, r, n, y)); // imageDataSingleArray[n+width*y] = (byte) 0xff; } } if (endAccum[r][n + width * l] > CORNER_MIN_HOUGH) { // Is this a local maxima? int val = endAccum[r][n + width * l]; if (val == getLocalMaxima(endAccum, r, n, y2, width)) { endCorners.add(new Point(n, y2)); System.out.println(String.format("Found end corner, quality = %d, r = %d, (%d, %d)", val, r, n, y2)); // imageDataSingleArray[n+width*y2] = (byte) 0x80; } } } } } // Zero the line just analyzed - it will be reused for the next line for (int n = 0; n < perfArea.getWidth(); n++) { for (int r = 0; r < (int) (maxCornerRadius - minCornerRadius); r++) { startAccum[r][n + width * (y % accumHeight)] = 0; endAccum[r][n + width * (y % accumHeight)] = 0; } } if ((y % 100 == 1) && analysisListener != null) { analysisListener.scanAnalysisProgress(y - 1, height); } } if (analysisListener != null) { analysisListener.scanAnalysisProgress(height, height); } /* Find perforations, i.e. pairs of start and end corners that are within the specified range from each other */ for (Point sp : startCorners) { for (Point ep : endCorners) { if (ep.y - sp.y > CC_MAX_DIST) { break; } if (Math.abs(ep.x - sp.x) < 10 && ep.y - sp.y > CC_MIN_DIST) { Perforation p = new Perforation(); p.x = (ep.x + sp.x) >> 1; p.y = (ep.y + sp.y) >> 1; // imageDataSingleArray[p.x+width*p.y] = (byte) 0x40; addPointToCluster(p.x, p.y); } } } System.out.println(String.format("%d clusters:", pointClusters.size())); for (PointCluster c : pointClusters) { System.out.println( String.format(" (%d, %d) %d points", c.getCentroidX(), c.getCentroidY(), c.getPointCount())); // imageDataSingleArray[c.getCentroidX()+width*c.getCentroidY()] = (byte) 0xff; } }