Example usage for java.awt.image BufferedImage getData

List of usage examples for java.awt.image BufferedImage getData

Introduction

In this page you can find the example usage for java.awt.image BufferedImage getData.

Prototype

public Raster getData() 

Source Link

Document

Returns the image as one large tile.

Usage

From source file:org.opencastproject.videosegmenter.impl.jmf.ImageComparator.java

/**
 * Returns <code>true</code> if <code>image</code> differs from <code>currentImage</code>. In order to be treated a
 * different image, the <code>rgb</code> values of at least <code>changesThreshold</code> pixels must have changed.
 * <p>//from  ww w .  j av a 2s  .c o m
 * Note that <code>image</code> might contain an altered version of the image, which will facilitate in the comparison
 * next time when <code>image</code> is <code>currentImage</code>.
 * 
 * @param previousImage
 *          the previous image
 * @param image
 *          the new image
 * @param timestamp
 *          the image timestamp
 * 
 * @return <code>true</code> if the two images are different
 */
public boolean isDifferent(BufferedImage previousImage, BufferedImage image, long timestamp) {
    boolean differsFromCurrentScene = false;
    BufferedImage edgeImage = getEdgedImage(image);

    if (previousImage == null) {
        differsFromCurrentScene = true;
        logger.debug("First segment started");
    } else if (previousImage.getWidth() != image.getWidth() || previousImage.getHeight() != image.getHeight()) {
        differsFromCurrentScene = true;
        String currentResolution = previousImage.getWidth() + "x" + previousImage.getHeight();
        String newResolution = image.getWidth() + "x" + image.getHeight();
        logger.warn("Resolution change detected ({} -> {})", currentResolution, newResolution);
    } else {
        int changes = 0;
        long pixels = image.getWidth() * image.getHeight();
        long changesThresholdPixels = (long) (pixels * changesThreshold);

        imagecomparison: for (int x = 0; x < image.getWidth(); x++) {
            for (int y = 0; y < image.getHeight(); y++) {
                if (edgeImage.getRGB(x, y) != previousImage.getRGB(x, y)) {
                    changes++;
                    if (changes > changesThresholdPixels) {
                        differsFromCurrentScene = true;
                        if (!collectStatistics)
                            break imagecomparison;
                    }
                }
            }
        }

        float percentage = ((float) changes) / ((float) pixels);
        if (differsFromCurrentScene)
            logger.debug("Differences found at {} s ({} change to previous image)", timestamp,
                    percentageNf.format(percentage));
        else
            logger.debug("Found {} changes at {} s to the previous frame", percentageNf.format(percentage),
                    timestamp);

        comparisons++;
        totalChanges += percentage;
    }

    // Write the images to disk for debugging and verification purposes
    if (tempDir != null) {
        try {
            FileUtils.forceMkdir(tempDir);
            ImageIO.write(image, "jpg", new File(tempDir, "image-" + timestamp + ".jpg"));
            ImageIO.write(edgeImage, "jpg", new File(tempDir, "image-" + timestamp + "-edged.jpg"));
        } catch (IOException e) {
            logger.warn("Error writing intermediary images to {}" + tempDir);
            e.printStackTrace();
        }
    }

    // Copy the resulting image for further reference to the original
    image.getRaster().setRect(edgeImage.getData());

    return differsFromCurrentScene;
}

From source file:org.springframework.cloud.stream.app.object.detection.processor.ObjectDetectionTensorflowInputConverter.java

private static Tensor<UInt8> makeImageTensor(byte[] imageBytes) throws IOException {
    ByteArrayInputStream is = new ByteArrayInputStream(imageBytes);
    BufferedImage img = ImageIO.read(is);

    if (img.getType() != BufferedImage.TYPE_3BYTE_BGR) {
        throw new IllegalArgumentException(
                String.format("Expected 3-byte BGR encoding in BufferedImage, found %d", img.getType()));
    }/*from   w  w  w . j  a v  a2 s .co m*/
    byte[] data = ((DataBufferByte) img.getData().getDataBuffer()).getData();
    // ImageIO.read produces BGR-encoded images, while the model expects RGB.
    bgrToRgb(data);

    //Expand dimensions since the model expects images to have shape: [1, None, None, 3]
    long[] shape = new long[] { BATCH_SIZE, img.getHeight(), img.getWidth(), CHANNELS };

    return Tensor.create(UInt8.class, shape, ByteBuffer.wrap(data));
}

From source file:org.squidy.designer.util.ImageUtils.java

public static Shape getShapeOfImage(BufferedImage image) {
    // Get the data
    Raster data = image.getData();
    ////from   ww w.j  av  a2s  .c  o  m
    //      System.out.println("num of bands = " + data.getNumBands());
    // The colour of the pixel looking at
    // Shoulld have length of 4 (RGBA)
    int[] lookAt = null;
    // The map of all the points
    Point2D[][] pointMap = new Point2D[data.getWidth()][data.getHeight()];
    // The from point
    Point2D from = null;
    // The general path
    GeneralPath path = new GeneralPath();

    // Go round height
    for (int y = 0; y < data.getHeight(); y++) {
        // Go round width
        for (int x = 0; x < data.getWidth(); x++) {
            // Get the colour
            lookAt = data.getPixel(x, y, lookAt);
            // The alpha
            int a = lookAt[3];
            // If > then 0
            if (a > 0) {
                // Output 1
                //System.out.print(1);
                // Save point
                pointMap[x][y] = new Point2D.Double(x, y);

                if (from == null) {
                    from = pointMap[x][y];
                }
            } // 0
            else {
                // Output 0
                //System.out.print(0);
                // Nothing her
                pointMap[x][y] = null;
            }
        }
        // New line
        //System.out.println();
    }

    // Move it to the from
    if (from != null) {
        path.moveTo(from.getX(), from.getY());
        /*
         * Make the shape
         */
        // Go round height
        for (int y = 0; y < data.getHeight(); y++) {
            // Go round width
            for (int x = 0; x < data.getWidth(); x++) {
                // If the point is not null
                if (pointMap[x][y] != null) {
                    // Draw a line to
                    path.append(new Rectangle2D.Double(pointMap[x][y].getX(), pointMap[x][y].getY(), 1, 1),
                            true);
                    //               path.lineTo(pointMap[x][y].getX(), pointMap[x][y].getY());
                }
            }

        }
        path.closePath();
        // TODO: Put in the middle
        return path;
    }
    return null;
}

From source file:pl.edu.icm.visnow.lib.basic.viewers.Viewer2D.Display2DPanel.java

public void writeImage(String fileName, int format) {
    //System.out.println("writing image: "+fileName);
    BufferedImage img = null;
    try {// ww  w .j  a v  a2  s . co m
        img = new BufferedImage(getWidth(), getHeight(), BufferedImage.TYPE_INT_RGB);
        dontWrite = true;
        paintComponent(img.getGraphics());
        dontWrite = false;
        File file = new File(fileName);
        switch (format) {
        case FORMAT_PNG:
            ImageUtilities.writePng(img, file);
            break;
        case FORMAT_YUV:
            int[] content = null;
            content = img.getData().getPixels(0, 0, img.getWidth(), img.getHeight(), content);
            if (yuvSaver == null || yuvSaver.getHeight() != img.getHeight()
                    || yuvSaver.getWidth() != img.getWidth()) {
                yuvSaver = new YUVSaver(img.getWidth(), img.getHeight(), fileName);
            }
            yuvSaver.saveEncoded(img, controlsFrame.getMovieCreationPanel().getCurrentFrameNumber());
            break;
        default:
            ImageIO.write(img, ImageFormat.JPEG_FORMAT.getExtension(), file);
            break;
        }

    } catch (FileNotFoundException ex) {
        //         pl.edu.icm.visnow.egg.error.Displayer.display(2008052915280L, ex, toString(), "File not found: " + fileName);
    } catch (IOException ex) {
        //         pl.edu.icm.visnow.egg.error.Displayer.display(2008052915290L, ex, toString(), "IO Exception: " + fileName);
    }
}

From source file:pl.edu.icm.visnow.lib.utils.ImageUtilities.java

public static BufferedImage addAlpha(BufferedImage src, BufferedImage alpha) {
    int w = src.getWidth();
    int h = src.getHeight();

    BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB);

    WritableRaster wr = bi.getWritableTile(0, 0);
    WritableRaster wr3 = wr.createWritableChild(0, 0, w, h, 0, 0, new int[] { 0, 1, 2 });
    WritableRaster wr1 = wr.createWritableChild(0, 0, w, h, 0, 0, new int[] { 3 });
    wr3.setRect(src.getData());
    wr1.setRect(alpha.getData());//from ww w  . ja  v a2 s.  co  m

    bi.releaseWritableTile(0, 0);

    return bi;
}

From source file:software.uncharted.image.ImageProcessing.java

/**
 * histogramByteHash -- compute a color histogram of an image
 *                      returned as a byte[]
 * @param img/* w  w w  .j ava 2 s .  c o m*/
 * @return byte[] histogram
 */
public static byte[] histogramByteHash(BufferedImage img) {

    Raster raster = img.getData();
    int h = raster.getHeight();
    int w = raster.getWidth();
    int components = img.getColorModel().getNumComponents();
    int pixels = w * h;
    int[] colors = new int[pixels * components];
    raster.getPixels(0, 0, w, h, colors);
    int[] counts = new int[DISTINCT_COLORS];
    int grayScaleCount = 0;
    for (int i = 0; i < DISTINCT_COLORS; i++)
        counts[i] = 0;

    int cIndx = 0; // 'colours' array index
    for (int i = 0; i < pixels; i++) {
        int r = colors[cIndx] / COLOR_DIVISOR; // quantizes down to 'COLOR_DEPTH' range
        int g = (colors[cIndx + 1]) / COLOR_DIVISOR;
        int b = (colors[cIndx + 2]) / COLOR_DIVISOR;
        int truncColor = (r * COLOR_DEPTH + g) * COLOR_DEPTH + b; // converts 3D histogram values to 1D concatenated histogram
        counts[truncColor]++;
        if (r == g && r == b)
            grayScaleCount++;
        cIndx += components;
    }
    byte[] result = new byte[DISTINCT_COLORS];

    if (grayScaleCount > pixels * 0.95) {
        //---- grayscale image detected!
        // set black and white hist bins == max value
        // and set all other bins == hist values along one of the colour axes
        // (since r-axis vals = g-axis = b-axis for grayscale)
        counts[0] = pixels;
        counts[DISTINCT_COLORS - 1] = pixels;
        for (int i = 1; i < DISTINCT_COLORS - 1; i++) {
            counts[i] = 0;
        }
        for (int i = 0; i < pixels; i++) {
            int idx = colors[i * components] * (DISTINCT_COLORS - 2) / 256 + 1;
            counts[idx]++;
        }
    }

    //---- normalize final histogram
    for (int i = 0; i < DISTINCT_COLORS; i++) {
        //int count = (int)Math.ceil((counts[i]*RATIO_MULTIPLIER)/pixels);
        int count = (int) Math.round((counts[i] * RATIO_MULTIPLIER) / ((double) pixels * HIST_NORM_FACTOR));
        result[i] = (byte) (Math.min(count, RATIO_MULTIPLIER) & 0xFF); // Min here to handle potential saturation of hist values
    }

    return result;
}

From source file:tooltip.ImageComparison.java

public static void setUp() throws Exception {
    boolean ret = true;
    System.out.println("Inside Setup C:\\Comcast Project Docs\\Automation\\CAAP AUTOMATION\\Selenium");
    //System.setProperty("webdriver.chrome.driver","C:\\Users\\ajavva001c\\Downloads\\chromedriver.exe");

    WebDriver driver = new FirefoxDriver();
    driver.manage().window().maximize();
    driver.get("https://activator-web-qaauto.g1.app.cloud.comcast.net/Activate/comFlow");
    File url = new File("C:/Users/ajavva001c/HSD/unpacked.png");
    FileInputStream fi = new FileInputStream(url);
    BufferedImage bufImgOne = ImageIO.read(fi);
    String s1 = driver.findElement(By.xpath("//*[@id='responsive']/div/div/div[2]/div/ul/li[2]/img"))
            .getAttribute("src");
    URL urls = new URL(s1);
    System.out.println(urls);/*from  w ww.j a v  a2  s.  c o  m*/
    BufferedImage bufImgOne1 = ImageIO.read(urls);
    Raster image = bufImgOne.getData();
    Raster image1 = bufImgOne1.getData();
    if (image.getNumBands() != image1.getNumBands() && image.getWidth() != image1.getWidth()
            && image.getHeight() != image1.getHeight()) {
        ret = false;
        System.out.println("fail");
    } else {
        search: for (int i = 0; i < image.getNumBands(); ++i) {
            for (int x = 0; x < image.getWidth(); ++x) {
                for (int y = 0; y < image.getHeight(); ++y) {
                    if (image.getSample(x, y, i) != image1.getSample(x, y, i)) {
                        ret = false;
                        break search;
                    }

                }
            }
        }
        System.out.println(ret);
    }
    driver.quit();
}

From source file:webservices.RetrieveImg.java

@POST
@Path("/hat")
public String retreiveImgg(@FormParam(value = "user") String user) {
    try {//from www . j a v  a  2s  .  c o  m
        JSONObject o;
        BufferedImage img;
        byte[] imageBytes = null;
        try {
            o = new JSONObject(user);
            String path = "D://" + o.getInt("userId") + ".png";
            URL url = new File(path).toURI().toURL();

            img = ImageIO.read(url);

            imageBytes = ((DataBufferByte) img.getData().getDataBuffer()).getData();

        } catch (JSONException ex) {
            Logger.getLogger(RetrieveImg.class.getName()).log(Level.SEVERE, null, ex);
        }
        System.out.println(imageBytes);

        String s = encodeImage(imageBytes);

        return s;
    } catch (IOException ex) {
        Logger.getLogger(SaveImg.class.getName()).log(Level.SEVERE, null, ex);
    }
    return "not eshta";
}