Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:de.vion.eyetracking.cameracalib.calibration.opencv.CameraCalibrator.java

private void renderFrame(Mat rgbaFrame) {
    drawPoints(rgbaFrame);/*from w ww. jav  a  2s. c o m*/

    Core.putText(rgbaFrame, "Captured: " + this.mCornersBuffer.size(),
            new Point(rgbaFrame.cols() / 3 * 2, rgbaFrame.rows() * 0.1), Core.FONT_HERSHEY_SIMPLEX, 1.0,
            new Scalar(255, 255, 0));
}

From source file:depthDataFromStereoCamsOpenCV.ProcessImages.java

/**
 * /*  w w  w.  j a v a  2  s.c o m*/
 * @param image
 * @return
 */
public static Mat bringImageToStdSize(Mat image) {
    //create the square container
    int dstWidth = 300;
    int dstHeight = 500;
    Mat dst = new Mat(dstHeight, dstWidth, CvType.CV_8UC3, new Scalar(0, 0, 0));
    //ProcessImages.displayImage(ProcessImages.Mat2BufferedImage(dst),"background");
    //Put the image into the container, roi is the new position
    Rect roi = new Rect((dstWidth - image.cols()) / 2, (dstHeight - image.rows()) / 2, image.cols(),
            image.rows());
    Mat targetROI = new Mat(dst, roi);
    image.copyTo(targetROI);
    //          ProcessImages.displayImage(ProcessImages.Mat2BufferedImage(dst),"Standardized");
    return dst;
}

From source file:depthDataFromStereoCamsOpenCV.ProcessImages.java

/**
 * Mat2BufferedImage/*from   w  w w .  j  a va 2 s.  c  o  m*/
 * @param Mat m
 * @return BufferedImage
 */

public static BufferedImage Mat2BufferedImage(Mat m) {
    // source: http://answers.opencv.org/question/10344/opencv-java-load-image-to-gui/
    // Fastest code
    // The output can be assigned either to a BufferedImage or to an Image

    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;

}

From source file:detectiontest.ImageDisplayer.java

public static BufferedImage toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//  w  ww . j ava 2s. c  o  m
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:dfmDrone.examples.fitEllipseExample.java

public static Image toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1)
        type = BufferedImage.TYPE_3BYTE_BGR;

    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:digimesh.xbee.gui.MeshGui.java

private BufferedImage Mat2BufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//from ww w  . ja  v a 2s  . c om
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:digitalassistant.Panel.java

/**
 * Converts/writes a Mat into a BufferedImage.
 *
 * @param matrix Mat of type CV_8UC3 or CV_8UC1
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *//*ww  w  .  j  a v  a  2s .  co m*/
public BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb  
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        return null;
    }
    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);
    return image;
}

From source file:digitalassistant.Panel.java

public BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;/*from w w w  .jav a2 s. com*/
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb  
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        return null;
    }
    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);
    return image;
}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Finds the bounding box for the book on the stand using 
 * the high resolution image./*from  w  w w . ja v  a  2 s . co m*/
 * @param src- High Resolution image of the book
 * @return Rectangle delineating the book
 */
public Rect highRes(Mat src) {
    Mat dst = src.clone();
    Imgproc.blur(src, dst, new Size(100.0, 100.0), new Point(-1, -1), Core.BORDER_REPLICATE);
    Imgproc.threshold(dst, dst, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
    Imgproc.Canny(dst, dst, 50, 200, 3, false);

    List<MatOfPoint> contours = new LinkedList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE,
            new Point(0, 0));

    Mat color = new Mat();
    Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR);
    for (int k = 0; k < contours.size(); k++) {
        byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size());
        Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 8);
    }
    new IViewer("HighRes Contours ", BReaderTools.bufferedImageFromMat(color));

    Point center = new Point(src.cols() / 2, src.rows() / 2);
    //Check hierarchy tree
    int[] res = polySearch(center, hierarchy, contours, 0);
    while (res[0] != 1 && res[2] != -1) {
        res = polySearch(center, hierarchy, contours, res[2]);
        if (res[0] == 1)
            break;
    }

    MatOfInt tHull = new MatOfInt();
    int index = 0;
    if (res[1] != -1) {
        index = res[1];
    }
    Imgproc.convexHull(contours.get(index), tHull);

    //get bounding box
    MatOfPoint cont = contours.get(index);
    Point[] points = new Point[tHull.rows()];
    for (int i = 0; i < tHull.rows(); i++) {
        int pIndex = (int) tHull.get(i, 0)[0];
        points[i] = new Point(cont.get(pIndex, 0));
    }
    Rect out = Imgproc.boundingRect(new MatOfPoint(points));
    return out;
}

From source file:edu.soict.hust.k57.mmdb.components.HistogramImageBulder.java

private ImageIcon createImageIcon(Mat hist, int bin, Channel c) {
    int hist_w = 150; // width of the histogram image
    int hist_h = 100; // height of the histogram image
    int bin_w = (int) Math.round(hist_w * 1.0 / bin);

    Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC3, new Scalar(80, 60, 60));
    Mat normalizeHist = hist.clone();//from  w  w  w  .j  a va  2 s . c  o m
    Core.normalize(normalizeHist, normalizeHist, 0, histImage.rows(), Core.NORM_MINMAX, -1, new Mat());

    Scalar scalar = null;
    switch (c) {
    case B:
        scalar = new Scalar(255, 0, 0);
        break;
    case G:
        scalar = new Scalar(0, 255, 0);
        break;
    case R:
        scalar = new Scalar(0, 0, 255);
    }

    for (int i = 1; i < bin; i++) {
        Imgproc.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])), scalar, 1, 8, 0);
        Imgproc.line(histImage, new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i, 0)[0])), scalar, 1, 8, 0);
    }
    MatOfByte buffer = new MatOfByte();
    Imgcodecs.imencode(".png", histImage, buffer);
    return new ImageIcon(buffer.toArray());
}