Example usage for org.opencv.core Mat width

List of usage examples for org.opencv.core Mat width

Introduction

In this page you can find the example usage for org.opencv.core Mat width.

Prototype

public int width() 

Source Link

Usage

From source file:com.trandi.opentld.TLDView.java

License:Apache License

@Override
public Mat onCameraFrame(Mat originalFrame) {
    try {/*from  ww  w . ja  v a2 s . c o  m*/
        // Image is too big and this requires too much CPU for a phone, so scale everything down...
        Imgproc.resize(originalFrame, _workingFrame, WORKING_FRAME_SIZE);
        final Size workingRatio = new Size(originalFrame.width() / WORKING_FRAME_SIZE.width,
                originalFrame.height() / WORKING_FRAME_SIZE.height);
        // usefull to see what we're actually working with...
        _workingFrame.copyTo(originalFrame.submat(originalFrame.rows() - _workingFrame.rows(),
                originalFrame.rows(), 0, _workingFrame.cols()));

        if (_trackedBox != null) {
            if (_tld == null) { // run the 1st time only
                Imgproc.cvtColor(_workingFrame, _lastGray, Imgproc.COLOR_RGB2GRAY);
                _tld = new Tld(_tldProperties);
                final Rect scaledDownTrackedBox = scaleDown(_trackedBox, workingRatio);
                Log.i(Util.TAG, "Working Ration: " + workingRatio + " / Tracking Box: " + _trackedBox
                        + " / Scaled down to: " + scaledDownTrackedBox);
                try {
                    _tld.init(_lastGray, scaledDownTrackedBox);
                } catch (Exception eInit) {
                    // start from scratch, you have to select an init box again !
                    _trackedBox = null;
                    _tld = null;
                    throw eInit; // re-throw it as it will be dealt with later
                }
            } else {
                Imgproc.cvtColor(_workingFrame, _currentGray, Imgproc.COLOR_RGB2GRAY);

                _processFrameStruct = _tld.processFrame(_lastGray, _currentGray);
                drawPoints(originalFrame, _processFrameStruct.lastPoints, workingRatio, new Scalar(255, 0, 0));
                drawPoints(originalFrame, _processFrameStruct.currentPoints, workingRatio,
                        new Scalar(0, 255, 0));
                drawBox(originalFrame, scaleUp(_processFrameStruct.currentBBox, workingRatio),
                        new Scalar(0, 0, 255));

                _currentGray.copyTo(_lastGray);

                // overlay the current positive examples on the real image(needs converting at the same time !)
                //copyTo(_tld.getPPatterns(), originalFrame);
            }
        }
    } catch (Exception e) {
        _errMessage = e.getClass().getSimpleName() + " / " + e.getMessage();
        Log.e(Util.TAG, "TLDView PROBLEM", e);
    }

    if (_errMessage != null) {
        Imgproc.putText(originalFrame, _errMessage, new Point(0, 300), Core.FONT_HERSHEY_PLAIN, 1.3d,
                new Scalar(255, 0, 0), 2);
    }

    return originalFrame;
}

From source file:com.wallerlab.compcellscope.calcDPCTask.java

License:BSD License

protected Long doInBackground(Mat... matrix_list) {
    //int count = urls.length;
    Mat in1 = matrix_list[0];
    Mat in2 = matrix_list[1];/*from  w w w. j  a va  2s .  c  o m*/
    Mat outputMat = matrix_list[2];

    Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type());
    Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type());
    in1.copyTo(Mat1);
    in2.copyTo(Mat2);

    Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1);
    Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1);

    Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4);
    Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);

    /*
    Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type()));
    Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type()));
    */

    // Convert to Floats
    Mat1.convertTo(Mat1, CvType.CV_32FC1);
    Mat2.convertTo(Mat2, CvType.CV_32FC1);
    Core.add(Mat1, Mat2, dpcSum);
    Core.subtract(Mat1, Mat2, dpcDifference);
    Core.divide(dpcDifference, dpcSum, dpcImgF);
    Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0
    Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255
    dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB
    Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4);

    dpcSum.release();
    dpcDifference.release();
    dpcImgF.release();
    Mat1.release();
    Mat2.release();

    Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4);
    int radius = maskedImg.width() / 2 + 25;
    Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius,
            new Scalar(255, 255, 255), -1, 8, 0);
    output.copyTo(outputMat, maskedImg);
    output.release();
    maskedImg.release();
    return null;
}

From source file:com.wallerlab.compcellscope.MultiModeViewActivity.java

License:BSD License

public Mat calcDPC(Mat in1, Mat in2, Mat out) {
    Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type());
    Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type());
    in1.copyTo(Mat1);//  ww  w. java2s.co  m
    in2.copyTo(Mat2);

    Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1);
    Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1);

    Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4);
    Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);

    /*
    Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type()));
    Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type()));
    */

    // Convert to Floats
    Mat1.convertTo(Mat1, CvType.CV_32FC1);
    Mat2.convertTo(Mat2, CvType.CV_32FC1);
    Core.add(Mat1, Mat2, dpcSum);
    Core.subtract(Mat1, Mat2, dpcDifference);
    Core.divide(dpcDifference, dpcSum, dpcImgF);
    Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0
    Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255
    dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB
    Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4);

    dpcSum.release();
    dpcDifference.release();
    dpcImgF.release();
    Mat1.release();
    Mat2.release();

    Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4);
    int radius = maskedImg.width() / 2 + 25;
    Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius,
            new Scalar(255, 255, 255), -1, 8, 0);
    output.copyTo(out, maskedImg);
    output.release();
    maskedImg.release();
    return out;
}

From source file:cv.FaceDetector.java

License:Open Source License

private BufferedImage matToBufferedImage(Mat matImage) {
    BufferedImage image = new BufferedImage(matImage.width(), matImage.height(), BufferedImage.TYPE_3BYTE_BGR);
    WritableRaster raster = image.getRaster();
    DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer();
    byte[] data = dataBuffer.getData();
    matImage.get(0, 0, data);/*from  w  w  w  .j a  v  a 2s .  c  o m*/
    return image;
}

From source file:cv.recon.util.MatFXUtils.java

License:Open Source License

/**
 * Convert from OpenCV Mat to JavaFX WritableImage to be displayed in
 * ImageView.//w  ww .  j a v a 2s  .  c  o  m
 * @param mat Mat to be converted
 * @param writableImage Optional WritableImage, if non-null, the Mat will be
 * written in this WritableImage
 * @return A WritableImage to be used for JavaFX, return null if already
 * supplied with WritableImage
 */
public static WritableImage toFXImage(Mat mat, WritableImage writableImage) {
    int width = mat.width();
    int height = mat.height();
    int channels = mat.channels();
    byte[] sourcePixels = new byte[width * height * channels];
    mat.get(0, 0, sourcePixels);

    BufferedImage bufferedImage;
    if (mat.channels() > 1) {
        bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    } else {
        bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
    }

    final byte[] targetPixels = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);

    if (writableImage == null) {
        WritableImage outputImage = SwingFXUtils.toFXImage(bufferedImage, null);
        return outputImage;
    } else {
        SwingFXUtils.toFXImage(bufferedImage, writableImage);
        return null;
    }
}

From source file:cx.uni.jk.mms.iaip.mat.MatModel.java

License:Open Source License

/**
 * Loads an image from a file into this model.
 * /*  w w w . j  av a 2 s .co  m*/
 * The image file type must be supported by ImageIO and must be 8 bit gray
 * scale due to limitations of the used methods. The image must be of even
 * width and even height in order to be processed by OpenCV's DCT/IDCT
 * methods.
 * 
 * This implementation uses {@link Path} instead of {@link File} in order to
 * read the jar from the inside.
 * 
 * @param path
 * @throws IllegalSizeException
 * @throws IOException
 * @throws UnsupportedImageTypeException
 */
public void loadImage(Path path) throws IllegalSizeException, IOException, UnsupportedImageTypeException {
    this.logger
            .fine(String.format("MatModel \"%s\" loading iamge from path %s", this.getName(), path.toString()));

    Mat matRead = null;

    matRead = this.loadAndDecodeImageWithJavaImageIO(path);
    // matRead = loadImageWithJavaImageIOAndDecodeWithOpenCV(path);
    // matRead = loadImageWithOpenCV(path);

    this.logger.finer("image type = " + matRead.type());
    this.logger.finer("image channels = " + matRead.channels());
    this.logger.finer("image depth = " + matRead.depth());

    /** images must have size larger than 0x0 */
    if (matRead.width() <= 0 || matRead.height() <= 0) {
        throw new IllegalSizeException("Image must have width and height > 0.");
    }

    /** dct images must have odd width or height */
    if (matRead.width() % 2 == 1 || matRead.height() % 2 == 1) {
        throw new IllegalSizeException("Image must have even width and even height to perform DCT/IDCT.");
    }

    /** we need a float mat to do DCT/IDCT */
    this.mat = matRead; // just a reference
    this.logger.finer("convert to internal format");
    this.mat.convertTo(this.mat, MAT_TYPE);
    this.logger.finer("image type = " + this.mat.type());
    this.logger.finer("image channels = " + this.mat.channels());
    this.logger.finer("image depth = " + this.mat.depth());

    /** remember last file loaded successfully */
    this.lastPath = path;
}

From source file:de.hu_berlin.informatik.spws2014.mapever.entzerrung.CornerDetector.java

License:Open Source License

/**
 * Guesses the most likly corners of a distorted map within an image.
 * Expects OpenCV to be initialized.//from w  w w  .j  a va  2  s .  c  o  m
 * The results are already pretty good but could propably be improved
 * via tweaking the parameters or adding some additional line filtering
 * criteria(like them being kind of parallel for instance...)
 * 
 * @param gray_img A grayscale image in OpenCVs Mat format.
 * @return An array of propable corner points in the following form: {x0,y0,x1,y1,x2,y2,x3,y3} or null on error.
 **/
public static Point[] guess_corners(Mat gray_img) {
    Mat lines = new Mat();
    Imgproc.Canny(gray_img, gray_img, THRESHOLD0, THRESHOLD1, APERTURE_SIZE, false);
    Imgproc.HoughLinesP(gray_img, lines, RHO, THETA, HOUGH_THRESHOLD,
            Math.min(gray_img.cols(), gray_img.rows()) / MIN_LINE_LENGTH_FRACTION, MAX_LINE_GAP);

    double[][] edge_lines = filter_lines(lines, gray_img.size());

    Point[] ret_val = new Point[4];
    ret_val[0] = find_intercept_point(edge_lines[0], edge_lines[2]);
    ret_val[1] = find_intercept_point(edge_lines[0], edge_lines[3]);
    ret_val[2] = find_intercept_point(edge_lines[1], edge_lines[3]);
    ret_val[3] = find_intercept_point(edge_lines[1], edge_lines[2]);

    // do sanity checks and return null on invalid coordinates
    for (int i = 0; i < 4; i++) {
        // check if coordinates are outside image boundaries
        if (ret_val[i].x < 0 || ret_val[i].y < 0 || ret_val[i].x > gray_img.width()
                || ret_val[i].y > gray_img.height()) {
            return null;
        }

        // check if point equal to other point
        for (int j = i + 1; j < 4; j++) {
            if (ret_val[j].x == ret_val[i].x && ret_val[j].y == ret_val[i].y) {
                return null;
            }
        }
    }

    return ret_val;
}

From source file:depthDataFromStereoCamsOpenCV.ProcessImages.java

/**
 * Trims image by trimSize//ww w .  ja v  a 2 s .co m
 * @param image
 * @param trimSize
 * @return
 */
public static Mat cropImageHorizontal(Mat image, int trimSize) {

    //         System.out.println("Initial image width "+image.width());
    //         System.out.println("Initial image height "+image.height());

    Rect roi = new Rect(2 * trimSize, 0, image.width() - 4 * trimSize, image.height());

    Mat result = image.submat(roi);

    //         System.out.println("Trimmed image width "+ result.width());
    //         System.out.println("Trimmed image height "+result.height());
    //         displayImage(ProcessImages.Mat2BufferedImage(result),"Cropped  Image");
    return result;

}

From source file:digitalassistant.Panel.java

public void run() {
    try {/*  w  w  w.  j  a  v a 2  s .c om*/
        while (true) {

            if (curr_image.equalsIgnoreCase("nitial_image")) {
                ImageIcon icon = new ImageIcon(ImageIO.read(new File(
                        "C:\\Users\\sandeep\\Documents\\NetBeansProjects\\DigitalAssistant\\src\\digitalassistant\\initial_image.jpg")));
                image_label.setIcon(icon);

            } else {
                System.out.println("Hello, OpenCV");

                // Load the native library.
                //System.loadLibrary("opencv_java244");
                //System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
                VideoCapture camera = new VideoCapture(0);
                System.out.println("inage width" + image_label.getWidth());

                camera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, image_label.getWidth());
                camera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, image_label.getHeight());
                Thread.sleep(1000);
                camera.open(0); //Useless
                if (!camera.isOpened()) {
                    System.out.println("Camera Error");
                } else {
                    System.out.println("Camera OK?");
                }

                Mat frame = new Mat();

                // camera.grab();
                //System.out.println("Frame Grabbed");
                // camera.retrieve(frame);
                //System.out.println("Frame Decoded");
                System.out.println("Frame Obtained");

                /* No difference
                 camera.release();
                 */
                System.out.println("Captured Frame Width " + frame.width());
                // JFrame frame1 = new JFrame("BasicPanel");
                // frame1.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
                // DetectFace f = new DetectFace();

                int count = 10;
                while (true) {
                    camera.read(frame);
                    // Core.putText(frame,count+"", new Point(frame.width()/4,frame.height()/4), 3, 2,new Scalar(0, 255, 0),3);

                    //f.face_detect(frame);
                    BufferedImage image = matToBufferedImage(frame);

                    ImageIcon icon = new ImageIcon(image);

                    icon.getImage().flush();

                    image_label.setIcon(icon);
                    // Thread.sleep(500);
                    //count--;

                }
                // camera.release();
                // curr_image = "initial_image";

            }

            //  ImageIcon icon =new ImageIcon(ImageIO.read( new File("C:\\Users\\sandeep\\Documents\\NetBeansProjects\\DigitalAssistant\\src\\digitalassistant\\initial_image.jpg")) );
            // image_label.setIcon(icon);

            // camera.read(frame);
            // Highgui.imwrite("camera.jpg", frame);
            // frame1.setVisible(false);
            // System.out.println("OK");
        }
    } catch (Exception e) {
        System.out.println(e);
    }
}

From source file:digitalassistant.Panel.java

public void capture() {
    try {//from  w  ww . ja  v  a2s . co  m
        System.out.println("Hello, OpenCV");
        // Load the native library.
        //System.loadLibrary("opencv_java244");
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        VideoCapture camera = new VideoCapture(0);
        Thread.sleep(1000);
        camera.open(0); //Useless
        if (!camera.isOpened()) {
            System.out.println("Camera Error");
        } else {
            System.out.println("Camera OK?");
        }

        Mat frame = new Mat();

        // camera.grab();
        //System.out.println("Frame Grabbed");
        // camera.retrieve(frame);
        //System.out.println("Frame Decoded");
        System.out.println("Frame Obtained");

        /* No difference
         camera.release();
         */
        System.out.println("Captured Frame Width " + frame.width());
        JFrame frame1 = new JFrame("BasicPanel");
        frame1.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
        // DetectFace f = new DetectFace();
        int count = 15;

        while (count > 0) {
            camera.read(frame);
            //f.face_detect(frame);
            frame1.setSize(frame.width(), frame.height());
            //  Core.putText(frame,count+"", new Point(frame.width()/4,frame.height()/4), 3, 2,new Scalar(0, 255, 0),3);
            // Core.rectangle(frame, new Point(frame.width()/4,frame.height()/4), new Point(frame.width()/4+300,frame.height()/4 +300), new Scalar(0, 255, 0));
            Panel panel1 = new Panel(frame);
            frame1.setContentPane(panel1);
            frame1.setVisible(true);
            Thread.sleep(100);
            count--;
        }

        // camera.read(frame);
        // Highgui.imwrite("camera.jpg", frame);
        // frame1.setVisible(false);
        // System.out.println("OK");
    } catch (Exception e) {
        System.out.println(e);
    }
}