Example usage for org.opencv.core Mat empty

List of usage examples for org.opencv.core Mat empty

Introduction

In this page you can find the example usage for org.opencv.core Mat empty.

Prototype

public boolean empty() 

Source Link

Usage

From source file:mx.iteso.desi.vision.ImagesMatUtils.java

License:Apache License

public static JLabel MatToJLabel(Mat mat) {

    JLabel ret = null;/*from w  ww. j ava2 s. co m*/

    if (mat.empty()) {
        return null;
    }

    BufferedImage bufImage = null;

    try {
        InputStream in = MatToInputStream(mat);
        bufImage = ImageIO.read(in);
        ret = new JLabel(new ImageIcon(bufImage));
    } catch (Exception e) {
        e.printStackTrace();
    }

    return ret;
}

From source file:opencvdemos.BallGame.java

License:Apache License

private Image grabFrame() {
    // Init everything
    Image imageToShow = null;//from  w w  w. j  av a  2s .c om
    Mat frame = new Mat();

    // Check if the capture is open
    if (this.capture.isOpened()) {
        try {
            // Read the current frame
            this.capture.read(frame);
            // Flip image for easy object manipulation
            Core.flip(frame, frame, 1);

            // If the frame is not empty, process it
            if (!frame.empty()) {
                // Init
                Mat blurredImage = new Mat();
                Mat hsvImage = new Mat();
                Mat mask = new Mat();
                Mat morphOutput = new Mat();

                // Remove some noise
                Imgproc.blur(frame, blurredImage, new Size(7, 7));

                // Convert the frame to HSV
                Imgproc.cvtColor(blurredImage, hsvImage, Imgproc.COLOR_BGR2HSV);

                // Get thresholding values from the UI
                // Remember: H ranges 0-180, S and V range 0-255
                Scalar minValues = new Scalar(this.hueStart.getValue(), this.saturationStart.getValue(),
                        this.valueStart.getValue());
                Scalar maxValues = new Scalar(this.hueStop.getValue(), this.saturationStop.getValue(),
                        this.valueStop.getValue());

                // Show the current selected HSV range
                String valuesToPrint = "Hue range: " + minValues.val[0] + "-" + maxValues.val[0]
                        + ". Sat. range: " + minValues.val[1] + "-" + maxValues.val[1] + ". Value range: "
                        + minValues.val[2] + "-" + maxValues.val[2];
                hsvCurrentValues.setText(valuesToPrint);

                // Threshold HSV image to select object
                Core.inRange(hsvImage, minValues, maxValues, mask);
                // Show the partial output
                maskImage.getGraphics().drawImage(this.mat2Image(mask), 0, 0, 205, 154, null);

                // Morphological operators
                // Dilate with large element, erode with small ones
                Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(24, 24));
                Mat erodeElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(12, 12));

                Imgproc.erode(mask, morphOutput, erodeElement);
                Imgproc.erode(mask, morphOutput, erodeElement);

                Imgproc.dilate(mask, morphOutput, dilateElement);
                Imgproc.dilate(mask, morphOutput, dilateElement);

                // Show the partial output
                morphImage.getGraphics().drawImage(this.mat2Image(morphOutput), 0, 0, 205, 154, null);

                // Find the object(s) contours and show them
                frame = this.findAndDrawObjects(morphOutput, frame);

                // Calculate centers and move ball
                Mat temp = new Mat();
                morphOutput.copyTo(temp);
                List<MatOfPoint> contours = new ArrayList<>();
                Imgproc.findContours(temp, contours, new Mat(), Imgproc.RETR_EXTERNAL,
                        Imgproc.CHAIN_APPROX_SIMPLE);
                for (int i = 0; i < contours.size(); i++) {
                    Rect objectBoundingRectangle = Imgproc.boundingRect(contours.get(i));
                    int x = objectBoundingRectangle.x + objectBoundingRectangle.width / 2;
                    int y = objectBoundingRectangle.y + objectBoundingRectangle.height / 2;

                    // Move ball
                    if (!ballChanged) {
                        if (b.x > objectBoundingRectangle.x
                                && b.x < objectBoundingRectangle.x + objectBoundingRectangle.width
                                && b.y > objectBoundingRectangle.y
                                && b.y < objectBoundingRectangle.y + objectBoundingRectangle.height) {
                            b.dx = -b.dx;
                            b.dy = -b.dy;
                            ballChanged = true;
                        }
                    }

                    // Show crosshair
                    Imgproc.circle(frame, new Point(x, y), 20, new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x, y - 25), new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x, y + 25), new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x - 25, y), new Scalar(0, 255, 0), 2);
                    Imgproc.line(frame, new Point(x, y), new Point(x + 25, y), new Scalar(0, 255, 0), 2);
                    Imgproc.putText(frame, "Tracking object at (" + x + "," + y + ")", new Point(x, y), 1, 1,
                            new Scalar(255, 0, 0), 2);
                }
                ballChanged = false;

                // Move and draw the ball
                if (b.dx < 0)
                    b.dx = ballSpeed.getValue() * -1;
                else
                    b.dx = ballSpeed.getValue();
                if (b.dy < 0)
                    b.dy = ballSpeed.getValue() * -1;
                else
                    b.dy = ballSpeed.getValue();
                b.move();
                Imgproc.circle(frame, new Point(b.x, b.y), b.r, new Scalar(255, 0, 255), -1);

                // convert the Mat object (OpenCV) to Image (Java AWT)
                imageToShow = mat2Image(frame);
            }

        } catch (Exception e) {
            // log the error
            System.err.println("Exception during the frame elaboration: " + e);
        }
    }

    return imageToShow;
}

From source file:org.lasarobotics.vision.detection.ObjectDetection.java

License:Open Source License

/**
 * Analyzes an object in preparation to search for the object in a frame.
 * <p/>/*from w w  w  .  j  a va 2 s . c om*/
 * This method should be called in an initialize() method.
 * Calling the analyzeObject method twice will overwrite the previous objectAnalysis.
 * <p/>
 * It is recommended to use a GFTT (Good Features To Track) detector for this phase.
 *
 * @param object Object image
 * @return The object descriptor matrix to be piped into locateObject() later
 */
public ObjectAnalysis analyzeObject(Mat object) throws IllegalArgumentException {
    Mat descriptors = new Mat();
    MatOfKeyPoint keypoints = new MatOfKeyPoint();

    Log.d("FTCVision", "Analyzing object...");

    if (object == null || object.empty()) {
        throw new IllegalArgumentException("Object image cannot be empty!");
    }

    //Detect object keypoints
    detector.detect(object, keypoints);

    //Extract object keypoints
    extractor.compute(object, keypoints, descriptors);

    return new ObjectAnalysis(keypoints, descriptors, object);
}

From source file:org.pidome.client.userdetection.faces.FD_Controller.java

/**
 * Get a frame from the opened video stream (if any)
 *
 * @return the {@link Image} to show/*from  www  .j ava 2s .c om*/
 */
private Image grabFrame() {
    // init everything
    Image imageToShow = null;
    Mat frame = new Mat();

    // check if the capture is open
    if (this.capture.isOpened()) {
        try {
            // read the current frame
            this.capture.read(frame);

            // if the frame is not empty, process it
            if (!frame.empty()) {
                // face detection
                this.detectAndDisplay(frame);
                // convert the Mat object (OpenCV) to Image (JavaFX)
                imageToShow = mat2Image(frame);
                if (startDetection) {
                    matchClip(ImgTools.fromFXImage(imageToShow, null));
                }
                if (saveImage) {
                    savingImage = true;
                    clipSaveFace(imageToShow);
                    saveImage = false;
                }
                faceRect = null;
            }

        } catch (Exception e) {
            // log the (full) error
            System.err.print("ERROR");
            e.printStackTrace();
        }
    }

    return imageToShow;
}

From source file:org.pidome.client.video.capture.CaptureControllerVideoSource.java

/**
 * Get a frame from the opened video stream (if any)
 * @return the {@link Mat} captured/* w w w .  java  2  s .c  o  m*/
 */
private Mat grabFrame() throws IOException {
    Mat frame = new Mat();
    if (this.capture.isOpened()) {
        this.capture.read(frame);
        if (!frame.empty()) {
            return frame;
        } else {
            return null;
        }
    } else {
        return null;
    }
}

From source file:org.pooledtimeseries.PoT.java

License:Apache License

static ArrayList<double[][][]> getOpticalHistograms(Path filename, int w_d, int h_d, int o_d)
        throws PoTException {
    ArrayList<double[][][]> histograms = new ArrayList<double[][][]>();

    try {/*from  w w  w  .j a  va 2s  .  com*/
        LOG.info("opening video file " + filename.toString());
        VideoCapture capture = new VideoCapture(filename.toString());

        if (!capture.isOpened()) {
            LOG.warning("video file " + filename.getFileName() + " could not be opened.");
            double[][][] hist = new double[w_d][h_d][o_d];
            histograms.add(hist);
        } else {
            // variables for processing images
            Mat original_frame = new Mat();

            Mat frame = new Mat();
            Mat frame_gray = new Mat();
            Mat prev_frame_gray = new Mat();
            MatOfPoint2f flow = new MatOfPoint2f();

            // computing a list of histogram of optical flows (i.e. a list of 5*5*8
            // arrays)
            for (int frame_index = 0;; frame_index++) {
                // capturing the video images
                capture.read(original_frame);

                if (original_frame.empty()) {
                    if (frame_index == 0) {
                        throw new PoTException("Could not read the video file");
                    } else
                        break;
                } else {
                    // resizing the captured frame and converting it to the gray scale
                    // image.
                    Imgproc.resize(original_frame, frame, new Size(frame_width, frame_height));
                    Imgproc.cvtColor(frame, frame_gray, Imgproc.COLOR_BGR2GRAY);

                    double[][][] hist = new double[w_d][h_d][o_d];
                    histograms.add(hist);

                    // from frame #2
                    if (frame_index > 0) {
                        // calculate optical flows
                        Video.calcOpticalFlowFarneback(prev_frame_gray, frame_gray, flow, 0.5, 1, 10, 2, 7, 1.5,
                                0); // 0.5, 1, 15, 2, 7, 1.5, 0

                        // update histogram of optical flows
                        updateOpticalHistogram(histograms.get(frame_index), flow);
                    }

                    Mat temp_frame = prev_frame_gray;
                    prev_frame_gray = frame_gray;
                    frame_gray = temp_frame;
                }
            }

            capture.release();
        }
    } catch (Exception e) {
        e.printStackTrace();
        LOG.log(Level.SEVERE, "Exception in getOpticalHistograms ", e);
    }
    return histograms;
}

From source file:org.pooledtimeseries.PoT.java

License:Apache License

static ArrayList<double[][][]> getGradientHistograms(Path filename, int w_d, int h_d, int o_d)
        throws PoTException {
    ArrayList<double[][][]> histograms = new ArrayList<double[][][]>();

    VideoCapture capture = new VideoCapture(filename.toString());

    if (!capture.isOpened()) {
        LOG.warning("video file not opened.");

        double[][][] hist = new double[w_d][h_d][o_d];
        histograms.add(hist);//from  w  ww  . j  a va  2 s. co  m
    } else {
        // variables for processing images
        Mat original_frame = new Mat();
        Mat resized = new Mat();
        Mat resized_gray = new Mat();

        // initializing a list of histogram of gradients (i.e. a list of s*s*9
        // arrays)
        for (int i = 0;; i++) {
            // capturing the video images
            capture.read(original_frame);
            if (original_frame.empty()) {

                if (original_frame.empty()) {
                    if (i == 0) {
                        throw new PoTException("Could not read the video file");
                    } else
                        break;
                }

            }

            double[][][] hist = new double[w_d][h_d][o_d];

            Imgproc.resize(original_frame, resized, new Size(frame_width, frame_height));
            Imgproc.cvtColor(resized, resized_gray, Imgproc.COLOR_BGR2GRAY);

            ArrayList<double[][]> gradients = computeGradients(resized_gray, o_d);
            updateGradientHistogram(hist, gradients);

            histograms.add(hist);
        }

        capture.release();
    }

    return histograms;
}

From source file:qupath.opencv.DetectCytokeratinCV.java

License:Open Source License

public static Area getArea(final Mat mat) {
    if (mat.empty())
        return null;

    // Identify all contours
    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(mat, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
    if (contours.isEmpty()) {
        hierarchy.release();//w w  w .ja v a 2  s . co  m
        return null;
    }

    Area area = new Area();
    updateArea(contours, hierarchy, area, 0, 0);

    hierarchy.release();

    return area;
}

From source file:src.main.java.org.roomwatcher.watcher.Window.java

public static void main(String arg[]) throws InterruptedException {
    // Load the native library.  
    System.loadLibrary("opencv_java2410");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);

    frame.setSize(400, 400);//  w w  w. jav  a  2  s.c om
    frame.setLocationRelativeTo(null);

    Processor processor = new Processor();
    Watcher watcher = new Watcher();

    watcher.add(peopleNumberLabel);
    frame.setContentPane(watcher);

    frame.setVisible(true);

    //-- 2. Read the video stream  
    Mat webcam_image = new Mat();
    VideoCapture capture = new VideoCapture(0);

    if (capture.isOpened()) {
        while (true) {
            capture.read(webcam_image);
            if (!webcam_image.empty()) {
                frame.setSize(webcam_image.width() + 40, webcam_image.height() + 160);
                //-- 3. Apply the classifier to the captured image  
                webcam_image = processor.detect(webcam_image);
                //-- 4. Display the image  
                watcher.MatToBufferedImage(webcam_image); // We could look at the error...  
                watcher.repaint();
            }
        }
    }
}

From source file:usefull.backgroundModel.java

License:LGPL

public static void main(String[] args) throws InterruptedException {

    // load the Core OpenCV library by name

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // create video capture device object

    VideoCapture cap = new VideoCapture();

    // try to use the hardware device if present

    int CAM_TO_USE = 0;

    // create new image objects

    Mat frame = new Mat();
    Mat foreground = new Mat();
    Mat fg_mask = new Mat();

    // create new Mixture of Gaussian BG model

    BackgroundSubtractorMOG MoG = new BackgroundSubtractorMOG();

    // try to open first capture device (0)

    try {/*w ww  . java 2s.  co  m*/
        cap.open(CAM_TO_USE);
    } catch (Exception e1) {
        System.out.println("No webcam attached");

        // otherwise try opening a video file 

        try {
            cap.open("files/video.mp4");
        } catch (Exception e2) {
            System.out.println("No video file found");
        }
    }

    // if the a video capture source is now open

    if (cap.isOpened()) {
        // create new window objects

        Imshow imsS = new Imshow("from video Source ... ");
        Imshow imsF = new Imshow("Foreground");

        boolean keepProcessing = true;

        while (keepProcessing) {
            // grab and return the next frame from video source

            cap.grab();
            cap.retrieve(frame);

            // if the frame is valid (not end of video for example)

            if (!(frame.empty())) {

                // add it to the background model with a learning rate of 0.1

                MoG.apply(frame, fg_mask, 0.1);

                // extract the foreground mask (1 = foreground / 0 - background), 
                // and convert/expand it to a 3-channel version of the same 

                Imgproc.cvtColor(fg_mask, fg_mask, Imgproc.COLOR_GRAY2BGR);

                // logically AND it with the original frame to extract colour 
                // pixel only in the foreground regions

                Core.bitwise_and(frame, fg_mask, foreground);

                // display image with a delay of 40ms (i.e. 1000 ms / 25 = 25 fps)

                imsS.showImage(frame);
                imsF.showImage(foreground);

                Thread.sleep(40);

            } else {
                keepProcessing = false;
            }
        }

    } else {
        System.out.println("error cannot open any capture source - exiting");
    }

    // close down the camera correctly

    cap.release();

}